Merge trunk version 217032 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blob70f2a6b3a011d77f3f53d6a61391dfa9ca03621f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_mathfn_p (enum built_in_function);
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static tree const_binop (enum tree_code, tree, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
164 static location_t
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
186 return x;
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 widest_int quo;
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 tree type;
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
511 break;
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
527 break;
529 default:
530 break;
532 return false;
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
540 static tree
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
544 tree tem;
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
584 break;
586 case VECTOR_CST:
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
598 return build_vector (type, elts);
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1244 break;
1246 default:
1247 return NULL_TREE;
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1327 else
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1358 else
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1380 break;
1382 default:
1383 return NULL_TREE;
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 if (code == VEC_RSHIFT_EXPR)
1423 if (!tree_fits_uhwi_p (arg2))
1424 return NULL_TREE;
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1431 return NULL_TREE;
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1437 offset = -offset;
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1441 if (i + offset < 0 || i + offset >= count)
1442 elts[i] = zero;
1443 else
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1447 else
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1468 tree
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1479 tree
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 tree type = TREE_TYPE (arg0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1488 TREE_TYPE (arg1)));
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1499 return arg0;
1501 else if (code == MINUS_EXPR)
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1506 else if (code == MULT_EXPR)
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1509 return arg1;
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1525 tree
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 tree type = TREE_TYPE (arg0);
1529 tree ctype;
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1532 TREE_TYPE (arg1)));
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538 if (type == sizetype)
1539 ctype = ssizetype;
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1542 else
1543 ctype = signed_type_for (type);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1562 else
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1566 MINUS_EXPR,
1567 arg1, arg0)));
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1573 static tree
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1587 static tree
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 bool overflow = false;
1591 tree t;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1602 wide_int val;
1603 REAL_VALUE_TYPE r;
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606 switch (code)
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1610 break;
1612 default:
1613 gcc_unreachable ();
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1619 overflow = true;
1620 val = wi::zero (TYPE_PRECISION (type));
1623 /* See if R is less than the lower bound or greater than the
1624 upper bound. */
1626 if (! overflow)
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1632 overflow = true;
1633 val = lt;
1637 if (! overflow)
1639 tree ut = TYPE_MAX_VALUE (type);
1640 if (ut)
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1645 overflow = true;
1646 val = ut;
1651 if (! overflow)
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1661 static tree
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 tree t;
1665 double_int temp, temp_trunc;
1666 unsigned int mode;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1682 else
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1703 return t;
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1709 static tree
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 REAL_VALUE_TYPE value;
1713 tree t;
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1734 else
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1736 return t;
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1742 static tree
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 REAL_VALUE_TYPE value;
1746 tree t;
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1778 static tree
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 FIXED_VALUE_TYPE value;
1782 tree t;
1783 bool overflow_p;
1784 double_int di;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1791 else
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1802 return t;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1808 static tree
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 FIXED_VALUE_TYPE value;
1812 tree t;
1813 bool overflow_p;
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1823 return t;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1829 static tree
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 if (TREE_TYPE (arg1) == type)
1833 return arg1;
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1863 return NULL_TREE;
1866 /* Construct a vector of zero elements of vector type TYPE. */
1868 static tree
1869 build_zero_vector (tree type)
1871 tree t;
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879 bool
1880 fold_convertible_p (const_tree type, const_tree arg)
1882 tree orig = TREE_TYPE (arg);
1884 if (type == orig)
1885 return true;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return false;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return true;
1895 switch (TREE_CODE (type))
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1899 case OFFSET_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1902 return true;
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906 case REAL_TYPE:
1907 case FIXED_POINT_TYPE:
1908 case COMPLEX_TYPE:
1909 case VECTOR_TYPE:
1910 case VOID_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1913 default:
1914 return false;
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1921 tree
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1924 tree orig = TREE_TYPE (arg);
1925 tree tem;
1927 if (type == orig)
1928 return arg;
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1935 switch (TREE_CODE (type))
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1944 /* fall through */
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case OFFSET_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 case REAL_TYPE:
1966 if (TREE_CODE (arg) == INTEGER_CST)
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1972 else if (TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1978 else if (TREE_CODE (arg) == FIXED_CST)
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992 case REAL_TYPE:
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998 case COMPLEX_TYPE:
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2002 default:
2003 gcc_unreachable ();
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2015 switch (TREE_CODE (orig))
2017 case FIXED_POINT_TYPE:
2018 case INTEGER_TYPE:
2019 case ENUMERAL_TYPE:
2020 case BOOLEAN_TYPE:
2021 case REAL_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2028 default:
2029 gcc_unreachable ();
2032 case COMPLEX_TYPE:
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 case REAL_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2044 case COMPLEX_TYPE:
2046 tree rpart, ipart;
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 default:
2066 gcc_unreachable ();
2069 case VECTOR_TYPE:
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077 case VOID_TYPE:
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081 default:
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2084 gcc_unreachable ();
2086 fold_convert_exit:
2087 protected_set_expr_location_unshare (tem, loc);
2088 return tem;
2091 /* Return false if expr can be assumed not to be an lvalue, true
2092 otherwise. */
2094 static bool
2095 maybe_lvalue_p (const_tree x)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2100 case VAR_DECL:
2101 case PARM_DECL:
2102 case RESULT_DECL:
2103 case LABEL_DECL:
2104 case FUNCTION_DECL:
2105 case SSA_NAME:
2107 case COMPONENT_REF:
2108 case MEM_REF:
2109 case INDIRECT_REF:
2110 case ARRAY_REF:
2111 case ARRAY_RANGE_REF:
2112 case BIT_FIELD_REF:
2113 case OBJ_TYPE_REF:
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2119 case SAVE_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2122 case COMPOUND_EXPR:
2123 case MODIFY_EXPR:
2124 case TARGET_EXPR:
2125 case COND_EXPR:
2126 case BIND_EXPR:
2127 break;
2129 default:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2132 break;
2133 return false;
2136 return true;
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141 tree
2142 non_lvalue_loc (location_t loc, tree x)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2145 us. */
2146 if (in_gimple_form)
2147 return x;
2149 if (! maybe_lvalue_p (x))
2150 return x;
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2154 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2155 Zero means allow extended lvalues. */
2157 int pedantic_lvalues;
2159 /* When pedantic, return an expr equal to X but certainly not valid as a
2160 pedantic lvalue. Otherwise, return X. */
2162 static tree
2163 pedantic_non_lvalue_loc (location_t loc, tree x)
2165 if (pedantic_lvalues)
2166 return non_lvalue_loc (loc, x);
2168 return protected_set_expr_location_unshare (x, loc);
2171 /* Given a tree comparison code, return the code that is the logical inverse.
2172 It is generally not safe to do this for floating-point comparisons, except
2173 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2174 ERROR_MARK in this case. */
2176 enum tree_code
2177 invert_tree_comparison (enum tree_code code, bool honor_nans)
2179 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2180 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2181 return ERROR_MARK;
2183 switch (code)
2185 case EQ_EXPR:
2186 return NE_EXPR;
2187 case NE_EXPR:
2188 return EQ_EXPR;
2189 case GT_EXPR:
2190 return honor_nans ? UNLE_EXPR : LE_EXPR;
2191 case GE_EXPR:
2192 return honor_nans ? UNLT_EXPR : LT_EXPR;
2193 case LT_EXPR:
2194 return honor_nans ? UNGE_EXPR : GE_EXPR;
2195 case LE_EXPR:
2196 return honor_nans ? UNGT_EXPR : GT_EXPR;
2197 case LTGT_EXPR:
2198 return UNEQ_EXPR;
2199 case UNEQ_EXPR:
2200 return LTGT_EXPR;
2201 case UNGT_EXPR:
2202 return LE_EXPR;
2203 case UNGE_EXPR:
2204 return LT_EXPR;
2205 case UNLT_EXPR:
2206 return GE_EXPR;
2207 case UNLE_EXPR:
2208 return GT_EXPR;
2209 case ORDERED_EXPR:
2210 return UNORDERED_EXPR;
2211 case UNORDERED_EXPR:
2212 return ORDERED_EXPR;
2213 default:
2214 gcc_unreachable ();
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2221 enum tree_code
2222 swap_tree_comparison (enum tree_code code)
2224 switch (code)
2226 case EQ_EXPR:
2227 case NE_EXPR:
2228 case ORDERED_EXPR:
2229 case UNORDERED_EXPR:
2230 case LTGT_EXPR:
2231 case UNEQ_EXPR:
2232 return code;
2233 case GT_EXPR:
2234 return LT_EXPR;
2235 case GE_EXPR:
2236 return LE_EXPR;
2237 case LT_EXPR:
2238 return GT_EXPR;
2239 case LE_EXPR:
2240 return GE_EXPR;
2241 case UNGT_EXPR:
2242 return UNLT_EXPR;
2243 case UNGE_EXPR:
2244 return UNLE_EXPR;
2245 case UNLT_EXPR:
2246 return UNGT_EXPR;
2247 case UNLE_EXPR:
2248 return UNGE_EXPR;
2249 default:
2250 gcc_unreachable ();
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code)
2262 switch (code)
2264 case LT_EXPR:
2265 return COMPCODE_LT;
2266 case EQ_EXPR:
2267 return COMPCODE_EQ;
2268 case LE_EXPR:
2269 return COMPCODE_LE;
2270 case GT_EXPR:
2271 return COMPCODE_GT;
2272 case NE_EXPR:
2273 return COMPCODE_NE;
2274 case GE_EXPR:
2275 return COMPCODE_GE;
2276 case ORDERED_EXPR:
2277 return COMPCODE_ORD;
2278 case UNORDERED_EXPR:
2279 return COMPCODE_UNORD;
2280 case UNLT_EXPR:
2281 return COMPCODE_UNLT;
2282 case UNEQ_EXPR:
2283 return COMPCODE_UNEQ;
2284 case UNLE_EXPR:
2285 return COMPCODE_UNLE;
2286 case UNGT_EXPR:
2287 return COMPCODE_UNGT;
2288 case LTGT_EXPR:
2289 return COMPCODE_LTGT;
2290 case UNGE_EXPR:
2291 return COMPCODE_UNGE;
2292 default:
2293 gcc_unreachable ();
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code)
2304 switch (code)
2306 case COMPCODE_LT:
2307 return LT_EXPR;
2308 case COMPCODE_EQ:
2309 return EQ_EXPR;
2310 case COMPCODE_LE:
2311 return LE_EXPR;
2312 case COMPCODE_GT:
2313 return GT_EXPR;
2314 case COMPCODE_NE:
2315 return NE_EXPR;
2316 case COMPCODE_GE:
2317 return GE_EXPR;
2318 case COMPCODE_ORD:
2319 return ORDERED_EXPR;
2320 case COMPCODE_UNORD:
2321 return UNORDERED_EXPR;
2322 case COMPCODE_UNLT:
2323 return UNLT_EXPR;
2324 case COMPCODE_UNEQ:
2325 return UNEQ_EXPR;
2326 case COMPCODE_UNLE:
2327 return UNLE_EXPR;
2328 case COMPCODE_UNGT:
2329 return UNGT_EXPR;
2330 case COMPCODE_LTGT:
2331 return LTGT_EXPR;
2332 case COMPCODE_UNGE:
2333 return UNGE_EXPR;
2334 default:
2335 gcc_unreachable ();
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2345 tree
2346 combine_comparisons (location_t loc,
2347 enum tree_code code, enum tree_code lcode,
2348 enum tree_code rcode, tree truth_type,
2349 tree ll_arg, tree lr_arg)
2351 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2352 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2353 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2354 int compcode;
2356 switch (code)
2358 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2359 compcode = lcompcode & rcompcode;
2360 break;
2362 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2363 compcode = lcompcode | rcompcode;
2364 break;
2366 default:
2367 return NULL_TREE;
2370 if (!honor_nans)
2372 /* Eliminate unordered comparisons, as well as LTGT and ORD
2373 which are not used unless the mode has NaNs. */
2374 compcode &= ~COMPCODE_UNORD;
2375 if (compcode == COMPCODE_LTGT)
2376 compcode = COMPCODE_NE;
2377 else if (compcode == COMPCODE_ORD)
2378 compcode = COMPCODE_TRUE;
2380 else if (flag_trapping_math)
2382 /* Check that the original operation and the optimized ones will trap
2383 under the same condition. */
2384 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2385 && (lcompcode != COMPCODE_EQ)
2386 && (lcompcode != COMPCODE_ORD);
2387 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2388 && (rcompcode != COMPCODE_EQ)
2389 && (rcompcode != COMPCODE_ORD);
2390 bool trap = (compcode & COMPCODE_UNORD) == 0
2391 && (compcode != COMPCODE_EQ)
2392 && (compcode != COMPCODE_ORD);
2394 /* In a short-circuited boolean expression the LHS might be
2395 such that the RHS, if evaluated, will never trap. For
2396 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2397 if neither x nor y is NaN. (This is a mixed blessing: for
2398 example, the expression above will never trap, hence
2399 optimizing it to x < y would be invalid). */
2400 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2401 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2402 rtrap = false;
2404 /* If the comparison was short-circuited, and only the RHS
2405 trapped, we may now generate a spurious trap. */
2406 if (rtrap && !ltrap
2407 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2408 return NULL_TREE;
2410 /* If we changed the conditions that cause a trap, we lose. */
2411 if ((ltrap || rtrap) != trap)
2412 return NULL_TREE;
2415 if (compcode == COMPCODE_TRUE)
2416 return constant_boolean_node (true, truth_type);
2417 else if (compcode == COMPCODE_FALSE)
2418 return constant_boolean_node (false, truth_type);
2419 else
2421 enum tree_code tcode;
2423 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2424 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2428 /* Return nonzero if two operands (typically of the same tree node)
2429 are necessarily equal. If either argument has side-effects this
2430 function returns zero. FLAGS modifies behavior as follows:
2432 If OEP_ONLY_CONST is set, only return nonzero for constants.
2433 This function tests whether the operands are indistinguishable;
2434 it does not test whether they are equal using C's == operation.
2435 The distinction is important for IEEE floating point, because
2436 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2437 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2439 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2440 even though it may hold multiple values during a function.
2441 This is because a GCC tree node guarantees that nothing else is
2442 executed between the evaluation of its "operands" (which may often
2443 be evaluated in arbitrary order). Hence if the operands themselves
2444 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2445 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2446 unset means assuming isochronic (or instantaneous) tree equivalence.
2447 Unless comparing arbitrary expression trees, such as from different
2448 statements, this flag can usually be left unset.
2450 If OEP_PURE_SAME is set, then pure functions with identical arguments
2451 are considered the same. It is used when the caller has other ways
2452 to ensure that global memory is unchanged in between. */
2455 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2457 /* If either is ERROR_MARK, they aren't equal. */
2458 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2459 || TREE_TYPE (arg0) == error_mark_node
2460 || TREE_TYPE (arg1) == error_mark_node)
2461 return 0;
2463 /* Similar, if either does not have a type (like a released SSA name),
2464 they aren't equal. */
2465 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2466 return 0;
2468 /* Check equality of integer constants before bailing out due to
2469 precision differences. */
2470 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2471 return tree_int_cst_equal (arg0, arg1);
2473 /* If both types don't have the same signedness, then we can't consider
2474 them equal. We must check this before the STRIP_NOPS calls
2475 because they may change the signedness of the arguments. As pointers
2476 strictly don't have a signedness, require either two pointers or
2477 two non-pointers as well. */
2478 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2479 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2480 return 0;
2482 /* We cannot consider pointers to different address space equal. */
2483 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2486 return 0;
2488 /* If both types don't have the same precision, then it is not safe
2489 to strip NOPs. */
2490 if (element_precision (TREE_TYPE (arg0))
2491 != element_precision (TREE_TYPE (arg1)))
2492 return 0;
2494 STRIP_NOPS (arg0);
2495 STRIP_NOPS (arg1);
2497 /* In case both args are comparisons but with different comparison
2498 code, try to swap the comparison operands of one arg to produce
2499 a match and compare that variant. */
2500 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2501 && COMPARISON_CLASS_P (arg0)
2502 && COMPARISON_CLASS_P (arg1))
2504 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2506 if (TREE_CODE (arg0) == swap_code)
2507 return operand_equal_p (TREE_OPERAND (arg0, 0),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 1),
2510 TREE_OPERAND (arg1, 0), flags);
2513 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2514 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2515 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2516 return 0;
2518 /* This is needed for conversions and for COMPONENT_REF.
2519 Might as well play it safe and always test this. */
2520 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2521 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2522 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2523 return 0;
2525 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2526 We don't care about side effects in that case because the SAVE_EXPR
2527 takes care of that for us. In all other cases, two expressions are
2528 equal if they have no side effects. If we have two identical
2529 expressions with side effects that should be treated the same due
2530 to the only side effects being identical SAVE_EXPR's, that will
2531 be detected in the recursive calls below.
2532 If we are taking an invariant address of two identical objects
2533 they are necessarily equal as well. */
2534 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2535 && (TREE_CODE (arg0) == SAVE_EXPR
2536 || (flags & OEP_CONSTANT_ADDRESS_OF)
2537 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2538 return 1;
2540 /* Next handle constant cases, those for which we can return 1 even
2541 if ONLY_CONST is set. */
2542 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2543 switch (TREE_CODE (arg0))
2545 case INTEGER_CST:
2546 return tree_int_cst_equal (arg0, arg1);
2548 case FIXED_CST:
2549 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2550 TREE_FIXED_CST (arg1));
2552 case REAL_CST:
2553 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2554 TREE_REAL_CST (arg1)))
2555 return 1;
2558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2560 /* If we do not distinguish between signed and unsigned zero,
2561 consider them equal. */
2562 if (real_zerop (arg0) && real_zerop (arg1))
2563 return 1;
2565 return 0;
2567 case VECTOR_CST:
2569 unsigned i;
2571 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2572 return 0;
2574 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2576 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2577 VECTOR_CST_ELT (arg1, i), flags))
2578 return 0;
2580 return 1;
2583 case COMPLEX_CST:
2584 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2585 flags)
2586 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2587 flags));
2589 case STRING_CST:
2590 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2591 && ! memcmp (TREE_STRING_POINTER (arg0),
2592 TREE_STRING_POINTER (arg1),
2593 TREE_STRING_LENGTH (arg0)));
2595 case ADDR_EXPR:
2596 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2597 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2598 ? OEP_CONSTANT_ADDRESS_OF : 0);
2599 default:
2600 break;
2603 if (flags & OEP_ONLY_CONST)
2604 return 0;
2606 /* Define macros to test an operand from arg0 and arg1 for equality and a
2607 variant that allows null and views null as being different from any
2608 non-null value. In the latter case, if either is null, the both
2609 must be; otherwise, do the normal comparison. */
2610 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2611 TREE_OPERAND (arg1, N), flags)
2613 #define OP_SAME_WITH_NULL(N) \
2614 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2615 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2617 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2619 case tcc_unary:
2620 /* Two conversions are equal only if signedness and modes match. */
2621 switch (TREE_CODE (arg0))
2623 CASE_CONVERT:
2624 case FIX_TRUNC_EXPR:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2627 return 0;
2628 break;
2629 default:
2630 break;
2633 return OP_SAME (0);
2636 case tcc_comparison:
2637 case tcc_binary:
2638 if (OP_SAME (0) && OP_SAME (1))
2639 return 1;
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0))
2643 && operand_equal_p (TREE_OPERAND (arg0, 0),
2644 TREE_OPERAND (arg1, 1), flags)
2645 && operand_equal_p (TREE_OPERAND (arg0, 1),
2646 TREE_OPERAND (arg1, 0), flags));
2648 case tcc_reference:
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal,
2651 but their addresses can be. */
2652 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2653 && (TREE_SIDE_EFFECTS (arg0)
2654 || TREE_SIDE_EFFECTS (arg1)))
2655 return 0;
2657 switch (TREE_CODE (arg0))
2659 case INDIRECT_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 return OP_SAME (0);
2663 case REALPART_EXPR:
2664 case IMAGPART_EXPR:
2665 return OP_SAME (0);
2667 case TARGET_MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal extra operands and then fall through to MEM_REF
2670 handling of the two common operands. */
2671 if (!OP_SAME_WITH_NULL (2)
2672 || !OP_SAME_WITH_NULL (3)
2673 || !OP_SAME_WITH_NULL (4))
2674 return 0;
2675 /* Fallthru. */
2676 case MEM_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 /* Require equal access sizes, and similar pointer types.
2679 We can have incomplete types for array references of
2680 variable-sized arrays from the Fortran frontend
2681 though. Also verify the types are compatible. */
2682 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2683 || (TYPE_SIZE (TREE_TYPE (arg0))
2684 && TYPE_SIZE (TREE_TYPE (arg1))
2685 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2686 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2687 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2688 && alias_ptr_types_compatible_p
2689 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2690 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2691 && OP_SAME (0) && OP_SAME (1));
2693 case ARRAY_REF:
2694 case ARRAY_RANGE_REF:
2695 /* Operands 2 and 3 may be null.
2696 Compare the array index by value if it is constant first as we
2697 may have different types but same value here. */
2698 if (!OP_SAME (0))
2699 return 0;
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2702 TREE_OPERAND (arg1, 1))
2703 || OP_SAME (1))
2704 && OP_SAME_WITH_NULL (2)
2705 && OP_SAME_WITH_NULL (3));
2707 case COMPONENT_REF:
2708 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2709 may be NULL when we're called to compare MEM_EXPRs. */
2710 if (!OP_SAME_WITH_NULL (0)
2711 || !OP_SAME (1))
2712 return 0;
2713 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2714 return OP_SAME_WITH_NULL (2);
2716 case BIT_FIELD_REF:
2717 if (!OP_SAME (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME (2);
2722 default:
2723 return 0;
2726 case tcc_expression:
2727 switch (TREE_CODE (arg0))
2729 case ADDR_EXPR:
2730 case TRUTH_NOT_EXPR:
2731 return OP_SAME (0);
2733 case TRUTH_ANDIF_EXPR:
2734 case TRUTH_ORIF_EXPR:
2735 return OP_SAME (0) && OP_SAME (1);
2737 case FMA_EXPR:
2738 case WIDEN_MULT_PLUS_EXPR:
2739 case WIDEN_MULT_MINUS_EXPR:
2740 if (!OP_SAME (2))
2741 return 0;
2742 /* The multiplcation operands are commutative. */
2743 /* FALLTHRU */
2745 case TRUTH_AND_EXPR:
2746 case TRUTH_OR_EXPR:
2747 case TRUTH_XOR_EXPR:
2748 if (OP_SAME (0) && OP_SAME (1))
2749 return 1;
2751 /* Otherwise take into account this is a commutative operation. */
2752 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags));
2757 case COND_EXPR:
2758 case VEC_COND_EXPR:
2759 case DOT_PROD_EXPR:
2760 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2762 default:
2763 return 0;
2766 case tcc_vl_exp:
2767 switch (TREE_CODE (arg0))
2769 case CALL_EXPR:
2770 /* If the CALL_EXPRs call different functions, then they
2771 clearly can not be equal. */
2772 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2773 flags))
2774 return 0;
2777 unsigned int cef = call_expr_flags (arg0);
2778 if (flags & OEP_PURE_SAME)
2779 cef &= ECF_CONST | ECF_PURE;
2780 else
2781 cef &= ECF_CONST;
2782 if (!cef)
2783 return 0;
2786 /* Now see if all the arguments are the same. */
2788 const_call_expr_arg_iterator iter0, iter1;
2789 const_tree a0, a1;
2790 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2791 a1 = first_const_call_expr_arg (arg1, &iter1);
2792 a0 && a1;
2793 a0 = next_const_call_expr_arg (&iter0),
2794 a1 = next_const_call_expr_arg (&iter1))
2795 if (! operand_equal_p (a0, a1, flags))
2796 return 0;
2798 /* If we get here and both argument lists are exhausted
2799 then the CALL_EXPRs are equal. */
2800 return ! (a0 || a1);
2802 default:
2803 return 0;
2806 case tcc_declaration:
2807 /* Consider __builtin_sqrt equal to sqrt. */
2808 return (TREE_CODE (arg0) == FUNCTION_DECL
2809 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2810 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2811 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2813 default:
2814 return 0;
2817 #undef OP_SAME
2818 #undef OP_SAME_WITH_NULL
2821 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2822 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2824 When in doubt, return 0. */
2826 static int
2827 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2829 int unsignedp1, unsignedpo;
2830 tree primarg0, primarg1, primother;
2831 unsigned int correct_width;
2833 if (operand_equal_p (arg0, arg1, 0))
2834 return 1;
2836 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2837 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2838 return 0;
2840 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2841 and see if the inner values are the same. This removes any
2842 signedness comparison, which doesn't matter here. */
2843 primarg0 = arg0, primarg1 = arg1;
2844 STRIP_NOPS (primarg0);
2845 STRIP_NOPS (primarg1);
2846 if (operand_equal_p (primarg0, primarg1, 0))
2847 return 1;
2849 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2850 actual comparison operand, ARG0.
2852 First throw away any conversions to wider types
2853 already present in the operands. */
2855 primarg1 = get_narrower (arg1, &unsignedp1);
2856 primother = get_narrower (other, &unsignedpo);
2858 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2859 if (unsignedp1 == unsignedpo
2860 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2861 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2863 tree type = TREE_TYPE (arg0);
2865 /* Make sure shorter operand is extended the right way
2866 to match the longer operand. */
2867 primarg1 = fold_convert (signed_or_unsigned_type_for
2868 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2870 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2871 return 1;
2874 return 0;
2877 /* See if ARG is an expression that is either a comparison or is performing
2878 arithmetic on comparisons. The comparisons must only be comparing
2879 two different values, which will be stored in *CVAL1 and *CVAL2; if
2880 they are nonzero it means that some operands have already been found.
2881 No variables may be used anywhere else in the expression except in the
2882 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2883 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2885 If this is true, return 1. Otherwise, return zero. */
2887 static int
2888 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2895 tclass = tcc_unary;
2896 else if (tclass == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2898 || code == COMPOUND_EXPR))
2899 tclass = tcc_binary;
2901 else if (tclass == tcc_expression && code == SAVE_EXPR
2902 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2904 /* If we've already found a CVAL1 or CVAL2, this expression is
2905 two complex to handle. */
2906 if (*cval1 || *cval2)
2907 return 0;
2909 tclass = tcc_unary;
2910 *save_p = 1;
2913 switch (tclass)
2915 case tcc_unary:
2916 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2918 case tcc_binary:
2919 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p));
2923 case tcc_constant:
2924 return 1;
2926 case tcc_expression:
2927 if (code == COND_EXPR)
2928 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2929 cval1, cval2, save_p)
2930 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2931 cval1, cval2, save_p)
2932 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2933 cval1, cval2, save_p));
2934 return 0;
2936 case tcc_comparison:
2937 /* First see if we can handle the first operand, then the second. For
2938 the second operand, we know *CVAL1 can't be zero. It must be that
2939 one side of the comparison is each of the values; test for the
2940 case where this isn't true by failing if the two operands
2941 are the same. */
2943 if (operand_equal_p (TREE_OPERAND (arg, 0),
2944 TREE_OPERAND (arg, 1), 0))
2945 return 0;
2947 if (*cval1 == 0)
2948 *cval1 = TREE_OPERAND (arg, 0);
2949 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2951 else if (*cval2 == 0)
2952 *cval2 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2955 else
2956 return 0;
2958 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2960 else if (*cval2 == 0)
2961 *cval2 = TREE_OPERAND (arg, 1);
2962 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2964 else
2965 return 0;
2967 return 1;
2969 default:
2970 return 0;
2974 /* ARG is a tree that is known to contain just arithmetic operations and
2975 comparisons. Evaluate the operations in the tree substituting NEW0 for
2976 any occurrence of OLD0 as an operand of a comparison and likewise for
2977 NEW1 and OLD1. */
2979 static tree
2980 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2981 tree old1, tree new1)
2983 tree type = TREE_TYPE (arg);
2984 enum tree_code code = TREE_CODE (arg);
2985 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2987 /* We can handle some of the tcc_expression cases here. */
2988 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2989 tclass = tcc_unary;
2990 else if (tclass == tcc_expression
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2992 tclass = tcc_binary;
2994 switch (tclass)
2996 case tcc_unary:
2997 return fold_build1_loc (loc, code, type,
2998 eval_subst (loc, TREE_OPERAND (arg, 0),
2999 old0, new0, old1, new1));
3001 case tcc_binary:
3002 return fold_build2_loc (loc, code, type,
3003 eval_subst (loc, TREE_OPERAND (arg, 0),
3004 old0, new0, old1, new1),
3005 eval_subst (loc, TREE_OPERAND (arg, 1),
3006 old0, new0, old1, new1));
3008 case tcc_expression:
3009 switch (code)
3011 case SAVE_EXPR:
3012 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3013 old1, new1);
3015 case COMPOUND_EXPR:
3016 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3017 old1, new1);
3019 case COND_EXPR:
3020 return fold_build3_loc (loc, code, type,
3021 eval_subst (loc, TREE_OPERAND (arg, 0),
3022 old0, new0, old1, new1),
3023 eval_subst (loc, TREE_OPERAND (arg, 1),
3024 old0, new0, old1, new1),
3025 eval_subst (loc, TREE_OPERAND (arg, 2),
3026 old0, new0, old1, new1));
3027 default:
3028 break;
3030 /* Fall through - ??? */
3032 case tcc_comparison:
3034 tree arg0 = TREE_OPERAND (arg, 0);
3035 tree arg1 = TREE_OPERAND (arg, 1);
3037 /* We need to check both for exact equality and tree equality. The
3038 former will be true if the operand has a side-effect. In that
3039 case, we know the operand occurred exactly once. */
3041 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3042 arg0 = new0;
3043 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3044 arg0 = new1;
3046 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3047 arg1 = new0;
3048 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3049 arg1 = new1;
3051 return fold_build2_loc (loc, code, type, arg0, arg1);
3054 default:
3055 return arg;
3059 /* Return a tree for the case when the result of an expression is RESULT
3060 converted to TYPE and OMITTED was previously an operand of the expression
3061 but is now not needed (e.g., we folded OMITTED * 0).
3063 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3064 the conversion of RESULT to TYPE. */
3066 tree
3067 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3069 tree t = fold_convert_loc (loc, type, result);
3071 /* If the resulting operand is an empty statement, just return the omitted
3072 statement casted to void. */
3073 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3074 return build1_loc (loc, NOP_EXPR, void_type_node,
3075 fold_ignored_result (omitted));
3077 if (TREE_SIDE_EFFECTS (omitted))
3078 return build2_loc (loc, COMPOUND_EXPR, type,
3079 fold_ignored_result (omitted), t);
3081 return non_lvalue_loc (loc, t);
3084 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3086 static tree
3087 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3088 tree omitted)
3090 tree t = fold_convert_loc (loc, type, result);
3092 /* If the resulting operand is an empty statement, just return the omitted
3093 statement casted to void. */
3094 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3095 return build1_loc (loc, NOP_EXPR, void_type_node,
3096 fold_ignored_result (omitted));
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2_loc (loc, COMPOUND_EXPR, type,
3100 fold_ignored_result (omitted), t);
3102 return pedantic_non_lvalue_loc (loc, t);
3105 /* Return a tree for the case when the result of an expression is RESULT
3106 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3107 of the expression but are now not needed.
3109 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3110 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3111 evaluated before OMITTED2. Otherwise, if neither has side effects,
3112 just do the conversion of RESULT to TYPE. */
3114 tree
3115 omit_two_operands_loc (location_t loc, tree type, tree result,
3116 tree omitted1, tree omitted2)
3118 tree t = fold_convert_loc (loc, type, result);
3120 if (TREE_SIDE_EFFECTS (omitted2))
3121 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3122 if (TREE_SIDE_EFFECTS (omitted1))
3123 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3125 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3129 /* Return a simplified tree node for the truth-negation of ARG. This
3130 never alters ARG itself. We assume that ARG is an operation that
3131 returns a truth value (0 or 1).
3133 FIXME: one would think we would fold the result, but it causes
3134 problems with the dominator optimizer. */
3136 static tree
3137 fold_truth_not_expr (location_t loc, tree arg)
3139 tree type = TREE_TYPE (arg);
3140 enum tree_code code = TREE_CODE (arg);
3141 location_t loc1, loc2;
3143 /* If this is a comparison, we can simply invert it, except for
3144 floating-point non-equality comparisons, in which case we just
3145 enclose a TRUTH_NOT_EXPR around what we have. */
3147 if (TREE_CODE_CLASS (code) == tcc_comparison)
3149 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3150 if (FLOAT_TYPE_P (op_type)
3151 && flag_trapping_math
3152 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3153 && code != NE_EXPR && code != EQ_EXPR)
3154 return NULL_TREE;
3156 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3157 if (code == ERROR_MARK)
3158 return NULL_TREE;
3160 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3161 TREE_OPERAND (arg, 1));
3164 switch (code)
3166 case INTEGER_CST:
3167 return constant_boolean_node (integer_zerop (arg), type);
3169 case TRUTH_AND_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_OR_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176 case TRUTH_OR_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_AND_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183 case TRUTH_XOR_EXPR:
3184 /* Here we can invert either operand. We invert the first operand
3185 unless the second operand is a TRUTH_NOT_EXPR in which case our
3186 result is the XOR of the first operand with the inside of the
3187 negation of the second operand. */
3189 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3191 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3192 else
3193 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3194 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3195 TREE_OPERAND (arg, 1));
3197 case TRUTH_ANDIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_ORIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211 case TRUTH_NOT_EXPR:
3212 return TREE_OPERAND (arg, 0);
3214 case COND_EXPR:
3216 tree arg1 = TREE_OPERAND (arg, 1);
3217 tree arg2 = TREE_OPERAND (arg, 2);
3219 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3220 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3222 /* A COND_EXPR may have a throw as one operand, which
3223 then has void type. Just leave void operands
3224 as they are. */
3225 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3226 VOID_TYPE_P (TREE_TYPE (arg1))
3227 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3228 VOID_TYPE_P (TREE_TYPE (arg2))
3229 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3232 case COMPOUND_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3234 return build2_loc (loc, COMPOUND_EXPR, type,
3235 TREE_OPERAND (arg, 0),
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3238 case NON_LVALUE_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3242 CASE_CONVERT:
3243 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3244 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3246 /* ... fall through ... */
3248 case FLOAT_EXPR:
3249 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3250 return build1_loc (loc, TREE_CODE (arg), type,
3251 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3253 case BIT_AND_EXPR:
3254 if (!integer_onep (TREE_OPERAND (arg, 1)))
3255 return NULL_TREE;
3256 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3258 case SAVE_EXPR:
3259 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3261 case CLEANUP_POINT_EXPR:
3262 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3263 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3266 default:
3267 return NULL_TREE;
3271 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3272 assume that ARG is an operation that returns a truth value (0 or 1
3273 for scalars, 0 or -1 for vectors). Return the folded expression if
3274 folding is successful. Otherwise, return NULL_TREE. */
3276 static tree
3277 fold_invert_truthvalue (location_t loc, tree arg)
3279 tree type = TREE_TYPE (arg);
3280 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3281 ? BIT_NOT_EXPR
3282 : TRUTH_NOT_EXPR,
3283 type, arg);
3286 /* Return a simplified tree node for the truth-negation of ARG. This
3287 never alters ARG itself. We assume that ARG is an operation that
3288 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3290 tree
3291 invert_truthvalue_loc (location_t loc, tree arg)
3293 if (TREE_CODE (arg) == ERROR_MARK)
3294 return arg;
3296 tree type = TREE_TYPE (arg);
3297 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3298 ? BIT_NOT_EXPR
3299 : TRUTH_NOT_EXPR,
3300 type, arg);
3303 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3304 operands are another bit-wise operation with a common input. If so,
3305 distribute the bit operations to save an operation and possibly two if
3306 constants are involved. For example, convert
3307 (A | B) & (A | C) into A | (B & C)
3308 Further simplification will occur if B and C are constants.
3310 If this optimization cannot be done, 0 will be returned. */
3312 static tree
3313 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3314 tree arg0, tree arg1)
3316 tree common;
3317 tree left, right;
3319 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3320 || TREE_CODE (arg0) == code
3321 || (TREE_CODE (arg0) != BIT_AND_EXPR
3322 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3323 return 0;
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3327 common = TREE_OPERAND (arg0, 0);
3328 left = TREE_OPERAND (arg0, 1);
3329 right = TREE_OPERAND (arg1, 1);
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 0);
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3339 common = TREE_OPERAND (arg0, 1);
3340 left = TREE_OPERAND (arg0, 0);
3341 right = TREE_OPERAND (arg1, 1);
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 0);
3349 else
3350 return 0;
3352 common = fold_convert_loc (loc, type, common);
3353 left = fold_convert_loc (loc, type, left);
3354 right = fold_convert_loc (loc, type, right);
3355 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3356 fold_build2_loc (loc, code, type, left, right));
3359 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3360 with code CODE. This optimization is unsafe. */
3361 static tree
3362 distribute_real_division (location_t loc, enum tree_code code, tree type,
3363 tree arg0, tree arg1)
3365 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3366 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3368 /* (A / C) +- (B / C) -> (A +- B) / C. */
3369 if (mul0 == mul1
3370 && operand_equal_p (TREE_OPERAND (arg0, 1),
3371 TREE_OPERAND (arg1, 1), 0))
3372 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3373 fold_build2_loc (loc, code, type,
3374 TREE_OPERAND (arg0, 0),
3375 TREE_OPERAND (arg1, 0)),
3376 TREE_OPERAND (arg0, 1));
3378 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3379 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3380 TREE_OPERAND (arg1, 0), 0)
3381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3382 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3384 REAL_VALUE_TYPE r0, r1;
3385 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3386 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3387 if (!mul0)
3388 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3389 if (!mul1)
3390 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3391 real_arithmetic (&r0, code, &r0, &r1);
3392 return fold_build2_loc (loc, MULT_EXPR, type,
3393 TREE_OPERAND (arg0, 0),
3394 build_real (type, r0));
3397 return NULL_TREE;
3400 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3401 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3403 static tree
3404 make_bit_field_ref (location_t loc, tree inner, tree type,
3405 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3407 tree result, bftype;
3409 if (bitpos == 0)
3411 tree size = TYPE_SIZE (TREE_TYPE (inner));
3412 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3413 || POINTER_TYPE_P (TREE_TYPE (inner)))
3414 && tree_fits_shwi_p (size)
3415 && tree_to_shwi (size) == bitsize)
3416 return fold_convert_loc (loc, type, inner);
3419 bftype = type;
3420 if (TYPE_PRECISION (bftype) != bitsize
3421 || TYPE_UNSIGNED (bftype) == !unsignedp)
3422 bftype = build_nonstandard_integer_type (bitsize, 0);
3424 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3425 size_int (bitsize), bitsize_int (bitpos));
3427 if (bftype != type)
3428 result = fold_convert_loc (loc, type, result);
3430 return result;
3433 /* Optimize a bit-field compare.
3435 There are two cases: First is a compare against a constant and the
3436 second is a comparison of two items where the fields are at the same
3437 bit position relative to the start of a chunk (byte, halfword, word)
3438 large enough to contain it. In these cases we can avoid the shift
3439 implicit in bitfield extractions.
3441 For constants, we emit a compare of the shifted constant with the
3442 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3443 compared. For two fields at the same position, we do the ANDs with the
3444 similar mask and compare the result of the ANDs.
3446 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3447 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3448 are the left and right operands of the comparison, respectively.
3450 If the optimization described above can be done, we return the resulting
3451 tree. Otherwise we return zero. */
3453 static tree
3454 optimize_bit_field_compare (location_t loc, enum tree_code code,
3455 tree compare_type, tree lhs, tree rhs)
3457 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3458 tree type = TREE_TYPE (lhs);
3459 tree unsigned_type;
3460 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3461 machine_mode lmode, rmode, nmode;
3462 int lunsignedp, runsignedp;
3463 int lvolatilep = 0, rvolatilep = 0;
3464 tree linner, rinner = NULL_TREE;
3465 tree mask;
3466 tree offset;
3468 /* Get all the information about the extractions being done. If the bit size
3469 if the same as the size of the underlying object, we aren't doing an
3470 extraction at all and so can do nothing. We also don't want to
3471 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3472 then will no longer be able to replace it. */
3473 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3474 &lunsignedp, &lvolatilep, false);
3475 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3476 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3477 return 0;
3479 if (!const_p)
3481 /* If this is not a constant, we can only do something if bit positions,
3482 sizes, and signedness are the same. */
3483 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3484 &runsignedp, &rvolatilep, false);
3486 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3487 || lunsignedp != runsignedp || offset != 0
3488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3489 return 0;
3492 /* See if we can find a mode to refer to this field. We should be able to,
3493 but fail if we can't. */
3494 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3495 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3496 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3497 TYPE_ALIGN (TREE_TYPE (rinner))),
3498 word_mode, false);
3499 if (nmode == VOIDmode)
3500 return 0;
3502 /* Set signed and unsigned types of the precision of this mode for the
3503 shifts below. */
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3539 mask));
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3550 if (lunsignedp)
3552 if (wi::lrshift (rhs, lbitsize) != 0)
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3559 else
3561 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3562 if (tem != 0 && tem != -1)
3564 warning (0, "comparison is always %d due to width of bit-field",
3565 code == NE_EXPR);
3566 return constant_boolean_node (code == NE_EXPR, compare_type);
3570 /* Single-bit compares should always be against zero. */
3571 if (lbitsize == 1 && ! integer_zerop (rhs))
3573 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3574 rhs = build_int_cst (type, 0);
3577 /* Make a new bitfield reference, shift the constant over the
3578 appropriate number of bits and mask it with the computed mask
3579 (in case this was a signed field). If we changed it, make a new one. */
3580 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3582 rhs = const_binop (BIT_AND_EXPR,
3583 const_binop (LSHIFT_EXPR,
3584 fold_convert_loc (loc, unsigned_type, rhs),
3585 size_int (lbitpos)),
3586 mask);
3588 lhs = build2_loc (loc, code, compare_type,
3589 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3590 return lhs;
3593 /* Subroutine for fold_truth_andor_1: decode a field reference.
3595 If EXP is a comparison reference, we return the innermost reference.
3597 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3598 set to the starting bit number.
3600 If the innermost field can be completely contained in a mode-sized
3601 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3603 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3604 otherwise it is not changed.
3606 *PUNSIGNEDP is set to the signedness of the field.
3608 *PMASK is set to the mask used. This is either contained in a
3609 BIT_AND_EXPR or derived from the width of the field.
3611 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3613 Return 0 if this is not a component reference or is one that we can't
3614 do anything with. */
3616 static tree
3617 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3618 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3619 int *punsignedp, int *pvolatilep,
3620 tree *pmask, tree *pand_mask)
3622 tree outer_type = 0;
3623 tree and_mask = 0;
3624 tree mask, inner, offset;
3625 tree unsigned_type;
3626 unsigned int precision;
3628 /* All the optimizations using this function assume integer fields.
3629 There are problems with FP fields since the type_for_size call
3630 below can fail for, e.g., XFmode. */
3631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3632 return 0;
3634 /* We are interested in the bare arrangement of bits, so strip everything
3635 that doesn't affect the machine mode. However, record the type of the
3636 outermost expression if it may matter below. */
3637 if (CONVERT_EXPR_P (exp)
3638 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3639 outer_type = TREE_TYPE (exp);
3640 STRIP_NOPS (exp);
3642 if (TREE_CODE (exp) == BIT_AND_EXPR)
3644 and_mask = TREE_OPERAND (exp, 1);
3645 exp = TREE_OPERAND (exp, 0);
3646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3647 if (TREE_CODE (and_mask) != INTEGER_CST)
3648 return 0;
3651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3652 punsignedp, pvolatilep, false);
3653 if ((inner == exp && and_mask == 0)
3654 || *pbitsize < 0 || offset != 0
3655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3656 return 0;
3658 /* If the number of bits in the reference is the same as the bitsize of
3659 the outer type, then the outer type gives the signedness. Otherwise
3660 (in case of a small bitfield) the signedness is unchanged. */
3661 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3662 *punsignedp = TYPE_UNSIGNED (outer_type);
3664 /* Compute the mask to access the bitfield. */
3665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3666 precision = TYPE_PRECISION (unsigned_type);
3668 mask = build_int_cst_type (unsigned_type, -1);
3670 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3671 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3673 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3674 if (and_mask != 0)
3675 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3676 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3678 *pmask = mask;
3679 *pand_mask = and_mask;
3680 return inner;
3683 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3684 bit positions and MASK is SIGNED. */
3686 static int
3687 all_ones_mask_p (const_tree mask, unsigned int size)
3689 tree type = TREE_TYPE (mask);
3690 unsigned int precision = TYPE_PRECISION (type);
3692 /* If this function returns true when the type of the mask is
3693 UNSIGNED, then there will be errors. In particular see
3694 gcc.c-torture/execute/990326-1.c. There does not appear to be
3695 any documentation paper trail as to why this is so. But the pre
3696 wide-int worked with that restriction and it has been preserved
3697 here. */
3698 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3699 return false;
3701 return wi::mask (size, false, precision) == mask;
3704 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3705 represents the sign bit of EXP's type. If EXP represents a sign
3706 or zero extension, also test VAL against the unextended type.
3707 The return value is the (sub)expression whose sign bit is VAL,
3708 or NULL_TREE otherwise. */
3710 static tree
3711 sign_bit_p (tree exp, const_tree val)
3713 int width;
3714 tree t;
3716 /* Tree EXP must have an integral type. */
3717 t = TREE_TYPE (exp);
3718 if (! INTEGRAL_TYPE_P (t))
3719 return NULL_TREE;
3721 /* Tree VAL must be an integer constant. */
3722 if (TREE_CODE (val) != INTEGER_CST
3723 || TREE_OVERFLOW (val))
3724 return NULL_TREE;
3726 width = TYPE_PRECISION (t);
3727 if (wi::only_sign_bit_p (val, width))
3728 return exp;
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3735 return NULL_TREE;
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3741 static int
3742 simple_operand_p (const_tree exp)
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Weakrefs are not safe to be read, since they can be NULL.
3759 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3760 have DECL_WEAK flag set. */
3761 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3762 /* Loading a static variable is unduly expensive, but global
3763 registers aren't expensive. */
3764 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3767 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3768 to be evaluated unconditionally.
3769 I addition to simple_operand_p, we assume that comparisons, conversions,
3770 and logic-not operations are simple, if their operands are simple, too. */
3772 static bool
3773 simple_operand_p_2 (tree exp)
3775 enum tree_code code;
3777 if (TREE_SIDE_EFFECTS (exp)
3778 || tree_could_trap_p (exp))
3779 return false;
3781 while (CONVERT_EXPR_P (exp))
3782 exp = TREE_OPERAND (exp, 0);
3784 code = TREE_CODE (exp);
3786 if (TREE_CODE_CLASS (code) == tcc_comparison)
3787 return (simple_operand_p (TREE_OPERAND (exp, 0))
3788 && simple_operand_p (TREE_OPERAND (exp, 1)));
3790 if (code == TRUTH_NOT_EXPR)
3791 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3793 return simple_operand_p (exp);
3797 /* The following functions are subroutines to fold_range_test and allow it to
3798 try to change a logical combination of comparisons into a range test.
3800 For example, both
3801 X == 2 || X == 3 || X == 4 || X == 5
3803 X >= 2 && X <= 5
3804 are converted to
3805 (unsigned) (X - 2) <= 3
3807 We describe each set of comparisons as being either inside or outside
3808 a range, using a variable named like IN_P, and then describe the
3809 range with a lower and upper bound. If one of the bounds is omitted,
3810 it represents either the highest or lowest value of the type.
3812 In the comments below, we represent a range by two numbers in brackets
3813 preceded by a "+" to designate being inside that range, or a "-" to
3814 designate being outside that range, so the condition can be inverted by
3815 flipping the prefix. An omitted bound is represented by a "-". For
3816 example, "- [-, 10]" means being outside the range starting at the lowest
3817 possible value and ending at 10, in other words, being greater than 10.
3818 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3819 always false.
3821 We set up things so that the missing bounds are handled in a consistent
3822 manner so neither a missing bound nor "true" and "false" need to be
3823 handled using a special case. */
3825 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3826 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3827 and UPPER1_P are nonzero if the respective argument is an upper bound
3828 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3829 must be specified for a comparison. ARG1 will be converted to ARG0's
3830 type if both are specified. */
3832 static tree
3833 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3834 tree arg1, int upper1_p)
3836 tree tem;
3837 int result;
3838 int sgn0, sgn1;
3840 /* If neither arg represents infinity, do the normal operation.
3841 Else, if not a comparison, return infinity. Else handle the special
3842 comparison rules. Note that most of the cases below won't occur, but
3843 are handled for consistency. */
3845 if (arg0 != 0 && arg1 != 0)
3847 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3848 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3849 STRIP_NOPS (tem);
3850 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3853 if (TREE_CODE_CLASS (code) != tcc_comparison)
3854 return 0;
3856 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3857 for neither. In real maths, we cannot assume open ended ranges are
3858 the same. But, this is computer arithmetic, where numbers are finite.
3859 We can therefore make the transformation of any unbounded range with
3860 the value Z, Z being greater than any representable number. This permits
3861 us to treat unbounded ranges as equal. */
3862 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3863 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3864 switch (code)
3866 case EQ_EXPR:
3867 result = sgn0 == sgn1;
3868 break;
3869 case NE_EXPR:
3870 result = sgn0 != sgn1;
3871 break;
3872 case LT_EXPR:
3873 result = sgn0 < sgn1;
3874 break;
3875 case LE_EXPR:
3876 result = sgn0 <= sgn1;
3877 break;
3878 case GT_EXPR:
3879 result = sgn0 > sgn1;
3880 break;
3881 case GE_EXPR:
3882 result = sgn0 >= sgn1;
3883 break;
3884 default:
3885 gcc_unreachable ();
3888 return constant_boolean_node (result, type);
3891 /* Helper routine for make_range. Perform one step for it, return
3892 new expression if the loop should continue or NULL_TREE if it should
3893 stop. */
3895 tree
3896 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3897 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3898 bool *strict_overflow_p)
3900 tree arg0_type = TREE_TYPE (arg0);
3901 tree n_low, n_high, low = *p_low, high = *p_high;
3902 int in_p = *p_in_p, n_in_p;
3904 switch (code)
3906 case TRUTH_NOT_EXPR:
3907 /* We can only do something if the range is testing for zero. */
3908 if (low == NULL_TREE || high == NULL_TREE
3909 || ! integer_zerop (low) || ! integer_zerop (high))
3910 return NULL_TREE;
3911 *p_in_p = ! in_p;
3912 return arg0;
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == NULL_TREE || high == NULL_TREE
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 return NULL_TREE;
3926 switch (code)
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
3946 default:
3947 gcc_unreachable ();
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
3959 build_int_cst (arg0_type, 0),
3960 NULL_TREE))
3961 return NULL_TREE;
3963 in_p = n_in_p, low = n_low, high = n_high;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 build_int_cst (TREE_TYPE (low), 1), 0);
3973 low = build_int_cst (arg0_type, 0);
3977 *p_low = low;
3978 *p_high = high;
3979 *p_in_p = in_p;
3980 return arg0;
3982 case NEGATE_EXPR:
3983 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3984 low and high are non-NULL, then normalize will DTRT. */
3985 if (!TYPE_UNSIGNED (arg0_type)
3986 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 if (low == NULL_TREE)
3989 low = TYPE_MIN_VALUE (arg0_type);
3990 if (high == NULL_TREE)
3991 high = TYPE_MAX_VALUE (arg0_type);
3994 /* (-x) IN [a,b] -> x in [-b, -a] */
3995 n_low = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, high, 1);
3998 n_high = range_binop (MINUS_EXPR, exp_type,
3999 build_int_cst (exp_type, 0),
4000 0, low, 0);
4001 if (n_high != 0 && TREE_OVERFLOW (n_high))
4002 return NULL_TREE;
4003 goto normalize;
4005 case BIT_NOT_EXPR:
4006 /* ~ X -> -X - 1 */
4007 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4008 build_int_cst (exp_type, 1));
4010 case PLUS_EXPR:
4011 case MINUS_EXPR:
4012 if (TREE_CODE (arg1) != INTEGER_CST)
4013 return NULL_TREE;
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4019 return NULL_TREE;
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, low, 0, arg1, 0);
4027 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, high, 1, arg1, 0);
4029 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4030 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4031 return NULL_TREE;
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4034 *strict_overflow_p = true;
4036 normalize:
4037 /* Check for an unsigned range which has wrapped around the maximum
4038 value thus making n_high < n_low, and normalize it. */
4039 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4041 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4042 build_int_cst (TREE_TYPE (n_high), 1), 0);
4043 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4044 build_int_cst (TREE_TYPE (n_low), 1), 0);
4046 /* If the range is of the form +/- [ x+1, x ], we won't
4047 be able to normalize it. But then, it represents the
4048 whole range or the empty set, so make it
4049 +/- [ -, - ]. */
4050 if (tree_int_cst_equal (n_low, low)
4051 && tree_int_cst_equal (n_high, high))
4052 low = high = 0;
4053 else
4054 in_p = ! in_p;
4056 else
4057 low = n_low, high = n_high;
4059 *p_low = low;
4060 *p_high = high;
4061 *p_in_p = in_p;
4062 return arg0;
4064 CASE_CONVERT:
4065 case NON_LVALUE_EXPR:
4066 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4067 return NULL_TREE;
4069 if (! INTEGRAL_TYPE_P (arg0_type)
4070 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4071 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4072 return NULL_TREE;
4074 n_low = low, n_high = high;
4076 if (n_low != 0)
4077 n_low = fold_convert_loc (loc, arg0_type, n_low);
4079 if (n_high != 0)
4080 n_high = fold_convert_loc (loc, arg0_type, n_high);
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4085 are both positive.
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4089 negative values. */
4091 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4093 tree high_positive;
4094 tree equiv_type;
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4100 TYPE_SATURATING (arg0_type));
4101 else
4102 equiv_type
4103 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4108 high_positive
4109 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4110 : TYPE_MAX_VALUE (arg0_type);
4112 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4113 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4114 fold_convert_loc (loc, arg0_type,
4115 high_positive),
4116 build_int_cst (arg0_type, 1));
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4120 positive. */
4121 if (low != 0)
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4124 1, fold_convert_loc (loc, arg0_type,
4125 integer_zero_node),
4126 high_positive))
4127 return NULL_TREE;
4129 in_p = (n_in_p == in_p);
4131 else
4133 /* Otherwise, "or" the range with the range of the input
4134 that will be interpreted as negative. */
4135 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4136 1, fold_convert_loc (loc, arg0_type,
4137 integer_zero_node),
4138 high_positive))
4139 return NULL_TREE;
4141 in_p = (in_p != n_in_p);
4145 *p_low = n_low;
4146 *p_high = n_high;
4147 *p_in_p = in_p;
4148 return arg0;
4150 default:
4151 return NULL_TREE;
4155 /* Given EXP, a logical expression, set the range it is testing into
4156 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4157 actually being tested. *PLOW and *PHIGH will be made of the same
4158 type as the returned expression. If EXP is not a comparison, we
4159 will most likely not be returning a useful value and range. Set
4160 *STRICT_OVERFLOW_P to true if the return value is only valid
4161 because signed overflow is undefined; otherwise, do not change
4162 *STRICT_OVERFLOW_P. */
4164 tree
4165 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4166 bool *strict_overflow_p)
4168 enum tree_code code;
4169 tree arg0, arg1 = NULL_TREE;
4170 tree exp_type, nexp;
4171 int in_p;
4172 tree low, high;
4173 location_t loc = EXPR_LOCATION (exp);
4175 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4176 and see if we can refine the range. Some of the cases below may not
4177 happen, but it doesn't seem worth worrying about this. We "continue"
4178 the outer loop when we've changed something; otherwise we "break"
4179 the switch, which will "break" the while. */
4181 in_p = 0;
4182 low = high = build_int_cst (TREE_TYPE (exp), 0);
4184 while (1)
4186 code = TREE_CODE (exp);
4187 exp_type = TREE_TYPE (exp);
4188 arg0 = NULL_TREE;
4190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4192 if (TREE_OPERAND_LENGTH (exp) > 0)
4193 arg0 = TREE_OPERAND (exp, 0);
4194 if (TREE_CODE_CLASS (code) == tcc_binary
4195 || TREE_CODE_CLASS (code) == tcc_comparison
4196 || (TREE_CODE_CLASS (code) == tcc_expression
4197 && TREE_OPERAND_LENGTH (exp) > 1))
4198 arg1 = TREE_OPERAND (exp, 1);
4200 if (arg0 == NULL_TREE)
4201 break;
4203 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4204 &high, &in_p, strict_overflow_p);
4205 if (nexp == NULL_TREE)
4206 break;
4207 exp = nexp;
4210 /* If EXP is a constant, we can evaluate whether this is true or false. */
4211 if (TREE_CODE (exp) == INTEGER_CST)
4213 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4214 exp, 0, low, 0))
4215 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4216 exp, 1, high, 1)));
4217 low = high = 0;
4218 exp = 0;
4221 *pin_p = in_p, *plow = low, *phigh = high;
4222 return exp;
4225 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4226 type, TYPE, return an expression to test if EXP is in (or out of, depending
4227 on IN_P) the range. Return 0 if the test couldn't be created. */
4229 tree
4230 build_range_check (location_t loc, tree type, tree exp, int in_p,
4231 tree low, tree high)
4233 tree etype = TREE_TYPE (exp), value;
4235 #ifdef HAVE_canonicalize_funcptr_for_compare
4236 /* Disable this optimization for function pointer expressions
4237 on targets that require function pointer canonicalization. */
4238 if (HAVE_canonicalize_funcptr_for_compare
4239 && TREE_CODE (etype) == POINTER_TYPE
4240 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4241 return NULL_TREE;
4242 #endif
4244 if (! in_p)
4246 value = build_range_check (loc, type, exp, 1, low, high);
4247 if (value != 0)
4248 return invert_truthvalue_loc (loc, value);
4250 return 0;
4253 if (low == 0 && high == 0)
4254 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4256 if (low == 0)
4257 return fold_build2_loc (loc, LE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, high));
4260 if (high == 0)
4261 return fold_build2_loc (loc, GE_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4264 if (operand_equal_p (low, high, 0))
4265 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4266 fold_convert_loc (loc, etype, low));
4268 if (integer_zerop (low))
4270 if (! TYPE_UNSIGNED (etype))
4272 etype = unsigned_type_for (etype);
4273 high = fold_convert_loc (loc, etype, high);
4274 exp = fold_convert_loc (loc, etype, exp);
4276 return build_range_check (loc, type, exp, 1, 0, high);
4279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4282 int prec = TYPE_PRECISION (etype);
4284 if (wi::mask (prec - 1, false, prec) == high)
4286 if (TYPE_UNSIGNED (etype))
4288 tree signed_etype = signed_type_for (etype);
4289 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4290 etype
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4292 else
4293 etype = signed_etype;
4294 exp = fold_convert_loc (loc, etype, exp);
4296 return fold_build2_loc (loc, GT_EXPR, type, exp,
4297 build_int_cst (etype, 0));
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4306 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4307 TYPE_UNSIGNED (etype));
4309 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4311 tree utype, minv, maxv;
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype = unsigned_type_for (etype);
4316 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4317 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4318 build_int_cst (TREE_TYPE (maxv), 1), 1);
4319 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4321 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4322 minv, 1, maxv, 1)))
4323 etype = utype;
4324 else
4325 return 0;
4328 high = fold_convert_loc (loc, etype, high);
4329 low = fold_convert_loc (loc, etype, low);
4330 exp = fold_convert_loc (loc, etype, exp);
4332 value = const_binop (MINUS_EXPR, high, low);
4335 if (POINTER_TYPE_P (etype))
4337 if (value != 0 && !TREE_OVERFLOW (value))
4339 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4340 return build_range_check (loc, type,
4341 fold_build_pointer_plus_loc (loc, exp, low),
4342 1, build_int_cst (etype, 0), value);
4344 return 0;
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (loc, type,
4349 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4352 return 0;
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4357 static tree
4358 range_predecessor (tree val)
4360 tree type = TREE_TYPE (val);
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4364 return 0;
4365 else
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4367 build_int_cst (TREE_TYPE (val), 1), 0);
4370 /* Return the successor of VAL in its type, handling the infinite case. */
4372 static tree
4373 range_successor (tree val)
4375 tree type = TREE_TYPE (val);
4377 if (INTEGRAL_TYPE_P (type)
4378 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4379 return 0;
4380 else
4381 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4382 build_int_cst (TREE_TYPE (val), 1), 0);
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4388 bool
4389 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390 tree high0, int in1_p, tree low1, tree high1)
4392 int no_overlap;
4393 int subset;
4394 int temp;
4395 tree tem;
4396 int in_p;
4397 tree low, high;
4398 int lowequal = ((low0 == 0 && low1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 low0, 0, low1, 0)));
4401 int highequal = ((high0 == 0 && high1 == 0)
4402 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403 high0, 1, high1, 1)));
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 low0, 0, low1, 0))
4409 || (lowequal
4410 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411 high1, 1, high0, 1))))
4413 temp = in0_p, in0_p = in1_p, in1_p = temp;
4414 tem = low0, low0 = low1, low1 = tem;
4415 tem = high0, high0 = high1, high1 = tem;
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422 high0, 1, low1, 0));
4423 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 high1, 1, high0, 1));
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4428 if (in0_p && in1_p)
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4433 if (no_overlap)
4434 in_p = 0, low = high = 0;
4435 else if (subset)
4436 in_p = 1, low = low1, high = high1;
4437 else
4438 in_p = 1, low = low1, high = high0;
4441 else if (in0_p && ! in1_p)
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4451 if (no_overlap)
4452 in_p = 1, low = low0, high = high0;
4453 else if (lowequal && highequal)
4454 in_p = 0, low = high = 0;
4455 else if (subset && lowequal)
4457 low = range_successor (high1);
4458 high = high0;
4459 in_p = 1;
4460 if (low == 0)
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4464 return 0;
4467 else if (! subset || highequal)
4469 low = low0;
4470 high = range_predecessor (low1);
4471 in_p = 1;
4472 if (high == 0)
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 return 0;
4478 else
4479 return 0;
4482 else if (! in0_p && in1_p)
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4488 if (no_overlap)
4489 in_p = 1, low = low1, high = high1;
4490 else if (subset || highequal)
4491 in_p = 0, low = high = 0;
4492 else
4494 low = range_successor (high0);
4495 high = high1;
4496 in_p = 1;
4497 if (low == 0)
4499 /* high1 > high0 but high0 has no successor. Punt. */
4500 return 0;
4505 else
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4512 second. */
4513 if (no_overlap)
4515 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516 range_successor (high0),
4517 1, low1, 0)))
4518 in_p = 0, low = low0, high = high1;
4519 else
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523 switch (TREE_CODE (TREE_TYPE (low0)))
4525 case ENUMERAL_TYPE:
4526 if (TYPE_PRECISION (TREE_TYPE (low0))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4528 break;
4529 /* FALLTHROUGH */
4530 case INTEGER_TYPE:
4531 if (tree_int_cst_equal (low0,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4533 low0 = 0;
4534 break;
4535 case POINTER_TYPE:
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537 && integer_zerop (low0))
4538 low0 = 0;
4539 break;
4540 default:
4541 break;
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546 switch (TREE_CODE (TREE_TYPE (high1)))
4548 case ENUMERAL_TYPE:
4549 if (TYPE_PRECISION (TREE_TYPE (high1))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4551 break;
4552 /* FALLTHROUGH */
4553 case INTEGER_TYPE:
4554 if (tree_int_cst_equal (high1,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4556 high1 = 0;
4557 break;
4558 case POINTER_TYPE:
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4561 high1, 1,
4562 build_int_cst (TREE_TYPE (high1), 1),
4563 1)))
4564 high1 = 0;
4565 break;
4566 default:
4567 break;
4570 /* The ranges might be also adjacent between the maximum and
4571 minimum values of the given type. For
4572 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4573 return + [x + 1, y - 1]. */
4574 if (low0 == 0 && high1 == 0)
4576 low = range_successor (high0);
4577 high = range_predecessor (low1);
4578 if (low == 0 || high == 0)
4579 return 0;
4581 in_p = 1;
4583 else
4584 return 0;
4587 else if (subset)
4588 in_p = 0, low = low0, high = high0;
4589 else
4590 in_p = 0, low = low0, high = high1;
4593 *pin_p = in_p, *plow = low, *phigh = high;
4594 return 1;
4598 /* Subroutine of fold, looking inside expressions of the form
4599 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4600 of the COND_EXPR. This function is being used also to optimize
4601 A op B ? C : A, by reversing the comparison first.
4603 Return a folded expression whose code is not a COND_EXPR
4604 anymore, or NULL_TREE if no folding opportunity is found. */
4606 static tree
4607 fold_cond_expr_with_comparison (location_t loc, tree type,
4608 tree arg0, tree arg1, tree arg2)
4610 enum tree_code comp_code = TREE_CODE (arg0);
4611 tree arg00 = TREE_OPERAND (arg0, 0);
4612 tree arg01 = TREE_OPERAND (arg0, 1);
4613 tree arg1_type = TREE_TYPE (arg1);
4614 tree tem;
4616 STRIP_NOPS (arg1);
4617 STRIP_NOPS (arg2);
4619 /* If we have A op 0 ? A : -A, consider applying the following
4620 transformations:
4622 A == 0? A : -A same as -A
4623 A != 0? A : -A same as A
4624 A >= 0? A : -A same as abs (A)
4625 A > 0? A : -A same as abs (A)
4626 A <= 0? A : -A same as -abs (A)
4627 A < 0? A : -A same as -abs (A)
4629 None of these transformations work for modes with signed
4630 zeros. If A is +/-0, the first two transformations will
4631 change the sign of the result (from +0 to -0, or vice
4632 versa). The last four will fix the sign of the result,
4633 even though the original expressions could be positive or
4634 negative, depending on the sign of A.
4636 Note that all these transformations are correct if A is
4637 NaN, since the two alternatives (A and -A) are also NaNs. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4639 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4640 ? real_zerop (arg01)
4641 : integer_zerop (arg01))
4642 && ((TREE_CODE (arg2) == NEGATE_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4644 /* In the case that A is of the form X-Y, '-A' (arg2) may
4645 have already been folded to Y-X, check for that. */
4646 || (TREE_CODE (arg1) == MINUS_EXPR
4647 && TREE_CODE (arg2) == MINUS_EXPR
4648 && operand_equal_p (TREE_OPERAND (arg1, 0),
4649 TREE_OPERAND (arg2, 1), 0)
4650 && operand_equal_p (TREE_OPERAND (arg1, 1),
4651 TREE_OPERAND (arg2, 0), 0))))
4652 switch (comp_code)
4654 case EQ_EXPR:
4655 case UNEQ_EXPR:
4656 tem = fold_convert_loc (loc, arg1_type, arg1);
4657 return pedantic_non_lvalue_loc (loc,
4658 fold_convert_loc (loc, type,
4659 negate_expr (tem)));
4660 case NE_EXPR:
4661 case LTGT_EXPR:
4662 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4663 case UNGE_EXPR:
4664 case UNGT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 /* Fall through. */
4668 case GE_EXPR:
4669 case GT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4675 case UNLE_EXPR:
4676 case UNLT_EXPR:
4677 if (flag_trapping_math)
4678 break;
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4682 arg1 = fold_convert_loc (loc, signed_type_for
4683 (TREE_TYPE (arg1)), arg1);
4684 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4685 return negate_expr (fold_convert_loc (loc, type, tem));
4686 default:
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4688 break;
4691 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4692 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4693 both transformations are correct when A is NaN: A != 0
4694 is then true, and A == 0 is false. */
4696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4697 && integer_zerop (arg01) && integer_zerop (arg2))
4699 if (comp_code == NE_EXPR)
4700 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4701 else if (comp_code == EQ_EXPR)
4702 return build_zero_cst (type);
4705 /* Try some transformations of A op B ? A : B.
4707 A == B? A : B same as B
4708 A != B? A : B same as A
4709 A >= B? A : B same as max (A, B)
4710 A > B? A : B same as max (B, A)
4711 A <= B? A : B same as min (A, B)
4712 A < B? A : B same as min (B, A)
4714 As above, these transformations don't work in the presence
4715 of signed zeros. For example, if A and B are zeros of
4716 opposite sign, the first two transformations will change
4717 the sign of the result. In the last four, the original
4718 expressions give different results for (A=+0, B=-0) and
4719 (A=-0, B=+0), but the transformed expressions do not.
4721 The first two transformations are correct if either A or B
4722 is a NaN. In the first transformation, the condition will
4723 be false, and B will indeed be chosen. In the case of the
4724 second transformation, the condition A != B will be true,
4725 and A will be chosen.
4727 The conversions to max() and min() are not correct if B is
4728 a number and A is not. The conditions in the original
4729 expressions will be false, so all four give B. The min()
4730 and max() versions would give a NaN instead. */
4731 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4732 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4733 /* Avoid these transformations if the COND_EXPR may be used
4734 as an lvalue in the C++ front-end. PR c++/19199. */
4735 && (in_gimple_form
4736 || VECTOR_TYPE_P (type)
4737 || (strcmp (lang_hooks.name, "GNU C++") != 0
4738 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4739 || ! maybe_lvalue_p (arg1)
4740 || ! maybe_lvalue_p (arg2)))
4742 tree comp_op0 = arg00;
4743 tree comp_op1 = arg01;
4744 tree comp_type = TREE_TYPE (comp_op0);
4746 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4747 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4749 comp_type = type;
4750 comp_op0 = arg1;
4751 comp_op1 = arg2;
4754 switch (comp_code)
4756 case EQ_EXPR:
4757 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4758 case NE_EXPR:
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 case LE_EXPR:
4761 case LT_EXPR:
4762 case UNLE_EXPR:
4763 case UNLT_EXPR:
4764 /* In C++ a ?: expression can be an lvalue, so put the
4765 operand which will be used if they are equal first
4766 so that we can convert this back to the
4767 corresponding COND_EXPR. */
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4770 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4771 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4772 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4773 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4775 comp_op1, comp_op0);
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4779 break;
4780 case GE_EXPR:
4781 case GT_EXPR:
4782 case UNGE_EXPR:
4783 case UNGT_EXPR:
4784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4787 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4788 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4789 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4790 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4791 comp_op1, comp_op0);
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, tem));
4795 break;
4796 case UNEQ_EXPR:
4797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4798 return pedantic_non_lvalue_loc (loc,
4799 fold_convert_loc (loc, type, arg2));
4800 break;
4801 case LTGT_EXPR:
4802 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, arg1));
4805 break;
4806 default:
4807 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4808 break;
4812 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4813 we might still be able to simplify this. For example,
4814 if C1 is one less or one more than C2, this might have started
4815 out as a MIN or MAX and been transformed by this function.
4816 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4818 if (INTEGRAL_TYPE_P (type)
4819 && TREE_CODE (arg01) == INTEGER_CST
4820 && TREE_CODE (arg2) == INTEGER_CST)
4821 switch (comp_code)
4823 case EQ_EXPR:
4824 if (TREE_CODE (arg1) == INTEGER_CST)
4825 break;
4826 /* We can replace A with C1 in this case. */
4827 arg1 = fold_convert_loc (loc, type, arg01);
4828 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4830 case LT_EXPR:
4831 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4832 MIN_EXPR, to preserve the signedness of the comparison. */
4833 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (PLUS_EXPR, arg2,
4837 build_int_cst (type, 1)),
4838 OEP_ONLY_CONST))
4840 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4841 fold_convert_loc (loc, TREE_TYPE (arg00),
4842 arg2));
4843 return pedantic_non_lvalue_loc (loc,
4844 fold_convert_loc (loc, type, tem));
4846 break;
4848 case LE_EXPR:
4849 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4850 as above. */
4851 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4852 OEP_ONLY_CONST)
4853 && operand_equal_p (arg01,
4854 const_binop (MINUS_EXPR, arg2,
4855 build_int_cst (type, 1)),
4856 OEP_ONLY_CONST))
4858 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4859 fold_convert_loc (loc, TREE_TYPE (arg00),
4860 arg2));
4861 return pedantic_non_lvalue_loc (loc,
4862 fold_convert_loc (loc, type, tem));
4864 break;
4866 case GT_EXPR:
4867 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4868 MAX_EXPR, to preserve the signedness of the comparison. */
4869 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4870 OEP_ONLY_CONST)
4871 && operand_equal_p (arg01,
4872 const_binop (MINUS_EXPR, arg2,
4873 build_int_cst (type, 1)),
4874 OEP_ONLY_CONST))
4876 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4877 fold_convert_loc (loc, TREE_TYPE (arg00),
4878 arg2));
4879 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4881 break;
4883 case GE_EXPR:
4884 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4885 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4886 OEP_ONLY_CONST)
4887 && operand_equal_p (arg01,
4888 const_binop (PLUS_EXPR, arg2,
4889 build_int_cst (type, 1)),
4890 OEP_ONLY_CONST))
4892 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4893 fold_convert_loc (loc, TREE_TYPE (arg00),
4894 arg2));
4895 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4897 break;
4898 case NE_EXPR:
4899 break;
4900 default:
4901 gcc_unreachable ();
4904 return NULL_TREE;
4909 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4910 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4911 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4912 false) >= 2)
4913 #endif
4915 /* EXP is some logical combination of boolean tests. See if we can
4916 merge it into some range test. Return the new tree if so. */
4918 static tree
4919 fold_range_test (location_t loc, enum tree_code code, tree type,
4920 tree op0, tree op1)
4922 int or_op = (code == TRUTH_ORIF_EXPR
4923 || code == TRUTH_OR_EXPR);
4924 int in0_p, in1_p, in_p;
4925 tree low0, low1, low, high0, high1, high;
4926 bool strict_overflow_p = false;
4927 tree tem, lhs, rhs;
4928 const char * const warnmsg = G_("assuming signed overflow does not occur "
4929 "when simplifying range test");
4931 if (!INTEGRAL_TYPE_P (type))
4932 return 0;
4934 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4937 /* If this is an OR operation, invert both sides; we will invert
4938 again at the end. */
4939 if (or_op)
4940 in0_p = ! in0_p, in1_p = ! in1_p;
4942 /* If both expressions are the same, if we can merge the ranges, and we
4943 can build the range test, return it or it inverted. If one of the
4944 ranges is always true or always false, consider it to be the same
4945 expression as the other. */
4946 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4947 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4948 in1_p, low1, high1)
4949 && 0 != (tem = (build_range_check (loc, type,
4950 lhs != 0 ? lhs
4951 : rhs != 0 ? rhs : integer_zero_node,
4952 in_p, low, high))))
4954 if (strict_overflow_p)
4955 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4956 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4959 /* On machines where the branch cost is expensive, if this is a
4960 short-circuited branch and the underlying object on both sides
4961 is the same, make a non-short-circuit operation. */
4962 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4963 && lhs != 0 && rhs != 0
4964 && (code == TRUTH_ANDIF_EXPR
4965 || code == TRUTH_ORIF_EXPR)
4966 && operand_equal_p (lhs, rhs, 0))
4968 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4969 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4970 which cases we can't do this. */
4971 if (simple_operand_p (lhs))
4972 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4973 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4974 type, op0, op1);
4976 else if (!lang_hooks.decls.global_bindings_p ()
4977 && !CONTAINS_PLACEHOLDER_P (lhs))
4979 tree common = save_expr (lhs);
4981 if (0 != (lhs = build_range_check (loc, type, common,
4982 or_op ? ! in0_p : in0_p,
4983 low0, high0))
4984 && (0 != (rhs = build_range_check (loc, type, common,
4985 or_op ? ! in1_p : in1_p,
4986 low1, high1))))
4988 if (strict_overflow_p)
4989 fold_overflow_warning (warnmsg,
4990 WARN_STRICT_OVERFLOW_COMPARISON);
4991 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4992 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4993 type, lhs, rhs);
4998 return 0;
5001 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5002 bit value. Arrange things so the extra bits will be set to zero if and
5003 only if C is signed-extended to its full width. If MASK is nonzero,
5004 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5006 static tree
5007 unextend (tree c, int p, int unsignedp, tree mask)
5009 tree type = TREE_TYPE (c);
5010 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5011 tree temp;
5013 if (p == modesize || unsignedp)
5014 return c;
5016 /* We work by getting just the sign bit into the low-order bit, then
5017 into the high-order bit, then sign-extend. We then XOR that value
5018 with C. */
5019 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (signed_type_for (type), temp);
5030 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5031 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5032 if (mask != 0)
5033 temp = const_binop (BIT_AND_EXPR, temp,
5034 fold_convert (TREE_TYPE (c), mask));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type))
5037 temp = fold_convert (type, temp);
5039 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5042 /* For an expression that has the form
5043 (A && B) || ~B
5045 (A || B) && ~B,
5046 we can drop one of the inner expressions and simplify to
5047 A || ~B
5049 A && ~B
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5058 static tree
5059 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5060 bool rhs_only)
5062 tree type = TREE_TYPE (cmpop);
5063 enum tree_code code = TREE_CODE (cmpop);
5064 enum tree_code truthop_code = TREE_CODE (op);
5065 tree lhs = TREE_OPERAND (op, 0);
5066 tree rhs = TREE_OPERAND (op, 1);
5067 tree orig_lhs = lhs, orig_rhs = rhs;
5068 enum tree_code rhs_code = TREE_CODE (rhs);
5069 enum tree_code lhs_code = TREE_CODE (lhs);
5070 enum tree_code inv_code;
5072 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5073 return NULL_TREE;
5075 if (TREE_CODE_CLASS (code) != tcc_comparison)
5076 return NULL_TREE;
5078 if (rhs_code == truthop_code)
5080 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5081 if (newrhs != NULL_TREE)
5083 rhs = newrhs;
5084 rhs_code = TREE_CODE (rhs);
5087 if (lhs_code == truthop_code && !rhs_only)
5089 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5090 if (newlhs != NULL_TREE)
5092 lhs = newlhs;
5093 lhs_code = TREE_CODE (lhs);
5097 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5098 if (inv_code == rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5101 return lhs;
5102 if (!rhs_only && inv_code == lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return rhs;
5106 if (rhs != orig_rhs || lhs != orig_lhs)
5107 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5108 lhs, rhs);
5109 return NULL_TREE;
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5132 two operands.
5134 We return the simplified tree or 0 if no optimization is possible. */
5136 static tree
5137 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5138 tree lhs, tree rhs)
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5149 enum tree_code wanted_code;
5150 enum tree_code lcode, rcode;
5151 tree ll_arg, lr_arg, rl_arg, rr_arg;
5152 tree ll_inner, lr_inner, rl_inner, rr_inner;
5153 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5154 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5155 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5156 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5157 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5158 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5159 machine_mode lnmode, rnmode;
5160 tree ll_mask, lr_mask, rl_mask, rr_mask;
5161 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5162 tree l_const, r_const;
5163 tree lntype, rntype, result;
5164 HOST_WIDE_INT first_bit, end_bit;
5165 int volatilep;
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5171 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5172 return 0;
5174 lcode = TREE_CODE (lhs);
5175 rcode = TREE_CODE (rhs);
5177 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5179 lhs = build2 (NE_EXPR, truth_type, lhs,
5180 build_int_cst (TREE_TYPE (lhs), 0));
5181 lcode = NE_EXPR;
5184 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5186 rhs = build2 (NE_EXPR, truth_type, rhs,
5187 build_int_cst (TREE_TYPE (rhs), 0));
5188 rcode = NE_EXPR;
5191 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5193 return 0;
5195 ll_arg = TREE_OPERAND (lhs, 0);
5196 lr_arg = TREE_OPERAND (lhs, 1);
5197 rl_arg = TREE_OPERAND (rhs, 0);
5198 rr_arg = TREE_OPERAND (rhs, 1);
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg)
5202 && simple_operand_p (lr_arg))
5204 if (operand_equal_p (ll_arg, rl_arg, 0)
5205 && operand_equal_p (lr_arg, rr_arg, 0))
5207 result = combine_comparisons (loc, code, lcode, rcode,
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5212 else if (operand_equal_p (ll_arg, rr_arg, 0)
5213 && operand_equal_p (lr_arg, rl_arg, 0))
5215 result = combine_comparisons (loc, code, lcode,
5216 swap_tree_comparison (rcode),
5217 truth_type, ll_arg, lr_arg);
5218 if (result)
5219 return result;
5223 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5232 false) >= 2
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5234 && simple_operand_p (rl_arg)
5235 && simple_operand_p (rr_arg))
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code == TRUTH_OR_EXPR
5239 && lcode == NE_EXPR && integer_zerop (lr_arg)
5240 && rcode == NE_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, NE_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code == TRUTH_AND_EXPR
5250 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5251 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5252 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5254 return build2_loc (loc, EQ_EXPR, truth_type,
5255 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5256 ll_arg, rl_arg),
5257 build_int_cst (TREE_TYPE (ll_arg), 0));
5260 /* See if the comparisons can be merged. Then get all the parameters for
5261 each side. */
5263 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5264 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5265 return 0;
5267 volatilep = 0;
5268 ll_inner = decode_field_reference (loc, ll_arg,
5269 &ll_bitsize, &ll_bitpos, &ll_mode,
5270 &ll_unsignedp, &volatilep, &ll_mask,
5271 &ll_and_mask);
5272 lr_inner = decode_field_reference (loc, lr_arg,
5273 &lr_bitsize, &lr_bitpos, &lr_mode,
5274 &lr_unsignedp, &volatilep, &lr_mask,
5275 &lr_and_mask);
5276 rl_inner = decode_field_reference (loc, rl_arg,
5277 &rl_bitsize, &rl_bitpos, &rl_mode,
5278 &rl_unsignedp, &volatilep, &rl_mask,
5279 &rl_and_mask);
5280 rr_inner = decode_field_reference (loc, rr_arg,
5281 &rr_bitsize, &rr_bitpos, &rr_mode,
5282 &rr_unsignedp, &volatilep, &rr_mask,
5283 &rr_and_mask);
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5288 the rhs's. */
5289 if (volatilep || ll_inner == 0 || rl_inner == 0
5290 || ! operand_equal_p (ll_inner, rl_inner, 0))
5291 return 0;
5293 if (TREE_CODE (lr_arg) == INTEGER_CST
5294 && TREE_CODE (rr_arg) == INTEGER_CST)
5295 l_const = lr_arg, r_const = rr_arg;
5296 else if (lr_inner == 0 || rr_inner == 0
5297 || ! operand_equal_p (lr_inner, rr_inner, 0))
5298 return 0;
5299 else
5300 l_const = r_const = 0;
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5306 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5307 if (lcode != wanted_code)
5309 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5313 thing below. */
5314 ll_unsignedp = 1;
5315 l_const = ll_mask;
5317 else
5318 return 0;
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode != wanted_code)
5324 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 rl_unsignedp = 1;
5327 r_const = rl_mask;
5329 else
5330 return 0;
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit = MIN (ll_bitpos, rl_bitpos);
5337 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5338 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5340 volatilep);
5341 if (lnmode == VOIDmode)
5342 return 0;
5344 lnbitsize = GET_MODE_BITSIZE (lnmode);
5345 lnbitpos = first_bit & ~ (lnbitsize - 1);
5346 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5347 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5349 if (BYTES_BIG_ENDIAN)
5351 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5352 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5355 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5356 size_int (xll_bitpos));
5357 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5358 size_int (xrl_bitpos));
5360 if (l_const)
5362 l_const = fold_convert_loc (loc, lntype, l_const);
5363 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5364 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5366 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 lntype, ll_mask))))
5369 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5371 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5374 if (r_const)
5376 r_const = fold_convert_loc (loc, lntype, r_const);
5377 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5378 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5380 fold_build1_loc (loc, BIT_NOT_EXPR,
5381 lntype, rl_mask))))
5383 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5385 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5392 if (l_const == 0)
5394 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5395 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5399 return 0;
5401 first_bit = MIN (lr_bitpos, rr_bitpos);
5402 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5403 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5405 volatilep);
5406 if (rnmode == VOIDmode)
5407 return 0;
5409 rnbitsize = GET_MODE_BITSIZE (rnmode);
5410 rnbitpos = first_bit & ~ (rnbitsize - 1);
5411 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5412 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5414 if (BYTES_BIG_ENDIAN)
5416 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5417 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5420 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, lr_mask),
5422 size_int (xlr_bitpos));
5423 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5424 rntype, rr_mask),
5425 size_int (xrr_bitpos));
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5431 results. */
5432 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5433 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5434 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5436 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5437 ll_unsignedp || rl_unsignedp);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5442 lr_unsignedp || rr_unsignedp);
5443 if (! all_ones_mask_p (lr_mask, rnbitsize))
5444 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5446 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize + ll_bitpos == rl_bitpos
5457 && lr_bitsize + lr_bitpos == rr_bitpos)
5458 || (ll_bitpos == rl_bitpos + rl_bitsize
5459 && lr_bitpos == rr_bitpos + rr_bitsize))
5461 tree type;
5463 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5464 ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5467 lr_bitsize + rr_bitsize,
5468 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5470 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5471 size_int (MIN (xll_bitpos, xrl_bitpos)));
5472 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5473 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5475 /* Convert to the smaller type before masking out unwanted bits. */
5476 type = lntype;
5477 if (lntype != rntype)
5479 if (lnbitsize > rnbitsize)
5481 lhs = fold_convert_loc (loc, rntype, lhs);
5482 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5483 type = rntype;
5485 else if (lnbitsize < rnbitsize)
5487 rhs = fold_convert_loc (loc, lntype, rhs);
5488 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5489 type = lntype;
5493 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5494 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5496 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5497 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5499 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5502 return 0;
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5510 if (! integer_zerop (result)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5512 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5514 if (wanted_code == NE_EXPR)
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type);
5519 else
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type);
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5529 merged constant. */
5530 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5531 ll_unsignedp || rl_unsignedp);
5533 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5534 if (! all_ones_mask_p (ll_mask, lnbitsize))
5535 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5537 return build2_loc (loc, wanted_code, truth_type, result,
5538 const_binop (BIT_IOR_EXPR, l_const, r_const));
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5544 static tree
5545 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5546 tree op0, tree op1)
5548 tree arg0 = op0;
5549 enum tree_code op_code;
5550 tree comp_const;
5551 tree minmax_const;
5552 int consts_equal, consts_lt;
5553 tree inner;
5555 STRIP_SIGN_NOPS (arg0);
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5560 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5561 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5562 inner = TREE_OPERAND (arg0, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5566 || TREE_CODE (comp_const) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const)
5568 || TREE_CODE (minmax_const) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const))
5570 return NULL_TREE;
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5574 simplifications. */
5575 switch (code)
5577 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5579 tree tem
5580 = optimize_minmax_comparison (loc,
5581 invert_tree_comparison (code, false),
5582 type, op0, op1);
5583 if (tem)
5584 return invert_truthvalue_loc (loc, tem);
5585 return NULL_TREE;
5588 case GE_EXPR:
5589 return
5590 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5591 optimize_minmax_comparison
5592 (loc, EQ_EXPR, type, arg0, comp_const),
5593 optimize_minmax_comparison
5594 (loc, GT_EXPR, type, arg0, comp_const));
5596 case EQ_EXPR:
5597 if (op_code == MAX_EXPR && consts_equal)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5601 else if (op_code == MAX_EXPR && consts_lt)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5605 else if (op_code == MAX_EXPR)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609 else if (consts_equal)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5613 else if (consts_lt)
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5617 else
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5621 case GT_EXPR:
5622 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5631 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5636 else
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5640 default:
5641 return NULL_TREE;
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5672 static int depth;
5673 tree ret;
5675 if (depth > 3)
5676 return NULL;
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5682 return ret;
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c));
5720 break;
5722 CASE_CONVERT: case NON_LVALUE_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5732 && (TYPE_PRECISION (ctype)
5733 > TYPE_PRECISION (TREE_TYPE (op0))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type)
5737 < TYPE_PRECISION (TREE_TYPE (op0)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code != MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5747 && !TYPE_OVERFLOW_UNDEFINED (type))))
5748 break;
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5754 && TREE_CODE (t2) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2)
5756 && (0 != (t1 = extract_muldiv (op0, t2, code,
5757 code == MULT_EXPR
5758 ? ctype : NULL_TREE,
5759 strict_overflow_p))))
5760 return t1;
5761 break;
5763 case ABS_EXPR:
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5768 tree cstype = (*signed_type_for) (ctype);
5769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5770 != 0)
5772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5773 return fold_convert (ctype, t1);
5775 break;
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c) == -1)
5779 break;
5780 /* FALLTHROUGH */
5781 case NEGATE_EXPR:
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5786 break;
5787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5788 != 0)
5789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 break;
5792 case MIN_EXPR: case MAX_EXPR:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 break;
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p = false;
5800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0
5802 && (t2 = extract_muldiv (op1, c, code, wide_type,
5803 &sub_strict_overflow_p)) != 0)
5805 if (tree_int_cst_sgn (c) < 0)
5806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5807 if (sub_strict_overflow_p)
5808 *strict_overflow_p = true;
5809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5810 fold_convert (ctype, t2));
5812 break;
5814 case LSHIFT_EXPR: case RSHIFT_EXPR:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5820 multiplication. */
5821 if (TREE_CODE (op1) == INTEGER_CST
5822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5826 && 0 != (t1 = fold_convert (ctype,
5827 const_binop (LSHIFT_EXPR,
5828 size_one_node,
5829 op1)))
5830 && !TREE_OVERFLOW (t1))
5831 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5832 ? MULT_EXPR : FLOOR_DIV_EXPR,
5833 ctype,
5834 fold_convert (ctype, op0),
5835 t1),
5836 c, code, wide_type, strict_overflow_p);
5837 break;
5839 case PLUS_EXPR: case MINUS_EXPR:
5840 /* See if we can eliminate the operation on both sides. If we can, we
5841 can return a new PLUS or MINUS. If we can't, the only remaining
5842 cases where we can do anything are if the second operand is a
5843 constant. */
5844 sub_strict_overflow_p = false;
5845 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5846 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5847 if (t1 != 0 && t2 != 0
5848 && (code == MULT_EXPR
5849 /* If not multiplication, we can only do this if both operands
5850 are divisible by c. */
5851 || (multiple_of_p (ctype, op0, c)
5852 && multiple_of_p (ctype, op1, c))))
5854 if (sub_strict_overflow_p)
5855 *strict_overflow_p = true;
5856 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5857 fold_convert (ctype, t2));
5860 /* If this was a subtraction, negate OP1 and set it to be an addition.
5861 This simplifies the logic below. */
5862 if (tcode == MINUS_EXPR)
5864 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5865 /* If OP1 was not easily negatable, the constant may be OP0. */
5866 if (TREE_CODE (op0) == INTEGER_CST)
5868 tree tem = op0;
5869 op0 = op1;
5870 op1 = tem;
5871 tem = t1;
5872 t1 = t2;
5873 t2 = tem;
5877 if (TREE_CODE (op1) != INTEGER_CST)
5878 break;
5880 /* If either OP1 or C are negative, this optimization is not safe for
5881 some of the division and remainder types while for others we need
5882 to change the code. */
5883 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5885 if (code == CEIL_DIV_EXPR)
5886 code = FLOOR_DIV_EXPR;
5887 else if (code == FLOOR_DIV_EXPR)
5888 code = CEIL_DIV_EXPR;
5889 else if (code != MULT_EXPR
5890 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5891 break;
5894 /* If it's a multiply or a division/modulus operation of a multiple
5895 of our constant, do the operation and verify it doesn't overflow. */
5896 if (code == MULT_EXPR
5897 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5899 op1 = const_binop (code, fold_convert (ctype, op1),
5900 fold_convert (ctype, c));
5901 /* We allow the constant to overflow with wrapping semantics. */
5902 if (op1 == 0
5903 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5904 break;
5906 else
5907 break;
5909 /* If we have an unsigned type, we cannot widen the operation since it
5910 will change the result if the original computation overflowed. */
5911 if (TYPE_UNSIGNED (ctype) && ctype != type)
5912 break;
5914 /* If we were able to eliminate our operation from the first side,
5915 apply our operation to the second side and reform the PLUS. */
5916 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5917 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5919 /* The last case is if we are a multiply. In that case, we can
5920 apply the distributive law to commute the multiply and addition
5921 if the multiplication of the constants doesn't overflow
5922 and overflow is defined. With undefined overflow
5923 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5924 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5925 return fold_build2 (tcode, ctype,
5926 fold_build2 (code, ctype,
5927 fold_convert (ctype, op0),
5928 fold_convert (ctype, c)),
5929 op1);
5931 break;
5933 case MULT_EXPR:
5934 /* We have a special case here if we are doing something like
5935 (C * 8) % 4 since we know that's zero. */
5936 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5937 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5938 /* If the multiplication can overflow we cannot optimize this. */
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5940 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5941 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5943 *strict_overflow_p = true;
5944 return omit_one_operand (type, integer_zero_node, op0);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5950 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5954 if (same_p
5955 && (t1 = extract_muldiv (op0, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5958 fold_convert (ctype, op1));
5959 else if (tcode == MULT_EXPR && code == MULT_EXPR
5960 && (t1 = extract_muldiv (op1, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5963 fold_convert (ctype, t1));
5964 else if (TREE_CODE (op1) != INTEGER_CST)
5965 return 0;
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5969 if (tcode == code)
5971 bool overflow_p = false;
5972 bool overflow_mul_p;
5973 signop sign = TYPE_SIGN (ctype);
5974 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5975 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5976 if (overflow_mul_p
5977 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5978 overflow_p = true;
5979 if (!overflow_p)
5980 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5981 wide_int_to_tree (ctype, mul));
5984 /* If these operations "cancel" each other, we have the main
5985 optimizations of this pass, which occur when either constant is a
5986 multiple of the other, in which case we replace this with either an
5987 operation or CODE or TCODE.
5989 If we have an unsigned type, we cannot do this since it will change
5990 the result if the original computation overflowed. */
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5992 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5993 || (tcode == MULT_EXPR
5994 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5995 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5996 && code != MULT_EXPR)))
5998 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 op1, c)));
6007 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 c, op1)));
6017 break;
6019 default:
6020 break;
6023 return 0;
6026 /* Return a node which has the indicated constant VALUE (either 0 or
6027 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6028 and is of the indicated TYPE. */
6030 tree
6031 constant_boolean_node (bool value, tree type)
6033 if (type == integer_type_node)
6034 return value ? integer_one_node : integer_zero_node;
6035 else if (type == boolean_type_node)
6036 return value ? boolean_true_node : boolean_false_node;
6037 else if (TREE_CODE (type) == VECTOR_TYPE)
6038 return build_vector_from_val (type,
6039 build_int_cst (TREE_TYPE (type),
6040 value ? -1 : 0));
6041 else
6042 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6046 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6047 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6048 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6049 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6050 COND is the first argument to CODE; otherwise (as in the example
6051 given here), it is the second argument. TYPE is the type of the
6052 original expression. Return NULL_TREE if no simplification is
6053 possible. */
6055 static tree
6056 fold_binary_op_with_conditional_arg (location_t loc,
6057 enum tree_code code,
6058 tree type, tree op0, tree op1,
6059 tree cond, tree arg, int cond_first_p)
6061 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6062 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6063 tree test, true_value, false_value;
6064 tree lhs = NULL_TREE;
6065 tree rhs = NULL_TREE;
6066 enum tree_code cond_code = COND_EXPR;
6068 if (TREE_CODE (cond) == COND_EXPR
6069 || TREE_CODE (cond) == VEC_COND_EXPR)
6071 test = TREE_OPERAND (cond, 0);
6072 true_value = TREE_OPERAND (cond, 1);
6073 false_value = TREE_OPERAND (cond, 2);
6074 /* If this operand throws an expression, then it does not make
6075 sense to try to perform a logical or arithmetic operation
6076 involving it. */
6077 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6078 lhs = true_value;
6079 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6080 rhs = false_value;
6082 else
6084 tree testtype = TREE_TYPE (cond);
6085 test = cond;
6086 true_value = constant_boolean_node (true, testtype);
6087 false_value = constant_boolean_node (false, testtype);
6090 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6091 cond_code = VEC_COND_EXPR;
6093 /* This transformation is only worthwhile if we don't have to wrap ARG
6094 in a SAVE_EXPR and the operation can be simplified without recursing
6095 on at least one of the branches once its pushed inside the COND_EXPR. */
6096 if (!TREE_CONSTANT (arg)
6097 && (TREE_SIDE_EFFECTS (arg)
6098 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6099 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6100 return NULL_TREE;
6102 arg = fold_convert_loc (loc, arg_type, arg);
6103 if (lhs == 0)
6105 true_value = fold_convert_loc (loc, cond_type, true_value);
6106 if (cond_first_p)
6107 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6108 else
6109 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6111 if (rhs == 0)
6113 false_value = fold_convert_loc (loc, cond_type, false_value);
6114 if (cond_first_p)
6115 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6116 else
6117 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6120 /* Check that we have simplified at least one of the branches. */
6121 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6122 return NULL_TREE;
6124 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6128 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6130 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6131 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6132 ADDEND is the same as X.
6134 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6135 and finite. The problematic cases are when X is zero, and its mode
6136 has signed zeros. In the case of rounding towards -infinity,
6137 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6138 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6140 bool
6141 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6143 if (!real_zerop (addend))
6144 return false;
6146 /* Don't allow the fold with -fsignaling-nans. */
6147 if (HONOR_SNANS (TYPE_MODE (type)))
6148 return false;
6150 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6151 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6152 return true;
6154 /* In a vector or complex, we would need to check the sign of all zeros. */
6155 if (TREE_CODE (addend) != REAL_CST)
6156 return false;
6158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6160 negate = !negate;
6162 /* The mode has signed zeros, and we have to honor their sign.
6163 In this situation, there is only one case we can return true for.
6164 X - 0 is the same as X unless rounding towards -infinity is
6165 supported. */
6166 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6169 /* Subroutine of fold() that checks comparisons of built-in math
6170 functions against real constants.
6172 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6173 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6174 is the type of the result and ARG0 and ARG1 are the operands of the
6175 comparison. ARG1 must be a TREE_REAL_CST.
6177 The function returns the constant folded tree if a simplification
6178 can be made, and NULL_TREE otherwise. */
6180 static tree
6181 fold_mathfn_compare (location_t loc,
6182 enum built_in_function fcode, enum tree_code code,
6183 tree type, tree arg0, tree arg1)
6185 REAL_VALUE_TYPE c;
6187 if (BUILTIN_SQRT_P (fcode))
6189 tree arg = CALL_EXPR_ARG (arg0, 0);
6190 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6192 c = TREE_REAL_CST (arg1);
6193 if (REAL_VALUE_NEGATIVE (c))
6195 /* sqrt(x) < y is always false, if y is negative. */
6196 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6197 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6199 /* sqrt(x) > y is always true, if y is negative and we
6200 don't care about NaNs, i.e. negative values of x. */
6201 if (code == NE_EXPR || !HONOR_NANS (mode))
6202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6204 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6205 return fold_build2_loc (loc, GE_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg), dconst0));
6208 else if (code == GT_EXPR || code == GE_EXPR)
6210 REAL_VALUE_TYPE c2;
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6215 if (REAL_VALUE_ISINF (c2))
6217 /* sqrt(x) > y is x == +Inf, when y is very large. */
6218 if (HONOR_INFINITIES (mode))
6219 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) > y is always false, when y is very large
6223 and we don't care about infinities. */
6224 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6227 /* sqrt(x) > c is the same as x > c*c. */
6228 return fold_build2_loc (loc, code, type, arg,
6229 build_real (TREE_TYPE (arg), c2));
6231 else if (code == LT_EXPR || code == LE_EXPR)
6233 REAL_VALUE_TYPE c2;
6235 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6236 real_convert (&c2, mode, &c2);
6238 if (REAL_VALUE_ISINF (c2))
6240 /* sqrt(x) < y is always true, when y is a very large
6241 value and we don't care about NaNs or Infinities. */
6242 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6245 /* sqrt(x) < y is x != +Inf when y is very large and we
6246 don't care about NaNs. */
6247 if (! HONOR_NANS (mode))
6248 return fold_build2_loc (loc, NE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6251 /* sqrt(x) < y is x >= 0 when y is very large and we
6252 don't care about Infinities. */
6253 if (! HONOR_INFINITIES (mode))
6254 return fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), dconst0));
6257 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6258 arg = save_expr (arg);
6259 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6260 fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2_loc (loc, NE_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6268 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, code, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6273 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6274 arg = save_expr (arg);
6275 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6276 fold_build2_loc (loc, GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 dconst0)),
6279 fold_build2_loc (loc, code, type, arg,
6280 build_real (TREE_TYPE (arg),
6281 c2)));
6285 return NULL_TREE;
6288 /* Subroutine of fold() that optimizes comparisons against Infinities,
6289 either +Inf or -Inf.
6291 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6292 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6293 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6295 The function returns the constant folded tree if a simplification
6296 can be made, and NULL_TREE otherwise. */
6298 static tree
6299 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6300 tree arg0, tree arg1)
6302 machine_mode mode;
6303 REAL_VALUE_TYPE max;
6304 tree temp;
6305 bool neg;
6307 mode = TYPE_MODE (TREE_TYPE (arg0));
6309 /* For negative infinity swap the sense of the comparison. */
6310 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6311 if (neg)
6312 code = swap_tree_comparison (code);
6314 switch (code)
6316 case GT_EXPR:
6317 /* x > +Inf is always false, if with ignore sNANs. */
6318 if (HONOR_SNANS (mode))
6319 return NULL_TREE;
6320 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6322 case LE_EXPR:
6323 /* x <= +Inf is always true, if we don't case about NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6327 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6328 arg0 = save_expr (arg0);
6329 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6331 case EQ_EXPR:
6332 case GE_EXPR:
6333 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6338 case LT_EXPR:
6339 /* x < +Inf is always equal to x <= DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 case NE_EXPR:
6345 /* x != +Inf is always equal to !(x > DBL_MAX). */
6346 real_maxval (&max, neg, mode);
6347 if (! HONOR_NANS (mode))
6348 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6351 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6352 arg0, build_real (TREE_TYPE (arg0), max));
6353 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6355 default:
6356 break;
6359 return NULL_TREE;
6362 /* Subroutine of fold() that optimizes comparisons of a division by
6363 a nonzero integer constant against an integer constant, i.e.
6364 X/C1 op C2.
6366 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6367 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6368 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370 The function returns the constant folded tree if a simplification
6371 can be made, and NULL_TREE otherwise. */
6373 static tree
6374 fold_div_compare (location_t loc,
6375 enum tree_code code, tree type, tree arg0, tree arg1)
6377 tree prod, tmp, hi, lo;
6378 tree arg00 = TREE_OPERAND (arg0, 0);
6379 tree arg01 = TREE_OPERAND (arg0, 1);
6380 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6381 bool neg_overflow = false;
6382 bool overflow;
6384 /* We have to do this the hard way to detect unsigned overflow.
6385 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6386 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6387 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6388 neg_overflow = false;
6390 if (sign == UNSIGNED)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 lo = prod;
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6397 val = wi::add (prod, tmp, sign, &overflow);
6398 hi = force_fit_type (TREE_TYPE (arg00), val,
6399 -1, overflow | TREE_OVERFLOW (prod));
6401 else if (tree_int_cst_sgn (arg01) >= 0)
6403 tmp = int_const_binop (MINUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6407 case -1:
6408 neg_overflow = true;
6409 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6410 hi = prod;
6411 break;
6413 case 0:
6414 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6415 hi = tmp;
6416 break;
6418 case 1:
6419 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6420 lo = prod;
6421 break;
6423 default:
6424 gcc_unreachable ();
6427 else
6429 /* A negative divisor reverses the relational operators. */
6430 code = swap_tree_comparison (code);
6432 tmp = int_const_binop (PLUS_EXPR, arg01,
6433 build_int_cst (TREE_TYPE (arg01), 1));
6434 switch (tree_int_cst_sgn (arg1))
6436 case -1:
6437 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6438 lo = prod;
6439 break;
6441 case 0:
6442 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6443 lo = tmp;
6444 break;
6446 case 1:
6447 neg_overflow = true;
6448 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6449 hi = prod;
6450 break;
6452 default:
6453 gcc_unreachable ();
6457 switch (code)
6459 case EQ_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6466 return build_range_check (loc, type, arg00, 1, lo, hi);
6468 case NE_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 0, lo, hi);
6477 case LT_EXPR:
6478 if (TREE_OVERFLOW (lo))
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand_loc (loc, type, tmp, arg00);
6483 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6485 case LE_EXPR:
6486 if (TREE_OVERFLOW (hi))
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6493 case GT_EXPR:
6494 if (TREE_OVERFLOW (hi))
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6501 case GE_EXPR:
6502 if (TREE_OVERFLOW (lo))
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6509 default:
6510 break;
6513 return NULL_TREE;
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6520 result type. */
6522 static tree
6523 fold_single_bit_test_into_sign_test (location_t loc,
6524 enum tree_code code, tree arg0, tree arg1,
6525 tree result_type)
6527 /* If this is testing a single bit, we can optimize the test. */
6528 if ((code == NE_EXPR || code == EQ_EXPR)
6529 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6530 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6532 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6533 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6534 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6536 if (arg00 != NULL_TREE
6537 /* This is only a win if casting to a signed type is cheap,
6538 i.e. when arg00's type is not a partial mode. */
6539 && TYPE_PRECISION (TREE_TYPE (arg00))
6540 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6542 tree stype = signed_type_for (TREE_TYPE (arg00));
6543 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6544 result_type,
6545 fold_convert_loc (loc, stype, arg00),
6546 build_int_cst (stype, 0));
6550 return NULL_TREE;
6553 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6554 equality/inequality test, then return a simplified form of
6555 the test using shifts and logical operations. Otherwise return
6556 NULL. TYPE is the desired result type. */
6558 tree
6559 fold_single_bit_test (location_t loc, enum tree_code code,
6560 tree arg0, tree arg1, tree result_type)
6562 /* If this is testing a single bit, we can optimize the test. */
6563 if ((code == NE_EXPR || code == EQ_EXPR)
6564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6567 tree inner = TREE_OPERAND (arg0, 0);
6568 tree type = TREE_TYPE (arg0);
6569 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6570 machine_mode operand_mode = TYPE_MODE (type);
6571 int ops_unsigned;
6572 tree signed_type, unsigned_type, intermediate_type;
6573 tree tem, one;
6575 /* First, see if we can fold the single bit test into a sign-bit
6576 test. */
6577 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6578 result_type);
6579 if (tem)
6580 return tem;
6582 /* Otherwise we have (A & C) != 0 where C is a single bit,
6583 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6584 Similarly for (A & C) == 0. */
6586 /* If INNER is a right shift of a constant and it plus BITNUM does
6587 not overflow, adjust BITNUM and INNER. */
6588 if (TREE_CODE (inner) == RSHIFT_EXPR
6589 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6590 && bitnum < TYPE_PRECISION (type)
6591 && wi::ltu_p (TREE_OPERAND (inner, 1),
6592 TYPE_PRECISION (type) - bitnum))
6594 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6595 inner = TREE_OPERAND (inner, 0);
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 && !flag_syntax_only) ? 0 : 1;
6604 #else
6605 ops_unsigned = 1;
6606 #endif
6608 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611 inner = fold_convert_loc (loc, intermediate_type, inner);
6613 if (bitnum != 0)
6614 inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 inner, size_int (bitnum));
6617 one = build_int_cst (intermediate_type, 1);
6619 if (code == EQ_EXPR)
6620 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6622 /* Put the AND last so it can combine with more things. */
6623 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6625 /* Make sure to return the proper type. */
6626 inner = fold_convert_loc (loc, result_type, inner);
6628 return inner;
6630 return NULL_TREE;
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6639 if (! flag_evaluation_order)
6640 return true;
6641 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return true;
6643 return ! TREE_SIDE_EFFECTS (arg0)
6644 && ! TREE_SIDE_EFFECTS (arg1);
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6655 if (CONSTANT_CLASS_P (arg1))
6656 return 0;
6657 if (CONSTANT_CLASS_P (arg0))
6658 return 1;
6660 STRIP_SIGN_NOPS (arg0);
6661 STRIP_SIGN_NOPS (arg1);
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (reorder && flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6670 return 0;
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0) == SSA_NAME
6677 && TREE_CODE (arg1) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6679 return 1;
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1) == SSA_NAME)
6683 return 0;
6684 if (TREE_CODE (arg0) == SSA_NAME)
6685 return 1;
6687 /* Put variables last. */
6688 if (DECL_P (arg1))
6689 return 0;
6690 if (DECL_P (arg0))
6691 return 1;
6693 return 0;
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6699 static tree
6700 fold_widened_comparison (location_t loc, enum tree_code code,
6701 tree type, tree arg0, tree arg1)
6703 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6704 tree arg1_unw;
6705 tree shorter_type, outer_type;
6706 tree min, max;
6707 bool above, below;
6709 if (arg0_unw == arg0)
6710 return NULL_TREE;
6711 shorter_type = TREE_TYPE (arg0_unw);
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6719 return NULL_TREE;
6720 #endif
6722 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6723 return NULL_TREE;
6725 arg1_unw = get_unwidened (arg1, NULL_TREE);
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code == EQ_EXPR || code == NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6730 && (TREE_TYPE (arg1_unw) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6733 && (TYPE_UNSIGNED (shorter_type)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6735 || (TREE_CODE (arg1_unw) == INTEGER_CST
6736 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6738 && int_fits_type_p (arg1_unw, shorter_type))))
6739 return fold_build2_loc (loc, code, type, arg0_unw,
6740 fold_convert_loc (loc, shorter_type, arg1_unw));
6742 if (TREE_CODE (arg1_unw) != INTEGER_CST
6743 || TREE_CODE (shorter_type) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw, shorter_type))
6745 return NULL_TREE;
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type = TREE_TYPE (arg1_unw);
6750 min = lower_bound_in_type (outer_type, shorter_type);
6751 max = upper_bound_in_type (outer_type, shorter_type);
6753 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 max, arg1_unw));
6755 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6756 arg1_unw, min));
6758 switch (code)
6760 case EQ_EXPR:
6761 if (above || below)
6762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6763 break;
6765 case NE_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 break;
6770 case LT_EXPR:
6771 case LE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6777 case GT_EXPR:
6778 case GE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 default:
6785 break;
6788 return NULL_TREE;
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6794 static tree
6795 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6796 tree arg0, tree arg1)
6798 tree arg0_inner;
6799 tree inner_type, outer_type;
6801 if (!CONVERT_EXPR_P (arg0))
6802 return NULL_TREE;
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6830 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6831 return NULL_TREE;
6833 if (TREE_CODE (arg1) == INTEGER_CST)
6834 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6835 TREE_OVERFLOW (arg1));
6836 else
6837 arg1 = fold_convert_loc (loc, inner_type, arg1);
6839 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6843 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6844 means A >= Y && A != MAX, but in this case we know that
6845 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6847 static tree
6848 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6850 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6852 if (TREE_CODE (bound) == LT_EXPR)
6853 a = TREE_OPERAND (bound, 0);
6854 else if (TREE_CODE (bound) == GT_EXPR)
6855 a = TREE_OPERAND (bound, 1);
6856 else
6857 return NULL_TREE;
6859 typea = TREE_TYPE (a);
6860 if (!INTEGRAL_TYPE_P (typea)
6861 && !POINTER_TYPE_P (typea))
6862 return NULL_TREE;
6864 if (TREE_CODE (ineq) == LT_EXPR)
6866 a1 = TREE_OPERAND (ineq, 1);
6867 y = TREE_OPERAND (ineq, 0);
6869 else if (TREE_CODE (ineq) == GT_EXPR)
6871 a1 = TREE_OPERAND (ineq, 0);
6872 y = TREE_OPERAND (ineq, 1);
6874 else
6875 return NULL_TREE;
6877 if (TREE_TYPE (a1) != typea)
6878 return NULL_TREE;
6880 if (POINTER_TYPE_P (typea))
6882 /* Convert the pointer types into integer before taking the difference. */
6883 tree ta = fold_convert_loc (loc, ssizetype, a);
6884 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6885 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6887 else
6888 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6890 if (!diff || !integer_onep (diff))
6891 return NULL_TREE;
6893 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6896 /* Fold a sum or difference of at least one multiplication.
6897 Returns the folded tree or NULL if no simplification could be made. */
6899 static tree
6900 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6901 tree arg0, tree arg1)
6903 tree arg00, arg01, arg10, arg11;
6904 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6906 /* (A * C) +- (B * C) -> (A+-B) * C.
6907 (A * C) +- A -> A * (C+-1).
6908 We are most concerned about the case where C is a constant,
6909 but other combinations show up during loop reduction. Since
6910 it is not difficult, try all four possibilities. */
6912 if (TREE_CODE (arg0) == MULT_EXPR)
6914 arg00 = TREE_OPERAND (arg0, 0);
6915 arg01 = TREE_OPERAND (arg0, 1);
6917 else if (TREE_CODE (arg0) == INTEGER_CST)
6919 arg00 = build_one_cst (type);
6920 arg01 = arg0;
6922 else
6924 /* We cannot generate constant 1 for fract. */
6925 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6926 return NULL_TREE;
6927 arg00 = arg0;
6928 arg01 = build_one_cst (type);
6930 if (TREE_CODE (arg1) == MULT_EXPR)
6932 arg10 = TREE_OPERAND (arg1, 0);
6933 arg11 = TREE_OPERAND (arg1, 1);
6935 else if (TREE_CODE (arg1) == INTEGER_CST)
6937 arg10 = build_one_cst (type);
6938 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6939 the purpose of this canonicalization. */
6940 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6941 && negate_expr_p (arg1)
6942 && code == PLUS_EXPR)
6944 arg11 = negate_expr (arg1);
6945 code = MINUS_EXPR;
6947 else
6948 arg11 = arg1;
6950 else
6952 /* We cannot generate constant 1 for fract. */
6953 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6954 return NULL_TREE;
6955 arg10 = arg1;
6956 arg11 = build_one_cst (type);
6958 same = NULL_TREE;
6960 if (operand_equal_p (arg01, arg11, 0))
6961 same = arg01, alt0 = arg00, alt1 = arg10;
6962 else if (operand_equal_p (arg00, arg10, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg11;
6964 else if (operand_equal_p (arg00, arg11, 0))
6965 same = arg00, alt0 = arg01, alt1 = arg10;
6966 else if (operand_equal_p (arg01, arg10, 0))
6967 same = arg01, alt0 = arg00, alt1 = arg11;
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (tree_fits_shwi_p (arg01)
6973 && tree_fits_shwi_p (arg11))
6975 HOST_WIDE_INT int01, int11, tmp;
6976 bool swap = false;
6977 tree maybe_same;
6978 int01 = tree_to_shwi (arg01);
6979 int11 = tree_to_shwi (arg11);
6981 /* Move min of absolute values to int11. */
6982 if (absu_hwi (int01) < absu_hwi (int11))
6984 tmp = int01, int01 = int11, int11 = tmp;
6985 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6986 maybe_same = arg01;
6987 swap = true;
6989 else
6990 maybe_same = arg11;
6992 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6993 /* The remainder should not be a constant, otherwise we
6994 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6995 increased the number of multiplications necessary. */
6996 && TREE_CODE (arg10) != INTEGER_CST)
6998 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6999 build_int_cst (TREE_TYPE (arg00),
7000 int01 / int11));
7001 alt1 = arg10;
7002 same = maybe_same;
7003 if (swap)
7004 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7008 if (same)
7009 return fold_build2_loc (loc, MULT_EXPR, type,
7010 fold_build2_loc (loc, code, type,
7011 fold_convert_loc (loc, type, alt0),
7012 fold_convert_loc (loc, type, alt1)),
7013 fold_convert_loc (loc, type, same));
7015 return NULL_TREE;
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7021 upon failure. */
7023 static int
7024 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7026 tree type = TREE_TYPE (expr);
7027 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7028 int byte, offset, word, words;
7029 unsigned char value;
7031 if ((off == -1 && total_bytes > len)
7032 || off >= total_bytes)
7033 return 0;
7034 if (off == -1)
7035 off = 0;
7036 words = total_bytes / UNITS_PER_WORD;
7038 for (byte = 0; byte < total_bytes; byte++)
7040 int bitpos = byte * BITS_PER_UNIT;
7041 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7042 number of bytes. */
7043 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7045 if (total_bytes > UNITS_PER_WORD)
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 if (offset >= off
7059 && offset - off < len)
7060 ptr[offset - off] = value;
7062 return MIN (len, total_bytes - off);
7066 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7067 specified by EXPR into the buffer PTR of length LEN bytes.
7068 Return the number of bytes placed in the buffer, or zero
7069 upon failure. */
7071 static int
7072 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7074 tree type = TREE_TYPE (expr);
7075 machine_mode mode = TYPE_MODE (type);
7076 int total_bytes = GET_MODE_SIZE (mode);
7077 FIXED_VALUE_TYPE value;
7078 tree i_value, i_type;
7080 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7081 return 0;
7083 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7085 if (NULL_TREE == i_type
7086 || TYPE_PRECISION (i_type) != total_bytes)
7087 return 0;
7089 value = TREE_FIXED_CST (expr);
7090 i_value = double_int_to_tree (i_type, value.data);
7092 return native_encode_int (i_value, ptr, len, off);
7096 /* Subroutine of native_encode_expr. Encode the REAL_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7101 static int
7102 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7104 tree type = TREE_TYPE (expr);
7105 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7106 int byte, offset, word, words, bitpos;
7107 unsigned char value;
7109 /* There are always 32 bits in each long, no matter the size of
7110 the hosts long. We handle floating point representations with
7111 up to 192 bits. */
7112 long tmp[6];
7114 if ((off == -1 && total_bytes > len)
7115 || off >= total_bytes)
7116 return 0;
7117 if (off == -1)
7118 off = 0;
7119 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7121 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7123 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7124 bitpos += BITS_PER_UNIT)
7126 byte = (bitpos / BITS_PER_UNIT) & 3;
7127 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7129 if (UNITS_PER_WORD < 4)
7131 word = byte / UNITS_PER_WORD;
7132 if (WORDS_BIG_ENDIAN)
7133 word = (words - 1) - word;
7134 offset = word * UNITS_PER_WORD;
7135 if (BYTES_BIG_ENDIAN)
7136 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7137 else
7138 offset += byte % UNITS_PER_WORD;
7140 else
7141 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7142 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7143 if (offset >= off
7144 && offset - off < len)
7145 ptr[offset - off] = value;
7147 return MIN (len, total_bytes - off);
7150 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7153 upon failure. */
7155 static int
7156 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7158 int rsize, isize;
7159 tree part;
7161 part = TREE_REALPART (expr);
7162 rsize = native_encode_expr (part, ptr, len, off);
7163 if (off == -1
7164 && rsize == 0)
7165 return 0;
7166 part = TREE_IMAGPART (expr);
7167 if (off != -1)
7168 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7169 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7170 if (off == -1
7171 && isize != rsize)
7172 return 0;
7173 return rsize + isize;
7177 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7182 static int
7183 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7185 unsigned i, count;
7186 int size, offset;
7187 tree itype, elem;
7189 offset = 0;
7190 count = VECTOR_CST_NELTS (expr);
7191 itype = TREE_TYPE (TREE_TYPE (expr));
7192 size = GET_MODE_SIZE (TYPE_MODE (itype));
7193 for (i = 0; i < count; i++)
7195 if (off >= size)
7197 off -= size;
7198 continue;
7200 elem = VECTOR_CST_ELT (expr, i);
7201 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7202 if ((off == -1 && res != size)
7203 || res == 0)
7204 return 0;
7205 offset += res;
7206 if (offset >= len)
7207 return offset;
7208 if (off != -1)
7209 off = 0;
7211 return offset;
7215 /* Subroutine of native_encode_expr. Encode the STRING_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7220 static int
7221 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7223 tree type = TREE_TYPE (expr);
7224 HOST_WIDE_INT total_bytes;
7226 if (TREE_CODE (type) != ARRAY_TYPE
7227 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7228 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7229 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7230 return 0;
7231 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7232 if ((off == -1 && total_bytes > len)
7233 || off >= total_bytes)
7234 return 0;
7235 if (off == -1)
7236 off = 0;
7237 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7239 int written = 0;
7240 if (off < TREE_STRING_LENGTH (expr))
7242 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7243 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7245 memset (ptr + written, 0,
7246 MIN (total_bytes - written, len - written));
7248 else
7249 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7250 return MIN (total_bytes - off, len);
7254 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7255 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7256 buffer PTR of length LEN bytes. If OFF is not -1 then start
7257 the encoding at byte offset OFF and encode at most LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero upon failure. */
7261 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7263 switch (TREE_CODE (expr))
7265 case INTEGER_CST:
7266 return native_encode_int (expr, ptr, len, off);
7268 case REAL_CST:
7269 return native_encode_real (expr, ptr, len, off);
7271 case FIXED_CST:
7272 return native_encode_fixed (expr, ptr, len, off);
7274 case COMPLEX_CST:
7275 return native_encode_complex (expr, ptr, len, off);
7277 case VECTOR_CST:
7278 return native_encode_vector (expr, ptr, len, off);
7280 case STRING_CST:
7281 return native_encode_string (expr, ptr, len, off);
7283 default:
7284 return 0;
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7293 static tree
7294 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 if (total_bytes > len
7299 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7300 return NULL_TREE;
7302 wide_int result = wi::from_buffer (ptr, total_bytes);
7304 return wide_int_to_tree (type, result);
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7312 static tree
7313 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 double_int result;
7317 FIXED_VALUE_TYPE fixed_value;
7319 if (total_bytes > len
7320 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7321 return NULL_TREE;
7323 result = double_int::from_buffer (ptr, total_bytes);
7324 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326 return build_fixed (type, fixed_value);
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7334 static tree
7335 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 machine_mode mode = TYPE_MODE (type);
7338 int total_bytes = GET_MODE_SIZE (mode);
7339 int byte, offset, word, words, bitpos;
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352 memset (tmp, 0, sizeof (tmp));
7353 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7356 byte = (bitpos / BITS_PER_UNIT) & 3;
7357 if (UNITS_PER_WORD < 4)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 else
7366 offset += byte % UNITS_PER_WORD;
7368 else
7369 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7370 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7372 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7375 real_from_target (&r, tmp, mode);
7376 return build_real (type, r);
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7384 static tree
7385 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7387 tree etype, rpart, ipart;
7388 int size;
7390 etype = TREE_TYPE (type);
7391 size = GET_MODE_SIZE (TYPE_MODE (etype));
7392 if (size * 2 > len)
7393 return NULL_TREE;
7394 rpart = native_interpret_expr (etype, ptr, size);
7395 if (!rpart)
7396 return NULL_TREE;
7397 ipart = native_interpret_expr (etype, ptr+size, size);
7398 if (!ipart)
7399 return NULL_TREE;
7400 return build_complex (type, rpart, ipart);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7408 static tree
7409 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7411 tree etype, elem;
7412 int i, size, count;
7413 tree *elements;
7415 etype = TREE_TYPE (type);
7416 size = GET_MODE_SIZE (TYPE_MODE (etype));
7417 count = TYPE_VECTOR_SUBPARTS (type);
7418 if (size * count > len)
7419 return NULL_TREE;
7421 elements = XALLOCAVEC (tree, count);
7422 for (i = count - 1; i >= 0; i--)
7424 elem = native_interpret_expr (etype, ptr+(i*size), size);
7425 if (!elem)
7426 return NULL_TREE;
7427 elements[i] = elem;
7429 return build_vector (type, elements);
7433 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a constant of type TYPE. For
7435 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7436 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7437 return NULL_TREE. */
7439 tree
7440 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7442 switch (TREE_CODE (type))
7444 case INTEGER_TYPE:
7445 case ENUMERAL_TYPE:
7446 case BOOLEAN_TYPE:
7447 case POINTER_TYPE:
7448 case REFERENCE_TYPE:
7449 return native_interpret_int (type, ptr, len);
7451 case REAL_TYPE:
7452 return native_interpret_real (type, ptr, len);
7454 case FIXED_POINT_TYPE:
7455 return native_interpret_fixed (type, ptr, len);
7457 case COMPLEX_TYPE:
7458 return native_interpret_complex (type, ptr, len);
7460 case VECTOR_TYPE:
7461 return native_interpret_vector (type, ptr, len);
7463 default:
7464 return NULL_TREE;
7468 /* Returns true if we can interpret the contents of a native encoding
7469 as TYPE. */
7471 static bool
7472 can_native_interpret_type_p (tree type)
7474 switch (TREE_CODE (type))
7476 case INTEGER_TYPE:
7477 case ENUMERAL_TYPE:
7478 case BOOLEAN_TYPE:
7479 case POINTER_TYPE:
7480 case REFERENCE_TYPE:
7481 case FIXED_POINT_TYPE:
7482 case REAL_TYPE:
7483 case COMPLEX_TYPE:
7484 case VECTOR_TYPE:
7485 return true;
7486 default:
7487 return false;
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7495 static tree
7496 fold_view_convert_expr (tree type, tree expr)
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer[64];
7500 int len;
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7504 return NULL_TREE;
7506 len = native_encode_expr (expr, buffer, sizeof (buffer));
7507 if (len == 0)
7508 return NULL_TREE;
7510 return native_interpret_expr (type, buffer, len);
7513 /* Build an expression for the address of T. Folds away INDIRECT_REF
7514 to avoid confusing the gimplify process. */
7516 tree
7517 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7519 /* The size of the object is not relevant when talking about its address. */
7520 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7521 t = TREE_OPERAND (t, 0);
7523 if (TREE_CODE (t) == INDIRECT_REF)
7525 t = TREE_OPERAND (t, 0);
7527 if (TREE_TYPE (t) != ptrtype)
7528 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7530 else if (TREE_CODE (t) == MEM_REF
7531 && integer_zerop (TREE_OPERAND (t, 1)))
7532 return TREE_OPERAND (t, 0);
7533 else if (TREE_CODE (t) == MEM_REF
7534 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7535 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7536 TREE_OPERAND (t, 0),
7537 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7538 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7540 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7542 if (TREE_TYPE (t) != ptrtype)
7543 t = fold_convert_loc (loc, ptrtype, t);
7545 else
7546 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7548 return t;
7551 /* Build an expression for the address of T. */
7553 tree
7554 build_fold_addr_expr_loc (location_t loc, tree t)
7556 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7558 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7561 static bool vec_cst_ctor_to_array (tree, tree *);
7563 /* Fold a unary expression of code CODE and type TYPE with operand
7564 OP0. Return the folded expression if folding is successful.
7565 Otherwise, return NULL_TREE. */
7567 tree
7568 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7570 tree tem;
7571 tree arg0;
7572 enum tree_code_class kind = TREE_CODE_CLASS (code);
7574 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7575 && TREE_CODE_LENGTH (code) == 1);
7577 tem = generic_simplify (loc, code, type, op0);
7578 if (tem)
7579 return tem;
7581 arg0 = op0;
7582 if (arg0)
7584 if (CONVERT_EXPR_CODE_P (code)
7585 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7587 /* Don't use STRIP_NOPS, because signedness of argument type
7588 matters. */
7589 STRIP_SIGN_NOPS (arg0);
7591 else
7593 /* Strip any conversions that don't change the mode. This
7594 is safe for every expression, except for a comparison
7595 expression because its signedness is derived from its
7596 operands.
7598 Note that this is done as an internal manipulation within
7599 the constant folder, in order to find the simplest
7600 representation of the arguments so that their form can be
7601 studied. In any cases, the appropriate type conversions
7602 should be put back in the tree that will get out of the
7603 constant folder. */
7604 STRIP_NOPS (arg0);
7608 if (TREE_CODE_CLASS (code) == tcc_unary)
7610 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7611 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7612 fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc, TREE_TYPE (op0),
7614 TREE_OPERAND (arg0, 1))));
7615 else if (TREE_CODE (arg0) == COND_EXPR)
7617 tree arg01 = TREE_OPERAND (arg0, 1);
7618 tree arg02 = TREE_OPERAND (arg0, 2);
7619 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7620 arg01 = fold_build1_loc (loc, code, type,
7621 fold_convert_loc (loc,
7622 TREE_TYPE (op0), arg01));
7623 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7624 arg02 = fold_build1_loc (loc, code, type,
7625 fold_convert_loc (loc,
7626 TREE_TYPE (op0), arg02));
7627 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7628 arg01, arg02);
7630 /* If this was a conversion, and all we did was to move into
7631 inside the COND_EXPR, bring it back out. But leave it if
7632 it is a conversion from integer to integer and the
7633 result precision is no wider than a word since such a
7634 conversion is cheap and may be optimized away by combine,
7635 while it couldn't if it were outside the COND_EXPR. Then return
7636 so we don't get into an infinite recursion loop taking the
7637 conversion out and then back in. */
7639 if ((CONVERT_EXPR_CODE_P (code)
7640 || code == NON_LVALUE_EXPR)
7641 && TREE_CODE (tem) == COND_EXPR
7642 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7643 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7646 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7647 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7648 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7649 && (INTEGRAL_TYPE_P
7650 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7651 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7652 || flag_syntax_only))
7653 tem = build1_loc (loc, code, type,
7654 build3 (COND_EXPR,
7655 TREE_TYPE (TREE_OPERAND
7656 (TREE_OPERAND (tem, 1), 0)),
7657 TREE_OPERAND (tem, 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 2),
7660 0)));
7661 return tem;
7665 switch (code)
7667 case NON_LVALUE_EXPR:
7668 if (!maybe_lvalue_p (op0))
7669 return fold_convert_loc (loc, type, op0);
7670 return NULL_TREE;
7672 CASE_CONVERT:
7673 case FLOAT_EXPR:
7674 case FIX_TRUNC_EXPR:
7675 if (COMPARISON_CLASS_P (op0))
7677 /* If we have (type) (a CMP b) and type is an integral type, return
7678 new expression involving the new type. Canonicalize
7679 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7680 non-integral type.
7681 Do not fold the result as that would not simplify further, also
7682 folding again results in recursions. */
7683 if (TREE_CODE (type) == BOOLEAN_TYPE)
7684 return build2_loc (loc, TREE_CODE (op0), type,
7685 TREE_OPERAND (op0, 0),
7686 TREE_OPERAND (op0, 1));
7687 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7688 && TREE_CODE (type) != VECTOR_TYPE)
7689 return build3_loc (loc, COND_EXPR, type, op0,
7690 constant_boolean_node (true, type),
7691 constant_boolean_node (false, type));
7694 /* Handle cases of two conversions in a row. */
7695 if (CONVERT_EXPR_P (op0))
7697 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7698 tree inter_type = TREE_TYPE (op0);
7699 int inside_int = INTEGRAL_TYPE_P (inside_type);
7700 int inside_ptr = POINTER_TYPE_P (inside_type);
7701 int inside_float = FLOAT_TYPE_P (inside_type);
7702 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7703 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7704 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7705 int inter_int = INTEGRAL_TYPE_P (inter_type);
7706 int inter_ptr = POINTER_TYPE_P (inter_type);
7707 int inter_float = FLOAT_TYPE_P (inter_type);
7708 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7709 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7710 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7711 int final_int = INTEGRAL_TYPE_P (type);
7712 int final_ptr = POINTER_TYPE_P (type);
7713 int final_float = FLOAT_TYPE_P (type);
7714 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7715 unsigned int final_prec = TYPE_PRECISION (type);
7716 int final_unsignedp = TYPE_UNSIGNED (type);
7718 /* check for cases specific to UPC, involving pointer types */
7719 if (final_ptr || inter_ptr || inside_ptr)
7721 int final_pts = final_ptr
7722 && upc_shared_type_p (TREE_TYPE (type));
7723 int inter_pts = inter_ptr
7724 && upc_shared_type_p (TREE_TYPE (inter_type));
7725 int inside_pts = inside_ptr
7726 && upc_shared_type_p (TREE_TYPE (inside_type));
7727 if (final_pts || inter_pts || inside_pts)
7729 if (!((final_pts && inter_pts)
7730 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7731 || ((inter_pts && inside_pts)
7732 && (TREE_TYPE (inter_type)
7733 == TREE_TYPE (inside_type))))
7734 return NULL;
7738 /* In addition to the cases of two conversions in a row
7739 handled below, if we are converting something to its own
7740 type via an object of identical or wider precision, neither
7741 conversion is needed. */
7742 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7743 && (((inter_int || inter_ptr) && final_int)
7744 || (inter_float && final_float))
7745 && inter_prec >= final_prec)
7746 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7748 /* Likewise, if the intermediate and initial types are either both
7749 float or both integer, we don't need the middle conversion if the
7750 former is wider than the latter and doesn't change the signedness
7751 (for integers). Avoid this if the final type is a pointer since
7752 then we sometimes need the middle conversion. Likewise if the
7753 final type has a precision not equal to the size of its mode. */
7754 if (((inter_int && inside_int)
7755 || (inter_float && inside_float)
7756 || (inter_vec && inside_vec))
7757 && inter_prec >= inside_prec
7758 && (inter_float || inter_vec
7759 || inter_unsignedp == inside_unsignedp)
7760 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7761 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7762 && ! final_ptr
7763 && (! final_vec || inter_prec == inside_prec))
7764 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7766 /* If we have a sign-extension of a zero-extended value, we can
7767 replace that by a single zero-extension. Likewise if the
7768 final conversion does not change precision we can drop the
7769 intermediate conversion. */
7770 if (inside_int && inter_int && final_int
7771 && ((inside_prec < inter_prec && inter_prec < final_prec
7772 && inside_unsignedp && !inter_unsignedp)
7773 || final_prec == inter_prec))
7774 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7776 /* Two conversions in a row are not needed unless:
7777 - some conversion is floating-point (overstrict for now), or
7778 - some conversion is a vector (overstrict for now), or
7779 - the intermediate type is narrower than both initial and
7780 final, or
7781 - the intermediate type and innermost type differ in signedness,
7782 and the outermost type is wider than the intermediate, or
7783 - the initial type is a pointer type and the precisions of the
7784 intermediate and final types differ, or
7785 - the final type is a pointer type and the precisions of the
7786 initial and intermediate types differ. */
7787 if (! inside_float && ! inter_float && ! final_float
7788 && ! inside_vec && ! inter_vec && ! final_vec
7789 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7790 && ! (inside_int && inter_int
7791 && inter_unsignedp != inside_unsignedp
7792 && inter_prec < final_prec)
7793 && ((inter_unsignedp && inter_prec > inside_prec)
7794 == (final_unsignedp && final_prec > inter_prec))
7795 && ! (inside_ptr && inter_prec != final_prec)
7796 && ! (final_ptr && inside_prec != inter_prec)
7797 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7798 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7799 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7802 /* Handle (T *)&A.B.C for A being of type T and B and C
7803 living at offset zero. This occurs frequently in
7804 C++ upcasting and then accessing the base. */
7805 if (TREE_CODE (op0) == ADDR_EXPR
7806 && POINTER_TYPE_P (type)
7807 && handled_component_p (TREE_OPERAND (op0, 0)))
7809 HOST_WIDE_INT bitsize, bitpos;
7810 tree offset;
7811 machine_mode mode;
7812 int unsignedp, volatilep;
7813 tree base = TREE_OPERAND (op0, 0);
7814 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7815 &mode, &unsignedp, &volatilep, false);
7816 /* If the reference was to a (constant) zero offset, we can use
7817 the address of the base if it has the same base type
7818 as the result type and the pointer type is unqualified. */
7819 if (! offset && bitpos == 0
7820 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7821 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7822 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7823 return fold_convert_loc (loc, type,
7824 build_fold_addr_expr_loc (loc, base));
7827 if (TREE_CODE (op0) == MODIFY_EXPR
7828 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7829 /* Detect assigning a bitfield. */
7830 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7831 && DECL_BIT_FIELD
7832 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7834 /* Don't leave an assignment inside a conversion
7835 unless assigning a bitfield. */
7836 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7837 /* First do the assignment, then return converted constant. */
7838 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7839 TREE_NO_WARNING (tem) = 1;
7840 TREE_USED (tem) = 1;
7841 return tem;
7844 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7845 constants (if x has signed type, the sign bit cannot be set
7846 in c). This folds extension into the BIT_AND_EXPR.
7847 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7848 very likely don't have maximal range for their precision and this
7849 transformation effectively doesn't preserve non-maximal ranges. */
7850 if (TREE_CODE (type) == INTEGER_TYPE
7851 && TREE_CODE (op0) == BIT_AND_EXPR
7852 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7854 tree and_expr = op0;
7855 tree and0 = TREE_OPERAND (and_expr, 0);
7856 tree and1 = TREE_OPERAND (and_expr, 1);
7857 int change = 0;
7859 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7860 || (TYPE_PRECISION (type)
7861 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7862 change = 1;
7863 else if (TYPE_PRECISION (TREE_TYPE (and1))
7864 <= HOST_BITS_PER_WIDE_INT
7865 && tree_fits_uhwi_p (and1))
7867 unsigned HOST_WIDE_INT cst;
7869 cst = tree_to_uhwi (and1);
7870 cst &= HOST_WIDE_INT_M1U
7871 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7872 change = (cst == 0);
7873 #ifdef LOAD_EXTEND_OP
7874 if (change
7875 && !flag_syntax_only
7876 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7877 == ZERO_EXTEND))
7879 tree uns = unsigned_type_for (TREE_TYPE (and0));
7880 and0 = fold_convert_loc (loc, uns, and0);
7881 and1 = fold_convert_loc (loc, uns, and1);
7883 #endif
7885 if (change)
7887 tem = force_fit_type (type, wi::to_widest (and1), 0,
7888 TREE_OVERFLOW (and1));
7889 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7890 fold_convert_loc (loc, type, and0), tem);
7894 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7895 when one of the new casts will fold away. Conservatively we assume
7896 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7897 if (POINTER_TYPE_P (type)
7898 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7899 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7900 && !upc_shared_type_p (TREE_TYPE (type))
7901 && !upc_shared_type_p (TREE_TYPE (
7902 TREE_TYPE (TREE_OPERAND (arg0, 0))))
7903 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7904 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7905 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7907 tree arg00 = TREE_OPERAND (arg0, 0);
7908 tree arg01 = TREE_OPERAND (arg0, 1);
7910 return fold_build_pointer_plus_loc
7911 (loc, fold_convert_loc (loc, type, arg00), arg01);
7914 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7915 of the same precision, and X is an integer type not narrower than
7916 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7917 if (INTEGRAL_TYPE_P (type)
7918 && TREE_CODE (op0) == BIT_NOT_EXPR
7919 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7920 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7921 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7923 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7924 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7925 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7926 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7927 fold_convert_loc (loc, type, tem));
7930 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7931 type of X and Y (integer types only). */
7932 if (INTEGRAL_TYPE_P (type)
7933 && TREE_CODE (op0) == MULT_EXPR
7934 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7935 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7937 /* Be careful not to introduce new overflows. */
7938 tree mult_type;
7939 if (TYPE_OVERFLOW_WRAPS (type))
7940 mult_type = type;
7941 else
7942 mult_type = unsigned_type_for (type);
7944 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7946 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7947 fold_convert_loc (loc, mult_type,
7948 TREE_OPERAND (op0, 0)),
7949 fold_convert_loc (loc, mult_type,
7950 TREE_OPERAND (op0, 1)));
7951 return fold_convert_loc (loc, type, tem);
7955 tem = fold_convert_const (code, type, arg0);
7956 return tem ? tem : NULL_TREE;
7958 case ADDR_SPACE_CONVERT_EXPR:
7959 if (integer_zerop (arg0))
7960 return fold_convert_const (code, type, arg0);
7961 return NULL_TREE;
7963 case FIXED_CONVERT_EXPR:
7964 tem = fold_convert_const (code, type, arg0);
7965 return tem ? tem : NULL_TREE;
7967 case VIEW_CONVERT_EXPR:
7968 if (TREE_CODE (op0) == MEM_REF)
7969 return fold_build2_loc (loc, MEM_REF, type,
7970 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7972 return fold_view_convert_expr (type, op0);
7974 case NEGATE_EXPR:
7975 tem = fold_negate_expr (loc, arg0);
7976 if (tem)
7977 return fold_convert_loc (loc, type, tem);
7978 return NULL_TREE;
7980 case ABS_EXPR:
7981 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7982 return fold_abs_const (arg0, type);
7983 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7984 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7985 /* Convert fabs((double)float) into (double)fabsf(float). */
7986 else if (TREE_CODE (arg0) == NOP_EXPR
7987 && TREE_CODE (type) == REAL_TYPE)
7989 tree targ0 = strip_float_extensions (arg0);
7990 if (targ0 != arg0)
7991 return fold_convert_loc (loc, type,
7992 fold_build1_loc (loc, ABS_EXPR,
7993 TREE_TYPE (targ0),
7994 targ0));
7996 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7997 else if (TREE_CODE (arg0) == ABS_EXPR)
7998 return arg0;
7999 else if (tree_expr_nonnegative_p (arg0))
8000 return arg0;
8002 /* Strip sign ops from argument. */
8003 if (TREE_CODE (type) == REAL_TYPE)
8005 tem = fold_strip_sign_ops (arg0);
8006 if (tem)
8007 return fold_build1_loc (loc, ABS_EXPR, type,
8008 fold_convert_loc (loc, type, tem));
8010 return NULL_TREE;
8012 case CONJ_EXPR:
8013 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8014 return fold_convert_loc (loc, type, arg0);
8015 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8017 tree itype = TREE_TYPE (type);
8018 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8019 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8020 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8021 negate_expr (ipart));
8023 if (TREE_CODE (arg0) == COMPLEX_CST)
8025 tree itype = TREE_TYPE (type);
8026 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8027 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8028 return build_complex (type, rpart, negate_expr (ipart));
8030 if (TREE_CODE (arg0) == CONJ_EXPR)
8031 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8032 return NULL_TREE;
8034 case BIT_NOT_EXPR:
8035 if (TREE_CODE (arg0) == INTEGER_CST)
8036 return fold_not_const (arg0, type);
8037 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8038 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8039 /* Convert ~ (-A) to A - 1. */
8040 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8041 return fold_build2_loc (loc, MINUS_EXPR, type,
8042 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8043 build_int_cst (type, 1));
8044 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8045 else if (INTEGRAL_TYPE_P (type)
8046 && ((TREE_CODE (arg0) == MINUS_EXPR
8047 && integer_onep (TREE_OPERAND (arg0, 1)))
8048 || (TREE_CODE (arg0) == PLUS_EXPR
8049 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8050 return fold_build1_loc (loc, NEGATE_EXPR, type,
8051 fold_convert_loc (loc, type,
8052 TREE_OPERAND (arg0, 0)));
8053 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8054 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8055 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8056 fold_convert_loc (loc, type,
8057 TREE_OPERAND (arg0, 0)))))
8058 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8059 fold_convert_loc (loc, type,
8060 TREE_OPERAND (arg0, 1)));
8061 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8062 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8063 fold_convert_loc (loc, type,
8064 TREE_OPERAND (arg0, 1)))))
8065 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)), tem);
8068 /* Perform BIT_NOT_EXPR on each element individually. */
8069 else if (TREE_CODE (arg0) == VECTOR_CST)
8071 tree *elements;
8072 tree elem;
8073 unsigned count = VECTOR_CST_NELTS (arg0), i;
8075 elements = XALLOCAVEC (tree, count);
8076 for (i = 0; i < count; i++)
8078 elem = VECTOR_CST_ELT (arg0, i);
8079 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8080 if (elem == NULL_TREE)
8081 break;
8082 elements[i] = elem;
8084 if (i == count)
8085 return build_vector (type, elements);
8087 else if (COMPARISON_CLASS_P (arg0)
8088 && (VECTOR_TYPE_P (type)
8089 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8091 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8092 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8093 HONOR_NANS (TYPE_MODE (op_type)));
8094 if (subcode != ERROR_MARK)
8095 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8096 TREE_OPERAND (arg0, 1));
8100 return NULL_TREE;
8102 case TRUTH_NOT_EXPR:
8103 /* Note that the operand of this must be an int
8104 and its values must be 0 or 1.
8105 ("true" is a fixed value perhaps depending on the language,
8106 but we don't handle values other than 1 correctly yet.) */
8107 tem = fold_truth_not_expr (loc, arg0);
8108 if (!tem)
8109 return NULL_TREE;
8110 return fold_convert_loc (loc, type, tem);
8112 case REALPART_EXPR:
8113 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8114 return fold_convert_loc (loc, type, arg0);
8115 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8116 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8117 TREE_OPERAND (arg0, 1));
8118 if (TREE_CODE (arg0) == COMPLEX_CST)
8119 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8120 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8122 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8123 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8124 fold_build1_loc (loc, REALPART_EXPR, itype,
8125 TREE_OPERAND (arg0, 0)),
8126 fold_build1_loc (loc, REALPART_EXPR, itype,
8127 TREE_OPERAND (arg0, 1)));
8128 return fold_convert_loc (loc, type, tem);
8130 if (TREE_CODE (arg0) == CONJ_EXPR)
8132 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8133 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8134 TREE_OPERAND (arg0, 0));
8135 return fold_convert_loc (loc, type, tem);
8137 if (TREE_CODE (arg0) == CALL_EXPR)
8139 tree fn = get_callee_fndecl (arg0);
8140 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8141 switch (DECL_FUNCTION_CODE (fn))
8143 CASE_FLT_FN (BUILT_IN_CEXPI):
8144 fn = mathfn_built_in (type, BUILT_IN_COS);
8145 if (fn)
8146 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8147 break;
8149 default:
8150 break;
8153 return NULL_TREE;
8155 case IMAGPART_EXPR:
8156 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8157 return build_zero_cst (type);
8158 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8159 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8160 TREE_OPERAND (arg0, 0));
8161 if (TREE_CODE (arg0) == COMPLEX_CST)
8162 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8163 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8165 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8166 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8167 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8168 TREE_OPERAND (arg0, 0)),
8169 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8170 TREE_OPERAND (arg0, 1)));
8171 return fold_convert_loc (loc, type, tem);
8173 if (TREE_CODE (arg0) == CONJ_EXPR)
8175 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8176 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8177 return fold_convert_loc (loc, type, negate_expr (tem));
8179 if (TREE_CODE (arg0) == CALL_EXPR)
8181 tree fn = get_callee_fndecl (arg0);
8182 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8183 switch (DECL_FUNCTION_CODE (fn))
8185 CASE_FLT_FN (BUILT_IN_CEXPI):
8186 fn = mathfn_built_in (type, BUILT_IN_SIN);
8187 if (fn)
8188 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8189 break;
8191 default:
8192 break;
8195 return NULL_TREE;
8197 case INDIRECT_REF:
8198 /* Fold *&X to X if X is an lvalue. */
8199 if (TREE_CODE (op0) == ADDR_EXPR)
8201 tree op00 = TREE_OPERAND (op0, 0);
8202 if ((TREE_CODE (op00) == VAR_DECL
8203 || TREE_CODE (op00) == PARM_DECL
8204 || TREE_CODE (op00) == RESULT_DECL)
8205 && !TREE_READONLY (op00))
8206 return op00;
8208 return NULL_TREE;
8210 case VEC_UNPACK_LO_EXPR:
8211 case VEC_UNPACK_HI_EXPR:
8212 case VEC_UNPACK_FLOAT_LO_EXPR:
8213 case VEC_UNPACK_FLOAT_HI_EXPR:
8215 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8216 tree *elts;
8217 enum tree_code subcode;
8219 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8220 if (TREE_CODE (arg0) != VECTOR_CST)
8221 return NULL_TREE;
8223 elts = XALLOCAVEC (tree, nelts * 2);
8224 if (!vec_cst_ctor_to_array (arg0, elts))
8225 return NULL_TREE;
8227 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8228 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8229 elts += nelts;
8231 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8232 subcode = NOP_EXPR;
8233 else
8234 subcode = FLOAT_EXPR;
8236 for (i = 0; i < nelts; i++)
8238 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8239 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8240 return NULL_TREE;
8243 return build_vector (type, elts);
8246 case REDUC_MIN_EXPR:
8247 case REDUC_MAX_EXPR:
8248 case REDUC_PLUS_EXPR:
8250 unsigned int nelts, i;
8251 tree *elts;
8252 enum tree_code subcode;
8254 if (TREE_CODE (op0) != VECTOR_CST)
8255 return NULL_TREE;
8256 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8258 elts = XALLOCAVEC (tree, nelts);
8259 if (!vec_cst_ctor_to_array (op0, elts))
8260 return NULL_TREE;
8262 switch (code)
8264 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8265 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8266 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8267 default: gcc_unreachable ();
8270 for (i = 1; i < nelts; i++)
8272 elts[0] = const_binop (subcode, elts[0], elts[i]);
8273 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8274 return NULL_TREE;
8277 return elts[0];
8280 default:
8281 return NULL_TREE;
8282 } /* switch (code) */
8286 /* If the operation was a conversion do _not_ mark a resulting constant
8287 with TREE_OVERFLOW if the original constant was not. These conversions
8288 have implementation defined behavior and retaining the TREE_OVERFLOW
8289 flag here would confuse later passes such as VRP. */
8290 tree
8291 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8292 tree type, tree op0)
8294 tree res = fold_unary_loc (loc, code, type, op0);
8295 if (res
8296 && TREE_CODE (res) == INTEGER_CST
8297 && TREE_CODE (op0) == INTEGER_CST
8298 && CONVERT_EXPR_CODE_P (code))
8299 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8301 return res;
8304 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8305 operands OP0 and OP1. LOC is the location of the resulting expression.
8306 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8307 Return the folded expression if folding is successful. Otherwise,
8308 return NULL_TREE. */
8309 static tree
8310 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8311 tree arg0, tree arg1, tree op0, tree op1)
8313 tree tem;
8315 /* We only do these simplifications if we are optimizing. */
8316 if (!optimize)
8317 return NULL_TREE;
8319 /* Check for things like (A || B) && (A || C). We can convert this
8320 to A || (B && C). Note that either operator can be any of the four
8321 truth and/or operations and the transformation will still be
8322 valid. Also note that we only care about order for the
8323 ANDIF and ORIF operators. If B contains side effects, this
8324 might change the truth-value of A. */
8325 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8326 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8327 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8328 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8329 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8330 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8332 tree a00 = TREE_OPERAND (arg0, 0);
8333 tree a01 = TREE_OPERAND (arg0, 1);
8334 tree a10 = TREE_OPERAND (arg1, 0);
8335 tree a11 = TREE_OPERAND (arg1, 1);
8336 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8337 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8338 && (code == TRUTH_AND_EXPR
8339 || code == TRUTH_OR_EXPR));
8341 if (operand_equal_p (a00, a10, 0))
8342 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8343 fold_build2_loc (loc, code, type, a01, a11));
8344 else if (commutative && operand_equal_p (a00, a11, 0))
8345 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8346 fold_build2_loc (loc, code, type, a01, a10));
8347 else if (commutative && operand_equal_p (a01, a10, 0))
8348 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8349 fold_build2_loc (loc, code, type, a00, a11));
8351 /* This case if tricky because we must either have commutative
8352 operators or else A10 must not have side-effects. */
8354 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8355 && operand_equal_p (a01, a11, 0))
8356 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8357 fold_build2_loc (loc, code, type, a00, a10),
8358 a01);
8361 /* See if we can build a range comparison. */
8362 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8363 return tem;
8365 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8366 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8368 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8369 if (tem)
8370 return fold_build2_loc (loc, code, type, tem, arg1);
8373 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8374 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8376 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8377 if (tem)
8378 return fold_build2_loc (loc, code, type, arg0, tem);
8381 /* Check for the possibility of merging component references. If our
8382 lhs is another similar operation, try to merge its rhs with our
8383 rhs. Then try to merge our lhs and rhs. */
8384 if (TREE_CODE (arg0) == code
8385 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8386 TREE_OPERAND (arg0, 1), arg1)))
8387 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8389 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8390 return tem;
8392 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8393 && (code == TRUTH_AND_EXPR
8394 || code == TRUTH_ANDIF_EXPR
8395 || code == TRUTH_OR_EXPR
8396 || code == TRUTH_ORIF_EXPR))
8398 enum tree_code ncode, icode;
8400 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8401 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8402 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8404 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8405 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8406 We don't want to pack more than two leafs to a non-IF AND/OR
8407 expression.
8408 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8409 equal to IF-CODE, then we don't want to add right-hand operand.
8410 If the inner right-hand side of left-hand operand has
8411 side-effects, or isn't simple, then we can't add to it,
8412 as otherwise we might destroy if-sequence. */
8413 if (TREE_CODE (arg0) == icode
8414 && simple_operand_p_2 (arg1)
8415 /* Needed for sequence points to handle trappings, and
8416 side-effects. */
8417 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8419 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8420 arg1);
8421 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8422 tem);
8424 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8425 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8426 else if (TREE_CODE (arg1) == icode
8427 && simple_operand_p_2 (arg0)
8428 /* Needed for sequence points to handle trappings, and
8429 side-effects. */
8430 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8432 tem = fold_build2_loc (loc, ncode, type,
8433 arg0, TREE_OPERAND (arg1, 0));
8434 return fold_build2_loc (loc, icode, type, tem,
8435 TREE_OPERAND (arg1, 1));
8437 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8438 into (A OR B).
8439 For sequence point consistancy, we need to check for trapping,
8440 and side-effects. */
8441 else if (code == icode && simple_operand_p_2 (arg0)
8442 && simple_operand_p_2 (arg1))
8443 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8446 return NULL_TREE;
8449 /* Fold a binary expression of code CODE and type TYPE with operands
8450 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8451 Return the folded expression if folding is successful. Otherwise,
8452 return NULL_TREE. */
8454 static tree
8455 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8457 enum tree_code compl_code;
8459 if (code == MIN_EXPR)
8460 compl_code = MAX_EXPR;
8461 else if (code == MAX_EXPR)
8462 compl_code = MIN_EXPR;
8463 else
8464 gcc_unreachable ();
8466 /* MIN (MAX (a, b), b) == b. */
8467 if (TREE_CODE (op0) == compl_code
8468 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8469 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8471 /* MIN (MAX (b, a), b) == b. */
8472 if (TREE_CODE (op0) == compl_code
8473 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8474 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8475 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8477 /* MIN (a, MAX (a, b)) == a. */
8478 if (TREE_CODE (op1) == compl_code
8479 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8480 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8481 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8483 /* MIN (a, MAX (b, a)) == a. */
8484 if (TREE_CODE (op1) == compl_code
8485 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8486 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8487 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8489 return NULL_TREE;
8492 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8493 by changing CODE to reduce the magnitude of constants involved in
8494 ARG0 of the comparison.
8495 Returns a canonicalized comparison tree if a simplification was
8496 possible, otherwise returns NULL_TREE.
8497 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8498 valid if signed overflow is undefined. */
8500 static tree
8501 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8502 tree arg0, tree arg1,
8503 bool *strict_overflow_p)
8505 enum tree_code code0 = TREE_CODE (arg0);
8506 tree t, cst0 = NULL_TREE;
8507 int sgn0;
8508 bool swap = false;
8510 /* Match A +- CST code arg1 and CST code arg1. We can change the
8511 first form only if overflow is undefined. */
8512 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8513 /* In principle pointers also have undefined overflow behavior,
8514 but that causes problems elsewhere. */
8515 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8516 && (code0 == MINUS_EXPR
8517 || code0 == PLUS_EXPR)
8518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8519 || code0 == INTEGER_CST))
8520 return NULL_TREE;
8522 /* Identify the constant in arg0 and its sign. */
8523 if (code0 == INTEGER_CST)
8524 cst0 = arg0;
8525 else
8526 cst0 = TREE_OPERAND (arg0, 1);
8527 sgn0 = tree_int_cst_sgn (cst0);
8529 /* Overflowed constants and zero will cause problems. */
8530 if (integer_zerop (cst0)
8531 || TREE_OVERFLOW (cst0))
8532 return NULL_TREE;
8534 /* See if we can reduce the magnitude of the constant in
8535 arg0 by changing the comparison code. */
8536 if (code0 == INTEGER_CST)
8538 /* CST <= arg1 -> CST-1 < arg1. */
8539 if (code == LE_EXPR && sgn0 == 1)
8540 code = LT_EXPR;
8541 /* -CST < arg1 -> -CST-1 <= arg1. */
8542 else if (code == LT_EXPR && sgn0 == -1)
8543 code = LE_EXPR;
8544 /* CST > arg1 -> CST-1 >= arg1. */
8545 else if (code == GT_EXPR && sgn0 == 1)
8546 code = GE_EXPR;
8547 /* -CST >= arg1 -> -CST-1 > arg1. */
8548 else if (code == GE_EXPR && sgn0 == -1)
8549 code = GT_EXPR;
8550 else
8551 return NULL_TREE;
8552 /* arg1 code' CST' might be more canonical. */
8553 swap = true;
8555 else
8557 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8558 if (code == LT_EXPR
8559 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8560 code = LE_EXPR;
8561 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8562 else if (code == GT_EXPR
8563 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8564 code = GE_EXPR;
8565 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8566 else if (code == LE_EXPR
8567 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8568 code = LT_EXPR;
8569 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8570 else if (code == GE_EXPR
8571 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8572 code = GT_EXPR;
8573 else
8574 return NULL_TREE;
8575 *strict_overflow_p = true;
8578 /* Now build the constant reduced in magnitude. But not if that
8579 would produce one outside of its types range. */
8580 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8581 && ((sgn0 == 1
8582 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8583 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8584 || (sgn0 == -1
8585 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8586 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8587 /* We cannot swap the comparison here as that would cause us to
8588 endlessly recurse. */
8589 return NULL_TREE;
8591 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8592 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8593 if (code0 != INTEGER_CST)
8594 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8595 t = fold_convert (TREE_TYPE (arg1), t);
8597 /* If swapping might yield to a more canonical form, do so. */
8598 if (swap)
8599 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8600 else
8601 return fold_build2_loc (loc, code, type, t, arg1);
8604 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8605 overflow further. Try to decrease the magnitude of constants involved
8606 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8607 and put sole constants at the second argument position.
8608 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8610 static tree
8611 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8612 tree arg0, tree arg1)
8614 tree t;
8615 bool strict_overflow_p;
8616 const char * const warnmsg = G_("assuming signed overflow does not occur "
8617 "when reducing constant in comparison");
8619 /* Try canonicalization by simplifying arg0. */
8620 strict_overflow_p = false;
8621 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8622 &strict_overflow_p);
8623 if (t)
8625 if (strict_overflow_p)
8626 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8627 return t;
8630 /* Try canonicalization by simplifying arg1 using the swapped
8631 comparison. */
8632 code = swap_tree_comparison (code);
8633 strict_overflow_p = false;
8634 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8635 &strict_overflow_p);
8636 if (t && strict_overflow_p)
8637 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8638 return t;
8641 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8642 space. This is used to avoid issuing overflow warnings for
8643 expressions like &p->x which can not wrap. */
8645 static bool
8646 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8648 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8649 return true;
8651 if (bitpos < 0)
8652 return true;
8654 wide_int wi_offset;
8655 int precision = TYPE_PRECISION (TREE_TYPE (base));
8656 if (offset == NULL_TREE)
8657 wi_offset = wi::zero (precision);
8658 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8659 return true;
8660 else
8661 wi_offset = offset;
8663 bool overflow;
8664 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8665 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8666 if (overflow)
8667 return true;
8669 if (!wi::fits_uhwi_p (total))
8670 return true;
8672 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8673 if (size <= 0)
8674 return true;
8676 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8677 array. */
8678 if (TREE_CODE (base) == ADDR_EXPR)
8680 HOST_WIDE_INT base_size;
8682 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8683 if (base_size > 0 && size < base_size)
8684 size = base_size;
8687 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8690 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8691 kind INTEGER_CST. This makes sure to properly sign-extend the
8692 constant. */
8694 static HOST_WIDE_INT
8695 size_low_cst (const_tree t)
8697 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8698 int prec = TYPE_PRECISION (TREE_TYPE (t));
8699 if (prec < HOST_BITS_PER_WIDE_INT)
8700 return sext_hwi (w, prec);
8701 return w;
8704 /* Subroutine of fold_binary. This routine performs all of the
8705 transformations that are common to the equality/inequality
8706 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8707 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8708 fold_binary should call fold_binary. Fold a comparison with
8709 tree code CODE and type TYPE with operands OP0 and OP1. Return
8710 the folded comparison or NULL_TREE. */
8712 static tree
8713 fold_comparison (location_t loc, enum tree_code code, tree type,
8714 tree op0, tree op1)
8716 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8717 tree arg0, arg1, tem;
8719 arg0 = op0;
8720 arg1 = op1;
8722 STRIP_SIGN_NOPS (arg0);
8723 STRIP_SIGN_NOPS (arg1);
8725 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8726 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8727 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8729 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8730 && TREE_CODE (arg1) == INTEGER_CST
8731 && !TREE_OVERFLOW (arg1))
8733 const enum tree_code
8734 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8735 tree const1 = TREE_OPERAND (arg0, 1);
8736 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8737 tree variable = TREE_OPERAND (arg0, 0);
8738 tree new_const = int_const_binop (reverse_op, const2, const1);
8740 /* If the constant operation overflowed this can be
8741 simplified as a comparison against INT_MAX/INT_MIN. */
8742 if (TREE_OVERFLOW (new_const)
8743 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8745 int const1_sgn = tree_int_cst_sgn (const1);
8746 enum tree_code code2 = code;
8748 /* Get the sign of the constant on the lhs if the
8749 operation were VARIABLE + CONST1. */
8750 if (TREE_CODE (arg0) == MINUS_EXPR)
8751 const1_sgn = -const1_sgn;
8753 /* The sign of the constant determines if we overflowed
8754 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8755 Canonicalize to the INT_MIN overflow by swapping the comparison
8756 if necessary. */
8757 if (const1_sgn == -1)
8758 code2 = swap_tree_comparison (code);
8760 /* We now can look at the canonicalized case
8761 VARIABLE + 1 CODE2 INT_MIN
8762 and decide on the result. */
8763 switch (code2)
8765 case EQ_EXPR:
8766 case LT_EXPR:
8767 case LE_EXPR:
8768 return
8769 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8771 case NE_EXPR:
8772 case GE_EXPR:
8773 case GT_EXPR:
8774 return
8775 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8777 default:
8778 gcc_unreachable ();
8781 else
8783 if (!equality_code)
8784 fold_overflow_warning ("assuming signed overflow does not occur "
8785 "when changing X +- C1 cmp C2 to "
8786 "X cmp C2 -+ C1",
8787 WARN_STRICT_OVERFLOW_COMPARISON);
8788 return fold_build2_loc (loc, code, type, variable, new_const);
8792 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8793 if (TREE_CODE (arg0) == MINUS_EXPR
8794 && equality_code
8795 && integer_zerop (arg1))
8797 /* ??? The transformation is valid for the other operators if overflow
8798 is undefined for the type, but performing it here badly interacts
8799 with the transformation in fold_cond_expr_with_comparison which
8800 attempts to synthetize ABS_EXPR. */
8801 if (!equality_code)
8802 fold_overflow_warning ("assuming signed overflow does not occur "
8803 "when changing X - Y cmp 0 to X cmp Y",
8804 WARN_STRICT_OVERFLOW_COMPARISON);
8805 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8806 TREE_OPERAND (arg0, 1));
8809 /* For comparisons of pointers we can decompose it to a compile time
8810 comparison of the base objects and the offsets into the object.
8811 This requires at least one operand being an ADDR_EXPR or a
8812 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8813 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8814 && (TREE_CODE (arg0) == ADDR_EXPR
8815 || TREE_CODE (arg1) == ADDR_EXPR
8816 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8817 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8819 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8820 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8821 machine_mode mode;
8822 int volatilep, unsignedp;
8823 bool indirect_base0 = false, indirect_base1 = false;
8825 /* Get base and offset for the access. Strip ADDR_EXPR for
8826 get_inner_reference, but put it back by stripping INDIRECT_REF
8827 off the base object if possible. indirect_baseN will be true
8828 if baseN is not an address but refers to the object itself. */
8829 base0 = arg0;
8830 if (TREE_CODE (arg0) == ADDR_EXPR)
8832 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8833 &bitsize, &bitpos0, &offset0, &mode,
8834 &unsignedp, &volatilep, false);
8835 if (TREE_CODE (base0) == INDIRECT_REF)
8836 base0 = TREE_OPERAND (base0, 0);
8837 else
8838 indirect_base0 = true;
8840 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8842 base0 = TREE_OPERAND (arg0, 0);
8843 STRIP_SIGN_NOPS (base0);
8844 if (TREE_CODE (base0) == ADDR_EXPR)
8846 base0 = TREE_OPERAND (base0, 0);
8847 indirect_base0 = true;
8849 offset0 = TREE_OPERAND (arg0, 1);
8850 if (tree_fits_shwi_p (offset0))
8852 HOST_WIDE_INT off = size_low_cst (offset0);
8853 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8854 * BITS_PER_UNIT)
8855 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8857 bitpos0 = off * BITS_PER_UNIT;
8858 offset0 = NULL_TREE;
8863 base1 = arg1;
8864 if (TREE_CODE (arg1) == ADDR_EXPR)
8866 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8867 &bitsize, &bitpos1, &offset1, &mode,
8868 &unsignedp, &volatilep, false);
8869 if (TREE_CODE (base1) == INDIRECT_REF)
8870 base1 = TREE_OPERAND (base1, 0);
8871 else
8872 indirect_base1 = true;
8874 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8876 base1 = TREE_OPERAND (arg1, 0);
8877 STRIP_SIGN_NOPS (base1);
8878 if (TREE_CODE (base1) == ADDR_EXPR)
8880 base1 = TREE_OPERAND (base1, 0);
8881 indirect_base1 = true;
8883 offset1 = TREE_OPERAND (arg1, 1);
8884 if (tree_fits_shwi_p (offset1))
8886 HOST_WIDE_INT off = size_low_cst (offset1);
8887 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8888 * BITS_PER_UNIT)
8889 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8891 bitpos1 = off * BITS_PER_UNIT;
8892 offset1 = NULL_TREE;
8897 /* A local variable can never be pointed to by
8898 the default SSA name of an incoming parameter. */
8899 if ((TREE_CODE (arg0) == ADDR_EXPR
8900 && indirect_base0
8901 && TREE_CODE (base0) == VAR_DECL
8902 && auto_var_in_fn_p (base0, current_function_decl)
8903 && !indirect_base1
8904 && TREE_CODE (base1) == SSA_NAME
8905 && SSA_NAME_IS_DEFAULT_DEF (base1)
8906 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8907 || (TREE_CODE (arg1) == ADDR_EXPR
8908 && indirect_base1
8909 && TREE_CODE (base1) == VAR_DECL
8910 && auto_var_in_fn_p (base1, current_function_decl)
8911 && !indirect_base0
8912 && TREE_CODE (base0) == SSA_NAME
8913 && SSA_NAME_IS_DEFAULT_DEF (base0)
8914 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8916 if (code == NE_EXPR)
8917 return constant_boolean_node (1, type);
8918 else if (code == EQ_EXPR)
8919 return constant_boolean_node (0, type);
8921 /* If we have equivalent bases we might be able to simplify. */
8922 else if (indirect_base0 == indirect_base1
8923 && operand_equal_p (base0, base1, 0))
8925 /* We can fold this expression to a constant if the non-constant
8926 offset parts are equal. */
8927 if ((offset0 == offset1
8928 || (offset0 && offset1
8929 && operand_equal_p (offset0, offset1, 0)))
8930 && (code == EQ_EXPR
8931 || code == NE_EXPR
8932 || (indirect_base0 && DECL_P (base0))
8933 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8936 if (!equality_code
8937 && bitpos0 != bitpos1
8938 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8939 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8940 fold_overflow_warning (("assuming pointer wraparound does not "
8941 "occur when comparing P +- C1 with "
8942 "P +- C2"),
8943 WARN_STRICT_OVERFLOW_CONDITIONAL);
8945 switch (code)
8947 case EQ_EXPR:
8948 return constant_boolean_node (bitpos0 == bitpos1, type);
8949 case NE_EXPR:
8950 return constant_boolean_node (bitpos0 != bitpos1, type);
8951 case LT_EXPR:
8952 return constant_boolean_node (bitpos0 < bitpos1, type);
8953 case LE_EXPR:
8954 return constant_boolean_node (bitpos0 <= bitpos1, type);
8955 case GE_EXPR:
8956 return constant_boolean_node (bitpos0 >= bitpos1, type);
8957 case GT_EXPR:
8958 return constant_boolean_node (bitpos0 > bitpos1, type);
8959 default:;
8962 /* We can simplify the comparison to a comparison of the variable
8963 offset parts if the constant offset parts are equal.
8964 Be careful to use signed sizetype here because otherwise we
8965 mess with array offsets in the wrong way. This is possible
8966 because pointer arithmetic is restricted to retain within an
8967 object and overflow on pointer differences is undefined as of
8968 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8969 else if (bitpos0 == bitpos1
8970 && (equality_code
8971 || (indirect_base0 && DECL_P (base0))
8972 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8974 /* By converting to signed sizetype we cover middle-end pointer
8975 arithmetic which operates on unsigned pointer types of size
8976 type size and ARRAY_REF offsets which are properly sign or
8977 zero extended from their type in case it is narrower than
8978 sizetype. */
8979 if (offset0 == NULL_TREE)
8980 offset0 = build_int_cst (ssizetype, 0);
8981 else
8982 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8983 if (offset1 == NULL_TREE)
8984 offset1 = build_int_cst (ssizetype, 0);
8985 else
8986 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8988 if (!equality_code
8989 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8990 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8991 fold_overflow_warning (("assuming pointer wraparound does not "
8992 "occur when comparing P +- C1 with "
8993 "P +- C2"),
8994 WARN_STRICT_OVERFLOW_COMPARISON);
8996 return fold_build2_loc (loc, code, type, offset0, offset1);
8999 /* For non-equal bases we can simplify if they are addresses
9000 of local binding decls or constants. */
9001 else if (indirect_base0 && indirect_base1
9002 /* We know that !operand_equal_p (base0, base1, 0)
9003 because the if condition was false. But make
9004 sure two decls are not the same. */
9005 && base0 != base1
9006 && TREE_CODE (arg0) == ADDR_EXPR
9007 && TREE_CODE (arg1) == ADDR_EXPR
9008 && (((TREE_CODE (base0) == VAR_DECL
9009 || TREE_CODE (base0) == PARM_DECL)
9010 && (targetm.binds_local_p (base0)
9011 || CONSTANT_CLASS_P (base1)))
9012 || CONSTANT_CLASS_P (base0))
9013 && (((TREE_CODE (base1) == VAR_DECL
9014 || TREE_CODE (base1) == PARM_DECL)
9015 && (targetm.binds_local_p (base1)
9016 || CONSTANT_CLASS_P (base0)))
9017 || CONSTANT_CLASS_P (base1)))
9019 if (code == EQ_EXPR)
9020 return omit_two_operands_loc (loc, type, boolean_false_node,
9021 arg0, arg1);
9022 else if (code == NE_EXPR)
9023 return omit_two_operands_loc (loc, type, boolean_true_node,
9024 arg0, arg1);
9026 /* For equal offsets we can simplify to a comparison of the
9027 base addresses. */
9028 else if (bitpos0 == bitpos1
9029 && (indirect_base0
9030 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9031 && (indirect_base1
9032 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9033 && ((offset0 == offset1)
9034 || (offset0 && offset1
9035 && operand_equal_p (offset0, offset1, 0))))
9037 if (indirect_base0)
9038 base0 = build_fold_addr_expr_loc (loc, base0);
9039 if (indirect_base1)
9040 base1 = build_fold_addr_expr_loc (loc, base1);
9041 return fold_build2_loc (loc, code, type, base0, base1);
9045 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9046 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9047 the resulting offset is smaller in absolute value than the
9048 original one and has the same sign. */
9049 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9050 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9051 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9052 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9053 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9054 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9055 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9057 tree const1 = TREE_OPERAND (arg0, 1);
9058 tree const2 = TREE_OPERAND (arg1, 1);
9059 tree variable1 = TREE_OPERAND (arg0, 0);
9060 tree variable2 = TREE_OPERAND (arg1, 0);
9061 tree cst;
9062 const char * const warnmsg = G_("assuming signed overflow does not "
9063 "occur when combining constants around "
9064 "a comparison");
9066 /* Put the constant on the side where it doesn't overflow and is
9067 of lower absolute value and of same sign than before. */
9068 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9069 ? MINUS_EXPR : PLUS_EXPR,
9070 const2, const1);
9071 if (!TREE_OVERFLOW (cst)
9072 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9073 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9075 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9076 return fold_build2_loc (loc, code, type,
9077 variable1,
9078 fold_build2_loc (loc, TREE_CODE (arg1),
9079 TREE_TYPE (arg1),
9080 variable2, cst));
9083 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9084 ? MINUS_EXPR : PLUS_EXPR,
9085 const1, const2);
9086 if (!TREE_OVERFLOW (cst)
9087 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9088 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9090 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9091 return fold_build2_loc (loc, code, type,
9092 fold_build2_loc (loc, TREE_CODE (arg0),
9093 TREE_TYPE (arg0),
9094 variable1, cst),
9095 variable2);
9099 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9100 signed arithmetic case. That form is created by the compiler
9101 often enough for folding it to be of value. One example is in
9102 computing loop trip counts after Operator Strength Reduction. */
9103 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9104 && TREE_CODE (arg0) == MULT_EXPR
9105 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9107 && integer_zerop (arg1))
9109 tree const1 = TREE_OPERAND (arg0, 1);
9110 tree const2 = arg1; /* zero */
9111 tree variable1 = TREE_OPERAND (arg0, 0);
9112 enum tree_code cmp_code = code;
9114 /* Handle unfolded multiplication by zero. */
9115 if (integer_zerop (const1))
9116 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9118 fold_overflow_warning (("assuming signed overflow does not occur when "
9119 "eliminating multiplication in comparison "
9120 "with zero"),
9121 WARN_STRICT_OVERFLOW_COMPARISON);
9123 /* If const1 is negative we swap the sense of the comparison. */
9124 if (tree_int_cst_sgn (const1) < 0)
9125 cmp_code = swap_tree_comparison (cmp_code);
9127 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9130 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9131 if (tem)
9132 return tem;
9134 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9136 tree targ0 = strip_float_extensions (arg0);
9137 tree targ1 = strip_float_extensions (arg1);
9138 tree newtype = TREE_TYPE (targ0);
9140 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9141 newtype = TREE_TYPE (targ1);
9143 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9144 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9145 return fold_build2_loc (loc, code, type,
9146 fold_convert_loc (loc, newtype, targ0),
9147 fold_convert_loc (loc, newtype, targ1));
9149 /* (-a) CMP (-b) -> b CMP a */
9150 if (TREE_CODE (arg0) == NEGATE_EXPR
9151 && TREE_CODE (arg1) == NEGATE_EXPR)
9152 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9153 TREE_OPERAND (arg0, 0));
9155 if (TREE_CODE (arg1) == REAL_CST)
9157 REAL_VALUE_TYPE cst;
9158 cst = TREE_REAL_CST (arg1);
9160 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9161 if (TREE_CODE (arg0) == NEGATE_EXPR)
9162 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9163 TREE_OPERAND (arg0, 0),
9164 build_real (TREE_TYPE (arg1),
9165 real_value_negate (&cst)));
9167 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9168 /* a CMP (-0) -> a CMP 0 */
9169 if (REAL_VALUE_MINUS_ZERO (cst))
9170 return fold_build2_loc (loc, code, type, arg0,
9171 build_real (TREE_TYPE (arg1), dconst0));
9173 /* x != NaN is always true, other ops are always false. */
9174 if (REAL_VALUE_ISNAN (cst)
9175 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9177 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9178 return omit_one_operand_loc (loc, type, tem, arg0);
9181 /* Fold comparisons against infinity. */
9182 if (REAL_VALUE_ISINF (cst)
9183 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9185 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9186 if (tem != NULL_TREE)
9187 return tem;
9191 /* If this is a comparison of a real constant with a PLUS_EXPR
9192 or a MINUS_EXPR of a real constant, we can convert it into a
9193 comparison with a revised real constant as long as no overflow
9194 occurs when unsafe_math_optimizations are enabled. */
9195 if (flag_unsafe_math_optimizations
9196 && TREE_CODE (arg1) == REAL_CST
9197 && (TREE_CODE (arg0) == PLUS_EXPR
9198 || TREE_CODE (arg0) == MINUS_EXPR)
9199 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9200 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9201 ? MINUS_EXPR : PLUS_EXPR,
9202 arg1, TREE_OPERAND (arg0, 1)))
9203 && !TREE_OVERFLOW (tem))
9204 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9206 /* Likewise, we can simplify a comparison of a real constant with
9207 a MINUS_EXPR whose first operand is also a real constant, i.e.
9208 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9209 floating-point types only if -fassociative-math is set. */
9210 if (flag_associative_math
9211 && TREE_CODE (arg1) == REAL_CST
9212 && TREE_CODE (arg0) == MINUS_EXPR
9213 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9214 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9215 arg1))
9216 && !TREE_OVERFLOW (tem))
9217 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9218 TREE_OPERAND (arg0, 1), tem);
9220 /* Fold comparisons against built-in math functions. */
9221 if (TREE_CODE (arg1) == REAL_CST
9222 && flag_unsafe_math_optimizations
9223 && ! flag_errno_math)
9225 enum built_in_function fcode = builtin_mathfn_code (arg0);
9227 if (fcode != END_BUILTINS)
9229 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9230 if (tem != NULL_TREE)
9231 return tem;
9236 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9237 && CONVERT_EXPR_P (arg0))
9239 /* If we are widening one operand of an integer comparison,
9240 see if the other operand is similarly being widened. Perhaps we
9241 can do the comparison in the narrower type. */
9242 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9243 if (tem)
9244 return tem;
9246 /* Or if we are changing signedness. */
9247 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9248 if (tem)
9249 return tem;
9252 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9253 constant, we can simplify it. */
9254 if (TREE_CODE (arg1) == INTEGER_CST
9255 && (TREE_CODE (arg0) == MIN_EXPR
9256 || TREE_CODE (arg0) == MAX_EXPR)
9257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9259 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9260 if (tem)
9261 return tem;
9264 /* Simplify comparison of something with itself. (For IEEE
9265 floating-point, we can only do some of these simplifications.) */
9266 if (operand_equal_p (arg0, arg1, 0))
9268 switch (code)
9270 case EQ_EXPR:
9271 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9272 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9273 return constant_boolean_node (1, type);
9274 break;
9276 case GE_EXPR:
9277 case LE_EXPR:
9278 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9279 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9280 return constant_boolean_node (1, type);
9281 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9283 case NE_EXPR:
9284 /* For NE, we can only do this simplification if integer
9285 or we don't honor IEEE floating point NaNs. */
9286 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9287 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9288 break;
9289 /* ... fall through ... */
9290 case GT_EXPR:
9291 case LT_EXPR:
9292 return constant_boolean_node (0, type);
9293 default:
9294 gcc_unreachable ();
9298 /* If we are comparing an expression that just has comparisons
9299 of two integer values, arithmetic expressions of those comparisons,
9300 and constants, we can simplify it. There are only three cases
9301 to check: the two values can either be equal, the first can be
9302 greater, or the second can be greater. Fold the expression for
9303 those three values. Since each value must be 0 or 1, we have
9304 eight possibilities, each of which corresponds to the constant 0
9305 or 1 or one of the six possible comparisons.
9307 This handles common cases like (a > b) == 0 but also handles
9308 expressions like ((x > y) - (y > x)) > 0, which supposedly
9309 occur in macroized code. */
9311 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9313 tree cval1 = 0, cval2 = 0;
9314 int save_p = 0;
9316 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9317 /* Don't handle degenerate cases here; they should already
9318 have been handled anyway. */
9319 && cval1 != 0 && cval2 != 0
9320 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9321 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9322 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9323 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9324 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9325 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9326 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9328 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9329 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9331 /* We can't just pass T to eval_subst in case cval1 or cval2
9332 was the same as ARG1. */
9334 tree high_result
9335 = fold_build2_loc (loc, code, type,
9336 eval_subst (loc, arg0, cval1, maxval,
9337 cval2, minval),
9338 arg1);
9339 tree equal_result
9340 = fold_build2_loc (loc, code, type,
9341 eval_subst (loc, arg0, cval1, maxval,
9342 cval2, maxval),
9343 arg1);
9344 tree low_result
9345 = fold_build2_loc (loc, code, type,
9346 eval_subst (loc, arg0, cval1, minval,
9347 cval2, maxval),
9348 arg1);
9350 /* All three of these results should be 0 or 1. Confirm they are.
9351 Then use those values to select the proper code to use. */
9353 if (TREE_CODE (high_result) == INTEGER_CST
9354 && TREE_CODE (equal_result) == INTEGER_CST
9355 && TREE_CODE (low_result) == INTEGER_CST)
9357 /* Make a 3-bit mask with the high-order bit being the
9358 value for `>', the next for '=', and the low for '<'. */
9359 switch ((integer_onep (high_result) * 4)
9360 + (integer_onep (equal_result) * 2)
9361 + integer_onep (low_result))
9363 case 0:
9364 /* Always false. */
9365 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9366 case 1:
9367 code = LT_EXPR;
9368 break;
9369 case 2:
9370 code = EQ_EXPR;
9371 break;
9372 case 3:
9373 code = LE_EXPR;
9374 break;
9375 case 4:
9376 code = GT_EXPR;
9377 break;
9378 case 5:
9379 code = NE_EXPR;
9380 break;
9381 case 6:
9382 code = GE_EXPR;
9383 break;
9384 case 7:
9385 /* Always true. */
9386 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9389 if (save_p)
9391 tem = save_expr (build2 (code, type, cval1, cval2));
9392 SET_EXPR_LOCATION (tem, loc);
9393 return tem;
9395 return fold_build2_loc (loc, code, type, cval1, cval2);
9400 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9401 into a single range test. */
9402 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9403 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9404 && TREE_CODE (arg1) == INTEGER_CST
9405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9406 && !integer_zerop (TREE_OPERAND (arg0, 1))
9407 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9408 && !TREE_OVERFLOW (arg1))
9410 tem = fold_div_compare (loc, code, type, arg0, arg1);
9411 if (tem != NULL_TREE)
9412 return tem;
9415 /* Fold ~X op ~Y as Y op X. */
9416 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9417 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9419 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9420 return fold_build2_loc (loc, code, type,
9421 fold_convert_loc (loc, cmp_type,
9422 TREE_OPERAND (arg1, 0)),
9423 TREE_OPERAND (arg0, 0));
9426 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9427 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9428 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9430 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9431 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9432 TREE_OPERAND (arg0, 0),
9433 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9434 fold_convert_loc (loc, cmp_type, arg1)));
9437 return NULL_TREE;
9441 /* Subroutine of fold_binary. Optimize complex multiplications of the
9442 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9443 argument EXPR represents the expression "z" of type TYPE. */
9445 static tree
9446 fold_mult_zconjz (location_t loc, tree type, tree expr)
9448 tree itype = TREE_TYPE (type);
9449 tree rpart, ipart, tem;
9451 if (TREE_CODE (expr) == COMPLEX_EXPR)
9453 rpart = TREE_OPERAND (expr, 0);
9454 ipart = TREE_OPERAND (expr, 1);
9456 else if (TREE_CODE (expr) == COMPLEX_CST)
9458 rpart = TREE_REALPART (expr);
9459 ipart = TREE_IMAGPART (expr);
9461 else
9463 expr = save_expr (expr);
9464 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9465 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9468 rpart = save_expr (rpart);
9469 ipart = save_expr (ipart);
9470 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9471 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9472 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9473 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9474 build_zero_cst (itype));
9478 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9479 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9480 guarantees that P and N have the same least significant log2(M) bits.
9481 N is not otherwise constrained. In particular, N is not normalized to
9482 0 <= N < M as is common. In general, the precise value of P is unknown.
9483 M is chosen as large as possible such that constant N can be determined.
9485 Returns M and sets *RESIDUE to N.
9487 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9488 account. This is not always possible due to PR 35705.
9491 static unsigned HOST_WIDE_INT
9492 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9493 bool allow_func_align)
9495 enum tree_code code;
9497 *residue = 0;
9499 code = TREE_CODE (expr);
9500 if (code == ADDR_EXPR)
9502 unsigned int bitalign;
9503 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9504 *residue /= BITS_PER_UNIT;
9505 return bitalign / BITS_PER_UNIT;
9507 else if (code == POINTER_PLUS_EXPR)
9509 tree op0, op1;
9510 unsigned HOST_WIDE_INT modulus;
9511 enum tree_code inner_code;
9513 op0 = TREE_OPERAND (expr, 0);
9514 STRIP_NOPS (op0);
9515 modulus = get_pointer_modulus_and_residue (op0, residue,
9516 allow_func_align);
9518 op1 = TREE_OPERAND (expr, 1);
9519 STRIP_NOPS (op1);
9520 inner_code = TREE_CODE (op1);
9521 if (inner_code == INTEGER_CST)
9523 *residue += TREE_INT_CST_LOW (op1);
9524 return modulus;
9526 else if (inner_code == MULT_EXPR)
9528 op1 = TREE_OPERAND (op1, 1);
9529 if (TREE_CODE (op1) == INTEGER_CST)
9531 unsigned HOST_WIDE_INT align;
9533 /* Compute the greatest power-of-2 divisor of op1. */
9534 align = TREE_INT_CST_LOW (op1);
9535 align &= -align;
9537 /* If align is non-zero and less than *modulus, replace
9538 *modulus with align., If align is 0, then either op1 is 0
9539 or the greatest power-of-2 divisor of op1 doesn't fit in an
9540 unsigned HOST_WIDE_INT. In either case, no additional
9541 constraint is imposed. */
9542 if (align)
9543 modulus = MIN (modulus, align);
9545 return modulus;
9550 /* If we get here, we were unable to determine anything useful about the
9551 expression. */
9552 return 1;
9555 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9556 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9558 static bool
9559 vec_cst_ctor_to_array (tree arg, tree *elts)
9561 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9563 if (TREE_CODE (arg) == VECTOR_CST)
9565 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9566 elts[i] = VECTOR_CST_ELT (arg, i);
9568 else if (TREE_CODE (arg) == CONSTRUCTOR)
9570 constructor_elt *elt;
9572 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9573 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9574 return false;
9575 else
9576 elts[i] = elt->value;
9578 else
9579 return false;
9580 for (; i < nelts; i++)
9581 elts[i]
9582 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9583 return true;
9586 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9587 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9588 NULL_TREE otherwise. */
9590 static tree
9591 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9593 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9594 tree *elts;
9595 bool need_ctor = false;
9597 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9598 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9599 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9600 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9601 return NULL_TREE;
9603 elts = XALLOCAVEC (tree, nelts * 3);
9604 if (!vec_cst_ctor_to_array (arg0, elts)
9605 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9606 return NULL_TREE;
9608 for (i = 0; i < nelts; i++)
9610 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9611 need_ctor = true;
9612 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9615 if (need_ctor)
9617 vec<constructor_elt, va_gc> *v;
9618 vec_alloc (v, nelts);
9619 for (i = 0; i < nelts; i++)
9620 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9621 return build_constructor (type, v);
9623 else
9624 return build_vector (type, &elts[2 * nelts]);
9627 /* Try to fold a pointer difference of type TYPE two address expressions of
9628 array references AREF0 and AREF1 using location LOC. Return a
9629 simplified expression for the difference or NULL_TREE. */
9631 static tree
9632 fold_addr_of_array_ref_difference (location_t loc, tree type,
9633 tree aref0, tree aref1)
9635 tree base0 = TREE_OPERAND (aref0, 0);
9636 tree base1 = TREE_OPERAND (aref1, 0);
9637 tree base_offset = build_int_cst (type, 0);
9639 /* If the bases are array references as well, recurse. If the bases
9640 are pointer indirections compute the difference of the pointers.
9641 If the bases are equal, we are set. */
9642 if ((TREE_CODE (base0) == ARRAY_REF
9643 && TREE_CODE (base1) == ARRAY_REF
9644 && (base_offset
9645 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9646 || (INDIRECT_REF_P (base0)
9647 && INDIRECT_REF_P (base1)
9648 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9649 TREE_OPERAND (base0, 0),
9650 TREE_OPERAND (base1, 0))))
9651 || operand_equal_p (base0, base1, 0))
9653 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9654 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9655 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9656 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9657 return fold_build2_loc (loc, PLUS_EXPR, type,
9658 base_offset,
9659 fold_build2_loc (loc, MULT_EXPR, type,
9660 diff, esz));
9662 return NULL_TREE;
9665 /* If the real or vector real constant CST of type TYPE has an exact
9666 inverse, return it, else return NULL. */
9668 static tree
9669 exact_inverse (tree type, tree cst)
9671 REAL_VALUE_TYPE r;
9672 tree unit_type, *elts;
9673 machine_mode mode;
9674 unsigned vec_nelts, i;
9676 switch (TREE_CODE (cst))
9678 case REAL_CST:
9679 r = TREE_REAL_CST (cst);
9681 if (exact_real_inverse (TYPE_MODE (type), &r))
9682 return build_real (type, r);
9684 return NULL_TREE;
9686 case VECTOR_CST:
9687 vec_nelts = VECTOR_CST_NELTS (cst);
9688 elts = XALLOCAVEC (tree, vec_nelts);
9689 unit_type = TREE_TYPE (type);
9690 mode = TYPE_MODE (unit_type);
9692 for (i = 0; i < vec_nelts; i++)
9694 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9695 if (!exact_real_inverse (mode, &r))
9696 return NULL_TREE;
9697 elts[i] = build_real (unit_type, r);
9700 return build_vector (type, elts);
9702 default:
9703 return NULL_TREE;
9707 /* Mask out the tz least significant bits of X of type TYPE where
9708 tz is the number of trailing zeroes in Y. */
9709 static wide_int
9710 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9712 int tz = wi::ctz (y);
9713 if (tz > 0)
9714 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9715 return x;
9718 /* Return true when T is an address and is known to be nonzero.
9719 For floating point we further ensure that T is not denormal.
9720 Similar logic is present in nonzero_address in rtlanal.h.
9722 If the return value is based on the assumption that signed overflow
9723 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9724 change *STRICT_OVERFLOW_P. */
9726 static bool
9727 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9729 tree type = TREE_TYPE (t);
9730 enum tree_code code;
9732 /* Doing something useful for floating point would need more work. */
9733 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9734 return false;
9736 code = TREE_CODE (t);
9737 switch (TREE_CODE_CLASS (code))
9739 case tcc_unary:
9740 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9741 strict_overflow_p);
9742 case tcc_binary:
9743 case tcc_comparison:
9744 return tree_binary_nonzero_warnv_p (code, type,
9745 TREE_OPERAND (t, 0),
9746 TREE_OPERAND (t, 1),
9747 strict_overflow_p);
9748 case tcc_constant:
9749 case tcc_declaration:
9750 case tcc_reference:
9751 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9753 default:
9754 break;
9757 switch (code)
9759 case TRUTH_NOT_EXPR:
9760 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9761 strict_overflow_p);
9763 case TRUTH_AND_EXPR:
9764 case TRUTH_OR_EXPR:
9765 case TRUTH_XOR_EXPR:
9766 return tree_binary_nonzero_warnv_p (code, type,
9767 TREE_OPERAND (t, 0),
9768 TREE_OPERAND (t, 1),
9769 strict_overflow_p);
9771 case COND_EXPR:
9772 case CONSTRUCTOR:
9773 case OBJ_TYPE_REF:
9774 case ASSERT_EXPR:
9775 case ADDR_EXPR:
9776 case WITH_SIZE_EXPR:
9777 case SSA_NAME:
9778 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9780 case COMPOUND_EXPR:
9781 case MODIFY_EXPR:
9782 case BIND_EXPR:
9783 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9784 strict_overflow_p);
9786 case SAVE_EXPR:
9787 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9788 strict_overflow_p);
9790 case CALL_EXPR:
9792 tree fndecl = get_callee_fndecl (t);
9793 if (!fndecl) return false;
9794 if (flag_delete_null_pointer_checks && !flag_check_new
9795 && DECL_IS_OPERATOR_NEW (fndecl)
9796 && !TREE_NOTHROW (fndecl))
9797 return true;
9798 if (flag_delete_null_pointer_checks
9799 && lookup_attribute ("returns_nonnull",
9800 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9801 return true;
9802 return alloca_call_p (t);
9805 default:
9806 break;
9808 return false;
9811 /* Return true when T is an address and is known to be nonzero.
9812 Handle warnings about undefined signed overflow. */
9814 static bool
9815 tree_expr_nonzero_p (tree t)
9817 bool ret, strict_overflow_p;
9819 strict_overflow_p = false;
9820 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9821 if (strict_overflow_p)
9822 fold_overflow_warning (("assuming signed overflow does not occur when "
9823 "determining that expression is always "
9824 "non-zero"),
9825 WARN_STRICT_OVERFLOW_MISC);
9826 return ret;
9829 /* Fold a binary expression of code CODE and type TYPE with operands
9830 OP0 and OP1. LOC is the location of the resulting expression.
9831 Return the folded expression if folding is successful. Otherwise,
9832 return NULL_TREE. */
9834 tree
9835 fold_binary_loc (location_t loc,
9836 enum tree_code code, tree type, tree op0, tree op1)
9838 enum tree_code_class kind = TREE_CODE_CLASS (code);
9839 tree arg0, arg1, tem;
9840 tree t1 = NULL_TREE;
9841 bool strict_overflow_p;
9842 unsigned int prec;
9844 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9845 && TREE_CODE_LENGTH (code) == 2
9846 && op0 != NULL_TREE
9847 && op1 != NULL_TREE);
9849 arg0 = op0;
9850 arg1 = op1;
9852 /* Strip any conversions that don't change the mode. This is
9853 safe for every expression, except for a comparison expression
9854 because its signedness is derived from its operands. So, in
9855 the latter case, only strip conversions that don't change the
9856 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9857 preserved.
9859 Note that this is done as an internal manipulation within the
9860 constant folder, in order to find the simplest representation
9861 of the arguments so that their form can be studied. In any
9862 cases, the appropriate type conversions should be put back in
9863 the tree that will get out of the constant folder. */
9865 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9867 STRIP_SIGN_NOPS (arg0);
9868 STRIP_SIGN_NOPS (arg1);
9870 else
9872 STRIP_NOPS (arg0);
9873 STRIP_NOPS (arg1);
9876 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9877 constant but we can't do arithmetic on them. */
9878 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9879 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9880 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9881 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9882 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9883 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9884 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9886 if (kind == tcc_binary)
9888 /* Make sure type and arg0 have the same saturating flag. */
9889 gcc_assert (TYPE_SATURATING (type)
9890 == TYPE_SATURATING (TREE_TYPE (arg0)));
9891 tem = const_binop (code, arg0, arg1);
9893 else if (kind == tcc_comparison)
9894 tem = fold_relational_const (code, type, arg0, arg1);
9895 else
9896 tem = NULL_TREE;
9898 if (tem != NULL_TREE)
9900 if (TREE_TYPE (tem) != type)
9901 tem = fold_convert_loc (loc, type, tem);
9902 return tem;
9906 /* If this is a commutative operation, and ARG0 is a constant, move it
9907 to ARG1 to reduce the number of tests below. */
9908 if (commutative_tree_code (code)
9909 && tree_swap_operands_p (arg0, arg1, true))
9910 return fold_build2_loc (loc, code, type, op1, op0);
9912 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9913 to ARG1 to reduce the number of tests below. */
9914 if (kind == tcc_comparison
9915 && tree_swap_operands_p (arg0, arg1, true))
9916 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9918 tem = generic_simplify (loc, code, type, op0, op1);
9919 if (tem)
9920 return tem;
9922 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9924 First check for cases where an arithmetic operation is applied to a
9925 compound, conditional, or comparison operation. Push the arithmetic
9926 operation inside the compound or conditional to see if any folding
9927 can then be done. Convert comparison to conditional for this purpose.
9928 The also optimizes non-constant cases that used to be done in
9929 expand_expr.
9931 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9932 one of the operands is a comparison and the other is a comparison, a
9933 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9934 code below would make the expression more complex. Change it to a
9935 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9936 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9938 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9939 || code == EQ_EXPR || code == NE_EXPR)
9940 && TREE_CODE (type) != VECTOR_TYPE
9941 && ((truth_value_p (TREE_CODE (arg0))
9942 && (truth_value_p (TREE_CODE (arg1))
9943 || (TREE_CODE (arg1) == BIT_AND_EXPR
9944 && integer_onep (TREE_OPERAND (arg1, 1)))))
9945 || (truth_value_p (TREE_CODE (arg1))
9946 && (truth_value_p (TREE_CODE (arg0))
9947 || (TREE_CODE (arg0) == BIT_AND_EXPR
9948 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9950 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9951 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9952 : TRUTH_XOR_EXPR,
9953 boolean_type_node,
9954 fold_convert_loc (loc, boolean_type_node, arg0),
9955 fold_convert_loc (loc, boolean_type_node, arg1));
9957 if (code == EQ_EXPR)
9958 tem = invert_truthvalue_loc (loc, tem);
9960 return fold_convert_loc (loc, type, tem);
9963 if (TREE_CODE_CLASS (code) == tcc_binary
9964 || TREE_CODE_CLASS (code) == tcc_comparison)
9966 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9968 tem = fold_build2_loc (loc, code, type,
9969 fold_convert_loc (loc, TREE_TYPE (op0),
9970 TREE_OPERAND (arg0, 1)), op1);
9971 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9972 tem);
9974 if (TREE_CODE (arg1) == COMPOUND_EXPR
9975 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9977 tem = fold_build2_loc (loc, code, type, op0,
9978 fold_convert_loc (loc, TREE_TYPE (op1),
9979 TREE_OPERAND (arg1, 1)));
9980 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9981 tem);
9984 if (TREE_CODE (arg0) == COND_EXPR
9985 || TREE_CODE (arg0) == VEC_COND_EXPR
9986 || COMPARISON_CLASS_P (arg0))
9988 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9989 arg0, arg1,
9990 /*cond_first_p=*/1);
9991 if (tem != NULL_TREE)
9992 return tem;
9995 if (TREE_CODE (arg1) == COND_EXPR
9996 || TREE_CODE (arg1) == VEC_COND_EXPR
9997 || COMPARISON_CLASS_P (arg1))
9999 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10000 arg1, arg0,
10001 /*cond_first_p=*/0);
10002 if (tem != NULL_TREE)
10003 return tem;
10007 switch (code)
10009 case MEM_REF:
10010 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10011 if (TREE_CODE (arg0) == ADDR_EXPR
10012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10014 tree iref = TREE_OPERAND (arg0, 0);
10015 return fold_build2 (MEM_REF, type,
10016 TREE_OPERAND (iref, 0),
10017 int_const_binop (PLUS_EXPR, arg1,
10018 TREE_OPERAND (iref, 1)));
10021 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10022 if (TREE_CODE (arg0) == ADDR_EXPR
10023 && handled_component_p (TREE_OPERAND (arg0, 0)))
10025 tree base;
10026 HOST_WIDE_INT coffset;
10027 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10028 &coffset);
10029 if (!base)
10030 return NULL_TREE;
10031 return fold_build2 (MEM_REF, type,
10032 build_fold_addr_expr (base),
10033 int_const_binop (PLUS_EXPR, arg1,
10034 size_int (coffset)));
10037 return NULL_TREE;
10039 case POINTER_PLUS_EXPR:
10040 /* 0 +p index -> (type)index */
10041 if (integer_zerop (arg0))
10042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10044 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10045 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10046 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10047 return fold_convert_loc (loc, type,
10048 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10049 fold_convert_loc (loc, sizetype,
10050 arg1),
10051 fold_convert_loc (loc, sizetype,
10052 arg0)));
10054 /* (PTR +p B) +p A -> PTR +p (B + A) */
10055 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10056 && !upc_shared_type_p (TREE_TYPE (type)))
10058 tree inner;
10059 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10060 tree arg00 = TREE_OPERAND (arg0, 0);
10061 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10062 arg01, fold_convert_loc (loc, sizetype, arg1));
10063 return fold_convert_loc (loc, type,
10064 fold_build_pointer_plus_loc (loc,
10065 arg00, inner));
10068 /* PTR_CST +p CST -> CST1 */
10069 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10070 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10071 fold_convert_loc (loc, type, arg1));
10073 return NULL_TREE;
10075 case PLUS_EXPR:
10076 /* A + (-B) -> A - B */
10077 if (TREE_CODE (arg1) == NEGATE_EXPR
10078 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10079 return fold_build2_loc (loc, MINUS_EXPR, type,
10080 fold_convert_loc (loc, type, arg0),
10081 fold_convert_loc (loc, type,
10082 TREE_OPERAND (arg1, 0)));
10083 /* (-A) + B -> B - A */
10084 if (TREE_CODE (arg0) == NEGATE_EXPR
10085 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10086 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10087 return fold_build2_loc (loc, MINUS_EXPR, type,
10088 fold_convert_loc (loc, type, arg1),
10089 fold_convert_loc (loc, type,
10090 TREE_OPERAND (arg0, 0)));
10092 /* Disable further optimizations involving UPC shared pointers,
10093 because integers are not interoperable with shared pointers. */
10094 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10095 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10096 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10097 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10098 return NULL_TREE;
10100 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10102 /* Convert ~A + 1 to -A. */
10103 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10104 && integer_each_onep (arg1))
10105 return fold_build1_loc (loc, NEGATE_EXPR, type,
10106 fold_convert_loc (loc, type,
10107 TREE_OPERAND (arg0, 0)));
10109 /* ~X + X is -1. */
10110 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10111 && !TYPE_OVERFLOW_TRAPS (type))
10113 tree tem = TREE_OPERAND (arg0, 0);
10115 STRIP_NOPS (tem);
10116 if (operand_equal_p (tem, arg1, 0))
10118 t1 = build_all_ones_cst (type);
10119 return omit_one_operand_loc (loc, type, t1, arg1);
10123 /* X + ~X is -1. */
10124 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10125 && !TYPE_OVERFLOW_TRAPS (type))
10127 tree tem = TREE_OPERAND (arg1, 0);
10129 STRIP_NOPS (tem);
10130 if (operand_equal_p (arg0, tem, 0))
10132 t1 = build_all_ones_cst (type);
10133 return omit_one_operand_loc (loc, type, t1, arg0);
10137 /* X + (X / CST) * -CST is X % CST. */
10138 if (TREE_CODE (arg1) == MULT_EXPR
10139 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10140 && operand_equal_p (arg0,
10141 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10143 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10144 tree cst1 = TREE_OPERAND (arg1, 1);
10145 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10146 cst1, cst0);
10147 if (sum && integer_zerop (sum))
10148 return fold_convert_loc (loc, type,
10149 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10150 TREE_TYPE (arg0), arg0,
10151 cst0));
10155 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10156 one. Make sure the type is not saturating and has the signedness of
10157 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10158 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10159 if ((TREE_CODE (arg0) == MULT_EXPR
10160 || TREE_CODE (arg1) == MULT_EXPR)
10161 && !TYPE_SATURATING (type)
10162 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10163 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10164 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10166 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10167 if (tem)
10168 return tem;
10171 if (! FLOAT_TYPE_P (type))
10173 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10174 with a constant, and the two constants have no bits in common,
10175 we should treat this as a BIT_IOR_EXPR since this may produce more
10176 simplifications. */
10177 if (TREE_CODE (arg0) == BIT_AND_EXPR
10178 && TREE_CODE (arg1) == BIT_AND_EXPR
10179 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10180 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10181 && wi::bit_and (TREE_OPERAND (arg0, 1),
10182 TREE_OPERAND (arg1, 1)) == 0)
10184 code = BIT_IOR_EXPR;
10185 goto bit_ior;
10188 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10189 (plus (plus (mult) (mult)) (foo)) so that we can
10190 take advantage of the factoring cases below. */
10191 if (TYPE_OVERFLOW_WRAPS (type)
10192 && (((TREE_CODE (arg0) == PLUS_EXPR
10193 || TREE_CODE (arg0) == MINUS_EXPR)
10194 && TREE_CODE (arg1) == MULT_EXPR)
10195 || ((TREE_CODE (arg1) == PLUS_EXPR
10196 || TREE_CODE (arg1) == MINUS_EXPR)
10197 && TREE_CODE (arg0) == MULT_EXPR)))
10199 tree parg0, parg1, parg, marg;
10200 enum tree_code pcode;
10202 if (TREE_CODE (arg1) == MULT_EXPR)
10203 parg = arg0, marg = arg1;
10204 else
10205 parg = arg1, marg = arg0;
10206 pcode = TREE_CODE (parg);
10207 parg0 = TREE_OPERAND (parg, 0);
10208 parg1 = TREE_OPERAND (parg, 1);
10209 STRIP_NOPS (parg0);
10210 STRIP_NOPS (parg1);
10212 if (TREE_CODE (parg0) == MULT_EXPR
10213 && TREE_CODE (parg1) != MULT_EXPR)
10214 return fold_build2_loc (loc, pcode, type,
10215 fold_build2_loc (loc, PLUS_EXPR, type,
10216 fold_convert_loc (loc, type,
10217 parg0),
10218 fold_convert_loc (loc, type,
10219 marg)),
10220 fold_convert_loc (loc, type, parg1));
10221 if (TREE_CODE (parg0) != MULT_EXPR
10222 && TREE_CODE (parg1) == MULT_EXPR)
10223 return
10224 fold_build2_loc (loc, PLUS_EXPR, type,
10225 fold_convert_loc (loc, type, parg0),
10226 fold_build2_loc (loc, pcode, type,
10227 fold_convert_loc (loc, type, marg),
10228 fold_convert_loc (loc, type,
10229 parg1)));
10232 else
10234 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10235 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10236 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10238 /* Likewise if the operands are reversed. */
10239 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10242 /* Convert X + -C into X - C. */
10243 if (TREE_CODE (arg1) == REAL_CST
10244 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10246 tem = fold_negate_const (arg1, type);
10247 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10248 return fold_build2_loc (loc, MINUS_EXPR, type,
10249 fold_convert_loc (loc, type, arg0),
10250 fold_convert_loc (loc, type, tem));
10253 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10254 to __complex__ ( x, y ). This is not the same for SNaNs or
10255 if signed zeros are involved. */
10256 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10257 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10258 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10260 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10261 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10262 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10263 bool arg0rz = false, arg0iz = false;
10264 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10265 || (arg0i && (arg0iz = real_zerop (arg0i))))
10267 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10268 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10269 if (arg0rz && arg1i && real_zerop (arg1i))
10271 tree rp = arg1r ? arg1r
10272 : build1 (REALPART_EXPR, rtype, arg1);
10273 tree ip = arg0i ? arg0i
10274 : build1 (IMAGPART_EXPR, rtype, arg0);
10275 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10277 else if (arg0iz && arg1r && real_zerop (arg1r))
10279 tree rp = arg0r ? arg0r
10280 : build1 (REALPART_EXPR, rtype, arg0);
10281 tree ip = arg1i ? arg1i
10282 : build1 (IMAGPART_EXPR, rtype, arg1);
10283 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10288 if (flag_unsafe_math_optimizations
10289 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10290 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10291 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10292 return tem;
10294 /* Convert x+x into x*2.0. */
10295 if (operand_equal_p (arg0, arg1, 0)
10296 && SCALAR_FLOAT_TYPE_P (type))
10297 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10298 build_real (type, dconst2));
10300 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10301 We associate floats only if the user has specified
10302 -fassociative-math. */
10303 if (flag_associative_math
10304 && TREE_CODE (arg1) == PLUS_EXPR
10305 && TREE_CODE (arg0) != MULT_EXPR)
10307 tree tree10 = TREE_OPERAND (arg1, 0);
10308 tree tree11 = TREE_OPERAND (arg1, 1);
10309 if (TREE_CODE (tree11) == MULT_EXPR
10310 && TREE_CODE (tree10) == MULT_EXPR)
10312 tree tree0;
10313 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10314 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10317 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10318 We associate floats only if the user has specified
10319 -fassociative-math. */
10320 if (flag_associative_math
10321 && TREE_CODE (arg0) == PLUS_EXPR
10322 && TREE_CODE (arg1) != MULT_EXPR)
10324 tree tree00 = TREE_OPERAND (arg0, 0);
10325 tree tree01 = TREE_OPERAND (arg0, 1);
10326 if (TREE_CODE (tree01) == MULT_EXPR
10327 && TREE_CODE (tree00) == MULT_EXPR)
10329 tree tree0;
10330 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10331 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10336 bit_rotate:
10337 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10338 is a rotate of A by C1 bits. */
10339 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10340 is a rotate of A by B bits. */
10342 enum tree_code code0, code1;
10343 tree rtype;
10344 code0 = TREE_CODE (arg0);
10345 code1 = TREE_CODE (arg1);
10346 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10347 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10348 && operand_equal_p (TREE_OPERAND (arg0, 0),
10349 TREE_OPERAND (arg1, 0), 0)
10350 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10351 TYPE_UNSIGNED (rtype))
10352 /* Only create rotates in complete modes. Other cases are not
10353 expanded properly. */
10354 && (element_precision (rtype)
10355 == element_precision (TYPE_MODE (rtype))))
10357 tree tree01, tree11;
10358 enum tree_code code01, code11;
10360 tree01 = TREE_OPERAND (arg0, 1);
10361 tree11 = TREE_OPERAND (arg1, 1);
10362 STRIP_NOPS (tree01);
10363 STRIP_NOPS (tree11);
10364 code01 = TREE_CODE (tree01);
10365 code11 = TREE_CODE (tree11);
10366 if (code01 == INTEGER_CST
10367 && code11 == INTEGER_CST
10368 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10369 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10371 tem = build2_loc (loc, LROTATE_EXPR,
10372 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10373 TREE_OPERAND (arg0, 0),
10374 code0 == LSHIFT_EXPR ? tree01 : tree11);
10375 return fold_convert_loc (loc, type, tem);
10377 else if (code11 == MINUS_EXPR)
10379 tree tree110, tree111;
10380 tree110 = TREE_OPERAND (tree11, 0);
10381 tree111 = TREE_OPERAND (tree11, 1);
10382 STRIP_NOPS (tree110);
10383 STRIP_NOPS (tree111);
10384 if (TREE_CODE (tree110) == INTEGER_CST
10385 && 0 == compare_tree_int (tree110,
10386 element_precision
10387 (TREE_TYPE (TREE_OPERAND
10388 (arg0, 0))))
10389 && operand_equal_p (tree01, tree111, 0))
10390 return
10391 fold_convert_loc (loc, type,
10392 build2 ((code0 == LSHIFT_EXPR
10393 ? LROTATE_EXPR
10394 : RROTATE_EXPR),
10395 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10396 TREE_OPERAND (arg0, 0), tree01));
10398 else if (code01 == MINUS_EXPR)
10400 tree tree010, tree011;
10401 tree010 = TREE_OPERAND (tree01, 0);
10402 tree011 = TREE_OPERAND (tree01, 1);
10403 STRIP_NOPS (tree010);
10404 STRIP_NOPS (tree011);
10405 if (TREE_CODE (tree010) == INTEGER_CST
10406 && 0 == compare_tree_int (tree010,
10407 element_precision
10408 (TREE_TYPE (TREE_OPERAND
10409 (arg0, 0))))
10410 && operand_equal_p (tree11, tree011, 0))
10411 return fold_convert_loc
10412 (loc, type,
10413 build2 ((code0 != LSHIFT_EXPR
10414 ? LROTATE_EXPR
10415 : RROTATE_EXPR),
10416 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10417 TREE_OPERAND (arg0, 0), tree11));
10422 associate:
10423 /* In most languages, can't associate operations on floats through
10424 parentheses. Rather than remember where the parentheses were, we
10425 don't associate floats at all, unless the user has specified
10426 -fassociative-math.
10427 And, we need to make sure type is not saturating. */
10429 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10430 && !TYPE_SATURATING (type))
10432 tree var0, con0, lit0, minus_lit0;
10433 tree var1, con1, lit1, minus_lit1;
10434 tree atype = type;
10435 bool ok = true;
10437 /* Split both trees into variables, constants, and literals. Then
10438 associate each group together, the constants with literals,
10439 then the result with variables. This increases the chances of
10440 literals being recombined later and of generating relocatable
10441 expressions for the sum of a constant and literal. */
10442 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10443 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10444 code == MINUS_EXPR);
10446 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10447 if (code == MINUS_EXPR)
10448 code = PLUS_EXPR;
10450 /* With undefined overflow prefer doing association in a type
10451 which wraps on overflow, if that is one of the operand types. */
10452 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10453 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10455 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10456 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10457 atype = TREE_TYPE (arg0);
10458 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10459 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10460 atype = TREE_TYPE (arg1);
10461 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10464 /* With undefined overflow we can only associate constants with one
10465 variable, and constants whose association doesn't overflow. */
10466 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10467 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10469 if (var0 && var1)
10471 tree tmp0 = var0;
10472 tree tmp1 = var1;
10474 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10475 tmp0 = TREE_OPERAND (tmp0, 0);
10476 if (CONVERT_EXPR_P (tmp0)
10477 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10478 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10479 <= TYPE_PRECISION (atype)))
10480 tmp0 = TREE_OPERAND (tmp0, 0);
10481 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10482 tmp1 = TREE_OPERAND (tmp1, 0);
10483 if (CONVERT_EXPR_P (tmp1)
10484 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10485 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10486 <= TYPE_PRECISION (atype)))
10487 tmp1 = TREE_OPERAND (tmp1, 0);
10488 /* The only case we can still associate with two variables
10489 is if they are the same, modulo negation and bit-pattern
10490 preserving conversions. */
10491 if (!operand_equal_p (tmp0, tmp1, 0))
10492 ok = false;
10496 /* Only do something if we found more than two objects. Otherwise,
10497 nothing has changed and we risk infinite recursion. */
10498 if (ok
10499 && (2 < ((var0 != 0) + (var1 != 0)
10500 + (con0 != 0) + (con1 != 0)
10501 + (lit0 != 0) + (lit1 != 0)
10502 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10504 bool any_overflows = false;
10505 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10506 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10507 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10508 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10509 var0 = associate_trees (loc, var0, var1, code, atype);
10510 con0 = associate_trees (loc, con0, con1, code, atype);
10511 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10512 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10513 code, atype);
10515 /* Preserve the MINUS_EXPR if the negative part of the literal is
10516 greater than the positive part. Otherwise, the multiplicative
10517 folding code (i.e extract_muldiv) may be fooled in case
10518 unsigned constants are subtracted, like in the following
10519 example: ((X*2 + 4) - 8U)/2. */
10520 if (minus_lit0 && lit0)
10522 if (TREE_CODE (lit0) == INTEGER_CST
10523 && TREE_CODE (minus_lit0) == INTEGER_CST
10524 && tree_int_cst_lt (lit0, minus_lit0))
10526 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10527 MINUS_EXPR, atype);
10528 lit0 = 0;
10530 else
10532 lit0 = associate_trees (loc, lit0, minus_lit0,
10533 MINUS_EXPR, atype);
10534 minus_lit0 = 0;
10538 /* Don't introduce overflows through reassociation. */
10539 if (!any_overflows
10540 && ((lit0 && TREE_OVERFLOW (lit0))
10541 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10542 return NULL_TREE;
10544 if (minus_lit0)
10546 if (con0 == 0)
10547 return
10548 fold_convert_loc (loc, type,
10549 associate_trees (loc, var0, minus_lit0,
10550 MINUS_EXPR, atype));
10551 else
10553 con0 = associate_trees (loc, con0, minus_lit0,
10554 MINUS_EXPR, atype);
10555 return
10556 fold_convert_loc (loc, type,
10557 associate_trees (loc, var0, con0,
10558 PLUS_EXPR, atype));
10562 con0 = associate_trees (loc, con0, lit0, code, atype);
10563 return
10564 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10565 code, atype));
10569 return NULL_TREE;
10571 case MINUS_EXPR:
10572 /* Pointer simplifications for subtraction, simple reassociations. */
10573 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10575 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10576 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10577 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10579 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10580 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10581 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10582 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10583 return fold_build2_loc (loc, PLUS_EXPR, type,
10584 fold_build2_loc (loc, MINUS_EXPR, type,
10585 arg00, arg10),
10586 fold_build2_loc (loc, MINUS_EXPR, type,
10587 arg01, arg11));
10589 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10590 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10592 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10593 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10594 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10595 fold_convert_loc (loc, type, arg1));
10596 if (tmp)
10597 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10599 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10600 simplifies. */
10601 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10603 tree arg10 = fold_convert_loc (loc, type,
10604 TREE_OPERAND (arg1, 0));
10605 tree arg11 = fold_convert_loc (loc, type,
10606 TREE_OPERAND (arg1, 1));
10607 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10608 fold_convert_loc (loc, type, arg0),
10609 arg10);
10610 if (tmp)
10611 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10614 /* A - (-B) -> A + B */
10615 if (TREE_CODE (arg1) == NEGATE_EXPR)
10616 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10617 fold_convert_loc (loc, type,
10618 TREE_OPERAND (arg1, 0)));
10620 /* Disable further optimizations involving UPC shared pointers,
10621 because integers are not interoperable with shared pointers.
10622 (The test below also detects pointer difference between
10623 shared pointers, which cannot be folded. */
10625 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10626 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10627 return NULL_TREE;
10629 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10630 if (TREE_CODE (arg0) == NEGATE_EXPR
10631 && negate_expr_p (arg1)
10632 && reorder_operands_p (arg0, arg1))
10633 return fold_build2_loc (loc, MINUS_EXPR, type,
10634 fold_convert_loc (loc, type,
10635 negate_expr (arg1)),
10636 fold_convert_loc (loc, type,
10637 TREE_OPERAND (arg0, 0)));
10638 /* Convert -A - 1 to ~A. */
10639 if (TREE_CODE (arg0) == NEGATE_EXPR
10640 && integer_each_onep (arg1)
10641 && !TYPE_OVERFLOW_TRAPS (type))
10642 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10643 fold_convert_loc (loc, type,
10644 TREE_OPERAND (arg0, 0)));
10646 /* Convert -1 - A to ~A. */
10647 if (TREE_CODE (type) != COMPLEX_TYPE
10648 && integer_all_onesp (arg0))
10649 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10652 /* X - (X / Y) * Y is X % Y. */
10653 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10654 && TREE_CODE (arg1) == MULT_EXPR
10655 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10656 && operand_equal_p (arg0,
10657 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10658 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10659 TREE_OPERAND (arg1, 1), 0))
10660 return
10661 fold_convert_loc (loc, type,
10662 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10663 arg0, TREE_OPERAND (arg1, 1)));
10665 if (! FLOAT_TYPE_P (type))
10667 if (integer_zerop (arg0))
10668 return negate_expr (fold_convert_loc (loc, type, arg1));
10670 /* Fold A - (A & B) into ~B & A. */
10671 if (!TREE_SIDE_EFFECTS (arg0)
10672 && TREE_CODE (arg1) == BIT_AND_EXPR)
10674 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10676 tree arg10 = fold_convert_loc (loc, type,
10677 TREE_OPERAND (arg1, 0));
10678 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10679 fold_build1_loc (loc, BIT_NOT_EXPR,
10680 type, arg10),
10681 fold_convert_loc (loc, type, arg0));
10683 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10685 tree arg11 = fold_convert_loc (loc,
10686 type, TREE_OPERAND (arg1, 1));
10687 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10688 fold_build1_loc (loc, BIT_NOT_EXPR,
10689 type, arg11),
10690 fold_convert_loc (loc, type, arg0));
10694 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10695 any power of 2 minus 1. */
10696 if (TREE_CODE (arg0) == BIT_AND_EXPR
10697 && TREE_CODE (arg1) == BIT_AND_EXPR
10698 && operand_equal_p (TREE_OPERAND (arg0, 0),
10699 TREE_OPERAND (arg1, 0), 0))
10701 tree mask0 = TREE_OPERAND (arg0, 1);
10702 tree mask1 = TREE_OPERAND (arg1, 1);
10703 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10705 if (operand_equal_p (tem, mask1, 0))
10707 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10708 TREE_OPERAND (arg0, 0), mask1);
10709 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10714 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10715 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10716 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10718 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10719 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10720 (-ARG1 + ARG0) reduces to -ARG1. */
10721 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10722 return negate_expr (fold_convert_loc (loc, type, arg1));
10724 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10725 __complex__ ( x, -y ). This is not the same for SNaNs or if
10726 signed zeros are involved. */
10727 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10728 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10729 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10731 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10732 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10733 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10734 bool arg0rz = false, arg0iz = false;
10735 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10736 || (arg0i && (arg0iz = real_zerop (arg0i))))
10738 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10739 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10740 if (arg0rz && arg1i && real_zerop (arg1i))
10742 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10743 arg1r ? arg1r
10744 : build1 (REALPART_EXPR, rtype, arg1));
10745 tree ip = arg0i ? arg0i
10746 : build1 (IMAGPART_EXPR, rtype, arg0);
10747 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10749 else if (arg0iz && arg1r && real_zerop (arg1r))
10751 tree rp = arg0r ? arg0r
10752 : build1 (REALPART_EXPR, rtype, arg0);
10753 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10754 arg1i ? arg1i
10755 : build1 (IMAGPART_EXPR, rtype, arg1));
10756 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10761 /* A - B -> A + (-B) if B is easily negatable. */
10762 if (negate_expr_p (arg1)
10763 && ((FLOAT_TYPE_P (type)
10764 /* Avoid this transformation if B is a positive REAL_CST. */
10765 && (TREE_CODE (arg1) != REAL_CST
10766 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10767 || INTEGRAL_TYPE_P (type)))
10768 return fold_build2_loc (loc, PLUS_EXPR, type,
10769 fold_convert_loc (loc, type, arg0),
10770 fold_convert_loc (loc, type,
10771 negate_expr (arg1)));
10773 /* Try folding difference of addresses. */
10775 HOST_WIDE_INT diff;
10777 if ((TREE_CODE (arg0) == ADDR_EXPR
10778 || TREE_CODE (arg1) == ADDR_EXPR)
10779 && ptr_difference_const (arg0, arg1, &diff))
10780 return build_int_cst_type (type, diff);
10783 /* Fold &a[i] - &a[j] to i-j. */
10784 if (TREE_CODE (arg0) == ADDR_EXPR
10785 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10786 && TREE_CODE (arg1) == ADDR_EXPR
10787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10789 tree tem = fold_addr_of_array_ref_difference (loc, type,
10790 TREE_OPERAND (arg0, 0),
10791 TREE_OPERAND (arg1, 0));
10792 if (tem)
10793 return tem;
10796 if (FLOAT_TYPE_P (type)
10797 && flag_unsafe_math_optimizations
10798 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10799 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10800 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10801 return tem;
10803 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10804 one. Make sure the type is not saturating and has the signedness of
10805 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10806 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10807 if ((TREE_CODE (arg0) == MULT_EXPR
10808 || TREE_CODE (arg1) == MULT_EXPR)
10809 && !TYPE_SATURATING (type)
10810 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10811 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10812 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10814 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10815 if (tem)
10816 return tem;
10819 goto associate;
10821 case MULT_EXPR:
10822 /* (-A) * (-B) -> A * B */
10823 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10824 return fold_build2_loc (loc, MULT_EXPR, type,
10825 fold_convert_loc (loc, type,
10826 TREE_OPERAND (arg0, 0)),
10827 fold_convert_loc (loc, type,
10828 negate_expr (arg1)));
10829 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10830 return fold_build2_loc (loc, MULT_EXPR, type,
10831 fold_convert_loc (loc, type,
10832 negate_expr (arg0)),
10833 fold_convert_loc (loc, type,
10834 TREE_OPERAND (arg1, 0)));
10836 if (! FLOAT_TYPE_P (type))
10838 /* Transform x * -1 into -x. Make sure to do the negation
10839 on the original operand with conversions not stripped
10840 because we can only strip non-sign-changing conversions. */
10841 if (integer_minus_onep (arg1))
10842 return fold_convert_loc (loc, type, negate_expr (op0));
10843 /* Transform x * -C into -x * C if x is easily negatable. */
10844 if (TREE_CODE (arg1) == INTEGER_CST
10845 && tree_int_cst_sgn (arg1) == -1
10846 && negate_expr_p (arg0)
10847 && (tem = negate_expr (arg1)) != arg1
10848 && !TREE_OVERFLOW (tem))
10849 return fold_build2_loc (loc, MULT_EXPR, type,
10850 fold_convert_loc (loc, type,
10851 negate_expr (arg0)),
10852 tem);
10854 /* (a * (1 << b)) is (a << b) */
10855 if (TREE_CODE (arg1) == LSHIFT_EXPR
10856 && integer_onep (TREE_OPERAND (arg1, 0)))
10857 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10858 TREE_OPERAND (arg1, 1));
10859 if (TREE_CODE (arg0) == LSHIFT_EXPR
10860 && integer_onep (TREE_OPERAND (arg0, 0)))
10861 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10862 TREE_OPERAND (arg0, 1));
10864 /* (A + A) * C -> A * 2 * C */
10865 if (TREE_CODE (arg0) == PLUS_EXPR
10866 && TREE_CODE (arg1) == INTEGER_CST
10867 && operand_equal_p (TREE_OPERAND (arg0, 0),
10868 TREE_OPERAND (arg0, 1), 0))
10869 return fold_build2_loc (loc, MULT_EXPR, type,
10870 omit_one_operand_loc (loc, type,
10871 TREE_OPERAND (arg0, 0),
10872 TREE_OPERAND (arg0, 1)),
10873 fold_build2_loc (loc, MULT_EXPR, type,
10874 build_int_cst (type, 2) , arg1));
10876 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10877 sign-changing only. */
10878 if (TREE_CODE (arg1) == INTEGER_CST
10879 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10880 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10881 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10883 strict_overflow_p = false;
10884 if (TREE_CODE (arg1) == INTEGER_CST
10885 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10886 &strict_overflow_p)))
10888 if (strict_overflow_p)
10889 fold_overflow_warning (("assuming signed overflow does not "
10890 "occur when simplifying "
10891 "multiplication"),
10892 WARN_STRICT_OVERFLOW_MISC);
10893 return fold_convert_loc (loc, type, tem);
10896 /* Optimize z * conj(z) for integer complex numbers. */
10897 if (TREE_CODE (arg0) == CONJ_EXPR
10898 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10899 return fold_mult_zconjz (loc, type, arg1);
10900 if (TREE_CODE (arg1) == CONJ_EXPR
10901 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10902 return fold_mult_zconjz (loc, type, arg0);
10904 else
10906 /* Maybe fold x * 0 to 0. The expressions aren't the same
10907 when x is NaN, since x * 0 is also NaN. Nor are they the
10908 same in modes with signed zeros, since multiplying a
10909 negative value by 0 gives -0, not +0. */
10910 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10911 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10912 && real_zerop (arg1))
10913 return omit_one_operand_loc (loc, type, arg1, arg0);
10914 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10915 Likewise for complex arithmetic with signed zeros. */
10916 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10917 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10918 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10919 && real_onep (arg1))
10920 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10922 /* Transform x * -1.0 into -x. */
10923 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10924 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10925 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10926 && real_minus_onep (arg1))
10927 return fold_convert_loc (loc, type, negate_expr (arg0));
10929 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10930 the result for floating point types due to rounding so it is applied
10931 only if -fassociative-math was specify. */
10932 if (flag_associative_math
10933 && TREE_CODE (arg0) == RDIV_EXPR
10934 && TREE_CODE (arg1) == REAL_CST
10935 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10937 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10938 arg1);
10939 if (tem)
10940 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10941 TREE_OPERAND (arg0, 1));
10944 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10945 if (operand_equal_p (arg0, arg1, 0))
10947 tree tem = fold_strip_sign_ops (arg0);
10948 if (tem != NULL_TREE)
10950 tem = fold_convert_loc (loc, type, tem);
10951 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10955 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10956 This is not the same for NaNs or if signed zeros are
10957 involved. */
10958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10959 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10960 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10961 && TREE_CODE (arg1) == COMPLEX_CST
10962 && real_zerop (TREE_REALPART (arg1)))
10964 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10965 if (real_onep (TREE_IMAGPART (arg1)))
10966 return
10967 fold_build2_loc (loc, COMPLEX_EXPR, type,
10968 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10969 rtype, arg0)),
10970 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10971 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10972 return
10973 fold_build2_loc (loc, COMPLEX_EXPR, type,
10974 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10975 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10976 rtype, arg0)));
10979 /* Optimize z * conj(z) for floating point complex numbers.
10980 Guarded by flag_unsafe_math_optimizations as non-finite
10981 imaginary components don't produce scalar results. */
10982 if (flag_unsafe_math_optimizations
10983 && TREE_CODE (arg0) == CONJ_EXPR
10984 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10985 return fold_mult_zconjz (loc, type, arg1);
10986 if (flag_unsafe_math_optimizations
10987 && TREE_CODE (arg1) == CONJ_EXPR
10988 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10989 return fold_mult_zconjz (loc, type, arg0);
10991 if (flag_unsafe_math_optimizations)
10993 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10994 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10996 /* Optimizations of root(...)*root(...). */
10997 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10999 tree rootfn, arg;
11000 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11001 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11003 /* Optimize sqrt(x)*sqrt(x) as x. */
11004 if (BUILTIN_SQRT_P (fcode0)
11005 && operand_equal_p (arg00, arg10, 0)
11006 && ! HONOR_SNANS (TYPE_MODE (type)))
11007 return arg00;
11009 /* Optimize root(x)*root(y) as root(x*y). */
11010 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11011 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11012 return build_call_expr_loc (loc, rootfn, 1, arg);
11015 /* Optimize expN(x)*expN(y) as expN(x+y). */
11016 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11018 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11019 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11020 CALL_EXPR_ARG (arg0, 0),
11021 CALL_EXPR_ARG (arg1, 0));
11022 return build_call_expr_loc (loc, expfn, 1, arg);
11025 /* Optimizations of pow(...)*pow(...). */
11026 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11027 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11028 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11030 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11031 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11032 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11033 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11035 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11036 if (operand_equal_p (arg01, arg11, 0))
11038 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11039 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11040 arg00, arg10);
11041 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11044 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11045 if (operand_equal_p (arg00, arg10, 0))
11047 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11048 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11049 arg01, arg11);
11050 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11054 /* Optimize tan(x)*cos(x) as sin(x). */
11055 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11056 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11057 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11058 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11059 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11060 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11061 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11062 CALL_EXPR_ARG (arg1, 0), 0))
11064 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11066 if (sinfn != NULL_TREE)
11067 return build_call_expr_loc (loc, sinfn, 1,
11068 CALL_EXPR_ARG (arg0, 0));
11071 /* Optimize x*pow(x,c) as pow(x,c+1). */
11072 if (fcode1 == BUILT_IN_POW
11073 || fcode1 == BUILT_IN_POWF
11074 || fcode1 == BUILT_IN_POWL)
11076 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11077 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11078 if (TREE_CODE (arg11) == REAL_CST
11079 && !TREE_OVERFLOW (arg11)
11080 && operand_equal_p (arg0, arg10, 0))
11082 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11083 REAL_VALUE_TYPE c;
11084 tree arg;
11086 c = TREE_REAL_CST (arg11);
11087 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11088 arg = build_real (type, c);
11089 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11093 /* Optimize pow(x,c)*x as pow(x,c+1). */
11094 if (fcode0 == BUILT_IN_POW
11095 || fcode0 == BUILT_IN_POWF
11096 || fcode0 == BUILT_IN_POWL)
11098 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11099 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11100 if (TREE_CODE (arg01) == REAL_CST
11101 && !TREE_OVERFLOW (arg01)
11102 && operand_equal_p (arg1, arg00, 0))
11104 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11105 REAL_VALUE_TYPE c;
11106 tree arg;
11108 c = TREE_REAL_CST (arg01);
11109 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11110 arg = build_real (type, c);
11111 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11115 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11116 if (!in_gimple_form
11117 && optimize
11118 && operand_equal_p (arg0, arg1, 0))
11120 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11122 if (powfn)
11124 tree arg = build_real (type, dconst2);
11125 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11130 goto associate;
11132 case BIT_IOR_EXPR:
11133 bit_ior:
11134 /* ~X | X is -1. */
11135 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11136 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11138 t1 = build_zero_cst (type);
11139 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11140 return omit_one_operand_loc (loc, type, t1, arg1);
11143 /* X | ~X is -1. */
11144 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11145 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11147 t1 = build_zero_cst (type);
11148 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11149 return omit_one_operand_loc (loc, type, t1, arg0);
11152 /* Canonicalize (X & C1) | C2. */
11153 if (TREE_CODE (arg0) == BIT_AND_EXPR
11154 && TREE_CODE (arg1) == INTEGER_CST
11155 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11157 int width = TYPE_PRECISION (type), w;
11158 wide_int c1 = TREE_OPERAND (arg0, 1);
11159 wide_int c2 = arg1;
11161 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11162 if ((c1 & c2) == c1)
11163 return omit_one_operand_loc (loc, type, arg1,
11164 TREE_OPERAND (arg0, 0));
11166 wide_int msk = wi::mask (width, false,
11167 TYPE_PRECISION (TREE_TYPE (arg1)));
11169 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11170 if (msk.and_not (c1 | c2) == 0)
11171 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11172 TREE_OPERAND (arg0, 0), arg1);
11174 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11175 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11176 mode which allows further optimizations. */
11177 c1 &= msk;
11178 c2 &= msk;
11179 wide_int c3 = c1.and_not (c2);
11180 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11182 wide_int mask = wi::mask (w, false,
11183 TYPE_PRECISION (type));
11184 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11186 c3 = mask;
11187 break;
11191 if (c3 != c1)
11192 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11193 fold_build2_loc (loc, BIT_AND_EXPR, type,
11194 TREE_OPERAND (arg0, 0),
11195 wide_int_to_tree (type,
11196 c3)),
11197 arg1);
11200 /* (X & Y) | Y is (X, Y). */
11201 if (TREE_CODE (arg0) == BIT_AND_EXPR
11202 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11203 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11204 /* (X & Y) | X is (Y, X). */
11205 if (TREE_CODE (arg0) == BIT_AND_EXPR
11206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11207 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11208 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11209 /* X | (X & Y) is (Y, X). */
11210 if (TREE_CODE (arg1) == BIT_AND_EXPR
11211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11212 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11213 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11214 /* X | (Y & X) is (Y, X). */
11215 if (TREE_CODE (arg1) == BIT_AND_EXPR
11216 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11217 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11218 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11220 /* (X & ~Y) | (~X & Y) is X ^ Y */
11221 if (TREE_CODE (arg0) == BIT_AND_EXPR
11222 && TREE_CODE (arg1) == BIT_AND_EXPR)
11224 tree a0, a1, l0, l1, n0, n1;
11226 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11227 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11229 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11230 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11232 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11233 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11235 if ((operand_equal_p (n0, a0, 0)
11236 && operand_equal_p (n1, a1, 0))
11237 || (operand_equal_p (n0, a1, 0)
11238 && operand_equal_p (n1, a0, 0)))
11239 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11242 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11243 if (t1 != NULL_TREE)
11244 return t1;
11246 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11248 This results in more efficient code for machines without a NAND
11249 instruction. Combine will canonicalize to the first form
11250 which will allow use of NAND instructions provided by the
11251 backend if they exist. */
11252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11253 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11255 return
11256 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11257 build2 (BIT_AND_EXPR, type,
11258 fold_convert_loc (loc, type,
11259 TREE_OPERAND (arg0, 0)),
11260 fold_convert_loc (loc, type,
11261 TREE_OPERAND (arg1, 0))));
11264 /* See if this can be simplified into a rotate first. If that
11265 is unsuccessful continue in the association code. */
11266 goto bit_rotate;
11268 case BIT_XOR_EXPR:
11269 /* ~X ^ X is -1. */
11270 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11271 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11273 t1 = build_zero_cst (type);
11274 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11275 return omit_one_operand_loc (loc, type, t1, arg1);
11278 /* X ^ ~X is -1. */
11279 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11282 t1 = build_zero_cst (type);
11283 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11284 return omit_one_operand_loc (loc, type, t1, arg0);
11287 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11288 with a constant, and the two constants have no bits in common,
11289 we should treat this as a BIT_IOR_EXPR since this may produce more
11290 simplifications. */
11291 if (TREE_CODE (arg0) == BIT_AND_EXPR
11292 && TREE_CODE (arg1) == BIT_AND_EXPR
11293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11294 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11295 && wi::bit_and (TREE_OPERAND (arg0, 1),
11296 TREE_OPERAND (arg1, 1)) == 0)
11298 code = BIT_IOR_EXPR;
11299 goto bit_ior;
11302 /* (X | Y) ^ X -> Y & ~ X*/
11303 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11304 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11306 tree t2 = TREE_OPERAND (arg0, 1);
11307 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11308 arg1);
11309 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11310 fold_convert_loc (loc, type, t2),
11311 fold_convert_loc (loc, type, t1));
11312 return t1;
11315 /* (Y | X) ^ X -> Y & ~ X*/
11316 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11319 tree t2 = TREE_OPERAND (arg0, 0);
11320 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11321 arg1);
11322 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11323 fold_convert_loc (loc, type, t2),
11324 fold_convert_loc (loc, type, t1));
11325 return t1;
11328 /* X ^ (X | Y) -> Y & ~ X*/
11329 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11330 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11332 tree t2 = TREE_OPERAND (arg1, 1);
11333 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11334 arg0);
11335 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11336 fold_convert_loc (loc, type, t2),
11337 fold_convert_loc (loc, type, t1));
11338 return t1;
11341 /* X ^ (Y | X) -> Y & ~ X*/
11342 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11343 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11345 tree t2 = TREE_OPERAND (arg1, 0);
11346 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11347 arg0);
11348 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11349 fold_convert_loc (loc, type, t2),
11350 fold_convert_loc (loc, type, t1));
11351 return t1;
11354 /* Convert ~X ^ ~Y to X ^ Y. */
11355 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11356 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11357 return fold_build2_loc (loc, code, type,
11358 fold_convert_loc (loc, type,
11359 TREE_OPERAND (arg0, 0)),
11360 fold_convert_loc (loc, type,
11361 TREE_OPERAND (arg1, 0)));
11363 /* Convert ~X ^ C to X ^ ~C. */
11364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11365 && TREE_CODE (arg1) == INTEGER_CST)
11366 return fold_build2_loc (loc, code, type,
11367 fold_convert_loc (loc, type,
11368 TREE_OPERAND (arg0, 0)),
11369 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11371 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11372 if (TREE_CODE (arg0) == BIT_AND_EXPR
11373 && INTEGRAL_TYPE_P (type)
11374 && integer_onep (TREE_OPERAND (arg0, 1))
11375 && integer_onep (arg1))
11376 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11377 build_zero_cst (TREE_TYPE (arg0)));
11379 /* Fold (X & Y) ^ Y as ~X & Y. */
11380 if (TREE_CODE (arg0) == BIT_AND_EXPR
11381 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11383 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11384 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11385 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11386 fold_convert_loc (loc, type, arg1));
11388 /* Fold (X & Y) ^ X as ~Y & X. */
11389 if (TREE_CODE (arg0) == BIT_AND_EXPR
11390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11391 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11393 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11394 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11395 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11396 fold_convert_loc (loc, type, arg1));
11398 /* Fold X ^ (X & Y) as X & ~Y. */
11399 if (TREE_CODE (arg1) == BIT_AND_EXPR
11400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11402 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11403 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11404 fold_convert_loc (loc, type, arg0),
11405 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11407 /* Fold X ^ (Y & X) as ~Y & X. */
11408 if (TREE_CODE (arg1) == BIT_AND_EXPR
11409 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11410 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg0));
11418 /* See if this can be simplified into a rotate first. If that
11419 is unsuccessful continue in the association code. */
11420 goto bit_rotate;
11422 case BIT_AND_EXPR:
11423 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11424 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11425 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11426 || (TREE_CODE (arg0) == EQ_EXPR
11427 && integer_zerop (TREE_OPERAND (arg0, 1))))
11428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11429 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11431 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11432 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11433 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11434 || (TREE_CODE (arg1) == EQ_EXPR
11435 && integer_zerop (TREE_OPERAND (arg1, 1))))
11436 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11437 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11439 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11440 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11441 && TREE_CODE (arg1) == INTEGER_CST
11442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11444 tree tmp1 = fold_convert_loc (loc, type, arg1);
11445 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11446 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11447 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11448 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11449 return
11450 fold_convert_loc (loc, type,
11451 fold_build2_loc (loc, BIT_IOR_EXPR,
11452 type, tmp2, tmp3));
11455 /* (X | Y) & Y is (X, Y). */
11456 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11457 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11458 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11459 /* (X | Y) & X is (Y, X). */
11460 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11461 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11462 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11463 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11464 /* X & (X | Y) is (Y, X). */
11465 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11466 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11467 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11468 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11469 /* X & (Y | X) is (Y, X). */
11470 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11471 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11472 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11473 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11475 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11476 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11477 && INTEGRAL_TYPE_P (type)
11478 && integer_onep (TREE_OPERAND (arg0, 1))
11479 && integer_onep (arg1))
11481 tree tem2;
11482 tem = TREE_OPERAND (arg0, 0);
11483 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11484 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11485 tem, tem2);
11486 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11487 build_zero_cst (TREE_TYPE (tem)));
11489 /* Fold ~X & 1 as (X & 1) == 0. */
11490 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11491 && INTEGRAL_TYPE_P (type)
11492 && integer_onep (arg1))
11494 tree tem2;
11495 tem = TREE_OPERAND (arg0, 0);
11496 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11497 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11498 tem, tem2);
11499 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11500 build_zero_cst (TREE_TYPE (tem)));
11502 /* Fold !X & 1 as X == 0. */
11503 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11504 && integer_onep (arg1))
11506 tem = TREE_OPERAND (arg0, 0);
11507 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11508 build_zero_cst (TREE_TYPE (tem)));
11511 /* Fold (X ^ Y) & Y as ~X & Y. */
11512 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11513 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11515 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11516 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11517 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11518 fold_convert_loc (loc, type, arg1));
11520 /* Fold (X ^ Y) & X as ~Y & X. */
11521 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11522 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11523 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11525 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11526 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11527 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11528 fold_convert_loc (loc, type, arg1));
11530 /* Fold X & (X ^ Y) as X & ~Y. */
11531 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11532 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11534 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11535 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11536 fold_convert_loc (loc, type, arg0),
11537 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11539 /* Fold X & (Y ^ X) as ~Y & X. */
11540 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11541 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11542 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11544 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11545 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11546 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11547 fold_convert_loc (loc, type, arg0));
11550 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11551 multiple of 1 << CST. */
11552 if (TREE_CODE (arg1) == INTEGER_CST)
11554 wide_int cst1 = arg1;
11555 wide_int ncst1 = -cst1;
11556 if ((cst1 & ncst1) == ncst1
11557 && multiple_of_p (type, arg0,
11558 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11559 return fold_convert_loc (loc, type, arg0);
11562 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11563 bits from CST2. */
11564 if (TREE_CODE (arg1) == INTEGER_CST
11565 && TREE_CODE (arg0) == MULT_EXPR
11566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11568 wide_int warg1 = arg1;
11569 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11571 if (masked == 0)
11572 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11573 arg0, arg1);
11574 else if (masked != warg1)
11576 /* Avoid the transform if arg1 is a mask of some
11577 mode which allows further optimizations. */
11578 int pop = wi::popcount (warg1);
11579 if (!(pop >= BITS_PER_UNIT
11580 && exact_log2 (pop) != -1
11581 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11582 return fold_build2_loc (loc, code, type, op0,
11583 wide_int_to_tree (type, masked));
11587 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11588 ((A & N) + B) & M -> (A + B) & M
11589 Similarly if (N & M) == 0,
11590 ((A | N) + B) & M -> (A + B) & M
11591 and for - instead of + (or unary - instead of +)
11592 and/or ^ instead of |.
11593 If B is constant and (B & M) == 0, fold into A & M. */
11594 if (TREE_CODE (arg1) == INTEGER_CST)
11596 wide_int cst1 = arg1;
11597 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11598 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11599 && (TREE_CODE (arg0) == PLUS_EXPR
11600 || TREE_CODE (arg0) == MINUS_EXPR
11601 || TREE_CODE (arg0) == NEGATE_EXPR)
11602 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11603 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11605 tree pmop[2];
11606 int which = 0;
11607 wide_int cst0;
11609 /* Now we know that arg0 is (C + D) or (C - D) or
11610 -C and arg1 (M) is == (1LL << cst) - 1.
11611 Store C into PMOP[0] and D into PMOP[1]. */
11612 pmop[0] = TREE_OPERAND (arg0, 0);
11613 pmop[1] = NULL;
11614 if (TREE_CODE (arg0) != NEGATE_EXPR)
11616 pmop[1] = TREE_OPERAND (arg0, 1);
11617 which = 1;
11620 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11621 which = -1;
11623 for (; which >= 0; which--)
11624 switch (TREE_CODE (pmop[which]))
11626 case BIT_AND_EXPR:
11627 case BIT_IOR_EXPR:
11628 case BIT_XOR_EXPR:
11629 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11630 != INTEGER_CST)
11631 break;
11632 cst0 = TREE_OPERAND (pmop[which], 1);
11633 cst0 &= cst1;
11634 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11636 if (cst0 != cst1)
11637 break;
11639 else if (cst0 != 0)
11640 break;
11641 /* If C or D is of the form (A & N) where
11642 (N & M) == M, or of the form (A | N) or
11643 (A ^ N) where (N & M) == 0, replace it with A. */
11644 pmop[which] = TREE_OPERAND (pmop[which], 0);
11645 break;
11646 case INTEGER_CST:
11647 /* If C or D is a N where (N & M) == 0, it can be
11648 omitted (assumed 0). */
11649 if ((TREE_CODE (arg0) == PLUS_EXPR
11650 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11651 && (cst1 & pmop[which]) == 0)
11652 pmop[which] = NULL;
11653 break;
11654 default:
11655 break;
11658 /* Only build anything new if we optimized one or both arguments
11659 above. */
11660 if (pmop[0] != TREE_OPERAND (arg0, 0)
11661 || (TREE_CODE (arg0) != NEGATE_EXPR
11662 && pmop[1] != TREE_OPERAND (arg0, 1)))
11664 tree utype = TREE_TYPE (arg0);
11665 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11667 /* Perform the operations in a type that has defined
11668 overflow behavior. */
11669 utype = unsigned_type_for (TREE_TYPE (arg0));
11670 if (pmop[0] != NULL)
11671 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11672 if (pmop[1] != NULL)
11673 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11676 if (TREE_CODE (arg0) == NEGATE_EXPR)
11677 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11678 else if (TREE_CODE (arg0) == PLUS_EXPR)
11680 if (pmop[0] != NULL && pmop[1] != NULL)
11681 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11682 pmop[0], pmop[1]);
11683 else if (pmop[0] != NULL)
11684 tem = pmop[0];
11685 else if (pmop[1] != NULL)
11686 tem = pmop[1];
11687 else
11688 return build_int_cst (type, 0);
11690 else if (pmop[0] == NULL)
11691 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11692 else
11693 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11694 pmop[0], pmop[1]);
11695 /* TEM is now the new binary +, - or unary - replacement. */
11696 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11697 fold_convert_loc (loc, utype, arg1));
11698 return fold_convert_loc (loc, type, tem);
11703 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11704 if (t1 != NULL_TREE)
11705 return t1;
11706 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11707 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11708 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11710 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11712 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11713 if (mask == -1)
11714 return
11715 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11718 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11720 This results in more efficient code for machines without a NOR
11721 instruction. Combine will canonicalize to the first form
11722 which will allow use of NOR instructions provided by the
11723 backend if they exist. */
11724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11725 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11727 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11728 build2 (BIT_IOR_EXPR, type,
11729 fold_convert_loc (loc, type,
11730 TREE_OPERAND (arg0, 0)),
11731 fold_convert_loc (loc, type,
11732 TREE_OPERAND (arg1, 0))));
11735 /* If arg0 is derived from the address of an object or function, we may
11736 be able to fold this expression using the object or function's
11737 alignment. */
11738 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11740 unsigned HOST_WIDE_INT modulus, residue;
11741 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11743 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11744 integer_onep (arg1));
11746 /* This works because modulus is a power of 2. If this weren't the
11747 case, we'd have to replace it by its greatest power-of-2
11748 divisor: modulus & -modulus. */
11749 if (low < modulus)
11750 return build_int_cst (type, residue & low);
11753 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11754 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11755 if the new mask might be further optimized. */
11756 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11757 || TREE_CODE (arg0) == RSHIFT_EXPR)
11758 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11759 && TREE_CODE (arg1) == INTEGER_CST
11760 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11761 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11762 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11763 < TYPE_PRECISION (TREE_TYPE (arg0))))
11765 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11766 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11767 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11768 tree shift_type = TREE_TYPE (arg0);
11770 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11771 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11772 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11773 && TYPE_PRECISION (TREE_TYPE (arg0))
11774 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11776 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11777 tree arg00 = TREE_OPERAND (arg0, 0);
11778 /* See if more bits can be proven as zero because of
11779 zero extension. */
11780 if (TREE_CODE (arg00) == NOP_EXPR
11781 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11783 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11784 if (TYPE_PRECISION (inner_type)
11785 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11786 && TYPE_PRECISION (inner_type) < prec)
11788 prec = TYPE_PRECISION (inner_type);
11789 /* See if we can shorten the right shift. */
11790 if (shiftc < prec)
11791 shift_type = inner_type;
11792 /* Otherwise X >> C1 is all zeros, so we'll optimize
11793 it into (X, 0) later on by making sure zerobits
11794 is all ones. */
11797 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11798 if (shiftc < prec)
11800 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11801 zerobits <<= prec - shiftc;
11803 /* For arithmetic shift if sign bit could be set, zerobits
11804 can contain actually sign bits, so no transformation is
11805 possible, unless MASK masks them all away. In that
11806 case the shift needs to be converted into logical shift. */
11807 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11808 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11810 if ((mask & zerobits) == 0)
11811 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11812 else
11813 zerobits = 0;
11817 /* ((X << 16) & 0xff00) is (X, 0). */
11818 if ((mask & zerobits) == mask)
11819 return omit_one_operand_loc (loc, type,
11820 build_int_cst (type, 0), arg0);
11822 newmask = mask | zerobits;
11823 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11825 /* Only do the transformation if NEWMASK is some integer
11826 mode's mask. */
11827 for (prec = BITS_PER_UNIT;
11828 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11829 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11830 break;
11831 if (prec < HOST_BITS_PER_WIDE_INT
11832 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11834 tree newmaskt;
11836 if (shift_type != TREE_TYPE (arg0))
11838 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11839 fold_convert_loc (loc, shift_type,
11840 TREE_OPERAND (arg0, 0)),
11841 TREE_OPERAND (arg0, 1));
11842 tem = fold_convert_loc (loc, type, tem);
11844 else
11845 tem = op0;
11846 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11847 if (!tree_int_cst_equal (newmaskt, arg1))
11848 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11853 goto associate;
11855 case RDIV_EXPR:
11856 /* Don't touch a floating-point divide by zero unless the mode
11857 of the constant can represent infinity. */
11858 if (TREE_CODE (arg1) == REAL_CST
11859 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11860 && real_zerop (arg1))
11861 return NULL_TREE;
11863 /* Optimize A / A to 1.0 if we don't care about
11864 NaNs or Infinities. Skip the transformation
11865 for non-real operands. */
11866 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11867 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11868 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11869 && operand_equal_p (arg0, arg1, 0))
11871 tree r = build_real (TREE_TYPE (arg0), dconst1);
11873 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11876 /* The complex version of the above A / A optimization. */
11877 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11878 && operand_equal_p (arg0, arg1, 0))
11880 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11881 if (! HONOR_NANS (TYPE_MODE (elem_type))
11882 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11884 tree r = build_real (elem_type, dconst1);
11885 /* omit_two_operands will call fold_convert for us. */
11886 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11890 /* (-A) / (-B) -> A / B */
11891 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11892 return fold_build2_loc (loc, RDIV_EXPR, type,
11893 TREE_OPERAND (arg0, 0),
11894 negate_expr (arg1));
11895 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11896 return fold_build2_loc (loc, RDIV_EXPR, type,
11897 negate_expr (arg0),
11898 TREE_OPERAND (arg1, 0));
11900 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11902 && real_onep (arg1))
11903 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11905 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11907 && real_minus_onep (arg1))
11908 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11909 negate_expr (arg0)));
11911 /* If ARG1 is a constant, we can convert this to a multiply by the
11912 reciprocal. This does not have the same rounding properties,
11913 so only do this if -freciprocal-math. We can actually
11914 always safely do it if ARG1 is a power of two, but it's hard to
11915 tell if it is or not in a portable manner. */
11916 if (optimize
11917 && (TREE_CODE (arg1) == REAL_CST
11918 || (TREE_CODE (arg1) == COMPLEX_CST
11919 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11920 || (TREE_CODE (arg1) == VECTOR_CST
11921 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11923 if (flag_reciprocal_math
11924 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11925 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11926 /* Find the reciprocal if optimizing and the result is exact.
11927 TODO: Complex reciprocal not implemented. */
11928 if (TREE_CODE (arg1) != COMPLEX_CST)
11930 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11932 if (inverse)
11933 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11936 /* Convert A/B/C to A/(B*C). */
11937 if (flag_reciprocal_math
11938 && TREE_CODE (arg0) == RDIV_EXPR)
11939 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11940 fold_build2_loc (loc, MULT_EXPR, type,
11941 TREE_OPERAND (arg0, 1), arg1));
11943 /* Convert A/(B/C) to (A/B)*C. */
11944 if (flag_reciprocal_math
11945 && TREE_CODE (arg1) == RDIV_EXPR)
11946 return fold_build2_loc (loc, MULT_EXPR, type,
11947 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11948 TREE_OPERAND (arg1, 0)),
11949 TREE_OPERAND (arg1, 1));
11951 /* Convert C1/(X*C2) into (C1/C2)/X. */
11952 if (flag_reciprocal_math
11953 && TREE_CODE (arg1) == MULT_EXPR
11954 && TREE_CODE (arg0) == REAL_CST
11955 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11957 tree tem = const_binop (RDIV_EXPR, arg0,
11958 TREE_OPERAND (arg1, 1));
11959 if (tem)
11960 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11961 TREE_OPERAND (arg1, 0));
11964 if (flag_unsafe_math_optimizations)
11966 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11967 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11969 /* Optimize sin(x)/cos(x) as tan(x). */
11970 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11971 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11972 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11973 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11974 CALL_EXPR_ARG (arg1, 0), 0))
11976 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11978 if (tanfn != NULL_TREE)
11979 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11982 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11983 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11984 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11985 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11986 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11987 CALL_EXPR_ARG (arg1, 0), 0))
11989 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11991 if (tanfn != NULL_TREE)
11993 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11994 CALL_EXPR_ARG (arg0, 0));
11995 return fold_build2_loc (loc, RDIV_EXPR, type,
11996 build_real (type, dconst1), tmp);
12000 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12001 NaNs or Infinities. */
12002 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12003 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12004 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12006 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12007 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12009 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12010 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12011 && operand_equal_p (arg00, arg01, 0))
12013 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12015 if (cosfn != NULL_TREE)
12016 return build_call_expr_loc (loc, cosfn, 1, arg00);
12020 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12021 NaNs or Infinities. */
12022 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12023 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12024 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12026 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12027 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12029 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12030 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12031 && operand_equal_p (arg00, arg01, 0))
12033 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12035 if (cosfn != NULL_TREE)
12037 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12038 return fold_build2_loc (loc, RDIV_EXPR, type,
12039 build_real (type, dconst1),
12040 tmp);
12045 /* Optimize pow(x,c)/x as pow(x,c-1). */
12046 if (fcode0 == BUILT_IN_POW
12047 || fcode0 == BUILT_IN_POWF
12048 || fcode0 == BUILT_IN_POWL)
12050 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12051 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12052 if (TREE_CODE (arg01) == REAL_CST
12053 && !TREE_OVERFLOW (arg01)
12054 && operand_equal_p (arg1, arg00, 0))
12056 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12057 REAL_VALUE_TYPE c;
12058 tree arg;
12060 c = TREE_REAL_CST (arg01);
12061 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12062 arg = build_real (type, c);
12063 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12067 /* Optimize a/root(b/c) into a*root(c/b). */
12068 if (BUILTIN_ROOT_P (fcode1))
12070 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12072 if (TREE_CODE (rootarg) == RDIV_EXPR)
12074 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12075 tree b = TREE_OPERAND (rootarg, 0);
12076 tree c = TREE_OPERAND (rootarg, 1);
12078 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12080 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12081 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12085 /* Optimize x/expN(y) into x*expN(-y). */
12086 if (BUILTIN_EXPONENT_P (fcode1))
12088 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12089 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12090 arg1 = build_call_expr_loc (loc,
12091 expfn, 1,
12092 fold_convert_loc (loc, type, arg));
12093 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12096 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12097 if (fcode1 == BUILT_IN_POW
12098 || fcode1 == BUILT_IN_POWF
12099 || fcode1 == BUILT_IN_POWL)
12101 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12102 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12103 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12104 tree neg11 = fold_convert_loc (loc, type,
12105 negate_expr (arg11));
12106 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12107 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12110 return NULL_TREE;
12112 case TRUNC_DIV_EXPR:
12113 /* Optimize (X & (-A)) / A where A is a power of 2,
12114 to X >> log2(A) */
12115 if (TREE_CODE (arg0) == BIT_AND_EXPR
12116 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12117 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12119 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12120 arg1, TREE_OPERAND (arg0, 1));
12121 if (sum && integer_zerop (sum)) {
12122 tree pow2 = build_int_cst (integer_type_node,
12123 wi::exact_log2 (arg1));
12124 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12125 TREE_OPERAND (arg0, 0), pow2);
12129 /* Fall through */
12131 case FLOOR_DIV_EXPR:
12132 /* Simplify A / (B << N) where A and B are positive and B is
12133 a power of 2, to A >> (N + log2(B)). */
12134 strict_overflow_p = false;
12135 if (TREE_CODE (arg1) == LSHIFT_EXPR
12136 && (TYPE_UNSIGNED (type)
12137 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12139 tree sval = TREE_OPERAND (arg1, 0);
12140 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12142 tree sh_cnt = TREE_OPERAND (arg1, 1);
12143 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12144 wi::exact_log2 (sval));
12146 if (strict_overflow_p)
12147 fold_overflow_warning (("assuming signed overflow does not "
12148 "occur when simplifying A / (B << N)"),
12149 WARN_STRICT_OVERFLOW_MISC);
12151 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12152 sh_cnt, pow2);
12153 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12154 fold_convert_loc (loc, type, arg0), sh_cnt);
12158 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12159 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12160 if (INTEGRAL_TYPE_P (type)
12161 && TYPE_UNSIGNED (type)
12162 && code == FLOOR_DIV_EXPR)
12163 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12165 /* Fall through */
12167 case ROUND_DIV_EXPR:
12168 case CEIL_DIV_EXPR:
12169 case EXACT_DIV_EXPR:
12170 if (integer_zerop (arg1))
12171 return NULL_TREE;
12172 /* X / -1 is -X. */
12173 if (!TYPE_UNSIGNED (type)
12174 && TREE_CODE (arg1) == INTEGER_CST
12175 && wi::eq_p (arg1, -1))
12176 return fold_convert_loc (loc, type, negate_expr (arg0));
12178 /* Convert -A / -B to A / B when the type is signed and overflow is
12179 undefined. */
12180 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12181 && TREE_CODE (arg0) == NEGATE_EXPR
12182 && negate_expr_p (arg1))
12184 if (INTEGRAL_TYPE_P (type))
12185 fold_overflow_warning (("assuming signed overflow does not occur "
12186 "when distributing negation across "
12187 "division"),
12188 WARN_STRICT_OVERFLOW_MISC);
12189 return fold_build2_loc (loc, code, type,
12190 fold_convert_loc (loc, type,
12191 TREE_OPERAND (arg0, 0)),
12192 fold_convert_loc (loc, type,
12193 negate_expr (arg1)));
12195 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12196 && TREE_CODE (arg1) == NEGATE_EXPR
12197 && negate_expr_p (arg0))
12199 if (INTEGRAL_TYPE_P (type))
12200 fold_overflow_warning (("assuming signed overflow does not occur "
12201 "when distributing negation across "
12202 "division"),
12203 WARN_STRICT_OVERFLOW_MISC);
12204 return fold_build2_loc (loc, code, type,
12205 fold_convert_loc (loc, type,
12206 negate_expr (arg0)),
12207 fold_convert_loc (loc, type,
12208 TREE_OPERAND (arg1, 0)));
12211 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12212 operation, EXACT_DIV_EXPR.
12214 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12215 At one time others generated faster code, it's not clear if they do
12216 after the last round to changes to the DIV code in expmed.c. */
12217 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12218 && multiple_of_p (type, arg0, arg1))
12219 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12221 strict_overflow_p = false;
12222 if (TREE_CODE (arg1) == INTEGER_CST
12223 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12224 &strict_overflow_p)))
12226 if (strict_overflow_p)
12227 fold_overflow_warning (("assuming signed overflow does not occur "
12228 "when simplifying division"),
12229 WARN_STRICT_OVERFLOW_MISC);
12230 return fold_convert_loc (loc, type, tem);
12233 return NULL_TREE;
12235 case CEIL_MOD_EXPR:
12236 case FLOOR_MOD_EXPR:
12237 case ROUND_MOD_EXPR:
12238 case TRUNC_MOD_EXPR:
12239 /* X % -1 is zero. */
12240 if (!TYPE_UNSIGNED (type)
12241 && TREE_CODE (arg1) == INTEGER_CST
12242 && wi::eq_p (arg1, -1))
12243 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12245 /* X % -C is the same as X % C. */
12246 if (code == TRUNC_MOD_EXPR
12247 && TYPE_SIGN (type) == SIGNED
12248 && TREE_CODE (arg1) == INTEGER_CST
12249 && !TREE_OVERFLOW (arg1)
12250 && wi::neg_p (arg1)
12251 && !TYPE_OVERFLOW_TRAPS (type)
12252 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12253 && !sign_bit_p (arg1, arg1))
12254 return fold_build2_loc (loc, code, type,
12255 fold_convert_loc (loc, type, arg0),
12256 fold_convert_loc (loc, type,
12257 negate_expr (arg1)));
12259 /* X % -Y is the same as X % Y. */
12260 if (code == TRUNC_MOD_EXPR
12261 && !TYPE_UNSIGNED (type)
12262 && TREE_CODE (arg1) == NEGATE_EXPR
12263 && !TYPE_OVERFLOW_TRAPS (type))
12264 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12265 fold_convert_loc (loc, type,
12266 TREE_OPERAND (arg1, 0)));
12268 strict_overflow_p = false;
12269 if (TREE_CODE (arg1) == INTEGER_CST
12270 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12271 &strict_overflow_p)))
12273 if (strict_overflow_p)
12274 fold_overflow_warning (("assuming signed overflow does not occur "
12275 "when simplifying modulus"),
12276 WARN_STRICT_OVERFLOW_MISC);
12277 return fold_convert_loc (loc, type, tem);
12280 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12281 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12282 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12283 && (TYPE_UNSIGNED (type)
12284 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12286 tree c = arg1;
12287 /* Also optimize A % (C << N) where C is a power of 2,
12288 to A & ((C << N) - 1). */
12289 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12290 c = TREE_OPERAND (arg1, 0);
12292 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12294 tree mask
12295 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12296 build_int_cst (TREE_TYPE (arg1), 1));
12297 if (strict_overflow_p)
12298 fold_overflow_warning (("assuming signed overflow does not "
12299 "occur when simplifying "
12300 "X % (power of two)"),
12301 WARN_STRICT_OVERFLOW_MISC);
12302 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12303 fold_convert_loc (loc, type, arg0),
12304 fold_convert_loc (loc, type, mask));
12308 return NULL_TREE;
12310 case LROTATE_EXPR:
12311 case RROTATE_EXPR:
12312 if (integer_all_onesp (arg0))
12313 return omit_one_operand_loc (loc, type, arg0, arg1);
12314 goto shift;
12316 case RSHIFT_EXPR:
12317 /* Optimize -1 >> x for arithmetic right shifts. */
12318 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12319 && tree_expr_nonnegative_p (arg1))
12320 return omit_one_operand_loc (loc, type, arg0, arg1);
12321 /* ... fall through ... */
12323 case LSHIFT_EXPR:
12324 shift:
12325 if (integer_zerop (arg1))
12326 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12327 if (integer_zerop (arg0))
12328 return omit_one_operand_loc (loc, type, arg0, arg1);
12330 /* Prefer vector1 << scalar to vector1 << vector2
12331 if vector2 is uniform. */
12332 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12333 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12334 return fold_build2_loc (loc, code, type, op0, tem);
12336 /* Since negative shift count is not well-defined,
12337 don't try to compute it in the compiler. */
12338 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12339 return NULL_TREE;
12341 prec = element_precision (type);
12343 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12344 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12345 && tree_to_uhwi (arg1) < prec
12346 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12347 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12349 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12350 + tree_to_uhwi (arg1));
12352 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12353 being well defined. */
12354 if (low >= prec)
12356 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12357 low = low % prec;
12358 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12359 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12360 TREE_OPERAND (arg0, 0));
12361 else
12362 low = prec - 1;
12365 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12366 build_int_cst (TREE_TYPE (arg1), low));
12369 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12370 into x & ((unsigned)-1 >> c) for unsigned types. */
12371 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12372 || (TYPE_UNSIGNED (type)
12373 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12374 && tree_fits_uhwi_p (arg1)
12375 && tree_to_uhwi (arg1) < prec
12376 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12377 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12379 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12380 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12381 tree lshift;
12382 tree arg00;
12384 if (low0 == low1)
12386 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12388 lshift = build_minus_one_cst (type);
12389 lshift = const_binop (code, lshift, arg1);
12391 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12395 /* Rewrite an LROTATE_EXPR by a constant into an
12396 RROTATE_EXPR by a new constant. */
12397 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12399 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12400 tem = const_binop (MINUS_EXPR, tem, arg1);
12401 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12404 /* If we have a rotate of a bit operation with the rotate count and
12405 the second operand of the bit operation both constant,
12406 permute the two operations. */
12407 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12408 && (TREE_CODE (arg0) == BIT_AND_EXPR
12409 || TREE_CODE (arg0) == BIT_IOR_EXPR
12410 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12412 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12413 fold_build2_loc (loc, code, type,
12414 TREE_OPERAND (arg0, 0), arg1),
12415 fold_build2_loc (loc, code, type,
12416 TREE_OPERAND (arg0, 1), arg1));
12418 /* Two consecutive rotates adding up to the some integer
12419 multiple of the precision of the type can be ignored. */
12420 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12421 && TREE_CODE (arg0) == RROTATE_EXPR
12422 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12423 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12424 prec) == 0)
12425 return TREE_OPERAND (arg0, 0);
12427 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12428 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12429 if the latter can be further optimized. */
12430 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12431 && TREE_CODE (arg0) == BIT_AND_EXPR
12432 && TREE_CODE (arg1) == INTEGER_CST
12433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12435 tree mask = fold_build2_loc (loc, code, type,
12436 fold_convert_loc (loc, type,
12437 TREE_OPERAND (arg0, 1)),
12438 arg1);
12439 tree shift = fold_build2_loc (loc, code, type,
12440 fold_convert_loc (loc, type,
12441 TREE_OPERAND (arg0, 0)),
12442 arg1);
12443 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12444 if (tem)
12445 return tem;
12448 return NULL_TREE;
12450 case MIN_EXPR:
12451 if (operand_equal_p (arg0, arg1, 0))
12452 return omit_one_operand_loc (loc, type, arg0, arg1);
12453 if (INTEGRAL_TYPE_P (type)
12454 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12455 return omit_one_operand_loc (loc, type, arg1, arg0);
12456 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12457 if (tem)
12458 return tem;
12459 goto associate;
12461 case MAX_EXPR:
12462 if (operand_equal_p (arg0, arg1, 0))
12463 return omit_one_operand_loc (loc, type, arg0, arg1);
12464 if (INTEGRAL_TYPE_P (type)
12465 && TYPE_MAX_VALUE (type)
12466 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12467 return omit_one_operand_loc (loc, type, arg1, arg0);
12468 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12469 if (tem)
12470 return tem;
12471 goto associate;
12473 case TRUTH_ANDIF_EXPR:
12474 /* Note that the operands of this must be ints
12475 and their values must be 0 or 1.
12476 ("true" is a fixed value perhaps depending on the language.) */
12477 /* If first arg is constant zero, return it. */
12478 if (integer_zerop (arg0))
12479 return fold_convert_loc (loc, type, arg0);
12480 case TRUTH_AND_EXPR:
12481 /* If either arg is constant true, drop it. */
12482 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12484 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12485 /* Preserve sequence points. */
12486 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12488 /* If second arg is constant zero, result is zero, but first arg
12489 must be evaluated. */
12490 if (integer_zerop (arg1))
12491 return omit_one_operand_loc (loc, type, arg1, arg0);
12492 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12493 case will be handled here. */
12494 if (integer_zerop (arg0))
12495 return omit_one_operand_loc (loc, type, arg0, arg1);
12497 /* !X && X is always false. */
12498 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12499 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12500 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12501 /* X && !X is always false. */
12502 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12503 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12504 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12506 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12507 means A >= Y && A != MAX, but in this case we know that
12508 A < X <= MAX. */
12510 if (!TREE_SIDE_EFFECTS (arg0)
12511 && !TREE_SIDE_EFFECTS (arg1))
12513 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12514 if (tem && !operand_equal_p (tem, arg0, 0))
12515 return fold_build2_loc (loc, code, type, tem, arg1);
12517 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12518 if (tem && !operand_equal_p (tem, arg1, 0))
12519 return fold_build2_loc (loc, code, type, arg0, tem);
12522 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12523 != NULL_TREE)
12524 return tem;
12526 return NULL_TREE;
12528 case TRUTH_ORIF_EXPR:
12529 /* Note that the operands of this must be ints
12530 and their values must be 0 or true.
12531 ("true" is a fixed value perhaps depending on the language.) */
12532 /* If first arg is constant true, return it. */
12533 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12534 return fold_convert_loc (loc, type, arg0);
12535 case TRUTH_OR_EXPR:
12536 /* If either arg is constant zero, drop it. */
12537 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12539 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12540 /* Preserve sequence points. */
12541 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12542 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12543 /* If second arg is constant true, result is true, but we must
12544 evaluate first arg. */
12545 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12546 return omit_one_operand_loc (loc, type, arg1, arg0);
12547 /* Likewise for first arg, but note this only occurs here for
12548 TRUTH_OR_EXPR. */
12549 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12550 return omit_one_operand_loc (loc, type, arg0, arg1);
12552 /* !X || X is always true. */
12553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12555 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12556 /* X || !X is always true. */
12557 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12559 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12561 /* (X && !Y) || (!X && Y) is X ^ Y */
12562 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12563 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12565 tree a0, a1, l0, l1, n0, n1;
12567 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12568 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12570 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12571 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12573 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12574 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12576 if ((operand_equal_p (n0, a0, 0)
12577 && operand_equal_p (n1, a1, 0))
12578 || (operand_equal_p (n0, a1, 0)
12579 && operand_equal_p (n1, a0, 0)))
12580 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12583 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12584 != NULL_TREE)
12585 return tem;
12587 return NULL_TREE;
12589 case TRUTH_XOR_EXPR:
12590 /* If the second arg is constant zero, drop it. */
12591 if (integer_zerop (arg1))
12592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12593 /* If the second arg is constant true, this is a logical inversion. */
12594 if (integer_onep (arg1))
12596 tem = invert_truthvalue_loc (loc, arg0);
12597 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12599 /* Identical arguments cancel to zero. */
12600 if (operand_equal_p (arg0, arg1, 0))
12601 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12603 /* !X ^ X is always true. */
12604 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12605 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12606 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12608 /* X ^ !X is always true. */
12609 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12611 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12613 return NULL_TREE;
12615 case EQ_EXPR:
12616 case NE_EXPR:
12617 STRIP_NOPS (arg0);
12618 STRIP_NOPS (arg1);
12620 tem = fold_comparison (loc, code, type, op0, op1);
12621 if (tem != NULL_TREE)
12622 return tem;
12624 /* bool_var != 0 becomes bool_var. */
12625 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12626 && code == NE_EXPR)
12627 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12629 /* bool_var == 1 becomes bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12631 && code == EQ_EXPR)
12632 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12634 /* bool_var != 1 becomes !bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12636 && code == NE_EXPR)
12637 return fold_convert_loc (loc, type,
12638 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12639 TREE_TYPE (arg0), arg0));
12641 /* bool_var == 0 becomes !bool_var. */
12642 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12643 && code == EQ_EXPR)
12644 return fold_convert_loc (loc, type,
12645 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12646 TREE_TYPE (arg0), arg0));
12648 /* !exp != 0 becomes !exp */
12649 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12650 && code == NE_EXPR)
12651 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12653 /* If this is an equality comparison of the address of two non-weak,
12654 unaliased symbols neither of which are extern (since we do not
12655 have access to attributes for externs), then we know the result. */
12656 if (TREE_CODE (arg0) == ADDR_EXPR
12657 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12658 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12659 && ! lookup_attribute ("alias",
12660 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12661 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12662 && TREE_CODE (arg1) == ADDR_EXPR
12663 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12664 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12665 && ! lookup_attribute ("alias",
12666 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12667 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12669 /* We know that we're looking at the address of two
12670 non-weak, unaliased, static _DECL nodes.
12672 It is both wasteful and incorrect to call operand_equal_p
12673 to compare the two ADDR_EXPR nodes. It is wasteful in that
12674 all we need to do is test pointer equality for the arguments
12675 to the two ADDR_EXPR nodes. It is incorrect to use
12676 operand_equal_p as that function is NOT equivalent to a
12677 C equality test. It can in fact return false for two
12678 objects which would test as equal using the C equality
12679 operator. */
12680 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12681 return constant_boolean_node (equal
12682 ? code == EQ_EXPR : code != EQ_EXPR,
12683 type);
12686 /* Similarly for a NEGATE_EXPR. */
12687 if (TREE_CODE (arg0) == NEGATE_EXPR
12688 && TREE_CODE (arg1) == INTEGER_CST
12689 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12690 arg1)))
12691 && TREE_CODE (tem) == INTEGER_CST
12692 && !TREE_OVERFLOW (tem))
12693 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12695 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12696 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12697 && TREE_CODE (arg1) == INTEGER_CST
12698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12699 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12700 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12701 fold_convert_loc (loc,
12702 TREE_TYPE (arg0),
12703 arg1),
12704 TREE_OPERAND (arg0, 1)));
12706 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12707 if ((TREE_CODE (arg0) == PLUS_EXPR
12708 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12709 || TREE_CODE (arg0) == MINUS_EXPR)
12710 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12711 0)),
12712 arg1, 0)
12713 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12714 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12716 tree val = TREE_OPERAND (arg0, 1);
12717 return omit_two_operands_loc (loc, type,
12718 fold_build2_loc (loc, code, type,
12719 val,
12720 build_int_cst (TREE_TYPE (val),
12721 0)),
12722 TREE_OPERAND (arg0, 0), arg1);
12725 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12726 if (TREE_CODE (arg0) == MINUS_EXPR
12727 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12728 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12729 1)),
12730 arg1, 0)
12731 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12733 return omit_two_operands_loc (loc, type,
12734 code == NE_EXPR
12735 ? boolean_true_node : boolean_false_node,
12736 TREE_OPERAND (arg0, 1), arg1);
12739 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12740 if (TREE_CODE (arg0) == ABS_EXPR
12741 && (integer_zerop (arg1) || real_zerop (arg1)))
12742 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12744 /* If this is an EQ or NE comparison with zero and ARG0 is
12745 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12746 two operations, but the latter can be done in one less insn
12747 on machines that have only two-operand insns or on which a
12748 constant cannot be the first operand. */
12749 if (TREE_CODE (arg0) == BIT_AND_EXPR
12750 && integer_zerop (arg1))
12752 tree arg00 = TREE_OPERAND (arg0, 0);
12753 tree arg01 = TREE_OPERAND (arg0, 1);
12754 if (TREE_CODE (arg00) == LSHIFT_EXPR
12755 && integer_onep (TREE_OPERAND (arg00, 0)))
12757 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12758 arg01, TREE_OPERAND (arg00, 1));
12759 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12760 build_int_cst (TREE_TYPE (arg0), 1));
12761 return fold_build2_loc (loc, code, type,
12762 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12763 arg1);
12765 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12766 && integer_onep (TREE_OPERAND (arg01, 0)))
12768 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12769 arg00, TREE_OPERAND (arg01, 1));
12770 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12771 build_int_cst (TREE_TYPE (arg0), 1));
12772 return fold_build2_loc (loc, code, type,
12773 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12774 arg1);
12778 /* If this is an NE or EQ comparison of zero against the result of a
12779 signed MOD operation whose second operand is a power of 2, make
12780 the MOD operation unsigned since it is simpler and equivalent. */
12781 if (integer_zerop (arg1)
12782 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12783 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12784 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12785 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12786 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12787 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12789 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12790 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12791 fold_convert_loc (loc, newtype,
12792 TREE_OPERAND (arg0, 0)),
12793 fold_convert_loc (loc, newtype,
12794 TREE_OPERAND (arg0, 1)));
12796 return fold_build2_loc (loc, code, type, newmod,
12797 fold_convert_loc (loc, newtype, arg1));
12800 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12801 C1 is a valid shift constant, and C2 is a power of two, i.e.
12802 a single bit. */
12803 if (TREE_CODE (arg0) == BIT_AND_EXPR
12804 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12805 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12806 == INTEGER_CST
12807 && integer_pow2p (TREE_OPERAND (arg0, 1))
12808 && integer_zerop (arg1))
12810 tree itype = TREE_TYPE (arg0);
12811 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12812 prec = TYPE_PRECISION (itype);
12814 /* Check for a valid shift count. */
12815 if (wi::ltu_p (arg001, prec))
12817 tree arg01 = TREE_OPERAND (arg0, 1);
12818 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12819 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12820 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12821 can be rewritten as (X & (C2 << C1)) != 0. */
12822 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12824 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12825 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12826 return fold_build2_loc (loc, code, type, tem,
12827 fold_convert_loc (loc, itype, arg1));
12829 /* Otherwise, for signed (arithmetic) shifts,
12830 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12831 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12832 else if (!TYPE_UNSIGNED (itype))
12833 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12834 arg000, build_int_cst (itype, 0));
12835 /* Otherwise, of unsigned (logical) shifts,
12836 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12837 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12838 else
12839 return omit_one_operand_loc (loc, type,
12840 code == EQ_EXPR ? integer_one_node
12841 : integer_zero_node,
12842 arg000);
12846 /* If we have (A & C) == C where C is a power of 2, convert this into
12847 (A & C) != 0. Similarly for NE_EXPR. */
12848 if (TREE_CODE (arg0) == BIT_AND_EXPR
12849 && integer_pow2p (TREE_OPERAND (arg0, 1))
12850 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12851 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12852 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12853 integer_zero_node));
12855 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12856 bit, then fold the expression into A < 0 or A >= 0. */
12857 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12858 if (tem)
12859 return tem;
12861 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12862 Similarly for NE_EXPR. */
12863 if (TREE_CODE (arg0) == BIT_AND_EXPR
12864 && TREE_CODE (arg1) == INTEGER_CST
12865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12867 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12868 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12869 TREE_OPERAND (arg0, 1));
12870 tree dandnotc
12871 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12872 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12873 notc);
12874 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12875 if (integer_nonzerop (dandnotc))
12876 return omit_one_operand_loc (loc, type, rslt, arg0);
12879 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12880 Similarly for NE_EXPR. */
12881 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12882 && TREE_CODE (arg1) == INTEGER_CST
12883 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12885 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12886 tree candnotd
12887 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12888 TREE_OPERAND (arg0, 1),
12889 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12890 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12891 if (integer_nonzerop (candnotd))
12892 return omit_one_operand_loc (loc, type, rslt, arg0);
12895 /* If this is a comparison of a field, we may be able to simplify it. */
12896 if ((TREE_CODE (arg0) == COMPONENT_REF
12897 || TREE_CODE (arg0) == BIT_FIELD_REF)
12898 /* Handle the constant case even without -O
12899 to make sure the warnings are given. */
12900 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12902 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12903 if (t1)
12904 return t1;
12907 /* Optimize comparisons of strlen vs zero to a compare of the
12908 first character of the string vs zero. To wit,
12909 strlen(ptr) == 0 => *ptr == 0
12910 strlen(ptr) != 0 => *ptr != 0
12911 Other cases should reduce to one of these two (or a constant)
12912 due to the return value of strlen being unsigned. */
12913 if (TREE_CODE (arg0) == CALL_EXPR
12914 && integer_zerop (arg1))
12916 tree fndecl = get_callee_fndecl (arg0);
12918 if (fndecl
12919 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12920 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12921 && call_expr_nargs (arg0) == 1
12922 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12924 tree iref = build_fold_indirect_ref_loc (loc,
12925 CALL_EXPR_ARG (arg0, 0));
12926 return fold_build2_loc (loc, code, type, iref,
12927 build_int_cst (TREE_TYPE (iref), 0));
12931 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12932 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12933 if (TREE_CODE (arg0) == RSHIFT_EXPR
12934 && integer_zerop (arg1)
12935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12937 tree arg00 = TREE_OPERAND (arg0, 0);
12938 tree arg01 = TREE_OPERAND (arg0, 1);
12939 tree itype = TREE_TYPE (arg00);
12940 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12942 if (TYPE_UNSIGNED (itype))
12944 itype = signed_type_for (itype);
12945 arg00 = fold_convert_loc (loc, itype, arg00);
12947 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12948 type, arg00, build_zero_cst (itype));
12952 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12953 if (integer_zerop (arg1)
12954 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12955 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12956 TREE_OPERAND (arg0, 1));
12958 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12959 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12960 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12962 build_zero_cst (TREE_TYPE (arg0)));
12963 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12964 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12965 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12966 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12967 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12968 build_zero_cst (TREE_TYPE (arg0)));
12970 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12971 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12972 && TREE_CODE (arg1) == INTEGER_CST
12973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12974 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12975 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12976 TREE_OPERAND (arg0, 1), arg1));
12978 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12979 (X & C) == 0 when C is a single bit. */
12980 if (TREE_CODE (arg0) == BIT_AND_EXPR
12981 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12982 && integer_zerop (arg1)
12983 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12985 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12986 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12987 TREE_OPERAND (arg0, 1));
12988 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12989 type, tem,
12990 fold_convert_loc (loc, TREE_TYPE (arg0),
12991 arg1));
12994 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12995 constant C is a power of two, i.e. a single bit. */
12996 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12997 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12998 && integer_zerop (arg1)
12999 && integer_pow2p (TREE_OPERAND (arg0, 1))
13000 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13001 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13003 tree arg00 = TREE_OPERAND (arg0, 0);
13004 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13005 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13008 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13009 when is C is a power of two, i.e. a single bit. */
13010 if (TREE_CODE (arg0) == BIT_AND_EXPR
13011 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13012 && integer_zerop (arg1)
13013 && integer_pow2p (TREE_OPERAND (arg0, 1))
13014 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13015 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13017 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13018 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13019 arg000, TREE_OPERAND (arg0, 1));
13020 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13021 tem, build_int_cst (TREE_TYPE (tem), 0));
13024 if (integer_zerop (arg1)
13025 && tree_expr_nonzero_p (arg0))
13027 tree res = constant_boolean_node (code==NE_EXPR, type);
13028 return omit_one_operand_loc (loc, type, res, arg0);
13031 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13032 if (TREE_CODE (arg0) == NEGATE_EXPR
13033 && TREE_CODE (arg1) == NEGATE_EXPR)
13034 return fold_build2_loc (loc, code, type,
13035 TREE_OPERAND (arg0, 0),
13036 fold_convert_loc (loc, TREE_TYPE (arg0),
13037 TREE_OPERAND (arg1, 0)));
13039 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13040 if (TREE_CODE (arg0) == BIT_AND_EXPR
13041 && TREE_CODE (arg1) == BIT_AND_EXPR)
13043 tree arg00 = TREE_OPERAND (arg0, 0);
13044 tree arg01 = TREE_OPERAND (arg0, 1);
13045 tree arg10 = TREE_OPERAND (arg1, 0);
13046 tree arg11 = TREE_OPERAND (arg1, 1);
13047 tree itype = TREE_TYPE (arg0);
13049 if (operand_equal_p (arg01, arg11, 0))
13050 return fold_build2_loc (loc, code, type,
13051 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13052 fold_build2_loc (loc,
13053 BIT_XOR_EXPR, itype,
13054 arg00, arg10),
13055 arg01),
13056 build_zero_cst (itype));
13058 if (operand_equal_p (arg01, arg10, 0))
13059 return fold_build2_loc (loc, code, type,
13060 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13061 fold_build2_loc (loc,
13062 BIT_XOR_EXPR, itype,
13063 arg00, arg11),
13064 arg01),
13065 build_zero_cst (itype));
13067 if (operand_equal_p (arg00, arg11, 0))
13068 return fold_build2_loc (loc, code, type,
13069 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13070 fold_build2_loc (loc,
13071 BIT_XOR_EXPR, itype,
13072 arg01, arg10),
13073 arg00),
13074 build_zero_cst (itype));
13076 if (operand_equal_p (arg00, arg10, 0))
13077 return fold_build2_loc (loc, code, type,
13078 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13079 fold_build2_loc (loc,
13080 BIT_XOR_EXPR, itype,
13081 arg01, arg11),
13082 arg00),
13083 build_zero_cst (itype));
13086 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13087 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13089 tree arg00 = TREE_OPERAND (arg0, 0);
13090 tree arg01 = TREE_OPERAND (arg0, 1);
13091 tree arg10 = TREE_OPERAND (arg1, 0);
13092 tree arg11 = TREE_OPERAND (arg1, 1);
13093 tree itype = TREE_TYPE (arg0);
13095 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13096 operand_equal_p guarantees no side-effects so we don't need
13097 to use omit_one_operand on Z. */
13098 if (operand_equal_p (arg01, arg11, 0))
13099 return fold_build2_loc (loc, code, type, arg00,
13100 fold_convert_loc (loc, TREE_TYPE (arg00),
13101 arg10));
13102 if (operand_equal_p (arg01, arg10, 0))
13103 return fold_build2_loc (loc, code, type, arg00,
13104 fold_convert_loc (loc, TREE_TYPE (arg00),
13105 arg11));
13106 if (operand_equal_p (arg00, arg11, 0))
13107 return fold_build2_loc (loc, code, type, arg01,
13108 fold_convert_loc (loc, TREE_TYPE (arg01),
13109 arg10));
13110 if (operand_equal_p (arg00, arg10, 0))
13111 return fold_build2_loc (loc, code, type, arg01,
13112 fold_convert_loc (loc, TREE_TYPE (arg01),
13113 arg11));
13115 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13116 if (TREE_CODE (arg01) == INTEGER_CST
13117 && TREE_CODE (arg11) == INTEGER_CST)
13119 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13120 fold_convert_loc (loc, itype, arg11));
13121 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13122 return fold_build2_loc (loc, code, type, tem,
13123 fold_convert_loc (loc, itype, arg10));
13127 /* Attempt to simplify equality/inequality comparisons of complex
13128 values. Only lower the comparison if the result is known or
13129 can be simplified to a single scalar comparison. */
13130 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13131 || TREE_CODE (arg0) == COMPLEX_CST)
13132 && (TREE_CODE (arg1) == COMPLEX_EXPR
13133 || TREE_CODE (arg1) == COMPLEX_CST))
13135 tree real0, imag0, real1, imag1;
13136 tree rcond, icond;
13138 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13140 real0 = TREE_OPERAND (arg0, 0);
13141 imag0 = TREE_OPERAND (arg0, 1);
13143 else
13145 real0 = TREE_REALPART (arg0);
13146 imag0 = TREE_IMAGPART (arg0);
13149 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13151 real1 = TREE_OPERAND (arg1, 0);
13152 imag1 = TREE_OPERAND (arg1, 1);
13154 else
13156 real1 = TREE_REALPART (arg1);
13157 imag1 = TREE_IMAGPART (arg1);
13160 rcond = fold_binary_loc (loc, code, type, real0, real1);
13161 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13163 if (integer_zerop (rcond))
13165 if (code == EQ_EXPR)
13166 return omit_two_operands_loc (loc, type, boolean_false_node,
13167 imag0, imag1);
13168 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13170 else
13172 if (code == NE_EXPR)
13173 return omit_two_operands_loc (loc, type, boolean_true_node,
13174 imag0, imag1);
13175 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13179 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13180 if (icond && TREE_CODE (icond) == INTEGER_CST)
13182 if (integer_zerop (icond))
13184 if (code == EQ_EXPR)
13185 return omit_two_operands_loc (loc, type, boolean_false_node,
13186 real0, real1);
13187 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13189 else
13191 if (code == NE_EXPR)
13192 return omit_two_operands_loc (loc, type, boolean_true_node,
13193 real0, real1);
13194 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13199 return NULL_TREE;
13201 case LT_EXPR:
13202 case GT_EXPR:
13203 case LE_EXPR:
13204 case GE_EXPR:
13205 tem = fold_comparison (loc, code, type, op0, op1);
13206 if (tem != NULL_TREE)
13207 return tem;
13209 /* Transform comparisons of the form X +- C CMP X. */
13210 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13211 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13212 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13213 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13214 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13215 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13217 tree arg01 = TREE_OPERAND (arg0, 1);
13218 enum tree_code code0 = TREE_CODE (arg0);
13219 int is_positive;
13221 if (TREE_CODE (arg01) == REAL_CST)
13222 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13223 else
13224 is_positive = tree_int_cst_sgn (arg01);
13226 /* (X - c) > X becomes false. */
13227 if (code == GT_EXPR
13228 && ((code0 == MINUS_EXPR && is_positive >= 0)
13229 || (code0 == PLUS_EXPR && is_positive <= 0)))
13231 if (TREE_CODE (arg01) == INTEGER_CST
13232 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13233 fold_overflow_warning (("assuming signed overflow does not "
13234 "occur when assuming that (X - c) > X "
13235 "is always false"),
13236 WARN_STRICT_OVERFLOW_ALL);
13237 return constant_boolean_node (0, type);
13240 /* Likewise (X + c) < X becomes false. */
13241 if (code == LT_EXPR
13242 && ((code0 == PLUS_EXPR && is_positive >= 0)
13243 || (code0 == MINUS_EXPR && is_positive <= 0)))
13245 if (TREE_CODE (arg01) == INTEGER_CST
13246 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13247 fold_overflow_warning (("assuming signed overflow does not "
13248 "occur when assuming that "
13249 "(X + c) < X is always false"),
13250 WARN_STRICT_OVERFLOW_ALL);
13251 return constant_boolean_node (0, type);
13254 /* Convert (X - c) <= X to true. */
13255 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13256 && code == LE_EXPR
13257 && ((code0 == MINUS_EXPR && is_positive >= 0)
13258 || (code0 == PLUS_EXPR && is_positive <= 0)))
13260 if (TREE_CODE (arg01) == INTEGER_CST
13261 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13262 fold_overflow_warning (("assuming signed overflow does not "
13263 "occur when assuming that "
13264 "(X - c) <= X is always true"),
13265 WARN_STRICT_OVERFLOW_ALL);
13266 return constant_boolean_node (1, type);
13269 /* Convert (X + c) >= X to true. */
13270 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13271 && code == GE_EXPR
13272 && ((code0 == PLUS_EXPR && is_positive >= 0)
13273 || (code0 == MINUS_EXPR && is_positive <= 0)))
13275 if (TREE_CODE (arg01) == INTEGER_CST
13276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13277 fold_overflow_warning (("assuming signed overflow does not "
13278 "occur when assuming that "
13279 "(X + c) >= X is always true"),
13280 WARN_STRICT_OVERFLOW_ALL);
13281 return constant_boolean_node (1, type);
13284 if (TREE_CODE (arg01) == INTEGER_CST)
13286 /* Convert X + c > X and X - c < X to true for integers. */
13287 if (code == GT_EXPR
13288 && ((code0 == PLUS_EXPR && is_positive > 0)
13289 || (code0 == MINUS_EXPR && is_positive < 0)))
13291 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13292 fold_overflow_warning (("assuming signed overflow does "
13293 "not occur when assuming that "
13294 "(X + c) > X is always true"),
13295 WARN_STRICT_OVERFLOW_ALL);
13296 return constant_boolean_node (1, type);
13299 if (code == LT_EXPR
13300 && ((code0 == MINUS_EXPR && is_positive > 0)
13301 || (code0 == PLUS_EXPR && is_positive < 0)))
13303 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13304 fold_overflow_warning (("assuming signed overflow does "
13305 "not occur when assuming that "
13306 "(X - c) < X is always true"),
13307 WARN_STRICT_OVERFLOW_ALL);
13308 return constant_boolean_node (1, type);
13311 /* Convert X + c <= X and X - c >= X to false for integers. */
13312 if (code == LE_EXPR
13313 && ((code0 == PLUS_EXPR && is_positive > 0)
13314 || (code0 == MINUS_EXPR && is_positive < 0)))
13316 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13317 fold_overflow_warning (("assuming signed overflow does "
13318 "not occur when assuming that "
13319 "(X + c) <= X is always false"),
13320 WARN_STRICT_OVERFLOW_ALL);
13321 return constant_boolean_node (0, type);
13324 if (code == GE_EXPR
13325 && ((code0 == MINUS_EXPR && is_positive > 0)
13326 || (code0 == PLUS_EXPR && is_positive < 0)))
13328 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13329 fold_overflow_warning (("assuming signed overflow does "
13330 "not occur when assuming that "
13331 "(X - c) >= X is always false"),
13332 WARN_STRICT_OVERFLOW_ALL);
13333 return constant_boolean_node (0, type);
13338 /* Comparisons with the highest or lowest possible integer of
13339 the specified precision will have known values. */
13341 tree arg1_type = TREE_TYPE (arg1);
13342 unsigned int prec = TYPE_PRECISION (arg1_type);
13344 if (TREE_CODE (arg1) == INTEGER_CST
13345 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13347 wide_int max = wi::max_value (arg1_type);
13348 wide_int signed_max = wi::max_value (prec, SIGNED);
13349 wide_int min = wi::min_value (arg1_type);
13351 if (wi::eq_p (arg1, max))
13352 switch (code)
13354 case GT_EXPR:
13355 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13357 case GE_EXPR:
13358 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13360 case LE_EXPR:
13361 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13363 case LT_EXPR:
13364 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13366 /* The GE_EXPR and LT_EXPR cases above are not normally
13367 reached because of previous transformations. */
13369 default:
13370 break;
13372 else if (wi::eq_p (arg1, max - 1))
13373 switch (code)
13375 case GT_EXPR:
13376 arg1 = const_binop (PLUS_EXPR, arg1,
13377 build_int_cst (TREE_TYPE (arg1), 1));
13378 return fold_build2_loc (loc, EQ_EXPR, type,
13379 fold_convert_loc (loc,
13380 TREE_TYPE (arg1), arg0),
13381 arg1);
13382 case LE_EXPR:
13383 arg1 = const_binop (PLUS_EXPR, arg1,
13384 build_int_cst (TREE_TYPE (arg1), 1));
13385 return fold_build2_loc (loc, NE_EXPR, type,
13386 fold_convert_loc (loc, TREE_TYPE (arg1),
13387 arg0),
13388 arg1);
13389 default:
13390 break;
13392 else if (wi::eq_p (arg1, min))
13393 switch (code)
13395 case LT_EXPR:
13396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13398 case LE_EXPR:
13399 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13401 case GE_EXPR:
13402 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13404 case GT_EXPR:
13405 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13407 default:
13408 break;
13410 else if (wi::eq_p (arg1, min + 1))
13411 switch (code)
13413 case GE_EXPR:
13414 arg1 = const_binop (MINUS_EXPR, arg1,
13415 build_int_cst (TREE_TYPE (arg1), 1));
13416 return fold_build2_loc (loc, NE_EXPR, type,
13417 fold_convert_loc (loc,
13418 TREE_TYPE (arg1), arg0),
13419 arg1);
13420 case LT_EXPR:
13421 arg1 = const_binop (MINUS_EXPR, arg1,
13422 build_int_cst (TREE_TYPE (arg1), 1));
13423 return fold_build2_loc (loc, EQ_EXPR, type,
13424 fold_convert_loc (loc, TREE_TYPE (arg1),
13425 arg0),
13426 arg1);
13427 default:
13428 break;
13431 else if (wi::eq_p (arg1, signed_max)
13432 && TYPE_UNSIGNED (arg1_type)
13433 /* We will flip the signedness of the comparison operator
13434 associated with the mode of arg1, so the sign bit is
13435 specified by this mode. Check that arg1 is the signed
13436 max associated with this sign bit. */
13437 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13438 /* signed_type does not work on pointer types. */
13439 && INTEGRAL_TYPE_P (arg1_type))
13441 /* The following case also applies to X < signed_max+1
13442 and X >= signed_max+1 because previous transformations. */
13443 if (code == LE_EXPR || code == GT_EXPR)
13445 tree st = signed_type_for (arg1_type);
13446 return fold_build2_loc (loc,
13447 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13448 type, fold_convert_loc (loc, st, arg0),
13449 build_int_cst (st, 0));
13455 /* If we are comparing an ABS_EXPR with a constant, we can
13456 convert all the cases into explicit comparisons, but they may
13457 well not be faster than doing the ABS and one comparison.
13458 But ABS (X) <= C is a range comparison, which becomes a subtraction
13459 and a comparison, and is probably faster. */
13460 if (code == LE_EXPR
13461 && TREE_CODE (arg1) == INTEGER_CST
13462 && TREE_CODE (arg0) == ABS_EXPR
13463 && ! TREE_SIDE_EFFECTS (arg0)
13464 && (0 != (tem = negate_expr (arg1)))
13465 && TREE_CODE (tem) == INTEGER_CST
13466 && !TREE_OVERFLOW (tem))
13467 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13468 build2 (GE_EXPR, type,
13469 TREE_OPERAND (arg0, 0), tem),
13470 build2 (LE_EXPR, type,
13471 TREE_OPERAND (arg0, 0), arg1));
13473 /* Convert ABS_EXPR<x> >= 0 to true. */
13474 strict_overflow_p = false;
13475 if (code == GE_EXPR
13476 && (integer_zerop (arg1)
13477 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13478 && real_zerop (arg1)))
13479 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13481 if (strict_overflow_p)
13482 fold_overflow_warning (("assuming signed overflow does not occur "
13483 "when simplifying comparison of "
13484 "absolute value and zero"),
13485 WARN_STRICT_OVERFLOW_CONDITIONAL);
13486 return omit_one_operand_loc (loc, type,
13487 constant_boolean_node (true, type),
13488 arg0);
13491 /* Convert ABS_EXPR<x> < 0 to false. */
13492 strict_overflow_p = false;
13493 if (code == LT_EXPR
13494 && (integer_zerop (arg1) || real_zerop (arg1))
13495 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13497 if (strict_overflow_p)
13498 fold_overflow_warning (("assuming signed overflow does not occur "
13499 "when simplifying comparison of "
13500 "absolute value and zero"),
13501 WARN_STRICT_OVERFLOW_CONDITIONAL);
13502 return omit_one_operand_loc (loc, type,
13503 constant_boolean_node (false, type),
13504 arg0);
13507 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13508 and similarly for >= into !=. */
13509 if ((code == LT_EXPR || code == GE_EXPR)
13510 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13511 && TREE_CODE (arg1) == LSHIFT_EXPR
13512 && integer_onep (TREE_OPERAND (arg1, 0)))
13513 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13514 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13515 TREE_OPERAND (arg1, 1)),
13516 build_zero_cst (TREE_TYPE (arg0)));
13518 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13519 otherwise Y might be >= # of bits in X's type and thus e.g.
13520 (unsigned char) (1 << Y) for Y 15 might be 0.
13521 If the cast is widening, then 1 << Y should have unsigned type,
13522 otherwise if Y is number of bits in the signed shift type minus 1,
13523 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13524 31 might be 0xffffffff80000000. */
13525 if ((code == LT_EXPR || code == GE_EXPR)
13526 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13527 && CONVERT_EXPR_P (arg1)
13528 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13529 && (TYPE_PRECISION (TREE_TYPE (arg1))
13530 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13531 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13532 || (TYPE_PRECISION (TREE_TYPE (arg1))
13533 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13534 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13536 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13537 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13538 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13539 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13540 build_zero_cst (TREE_TYPE (arg0)));
13543 return NULL_TREE;
13545 case UNORDERED_EXPR:
13546 case ORDERED_EXPR:
13547 case UNLT_EXPR:
13548 case UNLE_EXPR:
13549 case UNGT_EXPR:
13550 case UNGE_EXPR:
13551 case UNEQ_EXPR:
13552 case LTGT_EXPR:
13553 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13555 t1 = fold_relational_const (code, type, arg0, arg1);
13556 if (t1 != NULL_TREE)
13557 return t1;
13560 /* If the first operand is NaN, the result is constant. */
13561 if (TREE_CODE (arg0) == REAL_CST
13562 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13563 && (code != LTGT_EXPR || ! flag_trapping_math))
13565 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13566 ? integer_zero_node
13567 : integer_one_node;
13568 return omit_one_operand_loc (loc, type, t1, arg1);
13571 /* If the second operand is NaN, the result is constant. */
13572 if (TREE_CODE (arg1) == REAL_CST
13573 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13574 && (code != LTGT_EXPR || ! flag_trapping_math))
13576 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13577 ? integer_zero_node
13578 : integer_one_node;
13579 return omit_one_operand_loc (loc, type, t1, arg0);
13582 /* Simplify unordered comparison of something with itself. */
13583 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13584 && operand_equal_p (arg0, arg1, 0))
13585 return constant_boolean_node (1, type);
13587 if (code == LTGT_EXPR
13588 && !flag_trapping_math
13589 && operand_equal_p (arg0, arg1, 0))
13590 return constant_boolean_node (0, type);
13592 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13594 tree targ0 = strip_float_extensions (arg0);
13595 tree targ1 = strip_float_extensions (arg1);
13596 tree newtype = TREE_TYPE (targ0);
13598 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13599 newtype = TREE_TYPE (targ1);
13601 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13602 return fold_build2_loc (loc, code, type,
13603 fold_convert_loc (loc, newtype, targ0),
13604 fold_convert_loc (loc, newtype, targ1));
13607 return NULL_TREE;
13609 case COMPOUND_EXPR:
13610 /* When pedantic, a compound expression can be neither an lvalue
13611 nor an integer constant expression. */
13612 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13613 return NULL_TREE;
13614 /* Don't let (0, 0) be null pointer constant. */
13615 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13616 : fold_convert_loc (loc, type, arg1);
13617 return pedantic_non_lvalue_loc (loc, tem);
13619 case COMPLEX_EXPR:
13620 if ((TREE_CODE (arg0) == REAL_CST
13621 && TREE_CODE (arg1) == REAL_CST)
13622 || (TREE_CODE (arg0) == INTEGER_CST
13623 && TREE_CODE (arg1) == INTEGER_CST))
13624 return build_complex (type, arg0, arg1);
13625 if (TREE_CODE (arg0) == REALPART_EXPR
13626 && TREE_CODE (arg1) == IMAGPART_EXPR
13627 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13628 && operand_equal_p (TREE_OPERAND (arg0, 0),
13629 TREE_OPERAND (arg1, 0), 0))
13630 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13631 TREE_OPERAND (arg1, 0));
13632 return NULL_TREE;
13634 case ASSERT_EXPR:
13635 /* An ASSERT_EXPR should never be passed to fold_binary. */
13636 gcc_unreachable ();
13638 case VEC_PACK_TRUNC_EXPR:
13639 case VEC_PACK_FIX_TRUNC_EXPR:
13641 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13642 tree *elts;
13644 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13645 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13646 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13647 return NULL_TREE;
13649 elts = XALLOCAVEC (tree, nelts);
13650 if (!vec_cst_ctor_to_array (arg0, elts)
13651 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13652 return NULL_TREE;
13654 for (i = 0; i < nelts; i++)
13656 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13657 ? NOP_EXPR : FIX_TRUNC_EXPR,
13658 TREE_TYPE (type), elts[i]);
13659 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13660 return NULL_TREE;
13663 return build_vector (type, elts);
13666 case VEC_WIDEN_MULT_LO_EXPR:
13667 case VEC_WIDEN_MULT_HI_EXPR:
13668 case VEC_WIDEN_MULT_EVEN_EXPR:
13669 case VEC_WIDEN_MULT_ODD_EXPR:
13671 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13672 unsigned int out, ofs, scale;
13673 tree *elts;
13675 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13676 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13677 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13678 return NULL_TREE;
13680 elts = XALLOCAVEC (tree, nelts * 4);
13681 if (!vec_cst_ctor_to_array (arg0, elts)
13682 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13683 return NULL_TREE;
13685 if (code == VEC_WIDEN_MULT_LO_EXPR)
13686 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13687 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13688 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13689 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13690 scale = 1, ofs = 0;
13691 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13692 scale = 1, ofs = 1;
13694 for (out = 0; out < nelts; out++)
13696 unsigned int in1 = (out << scale) + ofs;
13697 unsigned int in2 = in1 + nelts * 2;
13698 tree t1, t2;
13700 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13701 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13703 if (t1 == NULL_TREE || t2 == NULL_TREE)
13704 return NULL_TREE;
13705 elts[out] = const_binop (MULT_EXPR, t1, t2);
13706 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13707 return NULL_TREE;
13710 return build_vector (type, elts);
13713 default:
13714 return NULL_TREE;
13715 } /* switch (code) */
13718 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13719 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13720 of GOTO_EXPR. */
13722 static tree
13723 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13725 switch (TREE_CODE (*tp))
13727 case LABEL_EXPR:
13728 return *tp;
13730 case GOTO_EXPR:
13731 *walk_subtrees = 0;
13733 /* ... fall through ... */
13735 default:
13736 return NULL_TREE;
13740 /* Return whether the sub-tree ST contains a label which is accessible from
13741 outside the sub-tree. */
13743 static bool
13744 contains_label_p (tree st)
13746 return
13747 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13750 /* Fold a ternary expression of code CODE and type TYPE with operands
13751 OP0, OP1, and OP2. Return the folded expression if folding is
13752 successful. Otherwise, return NULL_TREE. */
13754 tree
13755 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13756 tree op0, tree op1, tree op2)
13758 tree tem;
13759 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13760 enum tree_code_class kind = TREE_CODE_CLASS (code);
13762 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13763 && TREE_CODE_LENGTH (code) == 3);
13765 /* If this is a commutative operation, and OP0 is a constant, move it
13766 to OP1 to reduce the number of tests below. */
13767 if (commutative_ternary_tree_code (code)
13768 && tree_swap_operands_p (op0, op1, true))
13769 return fold_build3_loc (loc, code, type, op1, op0, op2);
13771 tem = generic_simplify (loc, code, type, op0, op1, op2);
13772 if (tem)
13773 return tem;
13775 /* Strip any conversions that don't change the mode. This is safe
13776 for every expression, except for a comparison expression because
13777 its signedness is derived from its operands. So, in the latter
13778 case, only strip conversions that don't change the signedness.
13780 Note that this is done as an internal manipulation within the
13781 constant folder, in order to find the simplest representation of
13782 the arguments so that their form can be studied. In any cases,
13783 the appropriate type conversions should be put back in the tree
13784 that will get out of the constant folder. */
13785 if (op0)
13787 arg0 = op0;
13788 STRIP_NOPS (arg0);
13791 if (op1)
13793 arg1 = op1;
13794 STRIP_NOPS (arg1);
13797 if (op2)
13799 arg2 = op2;
13800 STRIP_NOPS (arg2);
13803 switch (code)
13805 case COMPONENT_REF:
13806 if (TREE_CODE (arg0) == CONSTRUCTOR
13807 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13809 unsigned HOST_WIDE_INT idx;
13810 tree field, value;
13811 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13812 if (field == arg1)
13813 return value;
13815 return NULL_TREE;
13817 case COND_EXPR:
13818 case VEC_COND_EXPR:
13819 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13820 so all simple results must be passed through pedantic_non_lvalue. */
13821 if (TREE_CODE (arg0) == INTEGER_CST)
13823 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13824 tem = integer_zerop (arg0) ? op2 : op1;
13825 /* Only optimize constant conditions when the selected branch
13826 has the same type as the COND_EXPR. This avoids optimizing
13827 away "c ? x : throw", where the throw has a void type.
13828 Avoid throwing away that operand which contains label. */
13829 if ((!TREE_SIDE_EFFECTS (unused_op)
13830 || !contains_label_p (unused_op))
13831 && (! VOID_TYPE_P (TREE_TYPE (tem))
13832 || VOID_TYPE_P (type)))
13833 return pedantic_non_lvalue_loc (loc, tem);
13834 return NULL_TREE;
13836 else if (TREE_CODE (arg0) == VECTOR_CST)
13838 if (integer_all_onesp (arg0))
13839 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13840 if (integer_zerop (arg0))
13841 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13843 if ((TREE_CODE (arg1) == VECTOR_CST
13844 || TREE_CODE (arg1) == CONSTRUCTOR)
13845 && (TREE_CODE (arg2) == VECTOR_CST
13846 || TREE_CODE (arg2) == CONSTRUCTOR))
13848 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13849 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13850 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13851 for (i = 0; i < nelts; i++)
13853 tree val = VECTOR_CST_ELT (arg0, i);
13854 if (integer_all_onesp (val))
13855 sel[i] = i;
13856 else if (integer_zerop (val))
13857 sel[i] = nelts + i;
13858 else /* Currently unreachable. */
13859 return NULL_TREE;
13861 tree t = fold_vec_perm (type, arg1, arg2, sel);
13862 if (t != NULL_TREE)
13863 return t;
13867 if (operand_equal_p (arg1, op2, 0))
13868 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13870 /* If we have A op B ? A : C, we may be able to convert this to a
13871 simpler expression, depending on the operation and the values
13872 of B and C. Signed zeros prevent all of these transformations,
13873 for reasons given above each one.
13875 Also try swapping the arguments and inverting the conditional. */
13876 if (COMPARISON_CLASS_P (arg0)
13877 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13878 arg1, TREE_OPERAND (arg0, 1))
13879 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13881 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13882 if (tem)
13883 return tem;
13886 if (COMPARISON_CLASS_P (arg0)
13887 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13888 op2,
13889 TREE_OPERAND (arg0, 1))
13890 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13892 location_t loc0 = expr_location_or (arg0, loc);
13893 tem = fold_invert_truthvalue (loc0, arg0);
13894 if (tem && COMPARISON_CLASS_P (tem))
13896 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13897 if (tem)
13898 return tem;
13902 /* If the second operand is simpler than the third, swap them
13903 since that produces better jump optimization results. */
13904 if (truth_value_p (TREE_CODE (arg0))
13905 && tree_swap_operands_p (op1, op2, false))
13907 location_t loc0 = expr_location_or (arg0, loc);
13908 /* See if this can be inverted. If it can't, possibly because
13909 it was a floating-point inequality comparison, don't do
13910 anything. */
13911 tem = fold_invert_truthvalue (loc0, arg0);
13912 if (tem)
13913 return fold_build3_loc (loc, code, type, tem, op2, op1);
13916 /* Convert A ? 1 : 0 to simply A. */
13917 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13918 : (integer_onep (op1)
13919 && !VECTOR_TYPE_P (type)))
13920 && integer_zerop (op2)
13921 /* If we try to convert OP0 to our type, the
13922 call to fold will try to move the conversion inside
13923 a COND, which will recurse. In that case, the COND_EXPR
13924 is probably the best choice, so leave it alone. */
13925 && type == TREE_TYPE (arg0))
13926 return pedantic_non_lvalue_loc (loc, arg0);
13928 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13929 over COND_EXPR in cases such as floating point comparisons. */
13930 if (integer_zerop (op1)
13931 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13932 : (integer_onep (op2)
13933 && !VECTOR_TYPE_P (type)))
13934 && truth_value_p (TREE_CODE (arg0)))
13935 return pedantic_non_lvalue_loc (loc,
13936 fold_convert_loc (loc, type,
13937 invert_truthvalue_loc (loc,
13938 arg0)));
13940 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13941 if (TREE_CODE (arg0) == LT_EXPR
13942 && integer_zerop (TREE_OPERAND (arg0, 1))
13943 && integer_zerop (op2)
13944 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13946 /* sign_bit_p looks through both zero and sign extensions,
13947 but for this optimization only sign extensions are
13948 usable. */
13949 tree tem2 = TREE_OPERAND (arg0, 0);
13950 while (tem != tem2)
13952 if (TREE_CODE (tem2) != NOP_EXPR
13953 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13955 tem = NULL_TREE;
13956 break;
13958 tem2 = TREE_OPERAND (tem2, 0);
13960 /* sign_bit_p only checks ARG1 bits within A's precision.
13961 If <sign bit of A> has wider type than A, bits outside
13962 of A's precision in <sign bit of A> need to be checked.
13963 If they are all 0, this optimization needs to be done
13964 in unsigned A's type, if they are all 1 in signed A's type,
13965 otherwise this can't be done. */
13966 if (tem
13967 && TYPE_PRECISION (TREE_TYPE (tem))
13968 < TYPE_PRECISION (TREE_TYPE (arg1))
13969 && TYPE_PRECISION (TREE_TYPE (tem))
13970 < TYPE_PRECISION (type))
13972 int inner_width, outer_width;
13973 tree tem_type;
13975 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13976 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13977 if (outer_width > TYPE_PRECISION (type))
13978 outer_width = TYPE_PRECISION (type);
13980 wide_int mask = wi::shifted_mask
13981 (inner_width, outer_width - inner_width, false,
13982 TYPE_PRECISION (TREE_TYPE (arg1)));
13984 wide_int common = mask & arg1;
13985 if (common == mask)
13987 tem_type = signed_type_for (TREE_TYPE (tem));
13988 tem = fold_convert_loc (loc, tem_type, tem);
13990 else if (common == 0)
13992 tem_type = unsigned_type_for (TREE_TYPE (tem));
13993 tem = fold_convert_loc (loc, tem_type, tem);
13995 else
13996 tem = NULL;
13999 if (tem)
14000 return
14001 fold_convert_loc (loc, type,
14002 fold_build2_loc (loc, BIT_AND_EXPR,
14003 TREE_TYPE (tem), tem,
14004 fold_convert_loc (loc,
14005 TREE_TYPE (tem),
14006 arg1)));
14009 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14010 already handled above. */
14011 if (TREE_CODE (arg0) == BIT_AND_EXPR
14012 && integer_onep (TREE_OPERAND (arg0, 1))
14013 && integer_zerop (op2)
14014 && integer_pow2p (arg1))
14016 tree tem = TREE_OPERAND (arg0, 0);
14017 STRIP_NOPS (tem);
14018 if (TREE_CODE (tem) == RSHIFT_EXPR
14019 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14020 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14021 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14022 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14023 TREE_OPERAND (tem, 0), arg1);
14026 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14027 is probably obsolete because the first operand should be a
14028 truth value (that's why we have the two cases above), but let's
14029 leave it in until we can confirm this for all front-ends. */
14030 if (integer_zerop (op2)
14031 && TREE_CODE (arg0) == NE_EXPR
14032 && integer_zerop (TREE_OPERAND (arg0, 1))
14033 && integer_pow2p (arg1)
14034 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14035 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14036 arg1, OEP_ONLY_CONST))
14037 return pedantic_non_lvalue_loc (loc,
14038 fold_convert_loc (loc, type,
14039 TREE_OPERAND (arg0, 0)));
14041 /* Disable the transformations below for vectors, since
14042 fold_binary_op_with_conditional_arg may undo them immediately,
14043 yielding an infinite loop. */
14044 if (code == VEC_COND_EXPR)
14045 return NULL_TREE;
14047 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14048 if (integer_zerop (op2)
14049 && truth_value_p (TREE_CODE (arg0))
14050 && truth_value_p (TREE_CODE (arg1))
14051 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14052 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14053 : TRUTH_ANDIF_EXPR,
14054 type, fold_convert_loc (loc, type, arg0), arg1);
14056 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14057 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14058 && truth_value_p (TREE_CODE (arg0))
14059 && truth_value_p (TREE_CODE (arg1))
14060 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14062 location_t loc0 = expr_location_or (arg0, loc);
14063 /* Only perform transformation if ARG0 is easily inverted. */
14064 tem = fold_invert_truthvalue (loc0, arg0);
14065 if (tem)
14066 return fold_build2_loc (loc, code == VEC_COND_EXPR
14067 ? BIT_IOR_EXPR
14068 : TRUTH_ORIF_EXPR,
14069 type, fold_convert_loc (loc, type, tem),
14070 arg1);
14073 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14074 if (integer_zerop (arg1)
14075 && truth_value_p (TREE_CODE (arg0))
14076 && truth_value_p (TREE_CODE (op2))
14077 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14079 location_t loc0 = expr_location_or (arg0, loc);
14080 /* Only perform transformation if ARG0 is easily inverted. */
14081 tem = fold_invert_truthvalue (loc0, arg0);
14082 if (tem)
14083 return fold_build2_loc (loc, code == VEC_COND_EXPR
14084 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14085 type, fold_convert_loc (loc, type, tem),
14086 op2);
14089 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14090 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14091 && truth_value_p (TREE_CODE (arg0))
14092 && truth_value_p (TREE_CODE (op2))
14093 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14094 return fold_build2_loc (loc, code == VEC_COND_EXPR
14095 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14096 type, fold_convert_loc (loc, type, arg0), op2);
14098 return NULL_TREE;
14100 case CALL_EXPR:
14101 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14102 of fold_ternary on them. */
14103 gcc_unreachable ();
14105 case BIT_FIELD_REF:
14106 if ((TREE_CODE (arg0) == VECTOR_CST
14107 || (TREE_CODE (arg0) == CONSTRUCTOR
14108 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14109 && (type == TREE_TYPE (TREE_TYPE (arg0))
14110 || (TREE_CODE (type) == VECTOR_TYPE
14111 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14113 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14114 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14115 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14116 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14118 if (n != 0
14119 && (idx % width) == 0
14120 && (n % width) == 0
14121 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14123 idx = idx / width;
14124 n = n / width;
14126 if (TREE_CODE (arg0) == VECTOR_CST)
14128 if (n == 1)
14129 return VECTOR_CST_ELT (arg0, idx);
14131 tree *vals = XALLOCAVEC (tree, n);
14132 for (unsigned i = 0; i < n; ++i)
14133 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14134 return build_vector (type, vals);
14137 /* Constructor elements can be subvectors. */
14138 unsigned HOST_WIDE_INT k = 1;
14139 if (CONSTRUCTOR_NELTS (arg0) != 0)
14141 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14142 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14143 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14146 /* We keep an exact subset of the constructor elements. */
14147 if ((idx % k) == 0 && (n % k) == 0)
14149 if (CONSTRUCTOR_NELTS (arg0) == 0)
14150 return build_constructor (type, NULL);
14151 idx /= k;
14152 n /= k;
14153 if (n == 1)
14155 if (idx < CONSTRUCTOR_NELTS (arg0))
14156 return CONSTRUCTOR_ELT (arg0, idx)->value;
14157 return build_zero_cst (type);
14160 vec<constructor_elt, va_gc> *vals;
14161 vec_alloc (vals, n);
14162 for (unsigned i = 0;
14163 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14164 ++i)
14165 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14166 CONSTRUCTOR_ELT
14167 (arg0, idx + i)->value);
14168 return build_constructor (type, vals);
14170 /* The bitfield references a single constructor element. */
14171 else if (idx + n <= (idx / k + 1) * k)
14173 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14174 return build_zero_cst (type);
14175 else if (n == k)
14176 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14177 else
14178 return fold_build3_loc (loc, code, type,
14179 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14180 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14185 /* A bit-field-ref that referenced the full argument can be stripped. */
14186 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14187 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14188 && integer_zerop (op2))
14189 return fold_convert_loc (loc, type, arg0);
14191 /* On constants we can use native encode/interpret to constant
14192 fold (nearly) all BIT_FIELD_REFs. */
14193 if (CONSTANT_CLASS_P (arg0)
14194 && can_native_interpret_type_p (type)
14195 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14196 /* This limitation should not be necessary, we just need to
14197 round this up to mode size. */
14198 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14199 /* Need bit-shifting of the buffer to relax the following. */
14200 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14202 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14203 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14204 unsigned HOST_WIDE_INT clen;
14205 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14206 /* ??? We cannot tell native_encode_expr to start at
14207 some random byte only. So limit us to a reasonable amount
14208 of work. */
14209 if (clen <= 4096)
14211 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14212 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14213 if (len > 0
14214 && len * BITS_PER_UNIT >= bitpos + bitsize)
14216 tree v = native_interpret_expr (type,
14217 b + bitpos / BITS_PER_UNIT,
14218 bitsize / BITS_PER_UNIT);
14219 if (v)
14220 return v;
14225 return NULL_TREE;
14227 case FMA_EXPR:
14228 /* For integers we can decompose the FMA if possible. */
14229 if (TREE_CODE (arg0) == INTEGER_CST
14230 && TREE_CODE (arg1) == INTEGER_CST)
14231 return fold_build2_loc (loc, PLUS_EXPR, type,
14232 const_binop (MULT_EXPR, arg0, arg1), arg2);
14233 if (integer_zerop (arg2))
14234 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14236 return fold_fma (loc, type, arg0, arg1, arg2);
14238 case VEC_PERM_EXPR:
14239 if (TREE_CODE (arg2) == VECTOR_CST)
14241 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14242 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14243 bool need_mask_canon = false;
14244 bool all_in_vec0 = true;
14245 bool all_in_vec1 = true;
14246 bool maybe_identity = true;
14247 bool single_arg = (op0 == op1);
14248 bool changed = false;
14250 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14251 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14252 for (i = 0; i < nelts; i++)
14254 tree val = VECTOR_CST_ELT (arg2, i);
14255 if (TREE_CODE (val) != INTEGER_CST)
14256 return NULL_TREE;
14258 /* Make sure that the perm value is in an acceptable
14259 range. */
14260 wide_int t = val;
14261 if (wi::gtu_p (t, mask))
14263 need_mask_canon = true;
14264 sel[i] = t.to_uhwi () & mask;
14266 else
14267 sel[i] = t.to_uhwi ();
14269 if (sel[i] < nelts)
14270 all_in_vec1 = false;
14271 else
14272 all_in_vec0 = false;
14274 if ((sel[i] & (nelts-1)) != i)
14275 maybe_identity = false;
14278 if (maybe_identity)
14280 if (all_in_vec0)
14281 return op0;
14282 if (all_in_vec1)
14283 return op1;
14286 if (all_in_vec0)
14287 op1 = op0;
14288 else if (all_in_vec1)
14290 op0 = op1;
14291 for (i = 0; i < nelts; i++)
14292 sel[i] -= nelts;
14293 need_mask_canon = true;
14296 if ((TREE_CODE (op0) == VECTOR_CST
14297 || TREE_CODE (op0) == CONSTRUCTOR)
14298 && (TREE_CODE (op1) == VECTOR_CST
14299 || TREE_CODE (op1) == CONSTRUCTOR))
14301 tree t = fold_vec_perm (type, op0, op1, sel);
14302 if (t != NULL_TREE)
14303 return t;
14306 if (op0 == op1 && !single_arg)
14307 changed = true;
14309 if (need_mask_canon && arg2 == op2)
14311 tree *tsel = XALLOCAVEC (tree, nelts);
14312 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14313 for (i = 0; i < nelts; i++)
14314 tsel[i] = build_int_cst (eltype, sel[i]);
14315 op2 = build_vector (TREE_TYPE (arg2), tsel);
14316 changed = true;
14319 if (changed)
14320 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14322 return NULL_TREE;
14324 default:
14325 return NULL_TREE;
14326 } /* switch (code) */
14329 /* Perform constant folding and related simplification of EXPR.
14330 The related simplifications include x*1 => x, x*0 => 0, etc.,
14331 and application of the associative law.
14332 NOP_EXPR conversions may be removed freely (as long as we
14333 are careful not to change the type of the overall expression).
14334 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14335 but we can constant-fold them if they have constant operands. */
14337 #ifdef ENABLE_FOLD_CHECKING
14338 # define fold(x) fold_1 (x)
14339 static tree fold_1 (tree);
14340 static
14341 #endif
14342 tree
14343 fold (tree expr)
14345 const tree t = expr;
14346 enum tree_code code = TREE_CODE (t);
14347 enum tree_code_class kind = TREE_CODE_CLASS (code);
14348 tree tem;
14349 location_t loc = EXPR_LOCATION (expr);
14351 /* Return right away if a constant. */
14352 if (kind == tcc_constant)
14353 return t;
14355 /* CALL_EXPR-like objects with variable numbers of operands are
14356 treated specially. */
14357 if (kind == tcc_vl_exp)
14359 if (code == CALL_EXPR)
14361 tem = fold_call_expr (loc, expr, false);
14362 return tem ? tem : expr;
14364 return expr;
14367 if (IS_EXPR_CODE_CLASS (kind))
14369 tree type = TREE_TYPE (t);
14370 tree op0, op1, op2;
14372 switch (TREE_CODE_LENGTH (code))
14374 case 1:
14375 op0 = TREE_OPERAND (t, 0);
14376 tem = fold_unary_loc (loc, code, type, op0);
14377 return tem ? tem : expr;
14378 case 2:
14379 op0 = TREE_OPERAND (t, 0);
14380 op1 = TREE_OPERAND (t, 1);
14381 tem = fold_binary_loc (loc, code, type, op0, op1);
14382 return tem ? tem : expr;
14383 case 3:
14384 op0 = TREE_OPERAND (t, 0);
14385 op1 = TREE_OPERAND (t, 1);
14386 op2 = TREE_OPERAND (t, 2);
14387 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14388 return tem ? tem : expr;
14389 default:
14390 break;
14394 switch (code)
14396 case ARRAY_REF:
14398 tree op0 = TREE_OPERAND (t, 0);
14399 tree op1 = TREE_OPERAND (t, 1);
14401 if (TREE_CODE (op1) == INTEGER_CST
14402 && TREE_CODE (op0) == CONSTRUCTOR
14403 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14405 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14406 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14407 unsigned HOST_WIDE_INT begin = 0;
14409 /* Find a matching index by means of a binary search. */
14410 while (begin != end)
14412 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14413 tree index = (*elts)[middle].index;
14415 if (TREE_CODE (index) == INTEGER_CST
14416 && tree_int_cst_lt (index, op1))
14417 begin = middle + 1;
14418 else if (TREE_CODE (index) == INTEGER_CST
14419 && tree_int_cst_lt (op1, index))
14420 end = middle;
14421 else if (TREE_CODE (index) == RANGE_EXPR
14422 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14423 begin = middle + 1;
14424 else if (TREE_CODE (index) == RANGE_EXPR
14425 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14426 end = middle;
14427 else
14428 return (*elts)[middle].value;
14432 return t;
14435 /* Return a VECTOR_CST if possible. */
14436 case CONSTRUCTOR:
14438 tree type = TREE_TYPE (t);
14439 if (TREE_CODE (type) != VECTOR_TYPE)
14440 return t;
14442 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14443 unsigned HOST_WIDE_INT idx, pos = 0;
14444 tree value;
14446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14448 if (!CONSTANT_CLASS_P (value))
14449 return t;
14450 if (TREE_CODE (value) == VECTOR_CST)
14452 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14453 vec[pos++] = VECTOR_CST_ELT (value, i);
14455 else
14456 vec[pos++] = value;
14458 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14459 vec[pos] = build_zero_cst (TREE_TYPE (type));
14461 return build_vector (type, vec);
14464 case CONST_DECL:
14465 return fold (DECL_INITIAL (t));
14467 default:
14468 return t;
14469 } /* switch (code) */
14472 #ifdef ENABLE_FOLD_CHECKING
14473 #undef fold
14475 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14476 hash_table<pointer_hash<const tree_node> > *);
14477 static void fold_check_failed (const_tree, const_tree);
14478 void print_fold_checksum (const_tree);
14480 /* When --enable-checking=fold, compute a digest of expr before
14481 and after actual fold call to see if fold did not accidentally
14482 change original expr. */
14484 tree
14485 fold (tree expr)
14487 tree ret;
14488 struct md5_ctx ctx;
14489 unsigned char checksum_before[16], checksum_after[16];
14490 hash_table<pointer_hash<const tree_node> > ht (32);
14492 md5_init_ctx (&ctx);
14493 fold_checksum_tree (expr, &ctx, &ht);
14494 md5_finish_ctx (&ctx, checksum_before);
14495 ht.empty ();
14497 ret = fold_1 (expr);
14499 md5_init_ctx (&ctx);
14500 fold_checksum_tree (expr, &ctx, &ht);
14501 md5_finish_ctx (&ctx, checksum_after);
14503 if (memcmp (checksum_before, checksum_after, 16))
14504 fold_check_failed (expr, ret);
14506 return ret;
14509 void
14510 print_fold_checksum (const_tree expr)
14512 struct md5_ctx ctx;
14513 unsigned char checksum[16], cnt;
14514 hash_table<pointer_hash<const tree_node> > ht (32);
14516 md5_init_ctx (&ctx);
14517 fold_checksum_tree (expr, &ctx, &ht);
14518 md5_finish_ctx (&ctx, checksum);
14519 for (cnt = 0; cnt < 16; ++cnt)
14520 fprintf (stderr, "%02x", checksum[cnt]);
14521 putc ('\n', stderr);
14524 static void
14525 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14527 internal_error ("fold check: original tree changed by fold");
14530 static void
14531 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14532 hash_table<pointer_hash <const tree_node> > *ht)
14534 const tree_node **slot;
14535 enum tree_code code;
14536 union tree_node buf;
14537 int i, len;
14539 recursive_label:
14540 if (expr == NULL)
14541 return;
14542 slot = ht->find_slot (expr, INSERT);
14543 if (*slot != NULL)
14544 return;
14545 *slot = expr;
14546 code = TREE_CODE (expr);
14547 if (TREE_CODE_CLASS (code) == tcc_declaration
14548 && DECL_ASSEMBLER_NAME_SET_P (expr))
14550 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14551 memcpy ((char *) &buf, expr, tree_size (expr));
14552 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14553 expr = (tree) &buf;
14555 else if (TREE_CODE_CLASS (code) == tcc_type
14556 && (TYPE_POINTER_TO (expr)
14557 || TYPE_REFERENCE_TO (expr)
14558 || TYPE_CACHED_VALUES_P (expr)
14559 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14560 || TYPE_NEXT_VARIANT (expr)))
14562 /* Allow these fields to be modified. */
14563 tree tmp;
14564 memcpy ((char *) &buf, expr, tree_size (expr));
14565 expr = tmp = (tree) &buf;
14566 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14567 TYPE_POINTER_TO (tmp) = NULL;
14568 TYPE_REFERENCE_TO (tmp) = NULL;
14569 TYPE_NEXT_VARIANT (tmp) = NULL;
14570 if (TYPE_CACHED_VALUES_P (tmp))
14572 TYPE_CACHED_VALUES_P (tmp) = 0;
14573 TYPE_CACHED_VALUES (tmp) = NULL;
14576 md5_process_bytes (expr, tree_size (expr), ctx);
14577 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14578 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14579 if (TREE_CODE_CLASS (code) != tcc_type
14580 && TREE_CODE_CLASS (code) != tcc_declaration
14581 && code != TREE_LIST
14582 && code != SSA_NAME
14583 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14584 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14585 switch (TREE_CODE_CLASS (code))
14587 case tcc_constant:
14588 switch (code)
14590 case STRING_CST:
14591 md5_process_bytes (TREE_STRING_POINTER (expr),
14592 TREE_STRING_LENGTH (expr), ctx);
14593 break;
14594 case COMPLEX_CST:
14595 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14596 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14597 break;
14598 case VECTOR_CST:
14599 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14600 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14601 break;
14602 default:
14603 break;
14605 break;
14606 case tcc_exceptional:
14607 switch (code)
14609 case TREE_LIST:
14610 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14611 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14612 expr = TREE_CHAIN (expr);
14613 goto recursive_label;
14614 break;
14615 case TREE_VEC:
14616 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14617 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14618 break;
14619 default:
14620 break;
14622 break;
14623 case tcc_expression:
14624 case tcc_reference:
14625 case tcc_comparison:
14626 case tcc_unary:
14627 case tcc_binary:
14628 case tcc_statement:
14629 case tcc_vl_exp:
14630 len = TREE_OPERAND_LENGTH (expr);
14631 for (i = 0; i < len; ++i)
14632 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14633 break;
14634 case tcc_declaration:
14635 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14636 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14637 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14639 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14640 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14641 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14642 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14643 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14646 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14648 if (TREE_CODE (expr) == FUNCTION_DECL)
14650 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14651 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14653 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14655 break;
14656 case tcc_type:
14657 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14658 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14659 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14660 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14661 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14662 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14663 if (INTEGRAL_TYPE_P (expr)
14664 || SCALAR_FLOAT_TYPE_P (expr))
14666 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14667 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14669 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14670 if (TREE_CODE (expr) == RECORD_TYPE
14671 || TREE_CODE (expr) == UNION_TYPE
14672 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14673 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14674 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14675 break;
14676 default:
14677 break;
14681 /* Helper function for outputting the checksum of a tree T. When
14682 debugging with gdb, you can "define mynext" to be "next" followed
14683 by "call debug_fold_checksum (op0)", then just trace down till the
14684 outputs differ. */
14686 DEBUG_FUNCTION void
14687 debug_fold_checksum (const_tree t)
14689 int i;
14690 unsigned char checksum[16];
14691 struct md5_ctx ctx;
14692 hash_table<pointer_hash<const tree_node> > ht (32);
14694 md5_init_ctx (&ctx);
14695 fold_checksum_tree (t, &ctx, &ht);
14696 md5_finish_ctx (&ctx, checksum);
14697 ht.empty ();
14699 for (i = 0; i < 16; i++)
14700 fprintf (stderr, "%d ", checksum[i]);
14702 fprintf (stderr, "\n");
14705 #endif
14707 /* Fold a unary tree expression with code CODE of type TYPE with an
14708 operand OP0. LOC is the location of the resulting expression.
14709 Return a folded expression if successful. Otherwise, return a tree
14710 expression with code CODE of type TYPE with an operand OP0. */
14712 tree
14713 fold_build1_stat_loc (location_t loc,
14714 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14716 tree tem;
14717 #ifdef ENABLE_FOLD_CHECKING
14718 unsigned char checksum_before[16], checksum_after[16];
14719 struct md5_ctx ctx;
14720 hash_table<pointer_hash<const tree_node> > ht (32);
14722 md5_init_ctx (&ctx);
14723 fold_checksum_tree (op0, &ctx, &ht);
14724 md5_finish_ctx (&ctx, checksum_before);
14725 ht.empty ();
14726 #endif
14728 tem = fold_unary_loc (loc, code, type, op0);
14729 if (!tem)
14730 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14732 #ifdef ENABLE_FOLD_CHECKING
14733 md5_init_ctx (&ctx);
14734 fold_checksum_tree (op0, &ctx, &ht);
14735 md5_finish_ctx (&ctx, checksum_after);
14737 if (memcmp (checksum_before, checksum_after, 16))
14738 fold_check_failed (op0, tem);
14739 #endif
14740 return tem;
14743 /* Fold a binary tree expression with code CODE of type TYPE with
14744 operands OP0 and OP1. LOC is the location of the resulting
14745 expression. Return a folded expression if successful. Otherwise,
14746 return a tree expression with code CODE of type TYPE with operands
14747 OP0 and OP1. */
14749 tree
14750 fold_build2_stat_loc (location_t loc,
14751 enum tree_code code, tree type, tree op0, tree op1
14752 MEM_STAT_DECL)
14754 tree tem;
14755 #ifdef ENABLE_FOLD_CHECKING
14756 unsigned char checksum_before_op0[16],
14757 checksum_before_op1[16],
14758 checksum_after_op0[16],
14759 checksum_after_op1[16];
14760 struct md5_ctx ctx;
14761 hash_table<pointer_hash<const tree_node> > ht (32);
14763 md5_init_ctx (&ctx);
14764 fold_checksum_tree (op0, &ctx, &ht);
14765 md5_finish_ctx (&ctx, checksum_before_op0);
14766 ht.empty ();
14768 md5_init_ctx (&ctx);
14769 fold_checksum_tree (op1, &ctx, &ht);
14770 md5_finish_ctx (&ctx, checksum_before_op1);
14771 ht.empty ();
14772 #endif
14774 tem = fold_binary_loc (loc, code, type, op0, op1);
14775 if (!tem)
14776 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14778 #ifdef ENABLE_FOLD_CHECKING
14779 md5_init_ctx (&ctx);
14780 fold_checksum_tree (op0, &ctx, &ht);
14781 md5_finish_ctx (&ctx, checksum_after_op0);
14782 ht.empty ();
14784 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14785 fold_check_failed (op0, tem);
14787 md5_init_ctx (&ctx);
14788 fold_checksum_tree (op1, &ctx, &ht);
14789 md5_finish_ctx (&ctx, checksum_after_op1);
14791 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14792 fold_check_failed (op1, tem);
14793 #endif
14794 return tem;
14797 /* Fold a ternary tree expression with code CODE of type TYPE with
14798 operands OP0, OP1, and OP2. Return a folded expression if
14799 successful. Otherwise, return a tree expression with code CODE of
14800 type TYPE with operands OP0, OP1, and OP2. */
14802 tree
14803 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14804 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14806 tree tem;
14807 #ifdef ENABLE_FOLD_CHECKING
14808 unsigned char checksum_before_op0[16],
14809 checksum_before_op1[16],
14810 checksum_before_op2[16],
14811 checksum_after_op0[16],
14812 checksum_after_op1[16],
14813 checksum_after_op2[16];
14814 struct md5_ctx ctx;
14815 hash_table<pointer_hash<const tree_node> > ht (32);
14817 md5_init_ctx (&ctx);
14818 fold_checksum_tree (op0, &ctx, &ht);
14819 md5_finish_ctx (&ctx, checksum_before_op0);
14820 ht.empty ();
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op1, &ctx, &ht);
14824 md5_finish_ctx (&ctx, checksum_before_op1);
14825 ht.empty ();
14827 md5_init_ctx (&ctx);
14828 fold_checksum_tree (op2, &ctx, &ht);
14829 md5_finish_ctx (&ctx, checksum_before_op2);
14830 ht.empty ();
14831 #endif
14833 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14834 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14835 if (!tem)
14836 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14838 #ifdef ENABLE_FOLD_CHECKING
14839 md5_init_ctx (&ctx);
14840 fold_checksum_tree (op0, &ctx, &ht);
14841 md5_finish_ctx (&ctx, checksum_after_op0);
14842 ht.empty ();
14844 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14845 fold_check_failed (op0, tem);
14847 md5_init_ctx (&ctx);
14848 fold_checksum_tree (op1, &ctx, &ht);
14849 md5_finish_ctx (&ctx, checksum_after_op1);
14850 ht.empty ();
14852 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14853 fold_check_failed (op1, tem);
14855 md5_init_ctx (&ctx);
14856 fold_checksum_tree (op2, &ctx, &ht);
14857 md5_finish_ctx (&ctx, checksum_after_op2);
14859 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14860 fold_check_failed (op2, tem);
14861 #endif
14862 return tem;
14865 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14866 arguments in ARGARRAY, and a null static chain.
14867 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14868 of type TYPE from the given operands as constructed by build_call_array. */
14870 tree
14871 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14872 int nargs, tree *argarray)
14874 tree tem;
14875 #ifdef ENABLE_FOLD_CHECKING
14876 unsigned char checksum_before_fn[16],
14877 checksum_before_arglist[16],
14878 checksum_after_fn[16],
14879 checksum_after_arglist[16];
14880 struct md5_ctx ctx;
14881 hash_table<pointer_hash<const tree_node> > ht (32);
14882 int i;
14884 md5_init_ctx (&ctx);
14885 fold_checksum_tree (fn, &ctx, &ht);
14886 md5_finish_ctx (&ctx, checksum_before_fn);
14887 ht.empty ();
14889 md5_init_ctx (&ctx);
14890 for (i = 0; i < nargs; i++)
14891 fold_checksum_tree (argarray[i], &ctx, &ht);
14892 md5_finish_ctx (&ctx, checksum_before_arglist);
14893 ht.empty ();
14894 #endif
14896 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14898 #ifdef ENABLE_FOLD_CHECKING
14899 md5_init_ctx (&ctx);
14900 fold_checksum_tree (fn, &ctx, &ht);
14901 md5_finish_ctx (&ctx, checksum_after_fn);
14902 ht.empty ();
14904 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14905 fold_check_failed (fn, tem);
14907 md5_init_ctx (&ctx);
14908 for (i = 0; i < nargs; i++)
14909 fold_checksum_tree (argarray[i], &ctx, &ht);
14910 md5_finish_ctx (&ctx, checksum_after_arglist);
14912 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14913 fold_check_failed (NULL_TREE, tem);
14914 #endif
14915 return tem;
14918 /* Perform constant folding and related simplification of initializer
14919 expression EXPR. These behave identically to "fold_buildN" but ignore
14920 potential run-time traps and exceptions that fold must preserve. */
14922 #define START_FOLD_INIT \
14923 int saved_signaling_nans = flag_signaling_nans;\
14924 int saved_trapping_math = flag_trapping_math;\
14925 int saved_rounding_math = flag_rounding_math;\
14926 int saved_trapv = flag_trapv;\
14927 int saved_folding_initializer = folding_initializer;\
14928 flag_signaling_nans = 0;\
14929 flag_trapping_math = 0;\
14930 flag_rounding_math = 0;\
14931 flag_trapv = 0;\
14932 folding_initializer = 1;
14934 #define END_FOLD_INIT \
14935 flag_signaling_nans = saved_signaling_nans;\
14936 flag_trapping_math = saved_trapping_math;\
14937 flag_rounding_math = saved_rounding_math;\
14938 flag_trapv = saved_trapv;\
14939 folding_initializer = saved_folding_initializer;
14941 tree
14942 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14943 tree type, tree op)
14945 tree result;
14946 START_FOLD_INIT;
14948 result = fold_build1_loc (loc, code, type, op);
14950 END_FOLD_INIT;
14951 return result;
14954 tree
14955 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14956 tree type, tree op0, tree op1)
14958 tree result;
14959 START_FOLD_INIT;
14961 result = fold_build2_loc (loc, code, type, op0, op1);
14963 END_FOLD_INIT;
14964 return result;
14967 tree
14968 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14969 int nargs, tree *argarray)
14971 tree result;
14972 START_FOLD_INIT;
14974 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14976 END_FOLD_INIT;
14977 return result;
14980 #undef START_FOLD_INIT
14981 #undef END_FOLD_INIT
14983 /* Determine if first argument is a multiple of second argument. Return 0 if
14984 it is not, or we cannot easily determined it to be.
14986 An example of the sort of thing we care about (at this point; this routine
14987 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14988 fold cases do now) is discovering that
14990 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14992 is a multiple of
14994 SAVE_EXPR (J * 8)
14996 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14998 This code also handles discovering that
15000 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15002 is a multiple of 8 so we don't have to worry about dealing with a
15003 possible remainder.
15005 Note that we *look* inside a SAVE_EXPR only to determine how it was
15006 calculated; it is not safe for fold to do much of anything else with the
15007 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15008 at run time. For example, the latter example above *cannot* be implemented
15009 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15010 evaluation time of the original SAVE_EXPR is not necessarily the same at
15011 the time the new expression is evaluated. The only optimization of this
15012 sort that would be valid is changing
15014 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15016 divided by 8 to
15018 SAVE_EXPR (I) * SAVE_EXPR (J)
15020 (where the same SAVE_EXPR (J) is used in the original and the
15021 transformed version). */
15024 multiple_of_p (tree type, const_tree top, const_tree bottom)
15026 if (operand_equal_p (top, bottom, 0))
15027 return 1;
15029 if (TREE_CODE (type) != INTEGER_TYPE)
15030 return 0;
15032 switch (TREE_CODE (top))
15034 case BIT_AND_EXPR:
15035 /* Bitwise and provides a power of two multiple. If the mask is
15036 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15037 if (!integer_pow2p (bottom))
15038 return 0;
15039 /* FALLTHRU */
15041 case MULT_EXPR:
15042 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15043 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15045 case PLUS_EXPR:
15046 case MINUS_EXPR:
15047 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15048 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15050 case LSHIFT_EXPR:
15051 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15053 tree op1, t1;
15055 op1 = TREE_OPERAND (top, 1);
15056 /* const_binop may not detect overflow correctly,
15057 so check for it explicitly here. */
15058 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15059 && 0 != (t1 = fold_convert (type,
15060 const_binop (LSHIFT_EXPR,
15061 size_one_node,
15062 op1)))
15063 && !TREE_OVERFLOW (t1))
15064 return multiple_of_p (type, t1, bottom);
15066 return 0;
15068 case NOP_EXPR:
15069 /* Can't handle conversions from non-integral or wider integral type. */
15070 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15071 || (TYPE_PRECISION (type)
15072 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15073 return 0;
15075 /* .. fall through ... */
15077 case SAVE_EXPR:
15078 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15080 case COND_EXPR:
15081 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15082 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15084 case INTEGER_CST:
15085 if (TREE_CODE (bottom) != INTEGER_CST
15086 || integer_zerop (bottom)
15087 || (TYPE_UNSIGNED (type)
15088 && (tree_int_cst_sgn (top) < 0
15089 || tree_int_cst_sgn (bottom) < 0)))
15090 return 0;
15091 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15092 SIGNED);
15094 default:
15095 return 0;
15099 /* Return true if CODE or TYPE is known to be non-negative. */
15101 static bool
15102 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15104 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15105 && truth_value_p (code))
15106 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15107 have a signed:1 type (where the value is -1 and 0). */
15108 return true;
15109 return false;
15112 /* Return true if (CODE OP0) is known to be non-negative. If the return
15113 value is based on the assumption that signed overflow is undefined,
15114 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15115 *STRICT_OVERFLOW_P. */
15117 bool
15118 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15119 bool *strict_overflow_p)
15121 if (TYPE_UNSIGNED (type))
15122 return true;
15124 switch (code)
15126 case ABS_EXPR:
15127 /* We can't return 1 if flag_wrapv is set because
15128 ABS_EXPR<INT_MIN> = INT_MIN. */
15129 if (!INTEGRAL_TYPE_P (type))
15130 return true;
15131 if (TYPE_OVERFLOW_UNDEFINED (type))
15133 *strict_overflow_p = true;
15134 return true;
15136 break;
15138 case NON_LVALUE_EXPR:
15139 case FLOAT_EXPR:
15140 case FIX_TRUNC_EXPR:
15141 return tree_expr_nonnegative_warnv_p (op0,
15142 strict_overflow_p);
15144 case NOP_EXPR:
15146 tree inner_type = TREE_TYPE (op0);
15147 tree outer_type = type;
15149 if (TREE_CODE (outer_type) == REAL_TYPE)
15151 if (TREE_CODE (inner_type) == REAL_TYPE)
15152 return tree_expr_nonnegative_warnv_p (op0,
15153 strict_overflow_p);
15154 if (INTEGRAL_TYPE_P (inner_type))
15156 if (TYPE_UNSIGNED (inner_type))
15157 return true;
15158 return tree_expr_nonnegative_warnv_p (op0,
15159 strict_overflow_p);
15162 else if (INTEGRAL_TYPE_P (outer_type))
15164 if (TREE_CODE (inner_type) == REAL_TYPE)
15165 return tree_expr_nonnegative_warnv_p (op0,
15166 strict_overflow_p);
15167 if (INTEGRAL_TYPE_P (inner_type))
15168 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15169 && TYPE_UNSIGNED (inner_type);
15172 break;
15174 default:
15175 return tree_simple_nonnegative_warnv_p (code, type);
15178 /* We don't know sign of `t', so be conservative and return false. */
15179 return false;
15182 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15183 value is based on the assumption that signed overflow is undefined,
15184 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15185 *STRICT_OVERFLOW_P. */
15187 bool
15188 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15189 tree op1, bool *strict_overflow_p)
15191 if (TYPE_UNSIGNED (type))
15192 return true;
15194 switch (code)
15196 case POINTER_PLUS_EXPR:
15197 case PLUS_EXPR:
15198 if (FLOAT_TYPE_P (type))
15199 return (tree_expr_nonnegative_warnv_p (op0,
15200 strict_overflow_p)
15201 && tree_expr_nonnegative_warnv_p (op1,
15202 strict_overflow_p));
15204 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15205 both unsigned and at least 2 bits shorter than the result. */
15206 if (TREE_CODE (type) == INTEGER_TYPE
15207 && TREE_CODE (op0) == NOP_EXPR
15208 && TREE_CODE (op1) == NOP_EXPR)
15210 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15211 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15212 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15213 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15215 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15216 TYPE_PRECISION (inner2)) + 1;
15217 return prec < TYPE_PRECISION (type);
15220 break;
15222 case MULT_EXPR:
15223 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15225 /* x * x is always non-negative for floating point x
15226 or without overflow. */
15227 if (operand_equal_p (op0, op1, 0)
15228 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15229 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15231 if (TYPE_OVERFLOW_UNDEFINED (type))
15232 *strict_overflow_p = true;
15233 return true;
15237 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15238 both unsigned and their total bits is shorter than the result. */
15239 if (TREE_CODE (type) == INTEGER_TYPE
15240 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15241 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15243 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15244 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15245 : TREE_TYPE (op0);
15246 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15247 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15248 : TREE_TYPE (op1);
15250 bool unsigned0 = TYPE_UNSIGNED (inner0);
15251 bool unsigned1 = TYPE_UNSIGNED (inner1);
15253 if (TREE_CODE (op0) == INTEGER_CST)
15254 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15256 if (TREE_CODE (op1) == INTEGER_CST)
15257 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15259 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15260 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15262 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15263 ? tree_int_cst_min_precision (op0, UNSIGNED)
15264 : TYPE_PRECISION (inner0);
15266 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15267 ? tree_int_cst_min_precision (op1, UNSIGNED)
15268 : TYPE_PRECISION (inner1);
15270 return precision0 + precision1 < TYPE_PRECISION (type);
15273 return false;
15275 case BIT_AND_EXPR:
15276 case MAX_EXPR:
15277 return (tree_expr_nonnegative_warnv_p (op0,
15278 strict_overflow_p)
15279 || tree_expr_nonnegative_warnv_p (op1,
15280 strict_overflow_p));
15282 case BIT_IOR_EXPR:
15283 case BIT_XOR_EXPR:
15284 case MIN_EXPR:
15285 case RDIV_EXPR:
15286 case TRUNC_DIV_EXPR:
15287 case CEIL_DIV_EXPR:
15288 case FLOOR_DIV_EXPR:
15289 case ROUND_DIV_EXPR:
15290 return (tree_expr_nonnegative_warnv_p (op0,
15291 strict_overflow_p)
15292 && tree_expr_nonnegative_warnv_p (op1,
15293 strict_overflow_p));
15295 case TRUNC_MOD_EXPR:
15296 case CEIL_MOD_EXPR:
15297 case FLOOR_MOD_EXPR:
15298 case ROUND_MOD_EXPR:
15299 return tree_expr_nonnegative_warnv_p (op0,
15300 strict_overflow_p);
15301 default:
15302 return tree_simple_nonnegative_warnv_p (code, type);
15305 /* We don't know sign of `t', so be conservative and return false. */
15306 return false;
15309 /* Return true if T is known to be non-negative. If the return
15310 value is based on the assumption that signed overflow is undefined,
15311 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15312 *STRICT_OVERFLOW_P. */
15314 bool
15315 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15317 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15318 return true;
15320 switch (TREE_CODE (t))
15322 case INTEGER_CST:
15323 return tree_int_cst_sgn (t) >= 0;
15325 case REAL_CST:
15326 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15328 case FIXED_CST:
15329 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15331 case COND_EXPR:
15332 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15333 strict_overflow_p)
15334 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15335 strict_overflow_p));
15336 default:
15337 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15338 TREE_TYPE (t));
15340 /* We don't know sign of `t', so be conservative and return false. */
15341 return false;
15344 /* Return true if T is known to be non-negative. If the return
15345 value is based on the assumption that signed overflow is undefined,
15346 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15347 *STRICT_OVERFLOW_P. */
15349 bool
15350 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15351 tree arg0, tree arg1, bool *strict_overflow_p)
15353 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15354 switch (DECL_FUNCTION_CODE (fndecl))
15356 CASE_FLT_FN (BUILT_IN_ACOS):
15357 CASE_FLT_FN (BUILT_IN_ACOSH):
15358 CASE_FLT_FN (BUILT_IN_CABS):
15359 CASE_FLT_FN (BUILT_IN_COSH):
15360 CASE_FLT_FN (BUILT_IN_ERFC):
15361 CASE_FLT_FN (BUILT_IN_EXP):
15362 CASE_FLT_FN (BUILT_IN_EXP10):
15363 CASE_FLT_FN (BUILT_IN_EXP2):
15364 CASE_FLT_FN (BUILT_IN_FABS):
15365 CASE_FLT_FN (BUILT_IN_FDIM):
15366 CASE_FLT_FN (BUILT_IN_HYPOT):
15367 CASE_FLT_FN (BUILT_IN_POW10):
15368 CASE_INT_FN (BUILT_IN_FFS):
15369 CASE_INT_FN (BUILT_IN_PARITY):
15370 CASE_INT_FN (BUILT_IN_POPCOUNT):
15371 CASE_INT_FN (BUILT_IN_CLZ):
15372 CASE_INT_FN (BUILT_IN_CLRSB):
15373 case BUILT_IN_BSWAP32:
15374 case BUILT_IN_BSWAP64:
15375 /* Always true. */
15376 return true;
15378 CASE_FLT_FN (BUILT_IN_SQRT):
15379 /* sqrt(-0.0) is -0.0. */
15380 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15381 return true;
15382 return tree_expr_nonnegative_warnv_p (arg0,
15383 strict_overflow_p);
15385 CASE_FLT_FN (BUILT_IN_ASINH):
15386 CASE_FLT_FN (BUILT_IN_ATAN):
15387 CASE_FLT_FN (BUILT_IN_ATANH):
15388 CASE_FLT_FN (BUILT_IN_CBRT):
15389 CASE_FLT_FN (BUILT_IN_CEIL):
15390 CASE_FLT_FN (BUILT_IN_ERF):
15391 CASE_FLT_FN (BUILT_IN_EXPM1):
15392 CASE_FLT_FN (BUILT_IN_FLOOR):
15393 CASE_FLT_FN (BUILT_IN_FMOD):
15394 CASE_FLT_FN (BUILT_IN_FREXP):
15395 CASE_FLT_FN (BUILT_IN_ICEIL):
15396 CASE_FLT_FN (BUILT_IN_IFLOOR):
15397 CASE_FLT_FN (BUILT_IN_IRINT):
15398 CASE_FLT_FN (BUILT_IN_IROUND):
15399 CASE_FLT_FN (BUILT_IN_LCEIL):
15400 CASE_FLT_FN (BUILT_IN_LDEXP):
15401 CASE_FLT_FN (BUILT_IN_LFLOOR):
15402 CASE_FLT_FN (BUILT_IN_LLCEIL):
15403 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15404 CASE_FLT_FN (BUILT_IN_LLRINT):
15405 CASE_FLT_FN (BUILT_IN_LLROUND):
15406 CASE_FLT_FN (BUILT_IN_LRINT):
15407 CASE_FLT_FN (BUILT_IN_LROUND):
15408 CASE_FLT_FN (BUILT_IN_MODF):
15409 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15410 CASE_FLT_FN (BUILT_IN_RINT):
15411 CASE_FLT_FN (BUILT_IN_ROUND):
15412 CASE_FLT_FN (BUILT_IN_SCALB):
15413 CASE_FLT_FN (BUILT_IN_SCALBLN):
15414 CASE_FLT_FN (BUILT_IN_SCALBN):
15415 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15416 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15417 CASE_FLT_FN (BUILT_IN_SINH):
15418 CASE_FLT_FN (BUILT_IN_TANH):
15419 CASE_FLT_FN (BUILT_IN_TRUNC):
15420 /* True if the 1st argument is nonnegative. */
15421 return tree_expr_nonnegative_warnv_p (arg0,
15422 strict_overflow_p);
15424 CASE_FLT_FN (BUILT_IN_FMAX):
15425 /* True if the 1st OR 2nd arguments are nonnegative. */
15426 return (tree_expr_nonnegative_warnv_p (arg0,
15427 strict_overflow_p)
15428 || (tree_expr_nonnegative_warnv_p (arg1,
15429 strict_overflow_p)));
15431 CASE_FLT_FN (BUILT_IN_FMIN):
15432 /* True if the 1st AND 2nd arguments are nonnegative. */
15433 return (tree_expr_nonnegative_warnv_p (arg0,
15434 strict_overflow_p)
15435 && (tree_expr_nonnegative_warnv_p (arg1,
15436 strict_overflow_p)));
15438 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15439 /* True if the 2nd argument is nonnegative. */
15440 return tree_expr_nonnegative_warnv_p (arg1,
15441 strict_overflow_p);
15443 CASE_FLT_FN (BUILT_IN_POWI):
15444 /* True if the 1st argument is nonnegative or the second
15445 argument is an even integer. */
15446 if (TREE_CODE (arg1) == INTEGER_CST
15447 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15448 return true;
15449 return tree_expr_nonnegative_warnv_p (arg0,
15450 strict_overflow_p);
15452 CASE_FLT_FN (BUILT_IN_POW):
15453 /* True if the 1st argument is nonnegative or the second
15454 argument is an even integer valued real. */
15455 if (TREE_CODE (arg1) == REAL_CST)
15457 REAL_VALUE_TYPE c;
15458 HOST_WIDE_INT n;
15460 c = TREE_REAL_CST (arg1);
15461 n = real_to_integer (&c);
15462 if ((n & 1) == 0)
15464 REAL_VALUE_TYPE cint;
15465 real_from_integer (&cint, VOIDmode, n, SIGNED);
15466 if (real_identical (&c, &cint))
15467 return true;
15470 return tree_expr_nonnegative_warnv_p (arg0,
15471 strict_overflow_p);
15473 default:
15474 break;
15476 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15477 type);
15480 /* Return true if T is known to be non-negative. If the return
15481 value is based on the assumption that signed overflow is undefined,
15482 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15483 *STRICT_OVERFLOW_P. */
15485 static bool
15486 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15488 enum tree_code code = TREE_CODE (t);
15489 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15490 return true;
15492 switch (code)
15494 case TARGET_EXPR:
15496 tree temp = TARGET_EXPR_SLOT (t);
15497 t = TARGET_EXPR_INITIAL (t);
15499 /* If the initializer is non-void, then it's a normal expression
15500 that will be assigned to the slot. */
15501 if (!VOID_TYPE_P (t))
15502 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15504 /* Otherwise, the initializer sets the slot in some way. One common
15505 way is an assignment statement at the end of the initializer. */
15506 while (1)
15508 if (TREE_CODE (t) == BIND_EXPR)
15509 t = expr_last (BIND_EXPR_BODY (t));
15510 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15511 || TREE_CODE (t) == TRY_CATCH_EXPR)
15512 t = expr_last (TREE_OPERAND (t, 0));
15513 else if (TREE_CODE (t) == STATEMENT_LIST)
15514 t = expr_last (t);
15515 else
15516 break;
15518 if (TREE_CODE (t) == MODIFY_EXPR
15519 && TREE_OPERAND (t, 0) == temp)
15520 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15521 strict_overflow_p);
15523 return false;
15526 case CALL_EXPR:
15528 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15529 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15531 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15532 get_callee_fndecl (t),
15533 arg0,
15534 arg1,
15535 strict_overflow_p);
15537 case COMPOUND_EXPR:
15538 case MODIFY_EXPR:
15539 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15540 strict_overflow_p);
15541 case BIND_EXPR:
15542 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15543 strict_overflow_p);
15544 case SAVE_EXPR:
15545 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15546 strict_overflow_p);
15548 default:
15549 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15550 TREE_TYPE (t));
15553 /* We don't know sign of `t', so be conservative and return false. */
15554 return false;
15557 /* Return true if T is known to be non-negative. If the return
15558 value is based on the assumption that signed overflow is undefined,
15559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15560 *STRICT_OVERFLOW_P. */
15562 bool
15563 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15565 enum tree_code code;
15566 if (t == error_mark_node)
15567 return false;
15569 code = TREE_CODE (t);
15570 switch (TREE_CODE_CLASS (code))
15572 case tcc_binary:
15573 case tcc_comparison:
15574 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15575 TREE_TYPE (t),
15576 TREE_OPERAND (t, 0),
15577 TREE_OPERAND (t, 1),
15578 strict_overflow_p);
15580 case tcc_unary:
15581 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15582 TREE_TYPE (t),
15583 TREE_OPERAND (t, 0),
15584 strict_overflow_p);
15586 case tcc_constant:
15587 case tcc_declaration:
15588 case tcc_reference:
15589 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15591 default:
15592 break;
15595 switch (code)
15597 case TRUTH_AND_EXPR:
15598 case TRUTH_OR_EXPR:
15599 case TRUTH_XOR_EXPR:
15600 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15601 TREE_TYPE (t),
15602 TREE_OPERAND (t, 0),
15603 TREE_OPERAND (t, 1),
15604 strict_overflow_p);
15605 case TRUTH_NOT_EXPR:
15606 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15607 TREE_TYPE (t),
15608 TREE_OPERAND (t, 0),
15609 strict_overflow_p);
15611 case COND_EXPR:
15612 case CONSTRUCTOR:
15613 case OBJ_TYPE_REF:
15614 case ASSERT_EXPR:
15615 case ADDR_EXPR:
15616 case WITH_SIZE_EXPR:
15617 case SSA_NAME:
15618 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15620 default:
15621 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15625 /* Return true if `t' is known to be non-negative. Handle warnings
15626 about undefined signed overflow. */
15628 bool
15629 tree_expr_nonnegative_p (tree t)
15631 bool ret, strict_overflow_p;
15633 strict_overflow_p = false;
15634 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15635 if (strict_overflow_p)
15636 fold_overflow_warning (("assuming signed overflow does not occur when "
15637 "determining that expression is always "
15638 "non-negative"),
15639 WARN_STRICT_OVERFLOW_MISC);
15640 return ret;
15644 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15645 For floating point we further ensure that T is not denormal.
15646 Similar logic is present in nonzero_address in rtlanal.h.
15648 If the return value is based on the assumption that signed overflow
15649 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15650 change *STRICT_OVERFLOW_P. */
15652 bool
15653 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15654 bool *strict_overflow_p)
15656 switch (code)
15658 case ABS_EXPR:
15659 return tree_expr_nonzero_warnv_p (op0,
15660 strict_overflow_p);
15662 case NOP_EXPR:
15664 tree inner_type = TREE_TYPE (op0);
15665 tree outer_type = type;
15667 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15668 && tree_expr_nonzero_warnv_p (op0,
15669 strict_overflow_p));
15671 break;
15673 case NON_LVALUE_EXPR:
15674 return tree_expr_nonzero_warnv_p (op0,
15675 strict_overflow_p);
15677 default:
15678 break;
15681 return false;
15684 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15685 For floating point we further ensure that T is not denormal.
15686 Similar logic is present in nonzero_address in rtlanal.h.
15688 If the return value is based on the assumption that signed overflow
15689 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15690 change *STRICT_OVERFLOW_P. */
15692 bool
15693 tree_binary_nonzero_warnv_p (enum tree_code code,
15694 tree type,
15695 tree op0,
15696 tree op1, bool *strict_overflow_p)
15698 bool sub_strict_overflow_p;
15699 switch (code)
15701 case POINTER_PLUS_EXPR:
15702 case PLUS_EXPR:
15703 if (TYPE_OVERFLOW_UNDEFINED (type))
15705 /* With the presence of negative values it is hard
15706 to say something. */
15707 sub_strict_overflow_p = false;
15708 if (!tree_expr_nonnegative_warnv_p (op0,
15709 &sub_strict_overflow_p)
15710 || !tree_expr_nonnegative_warnv_p (op1,
15711 &sub_strict_overflow_p))
15712 return false;
15713 /* One of operands must be positive and the other non-negative. */
15714 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15715 overflows, on a twos-complement machine the sum of two
15716 nonnegative numbers can never be zero. */
15717 return (tree_expr_nonzero_warnv_p (op0,
15718 strict_overflow_p)
15719 || tree_expr_nonzero_warnv_p (op1,
15720 strict_overflow_p));
15722 break;
15724 case MULT_EXPR:
15725 if (TYPE_OVERFLOW_UNDEFINED (type))
15727 if (tree_expr_nonzero_warnv_p (op0,
15728 strict_overflow_p)
15729 && tree_expr_nonzero_warnv_p (op1,
15730 strict_overflow_p))
15732 *strict_overflow_p = true;
15733 return true;
15736 break;
15738 case MIN_EXPR:
15739 sub_strict_overflow_p = false;
15740 if (tree_expr_nonzero_warnv_p (op0,
15741 &sub_strict_overflow_p)
15742 && tree_expr_nonzero_warnv_p (op1,
15743 &sub_strict_overflow_p))
15745 if (sub_strict_overflow_p)
15746 *strict_overflow_p = true;
15748 break;
15750 case MAX_EXPR:
15751 sub_strict_overflow_p = false;
15752 if (tree_expr_nonzero_warnv_p (op0,
15753 &sub_strict_overflow_p))
15755 if (sub_strict_overflow_p)
15756 *strict_overflow_p = true;
15758 /* When both operands are nonzero, then MAX must be too. */
15759 if (tree_expr_nonzero_warnv_p (op1,
15760 strict_overflow_p))
15761 return true;
15763 /* MAX where operand 0 is positive is positive. */
15764 return tree_expr_nonnegative_warnv_p (op0,
15765 strict_overflow_p);
15767 /* MAX where operand 1 is positive is positive. */
15768 else if (tree_expr_nonzero_warnv_p (op1,
15769 &sub_strict_overflow_p)
15770 && tree_expr_nonnegative_warnv_p (op1,
15771 &sub_strict_overflow_p))
15773 if (sub_strict_overflow_p)
15774 *strict_overflow_p = true;
15775 return true;
15777 break;
15779 case BIT_IOR_EXPR:
15780 return (tree_expr_nonzero_warnv_p (op1,
15781 strict_overflow_p)
15782 || tree_expr_nonzero_warnv_p (op0,
15783 strict_overflow_p));
15785 default:
15786 break;
15789 return false;
15792 /* Return true when T is an address and is known to be nonzero.
15793 For floating point we further ensure that T is not denormal.
15794 Similar logic is present in nonzero_address in rtlanal.h.
15796 If the return value is based on the assumption that signed overflow
15797 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15798 change *STRICT_OVERFLOW_P. */
15800 bool
15801 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15803 bool sub_strict_overflow_p;
15804 switch (TREE_CODE (t))
15806 case INTEGER_CST:
15807 return !integer_zerop (t);
15809 case ADDR_EXPR:
15811 tree base = TREE_OPERAND (t, 0);
15813 if (!DECL_P (base))
15814 base = get_base_address (base);
15816 if (!base)
15817 return false;
15819 /* For objects in symbol table check if we know they are non-zero.
15820 Don't do anything for variables and functions before symtab is built;
15821 it is quite possible that they will be declared weak later. */
15822 if (DECL_P (base) && decl_in_symtab_p (base))
15824 struct symtab_node *symbol;
15826 symbol = symtab_node::get_create (base);
15827 if (symbol)
15828 return symbol->nonzero_address ();
15829 else
15830 return false;
15833 /* Function local objects are never NULL. */
15834 if (DECL_P (base)
15835 && (DECL_CONTEXT (base)
15836 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15837 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15838 return true;
15840 /* Constants are never weak. */
15841 if (CONSTANT_CLASS_P (base))
15842 return true;
15844 return false;
15847 case COND_EXPR:
15848 sub_strict_overflow_p = false;
15849 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15850 &sub_strict_overflow_p)
15851 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15852 &sub_strict_overflow_p))
15854 if (sub_strict_overflow_p)
15855 *strict_overflow_p = true;
15856 return true;
15858 break;
15860 default:
15861 break;
15863 return false;
15866 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15867 attempt to fold the expression to a constant without modifying TYPE,
15868 OP0 or OP1.
15870 If the expression could be simplified to a constant, then return
15871 the constant. If the expression would not be simplified to a
15872 constant, then return NULL_TREE. */
15874 tree
15875 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15877 tree tem = fold_binary (code, type, op0, op1);
15878 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15881 /* Given the components of a unary expression CODE, TYPE and OP0,
15882 attempt to fold the expression to a constant without modifying
15883 TYPE or OP0.
15885 If the expression could be simplified to a constant, then return
15886 the constant. If the expression would not be simplified to a
15887 constant, then return NULL_TREE. */
15889 tree
15890 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15892 tree tem = fold_unary (code, type, op0);
15893 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15896 /* If EXP represents referencing an element in a constant string
15897 (either via pointer arithmetic or array indexing), return the
15898 tree representing the value accessed, otherwise return NULL. */
15900 tree
15901 fold_read_from_constant_string (tree exp)
15903 if ((TREE_CODE (exp) == INDIRECT_REF
15904 || TREE_CODE (exp) == ARRAY_REF)
15905 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15907 tree exp1 = TREE_OPERAND (exp, 0);
15908 tree index;
15909 tree string;
15910 location_t loc = EXPR_LOCATION (exp);
15912 if (TREE_CODE (exp) == INDIRECT_REF)
15913 string = string_constant (exp1, &index);
15914 else
15916 tree low_bound = array_ref_low_bound (exp);
15917 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15919 /* Optimize the special-case of a zero lower bound.
15921 We convert the low_bound to sizetype to avoid some problems
15922 with constant folding. (E.g. suppose the lower bound is 1,
15923 and its mode is QI. Without the conversion,l (ARRAY
15924 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15925 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15926 if (! integer_zerop (low_bound))
15927 index = size_diffop_loc (loc, index,
15928 fold_convert_loc (loc, sizetype, low_bound));
15930 string = exp1;
15933 if (string
15934 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15935 && TREE_CODE (string) == STRING_CST
15936 && TREE_CODE (index) == INTEGER_CST
15937 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15938 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15939 == MODE_INT)
15940 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15941 return build_int_cst_type (TREE_TYPE (exp),
15942 (TREE_STRING_POINTER (string)
15943 [TREE_INT_CST_LOW (index)]));
15945 return NULL;
15948 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15949 an integer constant, real, or fixed-point constant.
15951 TYPE is the type of the result. */
15953 static tree
15954 fold_negate_const (tree arg0, tree type)
15956 tree t = NULL_TREE;
15958 switch (TREE_CODE (arg0))
15960 case INTEGER_CST:
15962 bool overflow;
15963 wide_int val = wi::neg (arg0, &overflow);
15964 t = force_fit_type (type, val, 1,
15965 (overflow | TREE_OVERFLOW (arg0))
15966 && !TYPE_UNSIGNED (type));
15967 break;
15970 case REAL_CST:
15971 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15972 break;
15974 case FIXED_CST:
15976 FIXED_VALUE_TYPE f;
15977 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15978 &(TREE_FIXED_CST (arg0)), NULL,
15979 TYPE_SATURATING (type));
15980 t = build_fixed (type, f);
15981 /* Propagate overflow flags. */
15982 if (overflow_p | TREE_OVERFLOW (arg0))
15983 TREE_OVERFLOW (t) = 1;
15984 break;
15987 default:
15988 gcc_unreachable ();
15991 return t;
15994 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15995 an integer constant or real constant.
15997 TYPE is the type of the result. */
15999 tree
16000 fold_abs_const (tree arg0, tree type)
16002 tree t = NULL_TREE;
16004 switch (TREE_CODE (arg0))
16006 case INTEGER_CST:
16008 /* If the value is unsigned or non-negative, then the absolute value
16009 is the same as the ordinary value. */
16010 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16011 t = arg0;
16013 /* If the value is negative, then the absolute value is
16014 its negation. */
16015 else
16017 bool overflow;
16018 wide_int val = wi::neg (arg0, &overflow);
16019 t = force_fit_type (type, val, -1,
16020 overflow | TREE_OVERFLOW (arg0));
16023 break;
16025 case REAL_CST:
16026 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16027 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16028 else
16029 t = arg0;
16030 break;
16032 default:
16033 gcc_unreachable ();
16036 return t;
16039 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16040 constant. TYPE is the type of the result. */
16042 static tree
16043 fold_not_const (const_tree arg0, tree type)
16045 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16047 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16050 /* Given CODE, a relational operator, the target type, TYPE and two
16051 constant operands OP0 and OP1, return the result of the
16052 relational operation. If the result is not a compile time
16053 constant, then return NULL_TREE. */
16055 static tree
16056 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16058 int result, invert;
16060 /* From here on, the only cases we handle are when the result is
16061 known to be a constant. */
16063 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16065 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16066 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16068 /* Handle the cases where either operand is a NaN. */
16069 if (real_isnan (c0) || real_isnan (c1))
16071 switch (code)
16073 case EQ_EXPR:
16074 case ORDERED_EXPR:
16075 result = 0;
16076 break;
16078 case NE_EXPR:
16079 case UNORDERED_EXPR:
16080 case UNLT_EXPR:
16081 case UNLE_EXPR:
16082 case UNGT_EXPR:
16083 case UNGE_EXPR:
16084 case UNEQ_EXPR:
16085 result = 1;
16086 break;
16088 case LT_EXPR:
16089 case LE_EXPR:
16090 case GT_EXPR:
16091 case GE_EXPR:
16092 case LTGT_EXPR:
16093 if (flag_trapping_math)
16094 return NULL_TREE;
16095 result = 0;
16096 break;
16098 default:
16099 gcc_unreachable ();
16102 return constant_boolean_node (result, type);
16105 return constant_boolean_node (real_compare (code, c0, c1), type);
16108 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16110 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16111 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16112 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16115 /* Handle equality/inequality of complex constants. */
16116 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16118 tree rcond = fold_relational_const (code, type,
16119 TREE_REALPART (op0),
16120 TREE_REALPART (op1));
16121 tree icond = fold_relational_const (code, type,
16122 TREE_IMAGPART (op0),
16123 TREE_IMAGPART (op1));
16124 if (code == EQ_EXPR)
16125 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16126 else if (code == NE_EXPR)
16127 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16128 else
16129 return NULL_TREE;
16132 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16134 unsigned count = VECTOR_CST_NELTS (op0);
16135 tree *elts = XALLOCAVEC (tree, count);
16136 gcc_assert (VECTOR_CST_NELTS (op1) == count
16137 && TYPE_VECTOR_SUBPARTS (type) == count);
16139 for (unsigned i = 0; i < count; i++)
16141 tree elem_type = TREE_TYPE (type);
16142 tree elem0 = VECTOR_CST_ELT (op0, i);
16143 tree elem1 = VECTOR_CST_ELT (op1, i);
16145 tree tem = fold_relational_const (code, elem_type,
16146 elem0, elem1);
16148 if (tem == NULL_TREE)
16149 return NULL_TREE;
16151 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16154 return build_vector (type, elts);
16157 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16159 To compute GT, swap the arguments and do LT.
16160 To compute GE, do LT and invert the result.
16161 To compute LE, swap the arguments, do LT and invert the result.
16162 To compute NE, do EQ and invert the result.
16164 Therefore, the code below must handle only EQ and LT. */
16166 if (code == LE_EXPR || code == GT_EXPR)
16168 tree tem = op0;
16169 op0 = op1;
16170 op1 = tem;
16171 code = swap_tree_comparison (code);
16174 /* Note that it is safe to invert for real values here because we
16175 have already handled the one case that it matters. */
16177 invert = 0;
16178 if (code == NE_EXPR || code == GE_EXPR)
16180 invert = 1;
16181 code = invert_tree_comparison (code, false);
16184 /* Compute a result for LT or EQ if args permit;
16185 Otherwise return T. */
16186 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16188 if (code == EQ_EXPR)
16189 result = tree_int_cst_equal (op0, op1);
16190 else
16191 result = tree_int_cst_lt (op0, op1);
16193 else
16194 return NULL_TREE;
16196 if (invert)
16197 result ^= 1;
16198 return constant_boolean_node (result, type);
16201 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16202 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16203 itself. */
16205 tree
16206 fold_build_cleanup_point_expr (tree type, tree expr)
16208 /* If the expression does not have side effects then we don't have to wrap
16209 it with a cleanup point expression. */
16210 if (!TREE_SIDE_EFFECTS (expr))
16211 return expr;
16213 /* If the expression is a return, check to see if the expression inside the
16214 return has no side effects or the right hand side of the modify expression
16215 inside the return. If either don't have side effects set we don't need to
16216 wrap the expression in a cleanup point expression. Note we don't check the
16217 left hand side of the modify because it should always be a return decl. */
16218 if (TREE_CODE (expr) == RETURN_EXPR)
16220 tree op = TREE_OPERAND (expr, 0);
16221 if (!op || !TREE_SIDE_EFFECTS (op))
16222 return expr;
16223 op = TREE_OPERAND (op, 1);
16224 if (!TREE_SIDE_EFFECTS (op))
16225 return expr;
16228 return build1 (CLEANUP_POINT_EXPR, type, expr);
16231 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16232 of an indirection through OP0, or NULL_TREE if no simplification is
16233 possible. */
16235 tree
16236 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16238 tree sub = op0;
16239 tree subtype;
16241 STRIP_NOPS (sub);
16242 subtype = TREE_TYPE (sub);
16243 if (!POINTER_TYPE_P (subtype))
16244 return NULL_TREE;
16246 if (TREE_CODE (sub) == ADDR_EXPR)
16248 tree op = TREE_OPERAND (sub, 0);
16249 tree optype = TREE_TYPE (op);
16250 /* *&CONST_DECL -> to the value of the const decl. */
16251 if (TREE_CODE (op) == CONST_DECL)
16252 return DECL_INITIAL (op);
16253 /* *&p => p; make sure to handle *&"str"[cst] here. */
16254 if (type == optype)
16256 tree fop = fold_read_from_constant_string (op);
16257 if (fop)
16258 return fop;
16259 else
16260 return op;
16262 /* *(foo *)&fooarray => fooarray[0] */
16263 else if (TREE_CODE (optype) == ARRAY_TYPE
16264 && type == TREE_TYPE (optype)
16265 && (!in_gimple_form
16266 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16268 tree type_domain = TYPE_DOMAIN (optype);
16269 tree min_val = size_zero_node;
16270 if (type_domain && TYPE_MIN_VALUE (type_domain))
16271 min_val = TYPE_MIN_VALUE (type_domain);
16272 if (in_gimple_form
16273 && TREE_CODE (min_val) != INTEGER_CST)
16274 return NULL_TREE;
16275 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16276 NULL_TREE, NULL_TREE);
16278 /* *(foo *)&complexfoo => __real__ complexfoo */
16279 else if (TREE_CODE (optype) == COMPLEX_TYPE
16280 && type == TREE_TYPE (optype))
16281 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16282 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16283 else if (TREE_CODE (optype) == VECTOR_TYPE
16284 && type == TREE_TYPE (optype))
16286 tree part_width = TYPE_SIZE (type);
16287 tree index = bitsize_int (0);
16288 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16292 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16293 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16295 tree op00 = TREE_OPERAND (sub, 0);
16296 tree op01 = TREE_OPERAND (sub, 1);
16298 STRIP_NOPS (op00);
16299 if (TREE_CODE (op00) == ADDR_EXPR)
16301 tree op00type;
16302 op00 = TREE_OPERAND (op00, 0);
16303 op00type = TREE_TYPE (op00);
16305 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16306 if (TREE_CODE (op00type) == VECTOR_TYPE
16307 && type == TREE_TYPE (op00type))
16309 HOST_WIDE_INT offset = tree_to_shwi (op01);
16310 tree part_width = TYPE_SIZE (type);
16311 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16312 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16313 tree index = bitsize_int (indexi);
16315 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16316 return fold_build3_loc (loc,
16317 BIT_FIELD_REF, type, op00,
16318 part_width, index);
16321 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16322 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16323 && type == TREE_TYPE (op00type))
16325 tree size = TYPE_SIZE_UNIT (type);
16326 if (tree_int_cst_equal (size, op01))
16327 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16329 /* ((foo *)&fooarray)[1] => fooarray[1] */
16330 else if (TREE_CODE (op00type) == ARRAY_TYPE
16331 && type == TREE_TYPE (op00type))
16333 tree type_domain = TYPE_DOMAIN (op00type);
16334 tree min_val = size_zero_node;
16335 if (type_domain && TYPE_MIN_VALUE (type_domain))
16336 min_val = TYPE_MIN_VALUE (type_domain);
16337 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16338 TYPE_SIZE_UNIT (type));
16339 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16340 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16341 NULL_TREE, NULL_TREE);
16346 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16347 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16348 && type == TREE_TYPE (TREE_TYPE (subtype))
16349 && (!in_gimple_form
16350 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16352 tree type_domain;
16353 tree min_val = size_zero_node;
16354 sub = build_fold_indirect_ref_loc (loc, sub);
16355 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16356 if (type_domain && TYPE_MIN_VALUE (type_domain))
16357 min_val = TYPE_MIN_VALUE (type_domain);
16358 if (in_gimple_form
16359 && TREE_CODE (min_val) != INTEGER_CST)
16360 return NULL_TREE;
16361 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16362 NULL_TREE);
16365 return NULL_TREE;
16368 /* Builds an expression for an indirection through T, simplifying some
16369 cases. */
16371 tree
16372 build_fold_indirect_ref_loc (location_t loc, tree t)
16374 tree type = TREE_TYPE (TREE_TYPE (t));
16375 tree sub = fold_indirect_ref_1 (loc, type, t);
16377 if (sub)
16378 return sub;
16380 return build1_loc (loc, INDIRECT_REF, type, t);
16383 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16385 tree
16386 fold_indirect_ref_loc (location_t loc, tree t)
16388 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16390 if (sub)
16391 return sub;
16392 else
16393 return t;
16396 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16397 whose result is ignored. The type of the returned tree need not be
16398 the same as the original expression. */
16400 tree
16401 fold_ignored_result (tree t)
16403 if (!TREE_SIDE_EFFECTS (t))
16404 return integer_zero_node;
16406 for (;;)
16407 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16409 case tcc_unary:
16410 t = TREE_OPERAND (t, 0);
16411 break;
16413 case tcc_binary:
16414 case tcc_comparison:
16415 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16416 t = TREE_OPERAND (t, 0);
16417 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16418 t = TREE_OPERAND (t, 1);
16419 else
16420 return t;
16421 break;
16423 case tcc_expression:
16424 switch (TREE_CODE (t))
16426 case COMPOUND_EXPR:
16427 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16428 return t;
16429 t = TREE_OPERAND (t, 0);
16430 break;
16432 case COND_EXPR:
16433 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16434 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16435 return t;
16436 t = TREE_OPERAND (t, 0);
16437 break;
16439 default:
16440 return t;
16442 break;
16444 default:
16445 return t;
16449 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16451 tree
16452 round_up_loc (location_t loc, tree value, unsigned int divisor)
16454 tree div = NULL_TREE;
16456 if (divisor == 1)
16457 return value;
16459 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16460 have to do anything. Only do this when we are not given a const,
16461 because in that case, this check is more expensive than just
16462 doing it. */
16463 if (TREE_CODE (value) != INTEGER_CST)
16465 div = build_int_cst (TREE_TYPE (value), divisor);
16467 if (multiple_of_p (TREE_TYPE (value), value, div))
16468 return value;
16471 /* If divisor is a power of two, simplify this to bit manipulation. */
16472 if (divisor == (divisor & -divisor))
16474 if (TREE_CODE (value) == INTEGER_CST)
16476 wide_int val = value;
16477 bool overflow_p;
16479 if ((val & (divisor - 1)) == 0)
16480 return value;
16482 overflow_p = TREE_OVERFLOW (value);
16483 val &= ~(divisor - 1);
16484 val += divisor;
16485 if (val == 0)
16486 overflow_p = true;
16488 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16490 else
16492 tree t;
16494 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16495 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16496 t = build_int_cst (TREE_TYPE (value), -divisor);
16497 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16500 else
16502 if (!div)
16503 div = build_int_cst (TREE_TYPE (value), divisor);
16504 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16505 value = size_binop_loc (loc, MULT_EXPR, value, div);
16508 return value;
16511 /* Likewise, but round down. */
16513 tree
16514 round_down_loc (location_t loc, tree value, int divisor)
16516 tree div = NULL_TREE;
16518 gcc_assert (divisor > 0);
16519 if (divisor == 1)
16520 return value;
16522 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16523 have to do anything. Only do this when we are not given a const,
16524 because in that case, this check is more expensive than just
16525 doing it. */
16526 if (TREE_CODE (value) != INTEGER_CST)
16528 div = build_int_cst (TREE_TYPE (value), divisor);
16530 if (multiple_of_p (TREE_TYPE (value), value, div))
16531 return value;
16534 /* If divisor is a power of two, simplify this to bit manipulation. */
16535 if (divisor == (divisor & -divisor))
16537 tree t;
16539 t = build_int_cst (TREE_TYPE (value), -divisor);
16540 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16542 else
16544 if (!div)
16545 div = build_int_cst (TREE_TYPE (value), divisor);
16546 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16547 value = size_binop_loc (loc, MULT_EXPR, value, div);
16550 return value;
16553 /* Returns the pointer to the base of the object addressed by EXP and
16554 extracts the information about the offset of the access, storing it
16555 to PBITPOS and POFFSET. */
16557 static tree
16558 split_address_to_core_and_offset (tree exp,
16559 HOST_WIDE_INT *pbitpos, tree *poffset)
16561 tree core;
16562 machine_mode mode;
16563 int unsignedp, volatilep;
16564 HOST_WIDE_INT bitsize;
16565 location_t loc = EXPR_LOCATION (exp);
16567 if (TREE_CODE (exp) == ADDR_EXPR)
16569 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16570 poffset, &mode, &unsignedp, &volatilep,
16571 false);
16572 core = build_fold_addr_expr_loc (loc, core);
16574 else
16576 core = exp;
16577 *pbitpos = 0;
16578 *poffset = NULL_TREE;
16581 return core;
16584 /* Returns true if addresses of E1 and E2 differ by a constant, false
16585 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16587 bool
16588 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16590 tree core1, core2;
16591 HOST_WIDE_INT bitpos1, bitpos2;
16592 tree toffset1, toffset2, tdiff, type;
16594 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16595 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16597 if (bitpos1 % BITS_PER_UNIT != 0
16598 || bitpos2 % BITS_PER_UNIT != 0
16599 || !operand_equal_p (core1, core2, 0))
16600 return false;
16602 if (toffset1 && toffset2)
16604 type = TREE_TYPE (toffset1);
16605 if (type != TREE_TYPE (toffset2))
16606 toffset2 = fold_convert (type, toffset2);
16608 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16609 if (!cst_and_fits_in_hwi (tdiff))
16610 return false;
16612 *diff = int_cst_value (tdiff);
16614 else if (toffset1 || toffset2)
16616 /* If only one of the offsets is non-constant, the difference cannot
16617 be a constant. */
16618 return false;
16620 else
16621 *diff = 0;
16623 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16624 return true;
16627 /* Simplify the floating point expression EXP when the sign of the
16628 result is not significant. Return NULL_TREE if no simplification
16629 is possible. */
16631 tree
16632 fold_strip_sign_ops (tree exp)
16634 tree arg0, arg1;
16635 location_t loc = EXPR_LOCATION (exp);
16637 switch (TREE_CODE (exp))
16639 case ABS_EXPR:
16640 case NEGATE_EXPR:
16641 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16642 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16644 case MULT_EXPR:
16645 case RDIV_EXPR:
16646 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16647 return NULL_TREE;
16648 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16649 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16650 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16651 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16652 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16653 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16654 break;
16656 case COMPOUND_EXPR:
16657 arg0 = TREE_OPERAND (exp, 0);
16658 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16659 if (arg1)
16660 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16661 break;
16663 case COND_EXPR:
16664 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16665 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16666 if (arg0 || arg1)
16667 return fold_build3_loc (loc,
16668 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16669 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16670 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16671 break;
16673 case CALL_EXPR:
16675 const enum built_in_function fcode = builtin_mathfn_code (exp);
16676 switch (fcode)
16678 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16679 /* Strip copysign function call, return the 1st argument. */
16680 arg0 = CALL_EXPR_ARG (exp, 0);
16681 arg1 = CALL_EXPR_ARG (exp, 1);
16682 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16684 default:
16685 /* Strip sign ops from the argument of "odd" math functions. */
16686 if (negate_mathfn_p (fcode))
16688 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16689 if (arg0)
16690 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16692 break;
16695 break;
16697 default:
16698 break;
16700 return NULL_TREE;