Make std::vector<bool> meet C++11 allocator requirements.
[official-gcc.git] / gcc / fold-const.c
blob625829545478e81a30f682eb1319bd14a47252a3
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_mathfn_p (enum built_in_function);
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static tree const_binop (enum tree_code, tree, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
164 static location_t
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
186 return x;
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 widest_int quo;
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 tree type;
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
511 break;
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
527 break;
529 default:
530 break;
532 return false;
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
540 static tree
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
544 tree tem;
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
584 break;
586 case VECTOR_CST:
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
598 return build_vector (type, elts);
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1244 break;
1246 default:
1247 return NULL_TREE;
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1327 else
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1358 else
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1380 break;
1382 default:
1383 return NULL_TREE;
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 if (code == VEC_RSHIFT_EXPR)
1423 if (!tree_fits_uhwi_p (arg2))
1424 return NULL_TREE;
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1431 return NULL_TREE;
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1437 offset = -offset;
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1441 if (i + offset < 0 || i + offset >= count)
1442 elts[i] = zero;
1443 else
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1447 else
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1468 tree
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1479 tree
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 tree type = TREE_TYPE (arg0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1488 TREE_TYPE (arg1)));
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1499 return arg0;
1501 else if (code == MINUS_EXPR)
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1506 else if (code == MULT_EXPR)
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1509 return arg1;
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1525 tree
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 tree type = TREE_TYPE (arg0);
1529 tree ctype;
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1532 TREE_TYPE (arg1)));
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538 if (type == sizetype)
1539 ctype = ssizetype;
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1542 else
1543 ctype = signed_type_for (type);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1562 else
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1566 MINUS_EXPR,
1567 arg1, arg0)));
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1573 static tree
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1587 static tree
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 bool overflow = false;
1591 tree t;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1602 wide_int val;
1603 REAL_VALUE_TYPE r;
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606 switch (code)
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1610 break;
1612 default:
1613 gcc_unreachable ();
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1619 overflow = true;
1620 val = wi::zero (TYPE_PRECISION (type));
1623 /* See if R is less than the lower bound or greater than the
1624 upper bound. */
1626 if (! overflow)
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1632 overflow = true;
1633 val = lt;
1637 if (! overflow)
1639 tree ut = TYPE_MAX_VALUE (type);
1640 if (ut)
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1645 overflow = true;
1646 val = ut;
1651 if (! overflow)
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1661 static tree
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 tree t;
1665 double_int temp, temp_trunc;
1666 unsigned int mode;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1682 else
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1703 return t;
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1709 static tree
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 REAL_VALUE_TYPE value;
1713 tree t;
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1734 else
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1736 return t;
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1742 static tree
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 REAL_VALUE_TYPE value;
1746 tree t;
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1778 static tree
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 FIXED_VALUE_TYPE value;
1782 tree t;
1783 bool overflow_p;
1784 double_int di;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1791 else
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1802 return t;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1808 static tree
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 FIXED_VALUE_TYPE value;
1812 tree t;
1813 bool overflow_p;
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1823 return t;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1829 static tree
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 if (TREE_TYPE (arg1) == type)
1833 return arg1;
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1863 return NULL_TREE;
1866 /* Construct a vector of zero elements of vector type TYPE. */
1868 static tree
1869 build_zero_vector (tree type)
1871 tree t;
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879 bool
1880 fold_convertible_p (const_tree type, const_tree arg)
1882 tree orig = TREE_TYPE (arg);
1884 if (type == orig)
1885 return true;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return false;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return true;
1895 switch (TREE_CODE (type))
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1899 case OFFSET_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1902 return true;
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906 case REAL_TYPE:
1907 case FIXED_POINT_TYPE:
1908 case COMPLEX_TYPE:
1909 case VECTOR_TYPE:
1910 case VOID_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1913 default:
1914 return false;
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1921 tree
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1924 tree orig = TREE_TYPE (arg);
1925 tree tem;
1927 if (type == orig)
1928 return arg;
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1935 switch (TREE_CODE (type))
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1944 /* fall through */
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case OFFSET_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 case REAL_TYPE:
1966 if (TREE_CODE (arg) == INTEGER_CST)
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1972 else if (TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1978 else if (TREE_CODE (arg) == FIXED_CST)
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992 case REAL_TYPE:
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998 case COMPLEX_TYPE:
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2002 default:
2003 gcc_unreachable ();
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2015 switch (TREE_CODE (orig))
2017 case FIXED_POINT_TYPE:
2018 case INTEGER_TYPE:
2019 case ENUMERAL_TYPE:
2020 case BOOLEAN_TYPE:
2021 case REAL_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2028 default:
2029 gcc_unreachable ();
2032 case COMPLEX_TYPE:
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 case REAL_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2044 case COMPLEX_TYPE:
2046 tree rpart, ipart;
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 default:
2066 gcc_unreachable ();
2069 case VECTOR_TYPE:
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077 case VOID_TYPE:
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081 default:
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2084 gcc_unreachable ();
2086 fold_convert_exit:
2087 protected_set_expr_location_unshare (tem, loc);
2088 return tem;
2091 /* Return false if expr can be assumed not to be an lvalue, true
2092 otherwise. */
2094 static bool
2095 maybe_lvalue_p (const_tree x)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2100 case VAR_DECL:
2101 case PARM_DECL:
2102 case RESULT_DECL:
2103 case LABEL_DECL:
2104 case FUNCTION_DECL:
2105 case SSA_NAME:
2107 case COMPONENT_REF:
2108 case MEM_REF:
2109 case INDIRECT_REF:
2110 case ARRAY_REF:
2111 case ARRAY_RANGE_REF:
2112 case BIT_FIELD_REF:
2113 case OBJ_TYPE_REF:
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2119 case SAVE_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2122 case COMPOUND_EXPR:
2123 case MODIFY_EXPR:
2124 case TARGET_EXPR:
2125 case COND_EXPR:
2126 case BIND_EXPR:
2127 break;
2129 default:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2132 break;
2133 return false;
2136 return true;
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141 tree
2142 non_lvalue_loc (location_t loc, tree x)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2145 us. */
2146 if (in_gimple_form)
2147 return x;
2149 if (! maybe_lvalue_p (x))
2150 return x;
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2154 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2155 Zero means allow extended lvalues. */
2157 int pedantic_lvalues;
2159 /* When pedantic, return an expr equal to X but certainly not valid as a
2160 pedantic lvalue. Otherwise, return X. */
2162 static tree
2163 pedantic_non_lvalue_loc (location_t loc, tree x)
2165 if (pedantic_lvalues)
2166 return non_lvalue_loc (loc, x);
2168 return protected_set_expr_location_unshare (x, loc);
2171 /* Given a tree comparison code, return the code that is the logical inverse.
2172 It is generally not safe to do this for floating-point comparisons, except
2173 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2174 ERROR_MARK in this case. */
2176 enum tree_code
2177 invert_tree_comparison (enum tree_code code, bool honor_nans)
2179 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2180 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2181 return ERROR_MARK;
2183 switch (code)
2185 case EQ_EXPR:
2186 return NE_EXPR;
2187 case NE_EXPR:
2188 return EQ_EXPR;
2189 case GT_EXPR:
2190 return honor_nans ? UNLE_EXPR : LE_EXPR;
2191 case GE_EXPR:
2192 return honor_nans ? UNLT_EXPR : LT_EXPR;
2193 case LT_EXPR:
2194 return honor_nans ? UNGE_EXPR : GE_EXPR;
2195 case LE_EXPR:
2196 return honor_nans ? UNGT_EXPR : GT_EXPR;
2197 case LTGT_EXPR:
2198 return UNEQ_EXPR;
2199 case UNEQ_EXPR:
2200 return LTGT_EXPR;
2201 case UNGT_EXPR:
2202 return LE_EXPR;
2203 case UNGE_EXPR:
2204 return LT_EXPR;
2205 case UNLT_EXPR:
2206 return GE_EXPR;
2207 case UNLE_EXPR:
2208 return GT_EXPR;
2209 case ORDERED_EXPR:
2210 return UNORDERED_EXPR;
2211 case UNORDERED_EXPR:
2212 return ORDERED_EXPR;
2213 default:
2214 gcc_unreachable ();
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2221 enum tree_code
2222 swap_tree_comparison (enum tree_code code)
2224 switch (code)
2226 case EQ_EXPR:
2227 case NE_EXPR:
2228 case ORDERED_EXPR:
2229 case UNORDERED_EXPR:
2230 case LTGT_EXPR:
2231 case UNEQ_EXPR:
2232 return code;
2233 case GT_EXPR:
2234 return LT_EXPR;
2235 case GE_EXPR:
2236 return LE_EXPR;
2237 case LT_EXPR:
2238 return GT_EXPR;
2239 case LE_EXPR:
2240 return GE_EXPR;
2241 case UNGT_EXPR:
2242 return UNLT_EXPR;
2243 case UNGE_EXPR:
2244 return UNLE_EXPR;
2245 case UNLT_EXPR:
2246 return UNGT_EXPR;
2247 case UNLE_EXPR:
2248 return UNGE_EXPR;
2249 default:
2250 gcc_unreachable ();
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code)
2262 switch (code)
2264 case LT_EXPR:
2265 return COMPCODE_LT;
2266 case EQ_EXPR:
2267 return COMPCODE_EQ;
2268 case LE_EXPR:
2269 return COMPCODE_LE;
2270 case GT_EXPR:
2271 return COMPCODE_GT;
2272 case NE_EXPR:
2273 return COMPCODE_NE;
2274 case GE_EXPR:
2275 return COMPCODE_GE;
2276 case ORDERED_EXPR:
2277 return COMPCODE_ORD;
2278 case UNORDERED_EXPR:
2279 return COMPCODE_UNORD;
2280 case UNLT_EXPR:
2281 return COMPCODE_UNLT;
2282 case UNEQ_EXPR:
2283 return COMPCODE_UNEQ;
2284 case UNLE_EXPR:
2285 return COMPCODE_UNLE;
2286 case UNGT_EXPR:
2287 return COMPCODE_UNGT;
2288 case LTGT_EXPR:
2289 return COMPCODE_LTGT;
2290 case UNGE_EXPR:
2291 return COMPCODE_UNGE;
2292 default:
2293 gcc_unreachable ();
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code)
2304 switch (code)
2306 case COMPCODE_LT:
2307 return LT_EXPR;
2308 case COMPCODE_EQ:
2309 return EQ_EXPR;
2310 case COMPCODE_LE:
2311 return LE_EXPR;
2312 case COMPCODE_GT:
2313 return GT_EXPR;
2314 case COMPCODE_NE:
2315 return NE_EXPR;
2316 case COMPCODE_GE:
2317 return GE_EXPR;
2318 case COMPCODE_ORD:
2319 return ORDERED_EXPR;
2320 case COMPCODE_UNORD:
2321 return UNORDERED_EXPR;
2322 case COMPCODE_UNLT:
2323 return UNLT_EXPR;
2324 case COMPCODE_UNEQ:
2325 return UNEQ_EXPR;
2326 case COMPCODE_UNLE:
2327 return UNLE_EXPR;
2328 case COMPCODE_UNGT:
2329 return UNGT_EXPR;
2330 case COMPCODE_LTGT:
2331 return LTGT_EXPR;
2332 case COMPCODE_UNGE:
2333 return UNGE_EXPR;
2334 default:
2335 gcc_unreachable ();
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2345 tree
2346 combine_comparisons (location_t loc,
2347 enum tree_code code, enum tree_code lcode,
2348 enum tree_code rcode, tree truth_type,
2349 tree ll_arg, tree lr_arg)
2351 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2352 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2353 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2354 int compcode;
2356 switch (code)
2358 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2359 compcode = lcompcode & rcompcode;
2360 break;
2362 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2363 compcode = lcompcode | rcompcode;
2364 break;
2366 default:
2367 return NULL_TREE;
2370 if (!honor_nans)
2372 /* Eliminate unordered comparisons, as well as LTGT and ORD
2373 which are not used unless the mode has NaNs. */
2374 compcode &= ~COMPCODE_UNORD;
2375 if (compcode == COMPCODE_LTGT)
2376 compcode = COMPCODE_NE;
2377 else if (compcode == COMPCODE_ORD)
2378 compcode = COMPCODE_TRUE;
2380 else if (flag_trapping_math)
2382 /* Check that the original operation and the optimized ones will trap
2383 under the same condition. */
2384 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2385 && (lcompcode != COMPCODE_EQ)
2386 && (lcompcode != COMPCODE_ORD);
2387 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2388 && (rcompcode != COMPCODE_EQ)
2389 && (rcompcode != COMPCODE_ORD);
2390 bool trap = (compcode & COMPCODE_UNORD) == 0
2391 && (compcode != COMPCODE_EQ)
2392 && (compcode != COMPCODE_ORD);
2394 /* In a short-circuited boolean expression the LHS might be
2395 such that the RHS, if evaluated, will never trap. For
2396 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2397 if neither x nor y is NaN. (This is a mixed blessing: for
2398 example, the expression above will never trap, hence
2399 optimizing it to x < y would be invalid). */
2400 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2401 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2402 rtrap = false;
2404 /* If the comparison was short-circuited, and only the RHS
2405 trapped, we may now generate a spurious trap. */
2406 if (rtrap && !ltrap
2407 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2408 return NULL_TREE;
2410 /* If we changed the conditions that cause a trap, we lose. */
2411 if ((ltrap || rtrap) != trap)
2412 return NULL_TREE;
2415 if (compcode == COMPCODE_TRUE)
2416 return constant_boolean_node (true, truth_type);
2417 else if (compcode == COMPCODE_FALSE)
2418 return constant_boolean_node (false, truth_type);
2419 else
2421 enum tree_code tcode;
2423 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2424 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2428 /* Return nonzero if two operands (typically of the same tree node)
2429 are necessarily equal. If either argument has side-effects this
2430 function returns zero. FLAGS modifies behavior as follows:
2432 If OEP_ONLY_CONST is set, only return nonzero for constants.
2433 This function tests whether the operands are indistinguishable;
2434 it does not test whether they are equal using C's == operation.
2435 The distinction is important for IEEE floating point, because
2436 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2437 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2439 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2440 even though it may hold multiple values during a function.
2441 This is because a GCC tree node guarantees that nothing else is
2442 executed between the evaluation of its "operands" (which may often
2443 be evaluated in arbitrary order). Hence if the operands themselves
2444 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2445 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2446 unset means assuming isochronic (or instantaneous) tree equivalence.
2447 Unless comparing arbitrary expression trees, such as from different
2448 statements, this flag can usually be left unset.
2450 If OEP_PURE_SAME is set, then pure functions with identical arguments
2451 are considered the same. It is used when the caller has other ways
2452 to ensure that global memory is unchanged in between. */
2455 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2457 /* If either is ERROR_MARK, they aren't equal. */
2458 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2459 || TREE_TYPE (arg0) == error_mark_node
2460 || TREE_TYPE (arg1) == error_mark_node)
2461 return 0;
2463 /* Similar, if either does not have a type (like a released SSA name),
2464 they aren't equal. */
2465 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2466 return 0;
2468 /* Check equality of integer constants before bailing out due to
2469 precision differences. */
2470 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2471 return tree_int_cst_equal (arg0, arg1);
2473 /* If both types don't have the same signedness, then we can't consider
2474 them equal. We must check this before the STRIP_NOPS calls
2475 because they may change the signedness of the arguments. As pointers
2476 strictly don't have a signedness, require either two pointers or
2477 two non-pointers as well. */
2478 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2479 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2480 return 0;
2482 /* We cannot consider pointers to different address space equal. */
2483 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2486 return 0;
2488 /* If both types don't have the same precision, then it is not safe
2489 to strip NOPs. */
2490 if (element_precision (TREE_TYPE (arg0))
2491 != element_precision (TREE_TYPE (arg1)))
2492 return 0;
2494 STRIP_NOPS (arg0);
2495 STRIP_NOPS (arg1);
2497 /* In case both args are comparisons but with different comparison
2498 code, try to swap the comparison operands of one arg to produce
2499 a match and compare that variant. */
2500 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2501 && COMPARISON_CLASS_P (arg0)
2502 && COMPARISON_CLASS_P (arg1))
2504 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2506 if (TREE_CODE (arg0) == swap_code)
2507 return operand_equal_p (TREE_OPERAND (arg0, 0),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 1),
2510 TREE_OPERAND (arg1, 0), flags);
2513 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2514 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2515 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2516 return 0;
2518 /* This is needed for conversions and for COMPONENT_REF.
2519 Might as well play it safe and always test this. */
2520 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2521 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2522 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2523 return 0;
2525 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2526 We don't care about side effects in that case because the SAVE_EXPR
2527 takes care of that for us. In all other cases, two expressions are
2528 equal if they have no side effects. If we have two identical
2529 expressions with side effects that should be treated the same due
2530 to the only side effects being identical SAVE_EXPR's, that will
2531 be detected in the recursive calls below.
2532 If we are taking an invariant address of two identical objects
2533 they are necessarily equal as well. */
2534 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2535 && (TREE_CODE (arg0) == SAVE_EXPR
2536 || (flags & OEP_CONSTANT_ADDRESS_OF)
2537 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2538 return 1;
2540 /* Next handle constant cases, those for which we can return 1 even
2541 if ONLY_CONST is set. */
2542 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2543 switch (TREE_CODE (arg0))
2545 case INTEGER_CST:
2546 return tree_int_cst_equal (arg0, arg1);
2548 case FIXED_CST:
2549 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2550 TREE_FIXED_CST (arg1));
2552 case REAL_CST:
2553 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2554 TREE_REAL_CST (arg1)))
2555 return 1;
2558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2560 /* If we do not distinguish between signed and unsigned zero,
2561 consider them equal. */
2562 if (real_zerop (arg0) && real_zerop (arg1))
2563 return 1;
2565 return 0;
2567 case VECTOR_CST:
2569 unsigned i;
2571 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2572 return 0;
2574 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2576 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2577 VECTOR_CST_ELT (arg1, i), flags))
2578 return 0;
2580 return 1;
2583 case COMPLEX_CST:
2584 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2585 flags)
2586 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2587 flags));
2589 case STRING_CST:
2590 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2591 && ! memcmp (TREE_STRING_POINTER (arg0),
2592 TREE_STRING_POINTER (arg1),
2593 TREE_STRING_LENGTH (arg0)));
2595 case ADDR_EXPR:
2596 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2597 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2598 ? OEP_CONSTANT_ADDRESS_OF : 0);
2599 default:
2600 break;
2603 if (flags & OEP_ONLY_CONST)
2604 return 0;
2606 /* Define macros to test an operand from arg0 and arg1 for equality and a
2607 variant that allows null and views null as being different from any
2608 non-null value. In the latter case, if either is null, the both
2609 must be; otherwise, do the normal comparison. */
2610 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2611 TREE_OPERAND (arg1, N), flags)
2613 #define OP_SAME_WITH_NULL(N) \
2614 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2615 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2617 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2619 case tcc_unary:
2620 /* Two conversions are equal only if signedness and modes match. */
2621 switch (TREE_CODE (arg0))
2623 CASE_CONVERT:
2624 case FIX_TRUNC_EXPR:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2627 return 0;
2628 break;
2629 default:
2630 break;
2633 return OP_SAME (0);
2636 case tcc_comparison:
2637 case tcc_binary:
2638 if (OP_SAME (0) && OP_SAME (1))
2639 return 1;
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0))
2643 && operand_equal_p (TREE_OPERAND (arg0, 0),
2644 TREE_OPERAND (arg1, 1), flags)
2645 && operand_equal_p (TREE_OPERAND (arg0, 1),
2646 TREE_OPERAND (arg1, 0), flags));
2648 case tcc_reference:
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal,
2651 but their addresses can be. */
2652 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2653 && (TREE_SIDE_EFFECTS (arg0)
2654 || TREE_SIDE_EFFECTS (arg1)))
2655 return 0;
2657 switch (TREE_CODE (arg0))
2659 case INDIRECT_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 return OP_SAME (0);
2663 case REALPART_EXPR:
2664 case IMAGPART_EXPR:
2665 return OP_SAME (0);
2667 case TARGET_MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal extra operands and then fall through to MEM_REF
2670 handling of the two common operands. */
2671 if (!OP_SAME_WITH_NULL (2)
2672 || !OP_SAME_WITH_NULL (3)
2673 || !OP_SAME_WITH_NULL (4))
2674 return 0;
2675 /* Fallthru. */
2676 case MEM_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 /* Require equal access sizes, and similar pointer types.
2679 We can have incomplete types for array references of
2680 variable-sized arrays from the Fortran frontend
2681 though. Also verify the types are compatible. */
2682 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2683 || (TYPE_SIZE (TREE_TYPE (arg0))
2684 && TYPE_SIZE (TREE_TYPE (arg1))
2685 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2686 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2687 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2688 && alias_ptr_types_compatible_p
2689 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2690 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2691 && OP_SAME (0) && OP_SAME (1));
2693 case ARRAY_REF:
2694 case ARRAY_RANGE_REF:
2695 /* Operands 2 and 3 may be null.
2696 Compare the array index by value if it is constant first as we
2697 may have different types but same value here. */
2698 if (!OP_SAME (0))
2699 return 0;
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2702 TREE_OPERAND (arg1, 1))
2703 || OP_SAME (1))
2704 && OP_SAME_WITH_NULL (2)
2705 && OP_SAME_WITH_NULL (3));
2707 case COMPONENT_REF:
2708 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2709 may be NULL when we're called to compare MEM_EXPRs. */
2710 if (!OP_SAME_WITH_NULL (0)
2711 || !OP_SAME (1))
2712 return 0;
2713 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2714 return OP_SAME_WITH_NULL (2);
2716 case BIT_FIELD_REF:
2717 if (!OP_SAME (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME (2);
2722 default:
2723 return 0;
2726 case tcc_expression:
2727 switch (TREE_CODE (arg0))
2729 case ADDR_EXPR:
2730 case TRUTH_NOT_EXPR:
2731 return OP_SAME (0);
2733 case TRUTH_ANDIF_EXPR:
2734 case TRUTH_ORIF_EXPR:
2735 return OP_SAME (0) && OP_SAME (1);
2737 case FMA_EXPR:
2738 case WIDEN_MULT_PLUS_EXPR:
2739 case WIDEN_MULT_MINUS_EXPR:
2740 if (!OP_SAME (2))
2741 return 0;
2742 /* The multiplcation operands are commutative. */
2743 /* FALLTHRU */
2745 case TRUTH_AND_EXPR:
2746 case TRUTH_OR_EXPR:
2747 case TRUTH_XOR_EXPR:
2748 if (OP_SAME (0) && OP_SAME (1))
2749 return 1;
2751 /* Otherwise take into account this is a commutative operation. */
2752 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags));
2757 case COND_EXPR:
2758 case VEC_COND_EXPR:
2759 case DOT_PROD_EXPR:
2760 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2762 default:
2763 return 0;
2766 case tcc_vl_exp:
2767 switch (TREE_CODE (arg0))
2769 case CALL_EXPR:
2770 /* If the CALL_EXPRs call different functions, then they
2771 clearly can not be equal. */
2772 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2773 flags))
2774 return 0;
2777 unsigned int cef = call_expr_flags (arg0);
2778 if (flags & OEP_PURE_SAME)
2779 cef &= ECF_CONST | ECF_PURE;
2780 else
2781 cef &= ECF_CONST;
2782 if (!cef)
2783 return 0;
2786 /* Now see if all the arguments are the same. */
2788 const_call_expr_arg_iterator iter0, iter1;
2789 const_tree a0, a1;
2790 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2791 a1 = first_const_call_expr_arg (arg1, &iter1);
2792 a0 && a1;
2793 a0 = next_const_call_expr_arg (&iter0),
2794 a1 = next_const_call_expr_arg (&iter1))
2795 if (! operand_equal_p (a0, a1, flags))
2796 return 0;
2798 /* If we get here and both argument lists are exhausted
2799 then the CALL_EXPRs are equal. */
2800 return ! (a0 || a1);
2802 default:
2803 return 0;
2806 case tcc_declaration:
2807 /* Consider __builtin_sqrt equal to sqrt. */
2808 return (TREE_CODE (arg0) == FUNCTION_DECL
2809 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2810 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2811 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2813 default:
2814 return 0;
2817 #undef OP_SAME
2818 #undef OP_SAME_WITH_NULL
2821 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2822 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2824 When in doubt, return 0. */
2826 static int
2827 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2829 int unsignedp1, unsignedpo;
2830 tree primarg0, primarg1, primother;
2831 unsigned int correct_width;
2833 if (operand_equal_p (arg0, arg1, 0))
2834 return 1;
2836 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2837 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2838 return 0;
2840 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2841 and see if the inner values are the same. This removes any
2842 signedness comparison, which doesn't matter here. */
2843 primarg0 = arg0, primarg1 = arg1;
2844 STRIP_NOPS (primarg0);
2845 STRIP_NOPS (primarg1);
2846 if (operand_equal_p (primarg0, primarg1, 0))
2847 return 1;
2849 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2850 actual comparison operand, ARG0.
2852 First throw away any conversions to wider types
2853 already present in the operands. */
2855 primarg1 = get_narrower (arg1, &unsignedp1);
2856 primother = get_narrower (other, &unsignedpo);
2858 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2859 if (unsignedp1 == unsignedpo
2860 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2861 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2863 tree type = TREE_TYPE (arg0);
2865 /* Make sure shorter operand is extended the right way
2866 to match the longer operand. */
2867 primarg1 = fold_convert (signed_or_unsigned_type_for
2868 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2870 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2871 return 1;
2874 return 0;
2877 /* See if ARG is an expression that is either a comparison or is performing
2878 arithmetic on comparisons. The comparisons must only be comparing
2879 two different values, which will be stored in *CVAL1 and *CVAL2; if
2880 they are nonzero it means that some operands have already been found.
2881 No variables may be used anywhere else in the expression except in the
2882 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2883 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2885 If this is true, return 1. Otherwise, return zero. */
2887 static int
2888 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2895 tclass = tcc_unary;
2896 else if (tclass == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2898 || code == COMPOUND_EXPR))
2899 tclass = tcc_binary;
2901 else if (tclass == tcc_expression && code == SAVE_EXPR
2902 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2904 /* If we've already found a CVAL1 or CVAL2, this expression is
2905 two complex to handle. */
2906 if (*cval1 || *cval2)
2907 return 0;
2909 tclass = tcc_unary;
2910 *save_p = 1;
2913 switch (tclass)
2915 case tcc_unary:
2916 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2918 case tcc_binary:
2919 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p));
2923 case tcc_constant:
2924 return 1;
2926 case tcc_expression:
2927 if (code == COND_EXPR)
2928 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2929 cval1, cval2, save_p)
2930 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2931 cval1, cval2, save_p)
2932 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2933 cval1, cval2, save_p));
2934 return 0;
2936 case tcc_comparison:
2937 /* First see if we can handle the first operand, then the second. For
2938 the second operand, we know *CVAL1 can't be zero. It must be that
2939 one side of the comparison is each of the values; test for the
2940 case where this isn't true by failing if the two operands
2941 are the same. */
2943 if (operand_equal_p (TREE_OPERAND (arg, 0),
2944 TREE_OPERAND (arg, 1), 0))
2945 return 0;
2947 if (*cval1 == 0)
2948 *cval1 = TREE_OPERAND (arg, 0);
2949 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2951 else if (*cval2 == 0)
2952 *cval2 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2955 else
2956 return 0;
2958 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2960 else if (*cval2 == 0)
2961 *cval2 = TREE_OPERAND (arg, 1);
2962 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2964 else
2965 return 0;
2967 return 1;
2969 default:
2970 return 0;
2974 /* ARG is a tree that is known to contain just arithmetic operations and
2975 comparisons. Evaluate the operations in the tree substituting NEW0 for
2976 any occurrence of OLD0 as an operand of a comparison and likewise for
2977 NEW1 and OLD1. */
2979 static tree
2980 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2981 tree old1, tree new1)
2983 tree type = TREE_TYPE (arg);
2984 enum tree_code code = TREE_CODE (arg);
2985 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2987 /* We can handle some of the tcc_expression cases here. */
2988 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2989 tclass = tcc_unary;
2990 else if (tclass == tcc_expression
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2992 tclass = tcc_binary;
2994 switch (tclass)
2996 case tcc_unary:
2997 return fold_build1_loc (loc, code, type,
2998 eval_subst (loc, TREE_OPERAND (arg, 0),
2999 old0, new0, old1, new1));
3001 case tcc_binary:
3002 return fold_build2_loc (loc, code, type,
3003 eval_subst (loc, TREE_OPERAND (arg, 0),
3004 old0, new0, old1, new1),
3005 eval_subst (loc, TREE_OPERAND (arg, 1),
3006 old0, new0, old1, new1));
3008 case tcc_expression:
3009 switch (code)
3011 case SAVE_EXPR:
3012 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3013 old1, new1);
3015 case COMPOUND_EXPR:
3016 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3017 old1, new1);
3019 case COND_EXPR:
3020 return fold_build3_loc (loc, code, type,
3021 eval_subst (loc, TREE_OPERAND (arg, 0),
3022 old0, new0, old1, new1),
3023 eval_subst (loc, TREE_OPERAND (arg, 1),
3024 old0, new0, old1, new1),
3025 eval_subst (loc, TREE_OPERAND (arg, 2),
3026 old0, new0, old1, new1));
3027 default:
3028 break;
3030 /* Fall through - ??? */
3032 case tcc_comparison:
3034 tree arg0 = TREE_OPERAND (arg, 0);
3035 tree arg1 = TREE_OPERAND (arg, 1);
3037 /* We need to check both for exact equality and tree equality. The
3038 former will be true if the operand has a side-effect. In that
3039 case, we know the operand occurred exactly once. */
3041 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3042 arg0 = new0;
3043 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3044 arg0 = new1;
3046 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3047 arg1 = new0;
3048 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3049 arg1 = new1;
3051 return fold_build2_loc (loc, code, type, arg0, arg1);
3054 default:
3055 return arg;
3059 /* Return a tree for the case when the result of an expression is RESULT
3060 converted to TYPE and OMITTED was previously an operand of the expression
3061 but is now not needed (e.g., we folded OMITTED * 0).
3063 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3064 the conversion of RESULT to TYPE. */
3066 tree
3067 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3069 tree t = fold_convert_loc (loc, type, result);
3071 /* If the resulting operand is an empty statement, just return the omitted
3072 statement casted to void. */
3073 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3074 return build1_loc (loc, NOP_EXPR, void_type_node,
3075 fold_ignored_result (omitted));
3077 if (TREE_SIDE_EFFECTS (omitted))
3078 return build2_loc (loc, COMPOUND_EXPR, type,
3079 fold_ignored_result (omitted), t);
3081 return non_lvalue_loc (loc, t);
3084 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3086 static tree
3087 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3088 tree omitted)
3090 tree t = fold_convert_loc (loc, type, result);
3092 /* If the resulting operand is an empty statement, just return the omitted
3093 statement casted to void. */
3094 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3095 return build1_loc (loc, NOP_EXPR, void_type_node,
3096 fold_ignored_result (omitted));
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2_loc (loc, COMPOUND_EXPR, type,
3100 fold_ignored_result (omitted), t);
3102 return pedantic_non_lvalue_loc (loc, t);
3105 /* Return a tree for the case when the result of an expression is RESULT
3106 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3107 of the expression but are now not needed.
3109 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3110 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3111 evaluated before OMITTED2. Otherwise, if neither has side effects,
3112 just do the conversion of RESULT to TYPE. */
3114 tree
3115 omit_two_operands_loc (location_t loc, tree type, tree result,
3116 tree omitted1, tree omitted2)
3118 tree t = fold_convert_loc (loc, type, result);
3120 if (TREE_SIDE_EFFECTS (omitted2))
3121 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3122 if (TREE_SIDE_EFFECTS (omitted1))
3123 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3125 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3129 /* Return a simplified tree node for the truth-negation of ARG. This
3130 never alters ARG itself. We assume that ARG is an operation that
3131 returns a truth value (0 or 1).
3133 FIXME: one would think we would fold the result, but it causes
3134 problems with the dominator optimizer. */
3136 static tree
3137 fold_truth_not_expr (location_t loc, tree arg)
3139 tree type = TREE_TYPE (arg);
3140 enum tree_code code = TREE_CODE (arg);
3141 location_t loc1, loc2;
3143 /* If this is a comparison, we can simply invert it, except for
3144 floating-point non-equality comparisons, in which case we just
3145 enclose a TRUTH_NOT_EXPR around what we have. */
3147 if (TREE_CODE_CLASS (code) == tcc_comparison)
3149 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3150 if (FLOAT_TYPE_P (op_type)
3151 && flag_trapping_math
3152 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3153 && code != NE_EXPR && code != EQ_EXPR)
3154 return NULL_TREE;
3156 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3157 if (code == ERROR_MARK)
3158 return NULL_TREE;
3160 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3161 TREE_OPERAND (arg, 1));
3164 switch (code)
3166 case INTEGER_CST:
3167 return constant_boolean_node (integer_zerop (arg), type);
3169 case TRUTH_AND_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_OR_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176 case TRUTH_OR_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_AND_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183 case TRUTH_XOR_EXPR:
3184 /* Here we can invert either operand. We invert the first operand
3185 unless the second operand is a TRUTH_NOT_EXPR in which case our
3186 result is the XOR of the first operand with the inside of the
3187 negation of the second operand. */
3189 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3191 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3192 else
3193 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3194 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3195 TREE_OPERAND (arg, 1));
3197 case TRUTH_ANDIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_ORIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211 case TRUTH_NOT_EXPR:
3212 return TREE_OPERAND (arg, 0);
3214 case COND_EXPR:
3216 tree arg1 = TREE_OPERAND (arg, 1);
3217 tree arg2 = TREE_OPERAND (arg, 2);
3219 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3220 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3222 /* A COND_EXPR may have a throw as one operand, which
3223 then has void type. Just leave void operands
3224 as they are. */
3225 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3226 VOID_TYPE_P (TREE_TYPE (arg1))
3227 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3228 VOID_TYPE_P (TREE_TYPE (arg2))
3229 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3232 case COMPOUND_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3234 return build2_loc (loc, COMPOUND_EXPR, type,
3235 TREE_OPERAND (arg, 0),
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3238 case NON_LVALUE_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3242 CASE_CONVERT:
3243 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3244 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3246 /* ... fall through ... */
3248 case FLOAT_EXPR:
3249 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3250 return build1_loc (loc, TREE_CODE (arg), type,
3251 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3253 case BIT_AND_EXPR:
3254 if (!integer_onep (TREE_OPERAND (arg, 1)))
3255 return NULL_TREE;
3256 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3258 case SAVE_EXPR:
3259 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3261 case CLEANUP_POINT_EXPR:
3262 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3263 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3266 default:
3267 return NULL_TREE;
3271 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3272 assume that ARG is an operation that returns a truth value (0 or 1
3273 for scalars, 0 or -1 for vectors). Return the folded expression if
3274 folding is successful. Otherwise, return NULL_TREE. */
3276 static tree
3277 fold_invert_truthvalue (location_t loc, tree arg)
3279 tree type = TREE_TYPE (arg);
3280 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3281 ? BIT_NOT_EXPR
3282 : TRUTH_NOT_EXPR,
3283 type, arg);
3286 /* Return a simplified tree node for the truth-negation of ARG. This
3287 never alters ARG itself. We assume that ARG is an operation that
3288 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3290 tree
3291 invert_truthvalue_loc (location_t loc, tree arg)
3293 if (TREE_CODE (arg) == ERROR_MARK)
3294 return arg;
3296 tree type = TREE_TYPE (arg);
3297 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3298 ? BIT_NOT_EXPR
3299 : TRUTH_NOT_EXPR,
3300 type, arg);
3303 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3304 operands are another bit-wise operation with a common input. If so,
3305 distribute the bit operations to save an operation and possibly two if
3306 constants are involved. For example, convert
3307 (A | B) & (A | C) into A | (B & C)
3308 Further simplification will occur if B and C are constants.
3310 If this optimization cannot be done, 0 will be returned. */
3312 static tree
3313 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3314 tree arg0, tree arg1)
3316 tree common;
3317 tree left, right;
3319 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3320 || TREE_CODE (arg0) == code
3321 || (TREE_CODE (arg0) != BIT_AND_EXPR
3322 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3323 return 0;
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3327 common = TREE_OPERAND (arg0, 0);
3328 left = TREE_OPERAND (arg0, 1);
3329 right = TREE_OPERAND (arg1, 1);
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 0);
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3339 common = TREE_OPERAND (arg0, 1);
3340 left = TREE_OPERAND (arg0, 0);
3341 right = TREE_OPERAND (arg1, 1);
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 0);
3349 else
3350 return 0;
3352 common = fold_convert_loc (loc, type, common);
3353 left = fold_convert_loc (loc, type, left);
3354 right = fold_convert_loc (loc, type, right);
3355 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3356 fold_build2_loc (loc, code, type, left, right));
3359 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3360 with code CODE. This optimization is unsafe. */
3361 static tree
3362 distribute_real_division (location_t loc, enum tree_code code, tree type,
3363 tree arg0, tree arg1)
3365 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3366 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3368 /* (A / C) +- (B / C) -> (A +- B) / C. */
3369 if (mul0 == mul1
3370 && operand_equal_p (TREE_OPERAND (arg0, 1),
3371 TREE_OPERAND (arg1, 1), 0))
3372 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3373 fold_build2_loc (loc, code, type,
3374 TREE_OPERAND (arg0, 0),
3375 TREE_OPERAND (arg1, 0)),
3376 TREE_OPERAND (arg0, 1));
3378 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3379 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3380 TREE_OPERAND (arg1, 0), 0)
3381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3382 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3384 REAL_VALUE_TYPE r0, r1;
3385 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3386 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3387 if (!mul0)
3388 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3389 if (!mul1)
3390 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3391 real_arithmetic (&r0, code, &r0, &r1);
3392 return fold_build2_loc (loc, MULT_EXPR, type,
3393 TREE_OPERAND (arg0, 0),
3394 build_real (type, r0));
3397 return NULL_TREE;
3400 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3401 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3403 static tree
3404 make_bit_field_ref (location_t loc, tree inner, tree type,
3405 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3407 tree result, bftype;
3409 if (bitpos == 0)
3411 tree size = TYPE_SIZE (TREE_TYPE (inner));
3412 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3413 || POINTER_TYPE_P (TREE_TYPE (inner)))
3414 && tree_fits_shwi_p (size)
3415 && tree_to_shwi (size) == bitsize)
3416 return fold_convert_loc (loc, type, inner);
3419 bftype = type;
3420 if (TYPE_PRECISION (bftype) != bitsize
3421 || TYPE_UNSIGNED (bftype) == !unsignedp)
3422 bftype = build_nonstandard_integer_type (bitsize, 0);
3424 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3425 size_int (bitsize), bitsize_int (bitpos));
3427 if (bftype != type)
3428 result = fold_convert_loc (loc, type, result);
3430 return result;
3433 /* Optimize a bit-field compare.
3435 There are two cases: First is a compare against a constant and the
3436 second is a comparison of two items where the fields are at the same
3437 bit position relative to the start of a chunk (byte, halfword, word)
3438 large enough to contain it. In these cases we can avoid the shift
3439 implicit in bitfield extractions.
3441 For constants, we emit a compare of the shifted constant with the
3442 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3443 compared. For two fields at the same position, we do the ANDs with the
3444 similar mask and compare the result of the ANDs.
3446 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3447 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3448 are the left and right operands of the comparison, respectively.
3450 If the optimization described above can be done, we return the resulting
3451 tree. Otherwise we return zero. */
3453 static tree
3454 optimize_bit_field_compare (location_t loc, enum tree_code code,
3455 tree compare_type, tree lhs, tree rhs)
3457 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3458 tree type = TREE_TYPE (lhs);
3459 tree unsigned_type;
3460 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3461 machine_mode lmode, rmode, nmode;
3462 int lunsignedp, runsignedp;
3463 int lvolatilep = 0, rvolatilep = 0;
3464 tree linner, rinner = NULL_TREE;
3465 tree mask;
3466 tree offset;
3468 /* Get all the information about the extractions being done. If the bit size
3469 if the same as the size of the underlying object, we aren't doing an
3470 extraction at all and so can do nothing. We also don't want to
3471 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3472 then will no longer be able to replace it. */
3473 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3474 &lunsignedp, &lvolatilep, false);
3475 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3476 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3477 return 0;
3479 if (!const_p)
3481 /* If this is not a constant, we can only do something if bit positions,
3482 sizes, and signedness are the same. */
3483 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3484 &runsignedp, &rvolatilep, false);
3486 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3487 || lunsignedp != runsignedp || offset != 0
3488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3489 return 0;
3492 /* See if we can find a mode to refer to this field. We should be able to,
3493 but fail if we can't. */
3494 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3495 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3496 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3497 TYPE_ALIGN (TREE_TYPE (rinner))),
3498 word_mode, false);
3499 if (nmode == VOIDmode)
3500 return 0;
3502 /* Set signed and unsigned types of the precision of this mode for the
3503 shifts below. */
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3539 mask));
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3550 if (lunsignedp)
3552 if (wi::lrshift (rhs, lbitsize) != 0)
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3559 else
3561 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3562 if (tem != 0 && tem != -1)
3564 warning (0, "comparison is always %d due to width of bit-field",
3565 code == NE_EXPR);
3566 return constant_boolean_node (code == NE_EXPR, compare_type);
3570 /* Single-bit compares should always be against zero. */
3571 if (lbitsize == 1 && ! integer_zerop (rhs))
3573 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3574 rhs = build_int_cst (type, 0);
3577 /* Make a new bitfield reference, shift the constant over the
3578 appropriate number of bits and mask it with the computed mask
3579 (in case this was a signed field). If we changed it, make a new one. */
3580 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3582 rhs = const_binop (BIT_AND_EXPR,
3583 const_binop (LSHIFT_EXPR,
3584 fold_convert_loc (loc, unsigned_type, rhs),
3585 size_int (lbitpos)),
3586 mask);
3588 lhs = build2_loc (loc, code, compare_type,
3589 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3590 return lhs;
3593 /* Subroutine for fold_truth_andor_1: decode a field reference.
3595 If EXP is a comparison reference, we return the innermost reference.
3597 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3598 set to the starting bit number.
3600 If the innermost field can be completely contained in a mode-sized
3601 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3603 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3604 otherwise it is not changed.
3606 *PUNSIGNEDP is set to the signedness of the field.
3608 *PMASK is set to the mask used. This is either contained in a
3609 BIT_AND_EXPR or derived from the width of the field.
3611 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3613 Return 0 if this is not a component reference or is one that we can't
3614 do anything with. */
3616 static tree
3617 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3618 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3619 int *punsignedp, int *pvolatilep,
3620 tree *pmask, tree *pand_mask)
3622 tree outer_type = 0;
3623 tree and_mask = 0;
3624 tree mask, inner, offset;
3625 tree unsigned_type;
3626 unsigned int precision;
3628 /* All the optimizations using this function assume integer fields.
3629 There are problems with FP fields since the type_for_size call
3630 below can fail for, e.g., XFmode. */
3631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3632 return 0;
3634 /* We are interested in the bare arrangement of bits, so strip everything
3635 that doesn't affect the machine mode. However, record the type of the
3636 outermost expression if it may matter below. */
3637 if (CONVERT_EXPR_P (exp)
3638 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3639 outer_type = TREE_TYPE (exp);
3640 STRIP_NOPS (exp);
3642 if (TREE_CODE (exp) == BIT_AND_EXPR)
3644 and_mask = TREE_OPERAND (exp, 1);
3645 exp = TREE_OPERAND (exp, 0);
3646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3647 if (TREE_CODE (and_mask) != INTEGER_CST)
3648 return 0;
3651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3652 punsignedp, pvolatilep, false);
3653 if ((inner == exp && and_mask == 0)
3654 || *pbitsize < 0 || offset != 0
3655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3656 return 0;
3658 /* If the number of bits in the reference is the same as the bitsize of
3659 the outer type, then the outer type gives the signedness. Otherwise
3660 (in case of a small bitfield) the signedness is unchanged. */
3661 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3662 *punsignedp = TYPE_UNSIGNED (outer_type);
3664 /* Compute the mask to access the bitfield. */
3665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3666 precision = TYPE_PRECISION (unsigned_type);
3668 mask = build_int_cst_type (unsigned_type, -1);
3670 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3671 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3673 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3674 if (and_mask != 0)
3675 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3676 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3678 *pmask = mask;
3679 *pand_mask = and_mask;
3680 return inner;
3683 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3684 bit positions and MASK is SIGNED. */
3686 static int
3687 all_ones_mask_p (const_tree mask, unsigned int size)
3689 tree type = TREE_TYPE (mask);
3690 unsigned int precision = TYPE_PRECISION (type);
3692 /* If this function returns true when the type of the mask is
3693 UNSIGNED, then there will be errors. In particular see
3694 gcc.c-torture/execute/990326-1.c. There does not appear to be
3695 any documentation paper trail as to why this is so. But the pre
3696 wide-int worked with that restriction and it has been preserved
3697 here. */
3698 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3699 return false;
3701 return wi::mask (size, false, precision) == mask;
3704 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3705 represents the sign bit of EXP's type. If EXP represents a sign
3706 or zero extension, also test VAL against the unextended type.
3707 The return value is the (sub)expression whose sign bit is VAL,
3708 or NULL_TREE otherwise. */
3710 static tree
3711 sign_bit_p (tree exp, const_tree val)
3713 int width;
3714 tree t;
3716 /* Tree EXP must have an integral type. */
3717 t = TREE_TYPE (exp);
3718 if (! INTEGRAL_TYPE_P (t))
3719 return NULL_TREE;
3721 /* Tree VAL must be an integer constant. */
3722 if (TREE_CODE (val) != INTEGER_CST
3723 || TREE_OVERFLOW (val))
3724 return NULL_TREE;
3726 width = TYPE_PRECISION (t);
3727 if (wi::only_sign_bit_p (val, width))
3728 return exp;
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3735 return NULL_TREE;
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3741 static int
3742 simple_operand_p (const_tree exp)
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Weakrefs are not safe to be read, since they can be NULL.
3759 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3760 have DECL_WEAK flag set. */
3761 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3762 /* Loading a static variable is unduly expensive, but global
3763 registers aren't expensive. */
3764 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3767 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3768 to be evaluated unconditionally.
3769 I addition to simple_operand_p, we assume that comparisons, conversions,
3770 and logic-not operations are simple, if their operands are simple, too. */
3772 static bool
3773 simple_operand_p_2 (tree exp)
3775 enum tree_code code;
3777 if (TREE_SIDE_EFFECTS (exp)
3778 || tree_could_trap_p (exp))
3779 return false;
3781 while (CONVERT_EXPR_P (exp))
3782 exp = TREE_OPERAND (exp, 0);
3784 code = TREE_CODE (exp);
3786 if (TREE_CODE_CLASS (code) == tcc_comparison)
3787 return (simple_operand_p (TREE_OPERAND (exp, 0))
3788 && simple_operand_p (TREE_OPERAND (exp, 1)));
3790 if (code == TRUTH_NOT_EXPR)
3791 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3793 return simple_operand_p (exp);
3797 /* The following functions are subroutines to fold_range_test and allow it to
3798 try to change a logical combination of comparisons into a range test.
3800 For example, both
3801 X == 2 || X == 3 || X == 4 || X == 5
3803 X >= 2 && X <= 5
3804 are converted to
3805 (unsigned) (X - 2) <= 3
3807 We describe each set of comparisons as being either inside or outside
3808 a range, using a variable named like IN_P, and then describe the
3809 range with a lower and upper bound. If one of the bounds is omitted,
3810 it represents either the highest or lowest value of the type.
3812 In the comments below, we represent a range by two numbers in brackets
3813 preceded by a "+" to designate being inside that range, or a "-" to
3814 designate being outside that range, so the condition can be inverted by
3815 flipping the prefix. An omitted bound is represented by a "-". For
3816 example, "- [-, 10]" means being outside the range starting at the lowest
3817 possible value and ending at 10, in other words, being greater than 10.
3818 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3819 always false.
3821 We set up things so that the missing bounds are handled in a consistent
3822 manner so neither a missing bound nor "true" and "false" need to be
3823 handled using a special case. */
3825 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3826 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3827 and UPPER1_P are nonzero if the respective argument is an upper bound
3828 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3829 must be specified for a comparison. ARG1 will be converted to ARG0's
3830 type if both are specified. */
3832 static tree
3833 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3834 tree arg1, int upper1_p)
3836 tree tem;
3837 int result;
3838 int sgn0, sgn1;
3840 /* If neither arg represents infinity, do the normal operation.
3841 Else, if not a comparison, return infinity. Else handle the special
3842 comparison rules. Note that most of the cases below won't occur, but
3843 are handled for consistency. */
3845 if (arg0 != 0 && arg1 != 0)
3847 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3848 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3849 STRIP_NOPS (tem);
3850 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3853 if (TREE_CODE_CLASS (code) != tcc_comparison)
3854 return 0;
3856 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3857 for neither. In real maths, we cannot assume open ended ranges are
3858 the same. But, this is computer arithmetic, where numbers are finite.
3859 We can therefore make the transformation of any unbounded range with
3860 the value Z, Z being greater than any representable number. This permits
3861 us to treat unbounded ranges as equal. */
3862 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3863 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3864 switch (code)
3866 case EQ_EXPR:
3867 result = sgn0 == sgn1;
3868 break;
3869 case NE_EXPR:
3870 result = sgn0 != sgn1;
3871 break;
3872 case LT_EXPR:
3873 result = sgn0 < sgn1;
3874 break;
3875 case LE_EXPR:
3876 result = sgn0 <= sgn1;
3877 break;
3878 case GT_EXPR:
3879 result = sgn0 > sgn1;
3880 break;
3881 case GE_EXPR:
3882 result = sgn0 >= sgn1;
3883 break;
3884 default:
3885 gcc_unreachable ();
3888 return constant_boolean_node (result, type);
3891 /* Helper routine for make_range. Perform one step for it, return
3892 new expression if the loop should continue or NULL_TREE if it should
3893 stop. */
3895 tree
3896 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3897 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3898 bool *strict_overflow_p)
3900 tree arg0_type = TREE_TYPE (arg0);
3901 tree n_low, n_high, low = *p_low, high = *p_high;
3902 int in_p = *p_in_p, n_in_p;
3904 switch (code)
3906 case TRUTH_NOT_EXPR:
3907 /* We can only do something if the range is testing for zero. */
3908 if (low == NULL_TREE || high == NULL_TREE
3909 || ! integer_zerop (low) || ! integer_zerop (high))
3910 return NULL_TREE;
3911 *p_in_p = ! in_p;
3912 return arg0;
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == NULL_TREE || high == NULL_TREE
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 return NULL_TREE;
3926 switch (code)
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
3946 default:
3947 gcc_unreachable ();
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
3959 build_int_cst (arg0_type, 0),
3960 NULL_TREE))
3961 return NULL_TREE;
3963 in_p = n_in_p, low = n_low, high = n_high;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 build_int_cst (TREE_TYPE (low), 1), 0);
3973 low = build_int_cst (arg0_type, 0);
3977 *p_low = low;
3978 *p_high = high;
3979 *p_in_p = in_p;
3980 return arg0;
3982 case NEGATE_EXPR:
3983 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3984 low and high are non-NULL, then normalize will DTRT. */
3985 if (!TYPE_UNSIGNED (arg0_type)
3986 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 if (low == NULL_TREE)
3989 low = TYPE_MIN_VALUE (arg0_type);
3990 if (high == NULL_TREE)
3991 high = TYPE_MAX_VALUE (arg0_type);
3994 /* (-x) IN [a,b] -> x in [-b, -a] */
3995 n_low = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, high, 1);
3998 n_high = range_binop (MINUS_EXPR, exp_type,
3999 build_int_cst (exp_type, 0),
4000 0, low, 0);
4001 if (n_high != 0 && TREE_OVERFLOW (n_high))
4002 return NULL_TREE;
4003 goto normalize;
4005 case BIT_NOT_EXPR:
4006 /* ~ X -> -X - 1 */
4007 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4008 build_int_cst (exp_type, 1));
4010 case PLUS_EXPR:
4011 case MINUS_EXPR:
4012 if (TREE_CODE (arg1) != INTEGER_CST)
4013 return NULL_TREE;
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4019 return NULL_TREE;
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, low, 0, arg1, 0);
4027 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, high, 1, arg1, 0);
4029 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4030 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4031 return NULL_TREE;
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4034 *strict_overflow_p = true;
4036 normalize:
4037 /* Check for an unsigned range which has wrapped around the maximum
4038 value thus making n_high < n_low, and normalize it. */
4039 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4041 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4042 build_int_cst (TREE_TYPE (n_high), 1), 0);
4043 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4044 build_int_cst (TREE_TYPE (n_low), 1), 0);
4046 /* If the range is of the form +/- [ x+1, x ], we won't
4047 be able to normalize it. But then, it represents the
4048 whole range or the empty set, so make it
4049 +/- [ -, - ]. */
4050 if (tree_int_cst_equal (n_low, low)
4051 && tree_int_cst_equal (n_high, high))
4052 low = high = 0;
4053 else
4054 in_p = ! in_p;
4056 else
4057 low = n_low, high = n_high;
4059 *p_low = low;
4060 *p_high = high;
4061 *p_in_p = in_p;
4062 return arg0;
4064 CASE_CONVERT:
4065 case NON_LVALUE_EXPR:
4066 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4067 return NULL_TREE;
4069 if (! INTEGRAL_TYPE_P (arg0_type)
4070 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4071 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4072 return NULL_TREE;
4074 n_low = low, n_high = high;
4076 if (n_low != 0)
4077 n_low = fold_convert_loc (loc, arg0_type, n_low);
4079 if (n_high != 0)
4080 n_high = fold_convert_loc (loc, arg0_type, n_high);
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4085 are both positive.
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4089 negative values. */
4091 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4093 tree high_positive;
4094 tree equiv_type;
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4100 TYPE_SATURATING (arg0_type));
4101 else
4102 equiv_type
4103 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4108 high_positive
4109 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4110 : TYPE_MAX_VALUE (arg0_type);
4112 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4113 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4114 fold_convert_loc (loc, arg0_type,
4115 high_positive),
4116 build_int_cst (arg0_type, 1));
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4120 positive. */
4121 if (low != 0)
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4124 1, fold_convert_loc (loc, arg0_type,
4125 integer_zero_node),
4126 high_positive))
4127 return NULL_TREE;
4129 in_p = (n_in_p == in_p);
4131 else
4133 /* Otherwise, "or" the range with the range of the input
4134 that will be interpreted as negative. */
4135 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4136 1, fold_convert_loc (loc, arg0_type,
4137 integer_zero_node),
4138 high_positive))
4139 return NULL_TREE;
4141 in_p = (in_p != n_in_p);
4145 *p_low = n_low;
4146 *p_high = n_high;
4147 *p_in_p = in_p;
4148 return arg0;
4150 default:
4151 return NULL_TREE;
4155 /* Given EXP, a logical expression, set the range it is testing into
4156 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4157 actually being tested. *PLOW and *PHIGH will be made of the same
4158 type as the returned expression. If EXP is not a comparison, we
4159 will most likely not be returning a useful value and range. Set
4160 *STRICT_OVERFLOW_P to true if the return value is only valid
4161 because signed overflow is undefined; otherwise, do not change
4162 *STRICT_OVERFLOW_P. */
4164 tree
4165 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4166 bool *strict_overflow_p)
4168 enum tree_code code;
4169 tree arg0, arg1 = NULL_TREE;
4170 tree exp_type, nexp;
4171 int in_p;
4172 tree low, high;
4173 location_t loc = EXPR_LOCATION (exp);
4175 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4176 and see if we can refine the range. Some of the cases below may not
4177 happen, but it doesn't seem worth worrying about this. We "continue"
4178 the outer loop when we've changed something; otherwise we "break"
4179 the switch, which will "break" the while. */
4181 in_p = 0;
4182 low = high = build_int_cst (TREE_TYPE (exp), 0);
4184 while (1)
4186 code = TREE_CODE (exp);
4187 exp_type = TREE_TYPE (exp);
4188 arg0 = NULL_TREE;
4190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4192 if (TREE_OPERAND_LENGTH (exp) > 0)
4193 arg0 = TREE_OPERAND (exp, 0);
4194 if (TREE_CODE_CLASS (code) == tcc_binary
4195 || TREE_CODE_CLASS (code) == tcc_comparison
4196 || (TREE_CODE_CLASS (code) == tcc_expression
4197 && TREE_OPERAND_LENGTH (exp) > 1))
4198 arg1 = TREE_OPERAND (exp, 1);
4200 if (arg0 == NULL_TREE)
4201 break;
4203 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4204 &high, &in_p, strict_overflow_p);
4205 if (nexp == NULL_TREE)
4206 break;
4207 exp = nexp;
4210 /* If EXP is a constant, we can evaluate whether this is true or false. */
4211 if (TREE_CODE (exp) == INTEGER_CST)
4213 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4214 exp, 0, low, 0))
4215 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4216 exp, 1, high, 1)));
4217 low = high = 0;
4218 exp = 0;
4221 *pin_p = in_p, *plow = low, *phigh = high;
4222 return exp;
4225 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4226 type, TYPE, return an expression to test if EXP is in (or out of, depending
4227 on IN_P) the range. Return 0 if the test couldn't be created. */
4229 tree
4230 build_range_check (location_t loc, tree type, tree exp, int in_p,
4231 tree low, tree high)
4233 tree etype = TREE_TYPE (exp), value;
4235 #ifdef HAVE_canonicalize_funcptr_for_compare
4236 /* Disable this optimization for function pointer expressions
4237 on targets that require function pointer canonicalization. */
4238 if (HAVE_canonicalize_funcptr_for_compare
4239 && TREE_CODE (etype) == POINTER_TYPE
4240 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4241 return NULL_TREE;
4242 #endif
4244 if (! in_p)
4246 value = build_range_check (loc, type, exp, 1, low, high);
4247 if (value != 0)
4248 return invert_truthvalue_loc (loc, value);
4250 return 0;
4253 if (low == 0 && high == 0)
4254 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4256 if (low == 0)
4257 return fold_build2_loc (loc, LE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, high));
4260 if (high == 0)
4261 return fold_build2_loc (loc, GE_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4264 if (operand_equal_p (low, high, 0))
4265 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4266 fold_convert_loc (loc, etype, low));
4268 if (integer_zerop (low))
4270 if (! TYPE_UNSIGNED (etype))
4272 etype = unsigned_type_for (etype);
4273 high = fold_convert_loc (loc, etype, high);
4274 exp = fold_convert_loc (loc, etype, exp);
4276 return build_range_check (loc, type, exp, 1, 0, high);
4279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4282 int prec = TYPE_PRECISION (etype);
4284 if (wi::mask (prec - 1, false, prec) == high)
4286 if (TYPE_UNSIGNED (etype))
4288 tree signed_etype = signed_type_for (etype);
4289 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4290 etype
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4292 else
4293 etype = signed_etype;
4294 exp = fold_convert_loc (loc, etype, exp);
4296 return fold_build2_loc (loc, GT_EXPR, type, exp,
4297 build_int_cst (etype, 0));
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4306 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4307 TYPE_UNSIGNED (etype));
4309 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4311 tree utype, minv, maxv;
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype = unsigned_type_for (etype);
4316 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4317 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4318 build_int_cst (TREE_TYPE (maxv), 1), 1);
4319 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4321 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4322 minv, 1, maxv, 1)))
4323 etype = utype;
4324 else
4325 return 0;
4328 high = fold_convert_loc (loc, etype, high);
4329 low = fold_convert_loc (loc, etype, low);
4330 exp = fold_convert_loc (loc, etype, exp);
4332 value = const_binop (MINUS_EXPR, high, low);
4335 if (POINTER_TYPE_P (etype))
4337 if (value != 0 && !TREE_OVERFLOW (value))
4339 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4340 return build_range_check (loc, type,
4341 fold_build_pointer_plus_loc (loc, exp, low),
4342 1, build_int_cst (etype, 0), value);
4344 return 0;
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (loc, type,
4349 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4352 return 0;
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4357 static tree
4358 range_predecessor (tree val)
4360 tree type = TREE_TYPE (val);
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4364 return 0;
4365 else
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4367 build_int_cst (TREE_TYPE (val), 1), 0);
4370 /* Return the successor of VAL in its type, handling the infinite case. */
4372 static tree
4373 range_successor (tree val)
4375 tree type = TREE_TYPE (val);
4377 if (INTEGRAL_TYPE_P (type)
4378 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4379 return 0;
4380 else
4381 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4382 build_int_cst (TREE_TYPE (val), 1), 0);
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4388 bool
4389 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390 tree high0, int in1_p, tree low1, tree high1)
4392 int no_overlap;
4393 int subset;
4394 int temp;
4395 tree tem;
4396 int in_p;
4397 tree low, high;
4398 int lowequal = ((low0 == 0 && low1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 low0, 0, low1, 0)));
4401 int highequal = ((high0 == 0 && high1 == 0)
4402 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403 high0, 1, high1, 1)));
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 low0, 0, low1, 0))
4409 || (lowequal
4410 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411 high1, 1, high0, 1))))
4413 temp = in0_p, in0_p = in1_p, in1_p = temp;
4414 tem = low0, low0 = low1, low1 = tem;
4415 tem = high0, high0 = high1, high1 = tem;
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422 high0, 1, low1, 0));
4423 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 high1, 1, high0, 1));
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4428 if (in0_p && in1_p)
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4433 if (no_overlap)
4434 in_p = 0, low = high = 0;
4435 else if (subset)
4436 in_p = 1, low = low1, high = high1;
4437 else
4438 in_p = 1, low = low1, high = high0;
4441 else if (in0_p && ! in1_p)
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4451 if (no_overlap)
4452 in_p = 1, low = low0, high = high0;
4453 else if (lowequal && highequal)
4454 in_p = 0, low = high = 0;
4455 else if (subset && lowequal)
4457 low = range_successor (high1);
4458 high = high0;
4459 in_p = 1;
4460 if (low == 0)
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4464 return 0;
4467 else if (! subset || highequal)
4469 low = low0;
4470 high = range_predecessor (low1);
4471 in_p = 1;
4472 if (high == 0)
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 return 0;
4478 else
4479 return 0;
4482 else if (! in0_p && in1_p)
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4488 if (no_overlap)
4489 in_p = 1, low = low1, high = high1;
4490 else if (subset || highequal)
4491 in_p = 0, low = high = 0;
4492 else
4494 low = range_successor (high0);
4495 high = high1;
4496 in_p = 1;
4497 if (low == 0)
4499 /* high1 > high0 but high0 has no successor. Punt. */
4500 return 0;
4505 else
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4512 second. */
4513 if (no_overlap)
4515 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516 range_successor (high0),
4517 1, low1, 0)))
4518 in_p = 0, low = low0, high = high1;
4519 else
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523 switch (TREE_CODE (TREE_TYPE (low0)))
4525 case ENUMERAL_TYPE:
4526 if (TYPE_PRECISION (TREE_TYPE (low0))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4528 break;
4529 /* FALLTHROUGH */
4530 case INTEGER_TYPE:
4531 if (tree_int_cst_equal (low0,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4533 low0 = 0;
4534 break;
4535 case POINTER_TYPE:
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537 && integer_zerop (low0))
4538 low0 = 0;
4539 break;
4540 default:
4541 break;
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546 switch (TREE_CODE (TREE_TYPE (high1)))
4548 case ENUMERAL_TYPE:
4549 if (TYPE_PRECISION (TREE_TYPE (high1))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4551 break;
4552 /* FALLTHROUGH */
4553 case INTEGER_TYPE:
4554 if (tree_int_cst_equal (high1,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4556 high1 = 0;
4557 break;
4558 case POINTER_TYPE:
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4561 high1, 1,
4562 build_int_cst (TREE_TYPE (high1), 1),
4563 1)))
4564 high1 = 0;
4565 break;
4566 default:
4567 break;
4570 /* The ranges might be also adjacent between the maximum and
4571 minimum values of the given type. For
4572 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4573 return + [x + 1, y - 1]. */
4574 if (low0 == 0 && high1 == 0)
4576 low = range_successor (high0);
4577 high = range_predecessor (low1);
4578 if (low == 0 || high == 0)
4579 return 0;
4581 in_p = 1;
4583 else
4584 return 0;
4587 else if (subset)
4588 in_p = 0, low = low0, high = high0;
4589 else
4590 in_p = 0, low = low0, high = high1;
4593 *pin_p = in_p, *plow = low, *phigh = high;
4594 return 1;
4598 /* Subroutine of fold, looking inside expressions of the form
4599 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4600 of the COND_EXPR. This function is being used also to optimize
4601 A op B ? C : A, by reversing the comparison first.
4603 Return a folded expression whose code is not a COND_EXPR
4604 anymore, or NULL_TREE if no folding opportunity is found. */
4606 static tree
4607 fold_cond_expr_with_comparison (location_t loc, tree type,
4608 tree arg0, tree arg1, tree arg2)
4610 enum tree_code comp_code = TREE_CODE (arg0);
4611 tree arg00 = TREE_OPERAND (arg0, 0);
4612 tree arg01 = TREE_OPERAND (arg0, 1);
4613 tree arg1_type = TREE_TYPE (arg1);
4614 tree tem;
4616 STRIP_NOPS (arg1);
4617 STRIP_NOPS (arg2);
4619 /* If we have A op 0 ? A : -A, consider applying the following
4620 transformations:
4622 A == 0? A : -A same as -A
4623 A != 0? A : -A same as A
4624 A >= 0? A : -A same as abs (A)
4625 A > 0? A : -A same as abs (A)
4626 A <= 0? A : -A same as -abs (A)
4627 A < 0? A : -A same as -abs (A)
4629 None of these transformations work for modes with signed
4630 zeros. If A is +/-0, the first two transformations will
4631 change the sign of the result (from +0 to -0, or vice
4632 versa). The last four will fix the sign of the result,
4633 even though the original expressions could be positive or
4634 negative, depending on the sign of A.
4636 Note that all these transformations are correct if A is
4637 NaN, since the two alternatives (A and -A) are also NaNs. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4639 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4640 ? real_zerop (arg01)
4641 : integer_zerop (arg01))
4642 && ((TREE_CODE (arg2) == NEGATE_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4644 /* In the case that A is of the form X-Y, '-A' (arg2) may
4645 have already been folded to Y-X, check for that. */
4646 || (TREE_CODE (arg1) == MINUS_EXPR
4647 && TREE_CODE (arg2) == MINUS_EXPR
4648 && operand_equal_p (TREE_OPERAND (arg1, 0),
4649 TREE_OPERAND (arg2, 1), 0)
4650 && operand_equal_p (TREE_OPERAND (arg1, 1),
4651 TREE_OPERAND (arg2, 0), 0))))
4652 switch (comp_code)
4654 case EQ_EXPR:
4655 case UNEQ_EXPR:
4656 tem = fold_convert_loc (loc, arg1_type, arg1);
4657 return pedantic_non_lvalue_loc (loc,
4658 fold_convert_loc (loc, type,
4659 negate_expr (tem)));
4660 case NE_EXPR:
4661 case LTGT_EXPR:
4662 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4663 case UNGE_EXPR:
4664 case UNGT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 /* Fall through. */
4668 case GE_EXPR:
4669 case GT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4675 case UNLE_EXPR:
4676 case UNLT_EXPR:
4677 if (flag_trapping_math)
4678 break;
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4682 arg1 = fold_convert_loc (loc, signed_type_for
4683 (TREE_TYPE (arg1)), arg1);
4684 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4685 return negate_expr (fold_convert_loc (loc, type, tem));
4686 default:
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4688 break;
4691 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4692 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4693 both transformations are correct when A is NaN: A != 0
4694 is then true, and A == 0 is false. */
4696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4697 && integer_zerop (arg01) && integer_zerop (arg2))
4699 if (comp_code == NE_EXPR)
4700 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4701 else if (comp_code == EQ_EXPR)
4702 return build_zero_cst (type);
4705 /* Try some transformations of A op B ? A : B.
4707 A == B? A : B same as B
4708 A != B? A : B same as A
4709 A >= B? A : B same as max (A, B)
4710 A > B? A : B same as max (B, A)
4711 A <= B? A : B same as min (A, B)
4712 A < B? A : B same as min (B, A)
4714 As above, these transformations don't work in the presence
4715 of signed zeros. For example, if A and B are zeros of
4716 opposite sign, the first two transformations will change
4717 the sign of the result. In the last four, the original
4718 expressions give different results for (A=+0, B=-0) and
4719 (A=-0, B=+0), but the transformed expressions do not.
4721 The first two transformations are correct if either A or B
4722 is a NaN. In the first transformation, the condition will
4723 be false, and B will indeed be chosen. In the case of the
4724 second transformation, the condition A != B will be true,
4725 and A will be chosen.
4727 The conversions to max() and min() are not correct if B is
4728 a number and A is not. The conditions in the original
4729 expressions will be false, so all four give B. The min()
4730 and max() versions would give a NaN instead. */
4731 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4732 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4733 /* Avoid these transformations if the COND_EXPR may be used
4734 as an lvalue in the C++ front-end. PR c++/19199. */
4735 && (in_gimple_form
4736 || VECTOR_TYPE_P (type)
4737 || (strcmp (lang_hooks.name, "GNU C++") != 0
4738 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4739 || ! maybe_lvalue_p (arg1)
4740 || ! maybe_lvalue_p (arg2)))
4742 tree comp_op0 = arg00;
4743 tree comp_op1 = arg01;
4744 tree comp_type = TREE_TYPE (comp_op0);
4746 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4747 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4749 comp_type = type;
4750 comp_op0 = arg1;
4751 comp_op1 = arg2;
4754 switch (comp_code)
4756 case EQ_EXPR:
4757 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4758 case NE_EXPR:
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 case LE_EXPR:
4761 case LT_EXPR:
4762 case UNLE_EXPR:
4763 case UNLT_EXPR:
4764 /* In C++ a ?: expression can be an lvalue, so put the
4765 operand which will be used if they are equal first
4766 so that we can convert this back to the
4767 corresponding COND_EXPR. */
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4770 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4771 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4772 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4773 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4775 comp_op1, comp_op0);
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4779 break;
4780 case GE_EXPR:
4781 case GT_EXPR:
4782 case UNGE_EXPR:
4783 case UNGT_EXPR:
4784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4787 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4788 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4789 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4790 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4791 comp_op1, comp_op0);
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, tem));
4795 break;
4796 case UNEQ_EXPR:
4797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4798 return pedantic_non_lvalue_loc (loc,
4799 fold_convert_loc (loc, type, arg2));
4800 break;
4801 case LTGT_EXPR:
4802 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, arg1));
4805 break;
4806 default:
4807 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4808 break;
4812 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4813 we might still be able to simplify this. For example,
4814 if C1 is one less or one more than C2, this might have started
4815 out as a MIN or MAX and been transformed by this function.
4816 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4818 if (INTEGRAL_TYPE_P (type)
4819 && TREE_CODE (arg01) == INTEGER_CST
4820 && TREE_CODE (arg2) == INTEGER_CST)
4821 switch (comp_code)
4823 case EQ_EXPR:
4824 if (TREE_CODE (arg1) == INTEGER_CST)
4825 break;
4826 /* We can replace A with C1 in this case. */
4827 arg1 = fold_convert_loc (loc, type, arg01);
4828 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4830 case LT_EXPR:
4831 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4832 MIN_EXPR, to preserve the signedness of the comparison. */
4833 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (PLUS_EXPR, arg2,
4837 build_int_cst (type, 1)),
4838 OEP_ONLY_CONST))
4840 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4841 fold_convert_loc (loc, TREE_TYPE (arg00),
4842 arg2));
4843 return pedantic_non_lvalue_loc (loc,
4844 fold_convert_loc (loc, type, tem));
4846 break;
4848 case LE_EXPR:
4849 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4850 as above. */
4851 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4852 OEP_ONLY_CONST)
4853 && operand_equal_p (arg01,
4854 const_binop (MINUS_EXPR, arg2,
4855 build_int_cst (type, 1)),
4856 OEP_ONLY_CONST))
4858 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4859 fold_convert_loc (loc, TREE_TYPE (arg00),
4860 arg2));
4861 return pedantic_non_lvalue_loc (loc,
4862 fold_convert_loc (loc, type, tem));
4864 break;
4866 case GT_EXPR:
4867 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4868 MAX_EXPR, to preserve the signedness of the comparison. */
4869 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4870 OEP_ONLY_CONST)
4871 && operand_equal_p (arg01,
4872 const_binop (MINUS_EXPR, arg2,
4873 build_int_cst (type, 1)),
4874 OEP_ONLY_CONST))
4876 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4877 fold_convert_loc (loc, TREE_TYPE (arg00),
4878 arg2));
4879 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4881 break;
4883 case GE_EXPR:
4884 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4885 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4886 OEP_ONLY_CONST)
4887 && operand_equal_p (arg01,
4888 const_binop (PLUS_EXPR, arg2,
4889 build_int_cst (type, 1)),
4890 OEP_ONLY_CONST))
4892 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4893 fold_convert_loc (loc, TREE_TYPE (arg00),
4894 arg2));
4895 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4897 break;
4898 case NE_EXPR:
4899 break;
4900 default:
4901 gcc_unreachable ();
4904 return NULL_TREE;
4909 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4910 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4911 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4912 false) >= 2)
4913 #endif
4915 /* EXP is some logical combination of boolean tests. See if we can
4916 merge it into some range test. Return the new tree if so. */
4918 static tree
4919 fold_range_test (location_t loc, enum tree_code code, tree type,
4920 tree op0, tree op1)
4922 int or_op = (code == TRUTH_ORIF_EXPR
4923 || code == TRUTH_OR_EXPR);
4924 int in0_p, in1_p, in_p;
4925 tree low0, low1, low, high0, high1, high;
4926 bool strict_overflow_p = false;
4927 tree tem, lhs, rhs;
4928 const char * const warnmsg = G_("assuming signed overflow does not occur "
4929 "when simplifying range test");
4931 if (!INTEGRAL_TYPE_P (type))
4932 return 0;
4934 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4937 /* If this is an OR operation, invert both sides; we will invert
4938 again at the end. */
4939 if (or_op)
4940 in0_p = ! in0_p, in1_p = ! in1_p;
4942 /* If both expressions are the same, if we can merge the ranges, and we
4943 can build the range test, return it or it inverted. If one of the
4944 ranges is always true or always false, consider it to be the same
4945 expression as the other. */
4946 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4947 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4948 in1_p, low1, high1)
4949 && 0 != (tem = (build_range_check (loc, type,
4950 lhs != 0 ? lhs
4951 : rhs != 0 ? rhs : integer_zero_node,
4952 in_p, low, high))))
4954 if (strict_overflow_p)
4955 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4956 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4959 /* On machines where the branch cost is expensive, if this is a
4960 short-circuited branch and the underlying object on both sides
4961 is the same, make a non-short-circuit operation. */
4962 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4963 && lhs != 0 && rhs != 0
4964 && (code == TRUTH_ANDIF_EXPR
4965 || code == TRUTH_ORIF_EXPR)
4966 && operand_equal_p (lhs, rhs, 0))
4968 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4969 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4970 which cases we can't do this. */
4971 if (simple_operand_p (lhs))
4972 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4973 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4974 type, op0, op1);
4976 else if (!lang_hooks.decls.global_bindings_p ()
4977 && !CONTAINS_PLACEHOLDER_P (lhs))
4979 tree common = save_expr (lhs);
4981 if (0 != (lhs = build_range_check (loc, type, common,
4982 or_op ? ! in0_p : in0_p,
4983 low0, high0))
4984 && (0 != (rhs = build_range_check (loc, type, common,
4985 or_op ? ! in1_p : in1_p,
4986 low1, high1))))
4988 if (strict_overflow_p)
4989 fold_overflow_warning (warnmsg,
4990 WARN_STRICT_OVERFLOW_COMPARISON);
4991 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4992 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4993 type, lhs, rhs);
4998 return 0;
5001 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5002 bit value. Arrange things so the extra bits will be set to zero if and
5003 only if C is signed-extended to its full width. If MASK is nonzero,
5004 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5006 static tree
5007 unextend (tree c, int p, int unsignedp, tree mask)
5009 tree type = TREE_TYPE (c);
5010 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5011 tree temp;
5013 if (p == modesize || unsignedp)
5014 return c;
5016 /* We work by getting just the sign bit into the low-order bit, then
5017 into the high-order bit, then sign-extend. We then XOR that value
5018 with C. */
5019 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (signed_type_for (type), temp);
5030 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5031 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5032 if (mask != 0)
5033 temp = const_binop (BIT_AND_EXPR, temp,
5034 fold_convert (TREE_TYPE (c), mask));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type))
5037 temp = fold_convert (type, temp);
5039 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5042 /* For an expression that has the form
5043 (A && B) || ~B
5045 (A || B) && ~B,
5046 we can drop one of the inner expressions and simplify to
5047 A || ~B
5049 A && ~B
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5058 static tree
5059 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5060 bool rhs_only)
5062 tree type = TREE_TYPE (cmpop);
5063 enum tree_code code = TREE_CODE (cmpop);
5064 enum tree_code truthop_code = TREE_CODE (op);
5065 tree lhs = TREE_OPERAND (op, 0);
5066 tree rhs = TREE_OPERAND (op, 1);
5067 tree orig_lhs = lhs, orig_rhs = rhs;
5068 enum tree_code rhs_code = TREE_CODE (rhs);
5069 enum tree_code lhs_code = TREE_CODE (lhs);
5070 enum tree_code inv_code;
5072 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5073 return NULL_TREE;
5075 if (TREE_CODE_CLASS (code) != tcc_comparison)
5076 return NULL_TREE;
5078 if (rhs_code == truthop_code)
5080 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5081 if (newrhs != NULL_TREE)
5083 rhs = newrhs;
5084 rhs_code = TREE_CODE (rhs);
5087 if (lhs_code == truthop_code && !rhs_only)
5089 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5090 if (newlhs != NULL_TREE)
5092 lhs = newlhs;
5093 lhs_code = TREE_CODE (lhs);
5097 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5098 if (inv_code == rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5101 return lhs;
5102 if (!rhs_only && inv_code == lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return rhs;
5106 if (rhs != orig_rhs || lhs != orig_lhs)
5107 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5108 lhs, rhs);
5109 return NULL_TREE;
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5132 two operands.
5134 We return the simplified tree or 0 if no optimization is possible. */
5136 static tree
5137 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5138 tree lhs, tree rhs)
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5149 enum tree_code wanted_code;
5150 enum tree_code lcode, rcode;
5151 tree ll_arg, lr_arg, rl_arg, rr_arg;
5152 tree ll_inner, lr_inner, rl_inner, rr_inner;
5153 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5154 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5155 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5156 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5157 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5158 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5159 machine_mode lnmode, rnmode;
5160 tree ll_mask, lr_mask, rl_mask, rr_mask;
5161 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5162 tree l_const, r_const;
5163 tree lntype, rntype, result;
5164 HOST_WIDE_INT first_bit, end_bit;
5165 int volatilep;
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5171 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5172 return 0;
5174 lcode = TREE_CODE (lhs);
5175 rcode = TREE_CODE (rhs);
5177 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5179 lhs = build2 (NE_EXPR, truth_type, lhs,
5180 build_int_cst (TREE_TYPE (lhs), 0));
5181 lcode = NE_EXPR;
5184 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5186 rhs = build2 (NE_EXPR, truth_type, rhs,
5187 build_int_cst (TREE_TYPE (rhs), 0));
5188 rcode = NE_EXPR;
5191 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5193 return 0;
5195 ll_arg = TREE_OPERAND (lhs, 0);
5196 lr_arg = TREE_OPERAND (lhs, 1);
5197 rl_arg = TREE_OPERAND (rhs, 0);
5198 rr_arg = TREE_OPERAND (rhs, 1);
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg)
5202 && simple_operand_p (lr_arg))
5204 if (operand_equal_p (ll_arg, rl_arg, 0)
5205 && operand_equal_p (lr_arg, rr_arg, 0))
5207 result = combine_comparisons (loc, code, lcode, rcode,
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5212 else if (operand_equal_p (ll_arg, rr_arg, 0)
5213 && operand_equal_p (lr_arg, rl_arg, 0))
5215 result = combine_comparisons (loc, code, lcode,
5216 swap_tree_comparison (rcode),
5217 truth_type, ll_arg, lr_arg);
5218 if (result)
5219 return result;
5223 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5232 false) >= 2
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5234 && simple_operand_p (rl_arg)
5235 && simple_operand_p (rr_arg))
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code == TRUTH_OR_EXPR
5239 && lcode == NE_EXPR && integer_zerop (lr_arg)
5240 && rcode == NE_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, NE_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code == TRUTH_AND_EXPR
5250 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5251 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5252 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5254 return build2_loc (loc, EQ_EXPR, truth_type,
5255 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5256 ll_arg, rl_arg),
5257 build_int_cst (TREE_TYPE (ll_arg), 0));
5260 /* See if the comparisons can be merged. Then get all the parameters for
5261 each side. */
5263 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5264 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5265 return 0;
5267 volatilep = 0;
5268 ll_inner = decode_field_reference (loc, ll_arg,
5269 &ll_bitsize, &ll_bitpos, &ll_mode,
5270 &ll_unsignedp, &volatilep, &ll_mask,
5271 &ll_and_mask);
5272 lr_inner = decode_field_reference (loc, lr_arg,
5273 &lr_bitsize, &lr_bitpos, &lr_mode,
5274 &lr_unsignedp, &volatilep, &lr_mask,
5275 &lr_and_mask);
5276 rl_inner = decode_field_reference (loc, rl_arg,
5277 &rl_bitsize, &rl_bitpos, &rl_mode,
5278 &rl_unsignedp, &volatilep, &rl_mask,
5279 &rl_and_mask);
5280 rr_inner = decode_field_reference (loc, rr_arg,
5281 &rr_bitsize, &rr_bitpos, &rr_mode,
5282 &rr_unsignedp, &volatilep, &rr_mask,
5283 &rr_and_mask);
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5288 the rhs's. */
5289 if (volatilep || ll_inner == 0 || rl_inner == 0
5290 || ! operand_equal_p (ll_inner, rl_inner, 0))
5291 return 0;
5293 if (TREE_CODE (lr_arg) == INTEGER_CST
5294 && TREE_CODE (rr_arg) == INTEGER_CST)
5295 l_const = lr_arg, r_const = rr_arg;
5296 else if (lr_inner == 0 || rr_inner == 0
5297 || ! operand_equal_p (lr_inner, rr_inner, 0))
5298 return 0;
5299 else
5300 l_const = r_const = 0;
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5306 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5307 if (lcode != wanted_code)
5309 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5313 thing below. */
5314 ll_unsignedp = 1;
5315 l_const = ll_mask;
5317 else
5318 return 0;
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode != wanted_code)
5324 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 rl_unsignedp = 1;
5327 r_const = rl_mask;
5329 else
5330 return 0;
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit = MIN (ll_bitpos, rl_bitpos);
5337 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5338 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5340 volatilep);
5341 if (lnmode == VOIDmode)
5342 return 0;
5344 lnbitsize = GET_MODE_BITSIZE (lnmode);
5345 lnbitpos = first_bit & ~ (lnbitsize - 1);
5346 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5347 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5349 if (BYTES_BIG_ENDIAN)
5351 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5352 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5355 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5356 size_int (xll_bitpos));
5357 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5358 size_int (xrl_bitpos));
5360 if (l_const)
5362 l_const = fold_convert_loc (loc, lntype, l_const);
5363 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5364 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5366 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 lntype, ll_mask))))
5369 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5371 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5374 if (r_const)
5376 r_const = fold_convert_loc (loc, lntype, r_const);
5377 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5378 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5380 fold_build1_loc (loc, BIT_NOT_EXPR,
5381 lntype, rl_mask))))
5383 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5385 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5392 if (l_const == 0)
5394 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5395 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5399 return 0;
5401 first_bit = MIN (lr_bitpos, rr_bitpos);
5402 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5403 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5405 volatilep);
5406 if (rnmode == VOIDmode)
5407 return 0;
5409 rnbitsize = GET_MODE_BITSIZE (rnmode);
5410 rnbitpos = first_bit & ~ (rnbitsize - 1);
5411 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5412 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5414 if (BYTES_BIG_ENDIAN)
5416 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5417 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5420 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, lr_mask),
5422 size_int (xlr_bitpos));
5423 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5424 rntype, rr_mask),
5425 size_int (xrr_bitpos));
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5431 results. */
5432 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5433 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5434 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5436 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5437 ll_unsignedp || rl_unsignedp);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5442 lr_unsignedp || rr_unsignedp);
5443 if (! all_ones_mask_p (lr_mask, rnbitsize))
5444 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5446 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize + ll_bitpos == rl_bitpos
5457 && lr_bitsize + lr_bitpos == rr_bitpos)
5458 || (ll_bitpos == rl_bitpos + rl_bitsize
5459 && lr_bitpos == rr_bitpos + rr_bitsize))
5461 tree type;
5463 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5464 ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5467 lr_bitsize + rr_bitsize,
5468 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5470 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5471 size_int (MIN (xll_bitpos, xrl_bitpos)));
5472 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5473 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5475 /* Convert to the smaller type before masking out unwanted bits. */
5476 type = lntype;
5477 if (lntype != rntype)
5479 if (lnbitsize > rnbitsize)
5481 lhs = fold_convert_loc (loc, rntype, lhs);
5482 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5483 type = rntype;
5485 else if (lnbitsize < rnbitsize)
5487 rhs = fold_convert_loc (loc, lntype, rhs);
5488 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5489 type = lntype;
5493 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5494 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5496 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5497 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5499 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5502 return 0;
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5510 if (! integer_zerop (result)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5512 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5514 if (wanted_code == NE_EXPR)
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type);
5519 else
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type);
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5529 merged constant. */
5530 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5531 ll_unsignedp || rl_unsignedp);
5533 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5534 if (! all_ones_mask_p (ll_mask, lnbitsize))
5535 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5537 return build2_loc (loc, wanted_code, truth_type, result,
5538 const_binop (BIT_IOR_EXPR, l_const, r_const));
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5544 static tree
5545 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5546 tree op0, tree op1)
5548 tree arg0 = op0;
5549 enum tree_code op_code;
5550 tree comp_const;
5551 tree minmax_const;
5552 int consts_equal, consts_lt;
5553 tree inner;
5555 STRIP_SIGN_NOPS (arg0);
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5560 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5561 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5562 inner = TREE_OPERAND (arg0, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5566 || TREE_CODE (comp_const) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const)
5568 || TREE_CODE (minmax_const) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const))
5570 return NULL_TREE;
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5574 simplifications. */
5575 switch (code)
5577 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5579 tree tem
5580 = optimize_minmax_comparison (loc,
5581 invert_tree_comparison (code, false),
5582 type, op0, op1);
5583 if (tem)
5584 return invert_truthvalue_loc (loc, tem);
5585 return NULL_TREE;
5588 case GE_EXPR:
5589 return
5590 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5591 optimize_minmax_comparison
5592 (loc, EQ_EXPR, type, arg0, comp_const),
5593 optimize_minmax_comparison
5594 (loc, GT_EXPR, type, arg0, comp_const));
5596 case EQ_EXPR:
5597 if (op_code == MAX_EXPR && consts_equal)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5601 else if (op_code == MAX_EXPR && consts_lt)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5605 else if (op_code == MAX_EXPR)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609 else if (consts_equal)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5613 else if (consts_lt)
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5617 else
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5621 case GT_EXPR:
5622 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5631 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5636 else
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5640 default:
5641 return NULL_TREE;
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5672 static int depth;
5673 tree ret;
5675 if (depth > 3)
5676 return NULL;
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5682 return ret;
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c));
5720 break;
5722 CASE_CONVERT: case NON_LVALUE_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5732 && (TYPE_PRECISION (ctype)
5733 > TYPE_PRECISION (TREE_TYPE (op0))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type)
5737 < TYPE_PRECISION (TREE_TYPE (op0)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code != MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5747 && !TYPE_OVERFLOW_UNDEFINED (type))))
5748 break;
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5754 && TREE_CODE (t2) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2)
5756 && (0 != (t1 = extract_muldiv (op0, t2, code,
5757 code == MULT_EXPR
5758 ? ctype : NULL_TREE,
5759 strict_overflow_p))))
5760 return t1;
5761 break;
5763 case ABS_EXPR:
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5768 tree cstype = (*signed_type_for) (ctype);
5769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5770 != 0)
5772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5773 return fold_convert (ctype, t1);
5775 break;
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c) == -1)
5779 break;
5780 /* FALLTHROUGH */
5781 case NEGATE_EXPR:
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5786 break;
5787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5788 != 0)
5789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 break;
5792 case MIN_EXPR: case MAX_EXPR:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 break;
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p = false;
5800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0
5802 && (t2 = extract_muldiv (op1, c, code, wide_type,
5803 &sub_strict_overflow_p)) != 0)
5805 if (tree_int_cst_sgn (c) < 0)
5806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5807 if (sub_strict_overflow_p)
5808 *strict_overflow_p = true;
5809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5810 fold_convert (ctype, t2));
5812 break;
5814 case LSHIFT_EXPR: case RSHIFT_EXPR:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5820 multiplication. */
5821 if (TREE_CODE (op1) == INTEGER_CST
5822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5826 && 0 != (t1 = fold_convert (ctype,
5827 const_binop (LSHIFT_EXPR,
5828 size_one_node,
5829 op1)))
5830 && !TREE_OVERFLOW (t1))
5831 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5832 ? MULT_EXPR : FLOOR_DIV_EXPR,
5833 ctype,
5834 fold_convert (ctype, op0),
5835 t1),
5836 c, code, wide_type, strict_overflow_p);
5837 break;
5839 case PLUS_EXPR: case MINUS_EXPR:
5840 /* See if we can eliminate the operation on both sides. If we can, we
5841 can return a new PLUS or MINUS. If we can't, the only remaining
5842 cases where we can do anything are if the second operand is a
5843 constant. */
5844 sub_strict_overflow_p = false;
5845 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5846 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5847 if (t1 != 0 && t2 != 0
5848 && (code == MULT_EXPR
5849 /* If not multiplication, we can only do this if both operands
5850 are divisible by c. */
5851 || (multiple_of_p (ctype, op0, c)
5852 && multiple_of_p (ctype, op1, c))))
5854 if (sub_strict_overflow_p)
5855 *strict_overflow_p = true;
5856 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5857 fold_convert (ctype, t2));
5860 /* If this was a subtraction, negate OP1 and set it to be an addition.
5861 This simplifies the logic below. */
5862 if (tcode == MINUS_EXPR)
5864 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5865 /* If OP1 was not easily negatable, the constant may be OP0. */
5866 if (TREE_CODE (op0) == INTEGER_CST)
5868 tree tem = op0;
5869 op0 = op1;
5870 op1 = tem;
5871 tem = t1;
5872 t1 = t2;
5873 t2 = tem;
5877 if (TREE_CODE (op1) != INTEGER_CST)
5878 break;
5880 /* If either OP1 or C are negative, this optimization is not safe for
5881 some of the division and remainder types while for others we need
5882 to change the code. */
5883 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5885 if (code == CEIL_DIV_EXPR)
5886 code = FLOOR_DIV_EXPR;
5887 else if (code == FLOOR_DIV_EXPR)
5888 code = CEIL_DIV_EXPR;
5889 else if (code != MULT_EXPR
5890 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5891 break;
5894 /* If it's a multiply or a division/modulus operation of a multiple
5895 of our constant, do the operation and verify it doesn't overflow. */
5896 if (code == MULT_EXPR
5897 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5899 op1 = const_binop (code, fold_convert (ctype, op1),
5900 fold_convert (ctype, c));
5901 /* We allow the constant to overflow with wrapping semantics. */
5902 if (op1 == 0
5903 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5904 break;
5906 else
5907 break;
5909 /* If we have an unsigned type, we cannot widen the operation since it
5910 will change the result if the original computation overflowed. */
5911 if (TYPE_UNSIGNED (ctype) && ctype != type)
5912 break;
5914 /* If we were able to eliminate our operation from the first side,
5915 apply our operation to the second side and reform the PLUS. */
5916 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5917 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5919 /* The last case is if we are a multiply. In that case, we can
5920 apply the distributive law to commute the multiply and addition
5921 if the multiplication of the constants doesn't overflow
5922 and overflow is defined. With undefined overflow
5923 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5924 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5925 return fold_build2 (tcode, ctype,
5926 fold_build2 (code, ctype,
5927 fold_convert (ctype, op0),
5928 fold_convert (ctype, c)),
5929 op1);
5931 break;
5933 case MULT_EXPR:
5934 /* We have a special case here if we are doing something like
5935 (C * 8) % 4 since we know that's zero. */
5936 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5937 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5938 /* If the multiplication can overflow we cannot optimize this. */
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5940 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5941 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5943 *strict_overflow_p = true;
5944 return omit_one_operand (type, integer_zero_node, op0);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5950 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5954 if (same_p
5955 && (t1 = extract_muldiv (op0, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5958 fold_convert (ctype, op1));
5959 else if (tcode == MULT_EXPR && code == MULT_EXPR
5960 && (t1 = extract_muldiv (op1, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5963 fold_convert (ctype, t1));
5964 else if (TREE_CODE (op1) != INTEGER_CST)
5965 return 0;
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5969 if (tcode == code)
5971 bool overflow_p = false;
5972 bool overflow_mul_p;
5973 signop sign = TYPE_SIGN (ctype);
5974 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5975 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5976 if (overflow_mul_p
5977 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5978 overflow_p = true;
5979 if (!overflow_p)
5980 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5981 wide_int_to_tree (ctype, mul));
5984 /* If these operations "cancel" each other, we have the main
5985 optimizations of this pass, which occur when either constant is a
5986 multiple of the other, in which case we replace this with either an
5987 operation or CODE or TCODE.
5989 If we have an unsigned type, we cannot do this since it will change
5990 the result if the original computation overflowed. */
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5992 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5993 || (tcode == MULT_EXPR
5994 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5995 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5996 && code != MULT_EXPR)))
5998 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 op1, c)));
6007 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 c, op1)));
6017 break;
6019 default:
6020 break;
6023 return 0;
6026 /* Return a node which has the indicated constant VALUE (either 0 or
6027 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6028 and is of the indicated TYPE. */
6030 tree
6031 constant_boolean_node (bool value, tree type)
6033 if (type == integer_type_node)
6034 return value ? integer_one_node : integer_zero_node;
6035 else if (type == boolean_type_node)
6036 return value ? boolean_true_node : boolean_false_node;
6037 else if (TREE_CODE (type) == VECTOR_TYPE)
6038 return build_vector_from_val (type,
6039 build_int_cst (TREE_TYPE (type),
6040 value ? -1 : 0));
6041 else
6042 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6046 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6047 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6048 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6049 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6050 COND is the first argument to CODE; otherwise (as in the example
6051 given here), it is the second argument. TYPE is the type of the
6052 original expression. Return NULL_TREE if no simplification is
6053 possible. */
6055 static tree
6056 fold_binary_op_with_conditional_arg (location_t loc,
6057 enum tree_code code,
6058 tree type, tree op0, tree op1,
6059 tree cond, tree arg, int cond_first_p)
6061 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6062 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6063 tree test, true_value, false_value;
6064 tree lhs = NULL_TREE;
6065 tree rhs = NULL_TREE;
6066 enum tree_code cond_code = COND_EXPR;
6068 if (TREE_CODE (cond) == COND_EXPR
6069 || TREE_CODE (cond) == VEC_COND_EXPR)
6071 test = TREE_OPERAND (cond, 0);
6072 true_value = TREE_OPERAND (cond, 1);
6073 false_value = TREE_OPERAND (cond, 2);
6074 /* If this operand throws an expression, then it does not make
6075 sense to try to perform a logical or arithmetic operation
6076 involving it. */
6077 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6078 lhs = true_value;
6079 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6080 rhs = false_value;
6082 else
6084 tree testtype = TREE_TYPE (cond);
6085 test = cond;
6086 true_value = constant_boolean_node (true, testtype);
6087 false_value = constant_boolean_node (false, testtype);
6090 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6091 cond_code = VEC_COND_EXPR;
6093 /* This transformation is only worthwhile if we don't have to wrap ARG
6094 in a SAVE_EXPR and the operation can be simplified without recursing
6095 on at least one of the branches once its pushed inside the COND_EXPR. */
6096 if (!TREE_CONSTANT (arg)
6097 && (TREE_SIDE_EFFECTS (arg)
6098 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6099 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6100 return NULL_TREE;
6102 arg = fold_convert_loc (loc, arg_type, arg);
6103 if (lhs == 0)
6105 true_value = fold_convert_loc (loc, cond_type, true_value);
6106 if (cond_first_p)
6107 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6108 else
6109 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6111 if (rhs == 0)
6113 false_value = fold_convert_loc (loc, cond_type, false_value);
6114 if (cond_first_p)
6115 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6116 else
6117 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6120 /* Check that we have simplified at least one of the branches. */
6121 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6122 return NULL_TREE;
6124 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6128 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6130 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6131 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6132 ADDEND is the same as X.
6134 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6135 and finite. The problematic cases are when X is zero, and its mode
6136 has signed zeros. In the case of rounding towards -infinity,
6137 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6138 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6140 bool
6141 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6143 if (!real_zerop (addend))
6144 return false;
6146 /* Don't allow the fold with -fsignaling-nans. */
6147 if (HONOR_SNANS (TYPE_MODE (type)))
6148 return false;
6150 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6151 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6152 return true;
6154 /* In a vector or complex, we would need to check the sign of all zeros. */
6155 if (TREE_CODE (addend) != REAL_CST)
6156 return false;
6158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6160 negate = !negate;
6162 /* The mode has signed zeros, and we have to honor their sign.
6163 In this situation, there is only one case we can return true for.
6164 X - 0 is the same as X unless rounding towards -infinity is
6165 supported. */
6166 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6169 /* Subroutine of fold() that checks comparisons of built-in math
6170 functions against real constants.
6172 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6173 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6174 is the type of the result and ARG0 and ARG1 are the operands of the
6175 comparison. ARG1 must be a TREE_REAL_CST.
6177 The function returns the constant folded tree if a simplification
6178 can be made, and NULL_TREE otherwise. */
6180 static tree
6181 fold_mathfn_compare (location_t loc,
6182 enum built_in_function fcode, enum tree_code code,
6183 tree type, tree arg0, tree arg1)
6185 REAL_VALUE_TYPE c;
6187 if (BUILTIN_SQRT_P (fcode))
6189 tree arg = CALL_EXPR_ARG (arg0, 0);
6190 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6192 c = TREE_REAL_CST (arg1);
6193 if (REAL_VALUE_NEGATIVE (c))
6195 /* sqrt(x) < y is always false, if y is negative. */
6196 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6197 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6199 /* sqrt(x) > y is always true, if y is negative and we
6200 don't care about NaNs, i.e. negative values of x. */
6201 if (code == NE_EXPR || !HONOR_NANS (mode))
6202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6204 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6205 return fold_build2_loc (loc, GE_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg), dconst0));
6208 else if (code == GT_EXPR || code == GE_EXPR)
6210 REAL_VALUE_TYPE c2;
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6215 if (REAL_VALUE_ISINF (c2))
6217 /* sqrt(x) > y is x == +Inf, when y is very large. */
6218 if (HONOR_INFINITIES (mode))
6219 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) > y is always false, when y is very large
6223 and we don't care about infinities. */
6224 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6227 /* sqrt(x) > c is the same as x > c*c. */
6228 return fold_build2_loc (loc, code, type, arg,
6229 build_real (TREE_TYPE (arg), c2));
6231 else if (code == LT_EXPR || code == LE_EXPR)
6233 REAL_VALUE_TYPE c2;
6235 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6236 real_convert (&c2, mode, &c2);
6238 if (REAL_VALUE_ISINF (c2))
6240 /* sqrt(x) < y is always true, when y is a very large
6241 value and we don't care about NaNs or Infinities. */
6242 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6245 /* sqrt(x) < y is x != +Inf when y is very large and we
6246 don't care about NaNs. */
6247 if (! HONOR_NANS (mode))
6248 return fold_build2_loc (loc, NE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6251 /* sqrt(x) < y is x >= 0 when y is very large and we
6252 don't care about Infinities. */
6253 if (! HONOR_INFINITIES (mode))
6254 return fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), dconst0));
6257 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6258 arg = save_expr (arg);
6259 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6260 fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2_loc (loc, NE_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6268 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, code, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6273 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6274 arg = save_expr (arg);
6275 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6276 fold_build2_loc (loc, GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 dconst0)),
6279 fold_build2_loc (loc, code, type, arg,
6280 build_real (TREE_TYPE (arg),
6281 c2)));
6285 return NULL_TREE;
6288 /* Subroutine of fold() that optimizes comparisons against Infinities,
6289 either +Inf or -Inf.
6291 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6292 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6293 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6295 The function returns the constant folded tree if a simplification
6296 can be made, and NULL_TREE otherwise. */
6298 static tree
6299 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6300 tree arg0, tree arg1)
6302 machine_mode mode;
6303 REAL_VALUE_TYPE max;
6304 tree temp;
6305 bool neg;
6307 mode = TYPE_MODE (TREE_TYPE (arg0));
6309 /* For negative infinity swap the sense of the comparison. */
6310 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6311 if (neg)
6312 code = swap_tree_comparison (code);
6314 switch (code)
6316 case GT_EXPR:
6317 /* x > +Inf is always false, if with ignore sNANs. */
6318 if (HONOR_SNANS (mode))
6319 return NULL_TREE;
6320 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6322 case LE_EXPR:
6323 /* x <= +Inf is always true, if we don't case about NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6327 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6328 arg0 = save_expr (arg0);
6329 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6331 case EQ_EXPR:
6332 case GE_EXPR:
6333 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6338 case LT_EXPR:
6339 /* x < +Inf is always equal to x <= DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 case NE_EXPR:
6345 /* x != +Inf is always equal to !(x > DBL_MAX). */
6346 real_maxval (&max, neg, mode);
6347 if (! HONOR_NANS (mode))
6348 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6351 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6352 arg0, build_real (TREE_TYPE (arg0), max));
6353 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6355 default:
6356 break;
6359 return NULL_TREE;
6362 /* Subroutine of fold() that optimizes comparisons of a division by
6363 a nonzero integer constant against an integer constant, i.e.
6364 X/C1 op C2.
6366 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6367 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6368 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370 The function returns the constant folded tree if a simplification
6371 can be made, and NULL_TREE otherwise. */
6373 static tree
6374 fold_div_compare (location_t loc,
6375 enum tree_code code, tree type, tree arg0, tree arg1)
6377 tree prod, tmp, hi, lo;
6378 tree arg00 = TREE_OPERAND (arg0, 0);
6379 tree arg01 = TREE_OPERAND (arg0, 1);
6380 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6381 bool neg_overflow = false;
6382 bool overflow;
6384 /* We have to do this the hard way to detect unsigned overflow.
6385 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6386 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6387 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6388 neg_overflow = false;
6390 if (sign == UNSIGNED)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 lo = prod;
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6397 val = wi::add (prod, tmp, sign, &overflow);
6398 hi = force_fit_type (TREE_TYPE (arg00), val,
6399 -1, overflow | TREE_OVERFLOW (prod));
6401 else if (tree_int_cst_sgn (arg01) >= 0)
6403 tmp = int_const_binop (MINUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6407 case -1:
6408 neg_overflow = true;
6409 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6410 hi = prod;
6411 break;
6413 case 0:
6414 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6415 hi = tmp;
6416 break;
6418 case 1:
6419 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6420 lo = prod;
6421 break;
6423 default:
6424 gcc_unreachable ();
6427 else
6429 /* A negative divisor reverses the relational operators. */
6430 code = swap_tree_comparison (code);
6432 tmp = int_const_binop (PLUS_EXPR, arg01,
6433 build_int_cst (TREE_TYPE (arg01), 1));
6434 switch (tree_int_cst_sgn (arg1))
6436 case -1:
6437 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6438 lo = prod;
6439 break;
6441 case 0:
6442 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6443 lo = tmp;
6444 break;
6446 case 1:
6447 neg_overflow = true;
6448 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6449 hi = prod;
6450 break;
6452 default:
6453 gcc_unreachable ();
6457 switch (code)
6459 case EQ_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6466 return build_range_check (loc, type, arg00, 1, lo, hi);
6468 case NE_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 0, lo, hi);
6477 case LT_EXPR:
6478 if (TREE_OVERFLOW (lo))
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand_loc (loc, type, tmp, arg00);
6483 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6485 case LE_EXPR:
6486 if (TREE_OVERFLOW (hi))
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6493 case GT_EXPR:
6494 if (TREE_OVERFLOW (hi))
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6501 case GE_EXPR:
6502 if (TREE_OVERFLOW (lo))
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6509 default:
6510 break;
6513 return NULL_TREE;
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6520 result type. */
6522 static tree
6523 fold_single_bit_test_into_sign_test (location_t loc,
6524 enum tree_code code, tree arg0, tree arg1,
6525 tree result_type)
6527 /* If this is testing a single bit, we can optimize the test. */
6528 if ((code == NE_EXPR || code == EQ_EXPR)
6529 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6530 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6532 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6533 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6534 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6536 if (arg00 != NULL_TREE
6537 /* This is only a win if casting to a signed type is cheap,
6538 i.e. when arg00's type is not a partial mode. */
6539 && TYPE_PRECISION (TREE_TYPE (arg00))
6540 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6542 tree stype = signed_type_for (TREE_TYPE (arg00));
6543 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6544 result_type,
6545 fold_convert_loc (loc, stype, arg00),
6546 build_int_cst (stype, 0));
6550 return NULL_TREE;
6553 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6554 equality/inequality test, then return a simplified form of
6555 the test using shifts and logical operations. Otherwise return
6556 NULL. TYPE is the desired result type. */
6558 tree
6559 fold_single_bit_test (location_t loc, enum tree_code code,
6560 tree arg0, tree arg1, tree result_type)
6562 /* If this is testing a single bit, we can optimize the test. */
6563 if ((code == NE_EXPR || code == EQ_EXPR)
6564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6567 tree inner = TREE_OPERAND (arg0, 0);
6568 tree type = TREE_TYPE (arg0);
6569 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6570 machine_mode operand_mode = TYPE_MODE (type);
6571 int ops_unsigned;
6572 tree signed_type, unsigned_type, intermediate_type;
6573 tree tem, one;
6575 /* First, see if we can fold the single bit test into a sign-bit
6576 test. */
6577 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6578 result_type);
6579 if (tem)
6580 return tem;
6582 /* Otherwise we have (A & C) != 0 where C is a single bit,
6583 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6584 Similarly for (A & C) == 0. */
6586 /* If INNER is a right shift of a constant and it plus BITNUM does
6587 not overflow, adjust BITNUM and INNER. */
6588 if (TREE_CODE (inner) == RSHIFT_EXPR
6589 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6590 && bitnum < TYPE_PRECISION (type)
6591 && wi::ltu_p (TREE_OPERAND (inner, 1),
6592 TYPE_PRECISION (type) - bitnum))
6594 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6595 inner = TREE_OPERAND (inner, 0);
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 && !flag_syntax_only) ? 0 : 1;
6604 #else
6605 ops_unsigned = 1;
6606 #endif
6608 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611 inner = fold_convert_loc (loc, intermediate_type, inner);
6613 if (bitnum != 0)
6614 inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 inner, size_int (bitnum));
6617 one = build_int_cst (intermediate_type, 1);
6619 if (code == EQ_EXPR)
6620 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6622 /* Put the AND last so it can combine with more things. */
6623 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6625 /* Make sure to return the proper type. */
6626 inner = fold_convert_loc (loc, result_type, inner);
6628 return inner;
6630 return NULL_TREE;
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6639 if (! flag_evaluation_order)
6640 return true;
6641 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return true;
6643 return ! TREE_SIDE_EFFECTS (arg0)
6644 && ! TREE_SIDE_EFFECTS (arg1);
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6655 if (CONSTANT_CLASS_P (arg1))
6656 return 0;
6657 if (CONSTANT_CLASS_P (arg0))
6658 return 1;
6660 STRIP_SIGN_NOPS (arg0);
6661 STRIP_SIGN_NOPS (arg1);
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (reorder && flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6670 return 0;
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0) == SSA_NAME
6677 && TREE_CODE (arg1) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6679 return 1;
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1) == SSA_NAME)
6683 return 0;
6684 if (TREE_CODE (arg0) == SSA_NAME)
6685 return 1;
6687 /* Put variables last. */
6688 if (DECL_P (arg1))
6689 return 0;
6690 if (DECL_P (arg0))
6691 return 1;
6693 return 0;
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6699 static tree
6700 fold_widened_comparison (location_t loc, enum tree_code code,
6701 tree type, tree arg0, tree arg1)
6703 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6704 tree arg1_unw;
6705 tree shorter_type, outer_type;
6706 tree min, max;
6707 bool above, below;
6709 if (arg0_unw == arg0)
6710 return NULL_TREE;
6711 shorter_type = TREE_TYPE (arg0_unw);
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6719 return NULL_TREE;
6720 #endif
6722 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6723 return NULL_TREE;
6725 arg1_unw = get_unwidened (arg1, NULL_TREE);
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code == EQ_EXPR || code == NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6730 && (TREE_TYPE (arg1_unw) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6733 && (TYPE_UNSIGNED (shorter_type)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6735 || (TREE_CODE (arg1_unw) == INTEGER_CST
6736 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6738 && int_fits_type_p (arg1_unw, shorter_type))))
6739 return fold_build2_loc (loc, code, type, arg0_unw,
6740 fold_convert_loc (loc, shorter_type, arg1_unw));
6742 if (TREE_CODE (arg1_unw) != INTEGER_CST
6743 || TREE_CODE (shorter_type) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw, shorter_type))
6745 return NULL_TREE;
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type = TREE_TYPE (arg1_unw);
6750 min = lower_bound_in_type (outer_type, shorter_type);
6751 max = upper_bound_in_type (outer_type, shorter_type);
6753 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 max, arg1_unw));
6755 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6756 arg1_unw, min));
6758 switch (code)
6760 case EQ_EXPR:
6761 if (above || below)
6762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6763 break;
6765 case NE_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 break;
6770 case LT_EXPR:
6771 case LE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6777 case GT_EXPR:
6778 case GE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 default:
6785 break;
6788 return NULL_TREE;
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6794 static tree
6795 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6796 tree arg0, tree arg1)
6798 tree arg0_inner;
6799 tree inner_type, outer_type;
6801 if (!CONVERT_EXPR_P (arg0))
6802 return NULL_TREE;
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6830 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6831 return NULL_TREE;
6833 if (TREE_CODE (arg1) == INTEGER_CST)
6834 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6835 TREE_OVERFLOW (arg1));
6836 else
6837 arg1 = fold_convert_loc (loc, inner_type, arg1);
6839 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6843 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6844 means A >= Y && A != MAX, but in this case we know that
6845 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6847 static tree
6848 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6850 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6852 if (TREE_CODE (bound) == LT_EXPR)
6853 a = TREE_OPERAND (bound, 0);
6854 else if (TREE_CODE (bound) == GT_EXPR)
6855 a = TREE_OPERAND (bound, 1);
6856 else
6857 return NULL_TREE;
6859 typea = TREE_TYPE (a);
6860 if (!INTEGRAL_TYPE_P (typea)
6861 && !POINTER_TYPE_P (typea))
6862 return NULL_TREE;
6864 if (TREE_CODE (ineq) == LT_EXPR)
6866 a1 = TREE_OPERAND (ineq, 1);
6867 y = TREE_OPERAND (ineq, 0);
6869 else if (TREE_CODE (ineq) == GT_EXPR)
6871 a1 = TREE_OPERAND (ineq, 0);
6872 y = TREE_OPERAND (ineq, 1);
6874 else
6875 return NULL_TREE;
6877 if (TREE_TYPE (a1) != typea)
6878 return NULL_TREE;
6880 if (POINTER_TYPE_P (typea))
6882 /* Convert the pointer types into integer before taking the difference. */
6883 tree ta = fold_convert_loc (loc, ssizetype, a);
6884 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6885 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6887 else
6888 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6890 if (!diff || !integer_onep (diff))
6891 return NULL_TREE;
6893 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6896 /* Fold a sum or difference of at least one multiplication.
6897 Returns the folded tree or NULL if no simplification could be made. */
6899 static tree
6900 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6901 tree arg0, tree arg1)
6903 tree arg00, arg01, arg10, arg11;
6904 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6906 /* (A * C) +- (B * C) -> (A+-B) * C.
6907 (A * C) +- A -> A * (C+-1).
6908 We are most concerned about the case where C is a constant,
6909 but other combinations show up during loop reduction. Since
6910 it is not difficult, try all four possibilities. */
6912 if (TREE_CODE (arg0) == MULT_EXPR)
6914 arg00 = TREE_OPERAND (arg0, 0);
6915 arg01 = TREE_OPERAND (arg0, 1);
6917 else if (TREE_CODE (arg0) == INTEGER_CST)
6919 arg00 = build_one_cst (type);
6920 arg01 = arg0;
6922 else
6924 /* We cannot generate constant 1 for fract. */
6925 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6926 return NULL_TREE;
6927 arg00 = arg0;
6928 arg01 = build_one_cst (type);
6930 if (TREE_CODE (arg1) == MULT_EXPR)
6932 arg10 = TREE_OPERAND (arg1, 0);
6933 arg11 = TREE_OPERAND (arg1, 1);
6935 else if (TREE_CODE (arg1) == INTEGER_CST)
6937 arg10 = build_one_cst (type);
6938 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6939 the purpose of this canonicalization. */
6940 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6941 && negate_expr_p (arg1)
6942 && code == PLUS_EXPR)
6944 arg11 = negate_expr (arg1);
6945 code = MINUS_EXPR;
6947 else
6948 arg11 = arg1;
6950 else
6952 /* We cannot generate constant 1 for fract. */
6953 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6954 return NULL_TREE;
6955 arg10 = arg1;
6956 arg11 = build_one_cst (type);
6958 same = NULL_TREE;
6960 if (operand_equal_p (arg01, arg11, 0))
6961 same = arg01, alt0 = arg00, alt1 = arg10;
6962 else if (operand_equal_p (arg00, arg10, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg11;
6964 else if (operand_equal_p (arg00, arg11, 0))
6965 same = arg00, alt0 = arg01, alt1 = arg10;
6966 else if (operand_equal_p (arg01, arg10, 0))
6967 same = arg01, alt0 = arg00, alt1 = arg11;
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (tree_fits_shwi_p (arg01)
6973 && tree_fits_shwi_p (arg11))
6975 HOST_WIDE_INT int01, int11, tmp;
6976 bool swap = false;
6977 tree maybe_same;
6978 int01 = tree_to_shwi (arg01);
6979 int11 = tree_to_shwi (arg11);
6981 /* Move min of absolute values to int11. */
6982 if (absu_hwi (int01) < absu_hwi (int11))
6984 tmp = int01, int01 = int11, int11 = tmp;
6985 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6986 maybe_same = arg01;
6987 swap = true;
6989 else
6990 maybe_same = arg11;
6992 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6993 /* The remainder should not be a constant, otherwise we
6994 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6995 increased the number of multiplications necessary. */
6996 && TREE_CODE (arg10) != INTEGER_CST)
6998 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6999 build_int_cst (TREE_TYPE (arg00),
7000 int01 / int11));
7001 alt1 = arg10;
7002 same = maybe_same;
7003 if (swap)
7004 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7008 if (same)
7009 return fold_build2_loc (loc, MULT_EXPR, type,
7010 fold_build2_loc (loc, code, type,
7011 fold_convert_loc (loc, type, alt0),
7012 fold_convert_loc (loc, type, alt1)),
7013 fold_convert_loc (loc, type, same));
7015 return NULL_TREE;
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7021 upon failure. */
7023 static int
7024 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7026 tree type = TREE_TYPE (expr);
7027 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7028 int byte, offset, word, words;
7029 unsigned char value;
7031 if ((off == -1 && total_bytes > len)
7032 || off >= total_bytes)
7033 return 0;
7034 if (off == -1)
7035 off = 0;
7036 words = total_bytes / UNITS_PER_WORD;
7038 for (byte = 0; byte < total_bytes; byte++)
7040 int bitpos = byte * BITS_PER_UNIT;
7041 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7042 number of bytes. */
7043 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7045 if (total_bytes > UNITS_PER_WORD)
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 if (offset >= off
7059 && offset - off < len)
7060 ptr[offset - off] = value;
7062 return MIN (len, total_bytes - off);
7066 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7067 specified by EXPR into the buffer PTR of length LEN bytes.
7068 Return the number of bytes placed in the buffer, or zero
7069 upon failure. */
7071 static int
7072 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7074 tree type = TREE_TYPE (expr);
7075 machine_mode mode = TYPE_MODE (type);
7076 int total_bytes = GET_MODE_SIZE (mode);
7077 FIXED_VALUE_TYPE value;
7078 tree i_value, i_type;
7080 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7081 return 0;
7083 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7085 if (NULL_TREE == i_type
7086 || TYPE_PRECISION (i_type) != total_bytes)
7087 return 0;
7089 value = TREE_FIXED_CST (expr);
7090 i_value = double_int_to_tree (i_type, value.data);
7092 return native_encode_int (i_value, ptr, len, off);
7096 /* Subroutine of native_encode_expr. Encode the REAL_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7101 static int
7102 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7104 tree type = TREE_TYPE (expr);
7105 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7106 int byte, offset, word, words, bitpos;
7107 unsigned char value;
7109 /* There are always 32 bits in each long, no matter the size of
7110 the hosts long. We handle floating point representations with
7111 up to 192 bits. */
7112 long tmp[6];
7114 if ((off == -1 && total_bytes > len)
7115 || off >= total_bytes)
7116 return 0;
7117 if (off == -1)
7118 off = 0;
7119 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7121 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7123 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7124 bitpos += BITS_PER_UNIT)
7126 byte = (bitpos / BITS_PER_UNIT) & 3;
7127 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7129 if (UNITS_PER_WORD < 4)
7131 word = byte / UNITS_PER_WORD;
7132 if (WORDS_BIG_ENDIAN)
7133 word = (words - 1) - word;
7134 offset = word * UNITS_PER_WORD;
7135 if (BYTES_BIG_ENDIAN)
7136 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7137 else
7138 offset += byte % UNITS_PER_WORD;
7140 else
7141 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7142 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7143 if (offset >= off
7144 && offset - off < len)
7145 ptr[offset - off] = value;
7147 return MIN (len, total_bytes - off);
7150 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7153 upon failure. */
7155 static int
7156 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7158 int rsize, isize;
7159 tree part;
7161 part = TREE_REALPART (expr);
7162 rsize = native_encode_expr (part, ptr, len, off);
7163 if (off == -1
7164 && rsize == 0)
7165 return 0;
7166 part = TREE_IMAGPART (expr);
7167 if (off != -1)
7168 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7169 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7170 if (off == -1
7171 && isize != rsize)
7172 return 0;
7173 return rsize + isize;
7177 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7182 static int
7183 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7185 unsigned i, count;
7186 int size, offset;
7187 tree itype, elem;
7189 offset = 0;
7190 count = VECTOR_CST_NELTS (expr);
7191 itype = TREE_TYPE (TREE_TYPE (expr));
7192 size = GET_MODE_SIZE (TYPE_MODE (itype));
7193 for (i = 0; i < count; i++)
7195 if (off >= size)
7197 off -= size;
7198 continue;
7200 elem = VECTOR_CST_ELT (expr, i);
7201 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7202 if ((off == -1 && res != size)
7203 || res == 0)
7204 return 0;
7205 offset += res;
7206 if (offset >= len)
7207 return offset;
7208 if (off != -1)
7209 off = 0;
7211 return offset;
7215 /* Subroutine of native_encode_expr. Encode the STRING_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7220 static int
7221 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7223 tree type = TREE_TYPE (expr);
7224 HOST_WIDE_INT total_bytes;
7226 if (TREE_CODE (type) != ARRAY_TYPE
7227 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7228 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7229 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7230 return 0;
7231 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7232 if ((off == -1 && total_bytes > len)
7233 || off >= total_bytes)
7234 return 0;
7235 if (off == -1)
7236 off = 0;
7237 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7239 int written = 0;
7240 if (off < TREE_STRING_LENGTH (expr))
7242 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7243 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7245 memset (ptr + written, 0,
7246 MIN (total_bytes - written, len - written));
7248 else
7249 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7250 return MIN (total_bytes - off, len);
7254 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7255 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7256 buffer PTR of length LEN bytes. If OFF is not -1 then start
7257 the encoding at byte offset OFF and encode at most LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero upon failure. */
7261 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7263 switch (TREE_CODE (expr))
7265 case INTEGER_CST:
7266 return native_encode_int (expr, ptr, len, off);
7268 case REAL_CST:
7269 return native_encode_real (expr, ptr, len, off);
7271 case FIXED_CST:
7272 return native_encode_fixed (expr, ptr, len, off);
7274 case COMPLEX_CST:
7275 return native_encode_complex (expr, ptr, len, off);
7277 case VECTOR_CST:
7278 return native_encode_vector (expr, ptr, len, off);
7280 case STRING_CST:
7281 return native_encode_string (expr, ptr, len, off);
7283 default:
7284 return 0;
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7293 static tree
7294 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 if (total_bytes > len
7299 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7300 return NULL_TREE;
7302 wide_int result = wi::from_buffer (ptr, total_bytes);
7304 return wide_int_to_tree (type, result);
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7312 static tree
7313 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 double_int result;
7317 FIXED_VALUE_TYPE fixed_value;
7319 if (total_bytes > len
7320 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7321 return NULL_TREE;
7323 result = double_int::from_buffer (ptr, total_bytes);
7324 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326 return build_fixed (type, fixed_value);
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7334 static tree
7335 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 machine_mode mode = TYPE_MODE (type);
7338 int total_bytes = GET_MODE_SIZE (mode);
7339 int byte, offset, word, words, bitpos;
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352 memset (tmp, 0, sizeof (tmp));
7353 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7356 byte = (bitpos / BITS_PER_UNIT) & 3;
7357 if (UNITS_PER_WORD < 4)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 else
7366 offset += byte % UNITS_PER_WORD;
7368 else
7369 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7370 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7372 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7375 real_from_target (&r, tmp, mode);
7376 return build_real (type, r);
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7384 static tree
7385 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7387 tree etype, rpart, ipart;
7388 int size;
7390 etype = TREE_TYPE (type);
7391 size = GET_MODE_SIZE (TYPE_MODE (etype));
7392 if (size * 2 > len)
7393 return NULL_TREE;
7394 rpart = native_interpret_expr (etype, ptr, size);
7395 if (!rpart)
7396 return NULL_TREE;
7397 ipart = native_interpret_expr (etype, ptr+size, size);
7398 if (!ipart)
7399 return NULL_TREE;
7400 return build_complex (type, rpart, ipart);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7408 static tree
7409 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7411 tree etype, elem;
7412 int i, size, count;
7413 tree *elements;
7415 etype = TREE_TYPE (type);
7416 size = GET_MODE_SIZE (TYPE_MODE (etype));
7417 count = TYPE_VECTOR_SUBPARTS (type);
7418 if (size * count > len)
7419 return NULL_TREE;
7421 elements = XALLOCAVEC (tree, count);
7422 for (i = count - 1; i >= 0; i--)
7424 elem = native_interpret_expr (etype, ptr+(i*size), size);
7425 if (!elem)
7426 return NULL_TREE;
7427 elements[i] = elem;
7429 return build_vector (type, elements);
7433 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a constant of type TYPE. For
7435 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7436 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7437 return NULL_TREE. */
7439 tree
7440 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7442 switch (TREE_CODE (type))
7444 case INTEGER_TYPE:
7445 case ENUMERAL_TYPE:
7446 case BOOLEAN_TYPE:
7447 case POINTER_TYPE:
7448 case REFERENCE_TYPE:
7449 return native_interpret_int (type, ptr, len);
7451 case REAL_TYPE:
7452 return native_interpret_real (type, ptr, len);
7454 case FIXED_POINT_TYPE:
7455 return native_interpret_fixed (type, ptr, len);
7457 case COMPLEX_TYPE:
7458 return native_interpret_complex (type, ptr, len);
7460 case VECTOR_TYPE:
7461 return native_interpret_vector (type, ptr, len);
7463 default:
7464 return NULL_TREE;
7468 /* Returns true if we can interpret the contents of a native encoding
7469 as TYPE. */
7471 static bool
7472 can_native_interpret_type_p (tree type)
7474 switch (TREE_CODE (type))
7476 case INTEGER_TYPE:
7477 case ENUMERAL_TYPE:
7478 case BOOLEAN_TYPE:
7479 case POINTER_TYPE:
7480 case REFERENCE_TYPE:
7481 case FIXED_POINT_TYPE:
7482 case REAL_TYPE:
7483 case COMPLEX_TYPE:
7484 case VECTOR_TYPE:
7485 return true;
7486 default:
7487 return false;
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7495 static tree
7496 fold_view_convert_expr (tree type, tree expr)
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer[64];
7500 int len;
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7504 return NULL_TREE;
7506 len = native_encode_expr (expr, buffer, sizeof (buffer));
7507 if (len == 0)
7508 return NULL_TREE;
7510 return native_interpret_expr (type, buffer, len);
7513 /* Build an expression for the address of T. Folds away INDIRECT_REF
7514 to avoid confusing the gimplify process. */
7516 tree
7517 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7519 /* The size of the object is not relevant when talking about its address. */
7520 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7521 t = TREE_OPERAND (t, 0);
7523 if (TREE_CODE (t) == INDIRECT_REF)
7525 t = TREE_OPERAND (t, 0);
7527 if (TREE_TYPE (t) != ptrtype)
7528 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7530 else if (TREE_CODE (t) == MEM_REF
7531 && integer_zerop (TREE_OPERAND (t, 1)))
7532 return TREE_OPERAND (t, 0);
7533 else if (TREE_CODE (t) == MEM_REF
7534 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7535 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7536 TREE_OPERAND (t, 0),
7537 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7538 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7540 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7542 if (TREE_TYPE (t) != ptrtype)
7543 t = fold_convert_loc (loc, ptrtype, t);
7545 else
7546 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7548 return t;
7551 /* Build an expression for the address of T. */
7553 tree
7554 build_fold_addr_expr_loc (location_t loc, tree t)
7556 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7558 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7561 static bool vec_cst_ctor_to_array (tree, tree *);
7563 /* Fold a unary expression of code CODE and type TYPE with operand
7564 OP0. Return the folded expression if folding is successful.
7565 Otherwise, return NULL_TREE. */
7567 tree
7568 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7570 tree tem;
7571 tree arg0;
7572 enum tree_code_class kind = TREE_CODE_CLASS (code);
7574 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7575 && TREE_CODE_LENGTH (code) == 1);
7577 tem = generic_simplify (loc, code, type, op0);
7578 if (tem)
7579 return tem;
7581 arg0 = op0;
7582 if (arg0)
7584 if (CONVERT_EXPR_CODE_P (code)
7585 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7587 /* Don't use STRIP_NOPS, because signedness of argument type
7588 matters. */
7589 STRIP_SIGN_NOPS (arg0);
7591 else
7593 /* Strip any conversions that don't change the mode. This
7594 is safe for every expression, except for a comparison
7595 expression because its signedness is derived from its
7596 operands.
7598 Note that this is done as an internal manipulation within
7599 the constant folder, in order to find the simplest
7600 representation of the arguments so that their form can be
7601 studied. In any cases, the appropriate type conversions
7602 should be put back in the tree that will get out of the
7603 constant folder. */
7604 STRIP_NOPS (arg0);
7608 if (TREE_CODE_CLASS (code) == tcc_unary)
7610 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7611 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7612 fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc, TREE_TYPE (op0),
7614 TREE_OPERAND (arg0, 1))));
7615 else if (TREE_CODE (arg0) == COND_EXPR)
7617 tree arg01 = TREE_OPERAND (arg0, 1);
7618 tree arg02 = TREE_OPERAND (arg0, 2);
7619 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7620 arg01 = fold_build1_loc (loc, code, type,
7621 fold_convert_loc (loc,
7622 TREE_TYPE (op0), arg01));
7623 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7624 arg02 = fold_build1_loc (loc, code, type,
7625 fold_convert_loc (loc,
7626 TREE_TYPE (op0), arg02));
7627 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7628 arg01, arg02);
7630 /* If this was a conversion, and all we did was to move into
7631 inside the COND_EXPR, bring it back out. But leave it if
7632 it is a conversion from integer to integer and the
7633 result precision is no wider than a word since such a
7634 conversion is cheap and may be optimized away by combine,
7635 while it couldn't if it were outside the COND_EXPR. Then return
7636 so we don't get into an infinite recursion loop taking the
7637 conversion out and then back in. */
7639 if ((CONVERT_EXPR_CODE_P (code)
7640 || code == NON_LVALUE_EXPR)
7641 && TREE_CODE (tem) == COND_EXPR
7642 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7643 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7646 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7647 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7648 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7649 && (INTEGRAL_TYPE_P
7650 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7651 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7652 || flag_syntax_only))
7653 tem = build1_loc (loc, code, type,
7654 build3 (COND_EXPR,
7655 TREE_TYPE (TREE_OPERAND
7656 (TREE_OPERAND (tem, 1), 0)),
7657 TREE_OPERAND (tem, 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 2),
7660 0)));
7661 return tem;
7665 switch (code)
7667 case NON_LVALUE_EXPR:
7668 if (!maybe_lvalue_p (op0))
7669 return fold_convert_loc (loc, type, op0);
7670 return NULL_TREE;
7672 CASE_CONVERT:
7673 case FLOAT_EXPR:
7674 case FIX_TRUNC_EXPR:
7675 if (COMPARISON_CLASS_P (op0))
7677 /* If we have (type) (a CMP b) and type is an integral type, return
7678 new expression involving the new type. Canonicalize
7679 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7680 non-integral type.
7681 Do not fold the result as that would not simplify further, also
7682 folding again results in recursions. */
7683 if (TREE_CODE (type) == BOOLEAN_TYPE)
7684 return build2_loc (loc, TREE_CODE (op0), type,
7685 TREE_OPERAND (op0, 0),
7686 TREE_OPERAND (op0, 1));
7687 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7688 && TREE_CODE (type) != VECTOR_TYPE)
7689 return build3_loc (loc, COND_EXPR, type, op0,
7690 constant_boolean_node (true, type),
7691 constant_boolean_node (false, type));
7694 /* Handle cases of two conversions in a row. */
7695 if (CONVERT_EXPR_P (op0))
7697 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7698 tree inter_type = TREE_TYPE (op0);
7699 int inside_int = INTEGRAL_TYPE_P (inside_type);
7700 int inside_ptr = POINTER_TYPE_P (inside_type);
7701 int inside_float = FLOAT_TYPE_P (inside_type);
7702 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7703 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7704 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7705 int inter_int = INTEGRAL_TYPE_P (inter_type);
7706 int inter_ptr = POINTER_TYPE_P (inter_type);
7707 int inter_float = FLOAT_TYPE_P (inter_type);
7708 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7709 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7710 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7711 int final_int = INTEGRAL_TYPE_P (type);
7712 int final_ptr = POINTER_TYPE_P (type);
7713 int final_float = FLOAT_TYPE_P (type);
7714 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7715 unsigned int final_prec = TYPE_PRECISION (type);
7716 int final_unsignedp = TYPE_UNSIGNED (type);
7718 /* In addition to the cases of two conversions in a row
7719 handled below, if we are converting something to its own
7720 type via an object of identical or wider precision, neither
7721 conversion is needed. */
7722 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7723 && (((inter_int || inter_ptr) && final_int)
7724 || (inter_float && final_float))
7725 && inter_prec >= final_prec)
7726 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7728 /* Likewise, if the intermediate and initial types are either both
7729 float or both integer, we don't need the middle conversion if the
7730 former is wider than the latter and doesn't change the signedness
7731 (for integers). Avoid this if the final type is a pointer since
7732 then we sometimes need the middle conversion. Likewise if the
7733 final type has a precision not equal to the size of its mode. */
7734 if (((inter_int && inside_int)
7735 || (inter_float && inside_float)
7736 || (inter_vec && inside_vec))
7737 && inter_prec >= inside_prec
7738 && (inter_float || inter_vec
7739 || inter_unsignedp == inside_unsignedp)
7740 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7741 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7742 && ! final_ptr
7743 && (! final_vec || inter_prec == inside_prec))
7744 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7746 /* If we have a sign-extension of a zero-extended value, we can
7747 replace that by a single zero-extension. Likewise if the
7748 final conversion does not change precision we can drop the
7749 intermediate conversion. */
7750 if (inside_int && inter_int && final_int
7751 && ((inside_prec < inter_prec && inter_prec < final_prec
7752 && inside_unsignedp && !inter_unsignedp)
7753 || final_prec == inter_prec))
7754 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7756 /* Two conversions in a row are not needed unless:
7757 - some conversion is floating-point (overstrict for now), or
7758 - some conversion is a vector (overstrict for now), or
7759 - the intermediate type is narrower than both initial and
7760 final, or
7761 - the intermediate type and innermost type differ in signedness,
7762 and the outermost type is wider than the intermediate, or
7763 - the initial type is a pointer type and the precisions of the
7764 intermediate and final types differ, or
7765 - the final type is a pointer type and the precisions of the
7766 initial and intermediate types differ. */
7767 if (! inside_float && ! inter_float && ! final_float
7768 && ! inside_vec && ! inter_vec && ! final_vec
7769 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7770 && ! (inside_int && inter_int
7771 && inter_unsignedp != inside_unsignedp
7772 && inter_prec < final_prec)
7773 && ((inter_unsignedp && inter_prec > inside_prec)
7774 == (final_unsignedp && final_prec > inter_prec))
7775 && ! (inside_ptr && inter_prec != final_prec)
7776 && ! (final_ptr && inside_prec != inter_prec)
7777 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7778 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7779 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7782 /* Handle (T *)&A.B.C for A being of type T and B and C
7783 living at offset zero. This occurs frequently in
7784 C++ upcasting and then accessing the base. */
7785 if (TREE_CODE (op0) == ADDR_EXPR
7786 && POINTER_TYPE_P (type)
7787 && handled_component_p (TREE_OPERAND (op0, 0)))
7789 HOST_WIDE_INT bitsize, bitpos;
7790 tree offset;
7791 machine_mode mode;
7792 int unsignedp, volatilep;
7793 tree base = TREE_OPERAND (op0, 0);
7794 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7795 &mode, &unsignedp, &volatilep, false);
7796 /* If the reference was to a (constant) zero offset, we can use
7797 the address of the base if it has the same base type
7798 as the result type and the pointer type is unqualified. */
7799 if (! offset && bitpos == 0
7800 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7801 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7802 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7803 return fold_convert_loc (loc, type,
7804 build_fold_addr_expr_loc (loc, base));
7807 if (TREE_CODE (op0) == MODIFY_EXPR
7808 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7809 /* Detect assigning a bitfield. */
7810 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7811 && DECL_BIT_FIELD
7812 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7814 /* Don't leave an assignment inside a conversion
7815 unless assigning a bitfield. */
7816 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7817 /* First do the assignment, then return converted constant. */
7818 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7819 TREE_NO_WARNING (tem) = 1;
7820 TREE_USED (tem) = 1;
7821 return tem;
7824 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7825 constants (if x has signed type, the sign bit cannot be set
7826 in c). This folds extension into the BIT_AND_EXPR.
7827 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7828 very likely don't have maximal range for their precision and this
7829 transformation effectively doesn't preserve non-maximal ranges. */
7830 if (TREE_CODE (type) == INTEGER_TYPE
7831 && TREE_CODE (op0) == BIT_AND_EXPR
7832 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7834 tree and_expr = op0;
7835 tree and0 = TREE_OPERAND (and_expr, 0);
7836 tree and1 = TREE_OPERAND (and_expr, 1);
7837 int change = 0;
7839 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7840 || (TYPE_PRECISION (type)
7841 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7842 change = 1;
7843 else if (TYPE_PRECISION (TREE_TYPE (and1))
7844 <= HOST_BITS_PER_WIDE_INT
7845 && tree_fits_uhwi_p (and1))
7847 unsigned HOST_WIDE_INT cst;
7849 cst = tree_to_uhwi (and1);
7850 cst &= HOST_WIDE_INT_M1U
7851 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7852 change = (cst == 0);
7853 #ifdef LOAD_EXTEND_OP
7854 if (change
7855 && !flag_syntax_only
7856 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7857 == ZERO_EXTEND))
7859 tree uns = unsigned_type_for (TREE_TYPE (and0));
7860 and0 = fold_convert_loc (loc, uns, and0);
7861 and1 = fold_convert_loc (loc, uns, and1);
7863 #endif
7865 if (change)
7867 tem = force_fit_type (type, wi::to_widest (and1), 0,
7868 TREE_OVERFLOW (and1));
7869 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7870 fold_convert_loc (loc, type, and0), tem);
7874 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7875 when one of the new casts will fold away. Conservatively we assume
7876 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7877 if (POINTER_TYPE_P (type)
7878 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7879 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7880 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7881 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7882 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7884 tree arg00 = TREE_OPERAND (arg0, 0);
7885 tree arg01 = TREE_OPERAND (arg0, 1);
7887 return fold_build_pointer_plus_loc
7888 (loc, fold_convert_loc (loc, type, arg00), arg01);
7891 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7892 of the same precision, and X is an integer type not narrower than
7893 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7894 if (INTEGRAL_TYPE_P (type)
7895 && TREE_CODE (op0) == BIT_NOT_EXPR
7896 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7897 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7898 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7900 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7901 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7902 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7903 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7904 fold_convert_loc (loc, type, tem));
7907 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7908 type of X and Y (integer types only). */
7909 if (INTEGRAL_TYPE_P (type)
7910 && TREE_CODE (op0) == MULT_EXPR
7911 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7912 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7914 /* Be careful not to introduce new overflows. */
7915 tree mult_type;
7916 if (TYPE_OVERFLOW_WRAPS (type))
7917 mult_type = type;
7918 else
7919 mult_type = unsigned_type_for (type);
7921 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7923 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 0)),
7926 fold_convert_loc (loc, mult_type,
7927 TREE_OPERAND (op0, 1)));
7928 return fold_convert_loc (loc, type, tem);
7932 tem = fold_convert_const (code, type, arg0);
7933 return tem ? tem : NULL_TREE;
7935 case ADDR_SPACE_CONVERT_EXPR:
7936 if (integer_zerop (arg0))
7937 return fold_convert_const (code, type, arg0);
7938 return NULL_TREE;
7940 case FIXED_CONVERT_EXPR:
7941 tem = fold_convert_const (code, type, arg0);
7942 return tem ? tem : NULL_TREE;
7944 case VIEW_CONVERT_EXPR:
7945 if (TREE_CODE (op0) == MEM_REF)
7946 return fold_build2_loc (loc, MEM_REF, type,
7947 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7949 return fold_view_convert_expr (type, op0);
7951 case NEGATE_EXPR:
7952 tem = fold_negate_expr (loc, arg0);
7953 if (tem)
7954 return fold_convert_loc (loc, type, tem);
7955 return NULL_TREE;
7957 case ABS_EXPR:
7958 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7959 return fold_abs_const (arg0, type);
7960 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7961 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7962 /* Convert fabs((double)float) into (double)fabsf(float). */
7963 else if (TREE_CODE (arg0) == NOP_EXPR
7964 && TREE_CODE (type) == REAL_TYPE)
7966 tree targ0 = strip_float_extensions (arg0);
7967 if (targ0 != arg0)
7968 return fold_convert_loc (loc, type,
7969 fold_build1_loc (loc, ABS_EXPR,
7970 TREE_TYPE (targ0),
7971 targ0));
7973 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7974 else if (TREE_CODE (arg0) == ABS_EXPR)
7975 return arg0;
7976 else if (tree_expr_nonnegative_p (arg0))
7977 return arg0;
7979 /* Strip sign ops from argument. */
7980 if (TREE_CODE (type) == REAL_TYPE)
7982 tem = fold_strip_sign_ops (arg0);
7983 if (tem)
7984 return fold_build1_loc (loc, ABS_EXPR, type,
7985 fold_convert_loc (loc, type, tem));
7987 return NULL_TREE;
7989 case CONJ_EXPR:
7990 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7991 return fold_convert_loc (loc, type, arg0);
7992 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7994 tree itype = TREE_TYPE (type);
7995 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7996 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7997 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7998 negate_expr (ipart));
8000 if (TREE_CODE (arg0) == COMPLEX_CST)
8002 tree itype = TREE_TYPE (type);
8003 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8004 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8005 return build_complex (type, rpart, negate_expr (ipart));
8007 if (TREE_CODE (arg0) == CONJ_EXPR)
8008 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8009 return NULL_TREE;
8011 case BIT_NOT_EXPR:
8012 if (TREE_CODE (arg0) == INTEGER_CST)
8013 return fold_not_const (arg0, type);
8014 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8015 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8016 /* Convert ~ (-A) to A - 1. */
8017 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8018 return fold_build2_loc (loc, MINUS_EXPR, type,
8019 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8020 build_int_cst (type, 1));
8021 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8022 else if (INTEGRAL_TYPE_P (type)
8023 && ((TREE_CODE (arg0) == MINUS_EXPR
8024 && integer_onep (TREE_OPERAND (arg0, 1)))
8025 || (TREE_CODE (arg0) == PLUS_EXPR
8026 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8027 return fold_build1_loc (loc, NEGATE_EXPR, type,
8028 fold_convert_loc (loc, type,
8029 TREE_OPERAND (arg0, 0)));
8030 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8031 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8032 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8033 fold_convert_loc (loc, type,
8034 TREE_OPERAND (arg0, 0)))))
8035 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8036 fold_convert_loc (loc, type,
8037 TREE_OPERAND (arg0, 1)));
8038 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8039 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8040 fold_convert_loc (loc, type,
8041 TREE_OPERAND (arg0, 1)))))
8042 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8043 fold_convert_loc (loc, type,
8044 TREE_OPERAND (arg0, 0)), tem);
8045 /* Perform BIT_NOT_EXPR on each element individually. */
8046 else if (TREE_CODE (arg0) == VECTOR_CST)
8048 tree *elements;
8049 tree elem;
8050 unsigned count = VECTOR_CST_NELTS (arg0), i;
8052 elements = XALLOCAVEC (tree, count);
8053 for (i = 0; i < count; i++)
8055 elem = VECTOR_CST_ELT (arg0, i);
8056 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8057 if (elem == NULL_TREE)
8058 break;
8059 elements[i] = elem;
8061 if (i == count)
8062 return build_vector (type, elements);
8064 else if (COMPARISON_CLASS_P (arg0)
8065 && (VECTOR_TYPE_P (type)
8066 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8068 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8069 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8070 HONOR_NANS (TYPE_MODE (op_type)));
8071 if (subcode != ERROR_MARK)
8072 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8073 TREE_OPERAND (arg0, 1));
8077 return NULL_TREE;
8079 case TRUTH_NOT_EXPR:
8080 /* Note that the operand of this must be an int
8081 and its values must be 0 or 1.
8082 ("true" is a fixed value perhaps depending on the language,
8083 but we don't handle values other than 1 correctly yet.) */
8084 tem = fold_truth_not_expr (loc, arg0);
8085 if (!tem)
8086 return NULL_TREE;
8087 return fold_convert_loc (loc, type, tem);
8089 case REALPART_EXPR:
8090 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8091 return fold_convert_loc (loc, type, arg0);
8092 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8093 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8094 TREE_OPERAND (arg0, 1));
8095 if (TREE_CODE (arg0) == COMPLEX_CST)
8096 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8097 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8099 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8100 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8101 fold_build1_loc (loc, REALPART_EXPR, itype,
8102 TREE_OPERAND (arg0, 0)),
8103 fold_build1_loc (loc, REALPART_EXPR, itype,
8104 TREE_OPERAND (arg0, 1)));
8105 return fold_convert_loc (loc, type, tem);
8107 if (TREE_CODE (arg0) == CONJ_EXPR)
8109 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8110 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8111 TREE_OPERAND (arg0, 0));
8112 return fold_convert_loc (loc, type, tem);
8114 if (TREE_CODE (arg0) == CALL_EXPR)
8116 tree fn = get_callee_fndecl (arg0);
8117 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8118 switch (DECL_FUNCTION_CODE (fn))
8120 CASE_FLT_FN (BUILT_IN_CEXPI):
8121 fn = mathfn_built_in (type, BUILT_IN_COS);
8122 if (fn)
8123 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8124 break;
8126 default:
8127 break;
8130 return NULL_TREE;
8132 case IMAGPART_EXPR:
8133 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8134 return build_zero_cst (type);
8135 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8136 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8137 TREE_OPERAND (arg0, 0));
8138 if (TREE_CODE (arg0) == COMPLEX_CST)
8139 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8140 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8142 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8143 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8144 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0)),
8146 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 1)));
8148 return fold_convert_loc (loc, type, tem);
8150 if (TREE_CODE (arg0) == CONJ_EXPR)
8152 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8153 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8154 return fold_convert_loc (loc, type, negate_expr (tem));
8156 if (TREE_CODE (arg0) == CALL_EXPR)
8158 tree fn = get_callee_fndecl (arg0);
8159 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8160 switch (DECL_FUNCTION_CODE (fn))
8162 CASE_FLT_FN (BUILT_IN_CEXPI):
8163 fn = mathfn_built_in (type, BUILT_IN_SIN);
8164 if (fn)
8165 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8166 break;
8168 default:
8169 break;
8172 return NULL_TREE;
8174 case INDIRECT_REF:
8175 /* Fold *&X to X if X is an lvalue. */
8176 if (TREE_CODE (op0) == ADDR_EXPR)
8178 tree op00 = TREE_OPERAND (op0, 0);
8179 if ((TREE_CODE (op00) == VAR_DECL
8180 || TREE_CODE (op00) == PARM_DECL
8181 || TREE_CODE (op00) == RESULT_DECL)
8182 && !TREE_READONLY (op00))
8183 return op00;
8185 return NULL_TREE;
8187 case VEC_UNPACK_LO_EXPR:
8188 case VEC_UNPACK_HI_EXPR:
8189 case VEC_UNPACK_FLOAT_LO_EXPR:
8190 case VEC_UNPACK_FLOAT_HI_EXPR:
8192 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8193 tree *elts;
8194 enum tree_code subcode;
8196 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8197 if (TREE_CODE (arg0) != VECTOR_CST)
8198 return NULL_TREE;
8200 elts = XALLOCAVEC (tree, nelts * 2);
8201 if (!vec_cst_ctor_to_array (arg0, elts))
8202 return NULL_TREE;
8204 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8205 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8206 elts += nelts;
8208 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8209 subcode = NOP_EXPR;
8210 else
8211 subcode = FLOAT_EXPR;
8213 for (i = 0; i < nelts; i++)
8215 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8216 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8217 return NULL_TREE;
8220 return build_vector (type, elts);
8223 case REDUC_MIN_EXPR:
8224 case REDUC_MAX_EXPR:
8225 case REDUC_PLUS_EXPR:
8227 unsigned int nelts, i;
8228 tree *elts;
8229 enum tree_code subcode;
8231 if (TREE_CODE (op0) != VECTOR_CST)
8232 return NULL_TREE;
8233 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8235 elts = XALLOCAVEC (tree, nelts);
8236 if (!vec_cst_ctor_to_array (op0, elts))
8237 return NULL_TREE;
8239 switch (code)
8241 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8242 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8243 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8244 default: gcc_unreachable ();
8247 for (i = 1; i < nelts; i++)
8249 elts[0] = const_binop (subcode, elts[0], elts[i]);
8250 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8251 return NULL_TREE;
8254 return elts[0];
8257 default:
8258 return NULL_TREE;
8259 } /* switch (code) */
8263 /* If the operation was a conversion do _not_ mark a resulting constant
8264 with TREE_OVERFLOW if the original constant was not. These conversions
8265 have implementation defined behavior and retaining the TREE_OVERFLOW
8266 flag here would confuse later passes such as VRP. */
8267 tree
8268 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8269 tree type, tree op0)
8271 tree res = fold_unary_loc (loc, code, type, op0);
8272 if (res
8273 && TREE_CODE (res) == INTEGER_CST
8274 && TREE_CODE (op0) == INTEGER_CST
8275 && CONVERT_EXPR_CODE_P (code))
8276 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8278 return res;
8281 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8282 operands OP0 and OP1. LOC is the location of the resulting expression.
8283 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8284 Return the folded expression if folding is successful. Otherwise,
8285 return NULL_TREE. */
8286 static tree
8287 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8288 tree arg0, tree arg1, tree op0, tree op1)
8290 tree tem;
8292 /* We only do these simplifications if we are optimizing. */
8293 if (!optimize)
8294 return NULL_TREE;
8296 /* Check for things like (A || B) && (A || C). We can convert this
8297 to A || (B && C). Note that either operator can be any of the four
8298 truth and/or operations and the transformation will still be
8299 valid. Also note that we only care about order for the
8300 ANDIF and ORIF operators. If B contains side effects, this
8301 might change the truth-value of A. */
8302 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8303 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8304 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8305 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8306 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8307 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8309 tree a00 = TREE_OPERAND (arg0, 0);
8310 tree a01 = TREE_OPERAND (arg0, 1);
8311 tree a10 = TREE_OPERAND (arg1, 0);
8312 tree a11 = TREE_OPERAND (arg1, 1);
8313 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8314 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8315 && (code == TRUTH_AND_EXPR
8316 || code == TRUTH_OR_EXPR));
8318 if (operand_equal_p (a00, a10, 0))
8319 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8320 fold_build2_loc (loc, code, type, a01, a11));
8321 else if (commutative && operand_equal_p (a00, a11, 0))
8322 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8323 fold_build2_loc (loc, code, type, a01, a10));
8324 else if (commutative && operand_equal_p (a01, a10, 0))
8325 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8326 fold_build2_loc (loc, code, type, a00, a11));
8328 /* This case if tricky because we must either have commutative
8329 operators or else A10 must not have side-effects. */
8331 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8332 && operand_equal_p (a01, a11, 0))
8333 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8334 fold_build2_loc (loc, code, type, a00, a10),
8335 a01);
8338 /* See if we can build a range comparison. */
8339 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8340 return tem;
8342 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8343 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8345 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8346 if (tem)
8347 return fold_build2_loc (loc, code, type, tem, arg1);
8350 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8351 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8353 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8354 if (tem)
8355 return fold_build2_loc (loc, code, type, arg0, tem);
8358 /* Check for the possibility of merging component references. If our
8359 lhs is another similar operation, try to merge its rhs with our
8360 rhs. Then try to merge our lhs and rhs. */
8361 if (TREE_CODE (arg0) == code
8362 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8363 TREE_OPERAND (arg0, 1), arg1)))
8364 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8366 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8367 return tem;
8369 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8370 && (code == TRUTH_AND_EXPR
8371 || code == TRUTH_ANDIF_EXPR
8372 || code == TRUTH_OR_EXPR
8373 || code == TRUTH_ORIF_EXPR))
8375 enum tree_code ncode, icode;
8377 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8378 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8379 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8381 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8382 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8383 We don't want to pack more than two leafs to a non-IF AND/OR
8384 expression.
8385 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8386 equal to IF-CODE, then we don't want to add right-hand operand.
8387 If the inner right-hand side of left-hand operand has
8388 side-effects, or isn't simple, then we can't add to it,
8389 as otherwise we might destroy if-sequence. */
8390 if (TREE_CODE (arg0) == icode
8391 && simple_operand_p_2 (arg1)
8392 /* Needed for sequence points to handle trappings, and
8393 side-effects. */
8394 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8396 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8397 arg1);
8398 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8399 tem);
8401 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8402 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8403 else if (TREE_CODE (arg1) == icode
8404 && simple_operand_p_2 (arg0)
8405 /* Needed for sequence points to handle trappings, and
8406 side-effects. */
8407 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8409 tem = fold_build2_loc (loc, ncode, type,
8410 arg0, TREE_OPERAND (arg1, 0));
8411 return fold_build2_loc (loc, icode, type, tem,
8412 TREE_OPERAND (arg1, 1));
8414 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8415 into (A OR B).
8416 For sequence point consistancy, we need to check for trapping,
8417 and side-effects. */
8418 else if (code == icode && simple_operand_p_2 (arg0)
8419 && simple_operand_p_2 (arg1))
8420 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8423 return NULL_TREE;
8426 /* Fold a binary expression of code CODE and type TYPE with operands
8427 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8428 Return the folded expression if folding is successful. Otherwise,
8429 return NULL_TREE. */
8431 static tree
8432 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8434 enum tree_code compl_code;
8436 if (code == MIN_EXPR)
8437 compl_code = MAX_EXPR;
8438 else if (code == MAX_EXPR)
8439 compl_code = MIN_EXPR;
8440 else
8441 gcc_unreachable ();
8443 /* MIN (MAX (a, b), b) == b. */
8444 if (TREE_CODE (op0) == compl_code
8445 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8446 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8448 /* MIN (MAX (b, a), b) == b. */
8449 if (TREE_CODE (op0) == compl_code
8450 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8451 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8452 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8454 /* MIN (a, MAX (a, b)) == a. */
8455 if (TREE_CODE (op1) == compl_code
8456 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8457 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8458 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8460 /* MIN (a, MAX (b, a)) == a. */
8461 if (TREE_CODE (op1) == compl_code
8462 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8463 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8464 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8466 return NULL_TREE;
8469 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8470 by changing CODE to reduce the magnitude of constants involved in
8471 ARG0 of the comparison.
8472 Returns a canonicalized comparison tree if a simplification was
8473 possible, otherwise returns NULL_TREE.
8474 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8475 valid if signed overflow is undefined. */
8477 static tree
8478 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8479 tree arg0, tree arg1,
8480 bool *strict_overflow_p)
8482 enum tree_code code0 = TREE_CODE (arg0);
8483 tree t, cst0 = NULL_TREE;
8484 int sgn0;
8485 bool swap = false;
8487 /* Match A +- CST code arg1 and CST code arg1. We can change the
8488 first form only if overflow is undefined. */
8489 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8490 /* In principle pointers also have undefined overflow behavior,
8491 but that causes problems elsewhere. */
8492 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8493 && (code0 == MINUS_EXPR
8494 || code0 == PLUS_EXPR)
8495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8496 || code0 == INTEGER_CST))
8497 return NULL_TREE;
8499 /* Identify the constant in arg0 and its sign. */
8500 if (code0 == INTEGER_CST)
8501 cst0 = arg0;
8502 else
8503 cst0 = TREE_OPERAND (arg0, 1);
8504 sgn0 = tree_int_cst_sgn (cst0);
8506 /* Overflowed constants and zero will cause problems. */
8507 if (integer_zerop (cst0)
8508 || TREE_OVERFLOW (cst0))
8509 return NULL_TREE;
8511 /* See if we can reduce the magnitude of the constant in
8512 arg0 by changing the comparison code. */
8513 if (code0 == INTEGER_CST)
8515 /* CST <= arg1 -> CST-1 < arg1. */
8516 if (code == LE_EXPR && sgn0 == 1)
8517 code = LT_EXPR;
8518 /* -CST < arg1 -> -CST-1 <= arg1. */
8519 else if (code == LT_EXPR && sgn0 == -1)
8520 code = LE_EXPR;
8521 /* CST > arg1 -> CST-1 >= arg1. */
8522 else if (code == GT_EXPR && sgn0 == 1)
8523 code = GE_EXPR;
8524 /* -CST >= arg1 -> -CST-1 > arg1. */
8525 else if (code == GE_EXPR && sgn0 == -1)
8526 code = GT_EXPR;
8527 else
8528 return NULL_TREE;
8529 /* arg1 code' CST' might be more canonical. */
8530 swap = true;
8532 else
8534 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8535 if (code == LT_EXPR
8536 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8537 code = LE_EXPR;
8538 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8539 else if (code == GT_EXPR
8540 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8541 code = GE_EXPR;
8542 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8543 else if (code == LE_EXPR
8544 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8545 code = LT_EXPR;
8546 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8547 else if (code == GE_EXPR
8548 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8549 code = GT_EXPR;
8550 else
8551 return NULL_TREE;
8552 *strict_overflow_p = true;
8555 /* Now build the constant reduced in magnitude. But not if that
8556 would produce one outside of its types range. */
8557 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8558 && ((sgn0 == 1
8559 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8560 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8561 || (sgn0 == -1
8562 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8563 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8564 /* We cannot swap the comparison here as that would cause us to
8565 endlessly recurse. */
8566 return NULL_TREE;
8568 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8569 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8570 if (code0 != INTEGER_CST)
8571 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8572 t = fold_convert (TREE_TYPE (arg1), t);
8574 /* If swapping might yield to a more canonical form, do so. */
8575 if (swap)
8576 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8577 else
8578 return fold_build2_loc (loc, code, type, t, arg1);
8581 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8582 overflow further. Try to decrease the magnitude of constants involved
8583 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8584 and put sole constants at the second argument position.
8585 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8587 static tree
8588 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8589 tree arg0, tree arg1)
8591 tree t;
8592 bool strict_overflow_p;
8593 const char * const warnmsg = G_("assuming signed overflow does not occur "
8594 "when reducing constant in comparison");
8596 /* Try canonicalization by simplifying arg0. */
8597 strict_overflow_p = false;
8598 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8599 &strict_overflow_p);
8600 if (t)
8602 if (strict_overflow_p)
8603 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8604 return t;
8607 /* Try canonicalization by simplifying arg1 using the swapped
8608 comparison. */
8609 code = swap_tree_comparison (code);
8610 strict_overflow_p = false;
8611 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8612 &strict_overflow_p);
8613 if (t && strict_overflow_p)
8614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8615 return t;
8618 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8619 space. This is used to avoid issuing overflow warnings for
8620 expressions like &p->x which can not wrap. */
8622 static bool
8623 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8625 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8626 return true;
8628 if (bitpos < 0)
8629 return true;
8631 wide_int wi_offset;
8632 int precision = TYPE_PRECISION (TREE_TYPE (base));
8633 if (offset == NULL_TREE)
8634 wi_offset = wi::zero (precision);
8635 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8636 return true;
8637 else
8638 wi_offset = offset;
8640 bool overflow;
8641 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8642 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8643 if (overflow)
8644 return true;
8646 if (!wi::fits_uhwi_p (total))
8647 return true;
8649 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8650 if (size <= 0)
8651 return true;
8653 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8654 array. */
8655 if (TREE_CODE (base) == ADDR_EXPR)
8657 HOST_WIDE_INT base_size;
8659 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8660 if (base_size > 0 && size < base_size)
8661 size = base_size;
8664 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8667 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8668 kind INTEGER_CST. This makes sure to properly sign-extend the
8669 constant. */
8671 static HOST_WIDE_INT
8672 size_low_cst (const_tree t)
8674 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8675 int prec = TYPE_PRECISION (TREE_TYPE (t));
8676 if (prec < HOST_BITS_PER_WIDE_INT)
8677 return sext_hwi (w, prec);
8678 return w;
8681 /* Subroutine of fold_binary. This routine performs all of the
8682 transformations that are common to the equality/inequality
8683 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8684 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8685 fold_binary should call fold_binary. Fold a comparison with
8686 tree code CODE and type TYPE with operands OP0 and OP1. Return
8687 the folded comparison or NULL_TREE. */
8689 static tree
8690 fold_comparison (location_t loc, enum tree_code code, tree type,
8691 tree op0, tree op1)
8693 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8694 tree arg0, arg1, tem;
8696 arg0 = op0;
8697 arg1 = op1;
8699 STRIP_SIGN_NOPS (arg0);
8700 STRIP_SIGN_NOPS (arg1);
8702 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8703 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8704 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8706 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8707 && TREE_CODE (arg1) == INTEGER_CST
8708 && !TREE_OVERFLOW (arg1))
8710 const enum tree_code
8711 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8712 tree const1 = TREE_OPERAND (arg0, 1);
8713 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8714 tree variable = TREE_OPERAND (arg0, 0);
8715 tree new_const = int_const_binop (reverse_op, const2, const1);
8717 /* If the constant operation overflowed this can be
8718 simplified as a comparison against INT_MAX/INT_MIN. */
8719 if (TREE_OVERFLOW (new_const)
8720 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8722 int const1_sgn = tree_int_cst_sgn (const1);
8723 enum tree_code code2 = code;
8725 /* Get the sign of the constant on the lhs if the
8726 operation were VARIABLE + CONST1. */
8727 if (TREE_CODE (arg0) == MINUS_EXPR)
8728 const1_sgn = -const1_sgn;
8730 /* The sign of the constant determines if we overflowed
8731 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8732 Canonicalize to the INT_MIN overflow by swapping the comparison
8733 if necessary. */
8734 if (const1_sgn == -1)
8735 code2 = swap_tree_comparison (code);
8737 /* We now can look at the canonicalized case
8738 VARIABLE + 1 CODE2 INT_MIN
8739 and decide on the result. */
8740 switch (code2)
8742 case EQ_EXPR:
8743 case LT_EXPR:
8744 case LE_EXPR:
8745 return
8746 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8748 case NE_EXPR:
8749 case GE_EXPR:
8750 case GT_EXPR:
8751 return
8752 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8754 default:
8755 gcc_unreachable ();
8758 else
8760 if (!equality_code)
8761 fold_overflow_warning ("assuming signed overflow does not occur "
8762 "when changing X +- C1 cmp C2 to "
8763 "X cmp C2 -+ C1",
8764 WARN_STRICT_OVERFLOW_COMPARISON);
8765 return fold_build2_loc (loc, code, type, variable, new_const);
8769 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8770 if (TREE_CODE (arg0) == MINUS_EXPR
8771 && equality_code
8772 && integer_zerop (arg1))
8774 /* ??? The transformation is valid for the other operators if overflow
8775 is undefined for the type, but performing it here badly interacts
8776 with the transformation in fold_cond_expr_with_comparison which
8777 attempts to synthetize ABS_EXPR. */
8778 if (!equality_code)
8779 fold_overflow_warning ("assuming signed overflow does not occur "
8780 "when changing X - Y cmp 0 to X cmp Y",
8781 WARN_STRICT_OVERFLOW_COMPARISON);
8782 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8783 TREE_OPERAND (arg0, 1));
8786 /* For comparisons of pointers we can decompose it to a compile time
8787 comparison of the base objects and the offsets into the object.
8788 This requires at least one operand being an ADDR_EXPR or a
8789 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8790 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8791 && (TREE_CODE (arg0) == ADDR_EXPR
8792 || TREE_CODE (arg1) == ADDR_EXPR
8793 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8794 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8796 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8797 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8798 machine_mode mode;
8799 int volatilep, unsignedp;
8800 bool indirect_base0 = false, indirect_base1 = false;
8802 /* Get base and offset for the access. Strip ADDR_EXPR for
8803 get_inner_reference, but put it back by stripping INDIRECT_REF
8804 off the base object if possible. indirect_baseN will be true
8805 if baseN is not an address but refers to the object itself. */
8806 base0 = arg0;
8807 if (TREE_CODE (arg0) == ADDR_EXPR)
8809 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8810 &bitsize, &bitpos0, &offset0, &mode,
8811 &unsignedp, &volatilep, false);
8812 if (TREE_CODE (base0) == INDIRECT_REF)
8813 base0 = TREE_OPERAND (base0, 0);
8814 else
8815 indirect_base0 = true;
8817 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8819 base0 = TREE_OPERAND (arg0, 0);
8820 STRIP_SIGN_NOPS (base0);
8821 if (TREE_CODE (base0) == ADDR_EXPR)
8823 base0 = TREE_OPERAND (base0, 0);
8824 indirect_base0 = true;
8826 offset0 = TREE_OPERAND (arg0, 1);
8827 if (tree_fits_shwi_p (offset0))
8829 HOST_WIDE_INT off = size_low_cst (offset0);
8830 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8831 * BITS_PER_UNIT)
8832 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8834 bitpos0 = off * BITS_PER_UNIT;
8835 offset0 = NULL_TREE;
8840 base1 = arg1;
8841 if (TREE_CODE (arg1) == ADDR_EXPR)
8843 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8844 &bitsize, &bitpos1, &offset1, &mode,
8845 &unsignedp, &volatilep, false);
8846 if (TREE_CODE (base1) == INDIRECT_REF)
8847 base1 = TREE_OPERAND (base1, 0);
8848 else
8849 indirect_base1 = true;
8851 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8853 base1 = TREE_OPERAND (arg1, 0);
8854 STRIP_SIGN_NOPS (base1);
8855 if (TREE_CODE (base1) == ADDR_EXPR)
8857 base1 = TREE_OPERAND (base1, 0);
8858 indirect_base1 = true;
8860 offset1 = TREE_OPERAND (arg1, 1);
8861 if (tree_fits_shwi_p (offset1))
8863 HOST_WIDE_INT off = size_low_cst (offset1);
8864 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8865 * BITS_PER_UNIT)
8866 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8868 bitpos1 = off * BITS_PER_UNIT;
8869 offset1 = NULL_TREE;
8874 /* A local variable can never be pointed to by
8875 the default SSA name of an incoming parameter. */
8876 if ((TREE_CODE (arg0) == ADDR_EXPR
8877 && indirect_base0
8878 && TREE_CODE (base0) == VAR_DECL
8879 && auto_var_in_fn_p (base0, current_function_decl)
8880 && !indirect_base1
8881 && TREE_CODE (base1) == SSA_NAME
8882 && SSA_NAME_IS_DEFAULT_DEF (base1)
8883 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8884 || (TREE_CODE (arg1) == ADDR_EXPR
8885 && indirect_base1
8886 && TREE_CODE (base1) == VAR_DECL
8887 && auto_var_in_fn_p (base1, current_function_decl)
8888 && !indirect_base0
8889 && TREE_CODE (base0) == SSA_NAME
8890 && SSA_NAME_IS_DEFAULT_DEF (base0)
8891 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8893 if (code == NE_EXPR)
8894 return constant_boolean_node (1, type);
8895 else if (code == EQ_EXPR)
8896 return constant_boolean_node (0, type);
8898 /* If we have equivalent bases we might be able to simplify. */
8899 else if (indirect_base0 == indirect_base1
8900 && operand_equal_p (base0, base1, 0))
8902 /* We can fold this expression to a constant if the non-constant
8903 offset parts are equal. */
8904 if ((offset0 == offset1
8905 || (offset0 && offset1
8906 && operand_equal_p (offset0, offset1, 0)))
8907 && (code == EQ_EXPR
8908 || code == NE_EXPR
8909 || (indirect_base0 && DECL_P (base0))
8910 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8913 if (!equality_code
8914 && bitpos0 != bitpos1
8915 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8916 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8917 fold_overflow_warning (("assuming pointer wraparound does not "
8918 "occur when comparing P +- C1 with "
8919 "P +- C2"),
8920 WARN_STRICT_OVERFLOW_CONDITIONAL);
8922 switch (code)
8924 case EQ_EXPR:
8925 return constant_boolean_node (bitpos0 == bitpos1, type);
8926 case NE_EXPR:
8927 return constant_boolean_node (bitpos0 != bitpos1, type);
8928 case LT_EXPR:
8929 return constant_boolean_node (bitpos0 < bitpos1, type);
8930 case LE_EXPR:
8931 return constant_boolean_node (bitpos0 <= bitpos1, type);
8932 case GE_EXPR:
8933 return constant_boolean_node (bitpos0 >= bitpos1, type);
8934 case GT_EXPR:
8935 return constant_boolean_node (bitpos0 > bitpos1, type);
8936 default:;
8939 /* We can simplify the comparison to a comparison of the variable
8940 offset parts if the constant offset parts are equal.
8941 Be careful to use signed sizetype here because otherwise we
8942 mess with array offsets in the wrong way. This is possible
8943 because pointer arithmetic is restricted to retain within an
8944 object and overflow on pointer differences is undefined as of
8945 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8946 else if (bitpos0 == bitpos1
8947 && (equality_code
8948 || (indirect_base0 && DECL_P (base0))
8949 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8951 /* By converting to signed sizetype we cover middle-end pointer
8952 arithmetic which operates on unsigned pointer types of size
8953 type size and ARRAY_REF offsets which are properly sign or
8954 zero extended from their type in case it is narrower than
8955 sizetype. */
8956 if (offset0 == NULL_TREE)
8957 offset0 = build_int_cst (ssizetype, 0);
8958 else
8959 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8960 if (offset1 == NULL_TREE)
8961 offset1 = build_int_cst (ssizetype, 0);
8962 else
8963 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8965 if (!equality_code
8966 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8967 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8968 fold_overflow_warning (("assuming pointer wraparound does not "
8969 "occur when comparing P +- C1 with "
8970 "P +- C2"),
8971 WARN_STRICT_OVERFLOW_COMPARISON);
8973 return fold_build2_loc (loc, code, type, offset0, offset1);
8976 /* For non-equal bases we can simplify if they are addresses
8977 of local binding decls or constants. */
8978 else if (indirect_base0 && indirect_base1
8979 /* We know that !operand_equal_p (base0, base1, 0)
8980 because the if condition was false. But make
8981 sure two decls are not the same. */
8982 && base0 != base1
8983 && TREE_CODE (arg0) == ADDR_EXPR
8984 && TREE_CODE (arg1) == ADDR_EXPR
8985 && (((TREE_CODE (base0) == VAR_DECL
8986 || TREE_CODE (base0) == PARM_DECL)
8987 && (targetm.binds_local_p (base0)
8988 || CONSTANT_CLASS_P (base1)))
8989 || CONSTANT_CLASS_P (base0))
8990 && (((TREE_CODE (base1) == VAR_DECL
8991 || TREE_CODE (base1) == PARM_DECL)
8992 && (targetm.binds_local_p (base1)
8993 || CONSTANT_CLASS_P (base0)))
8994 || CONSTANT_CLASS_P (base1)))
8996 if (code == EQ_EXPR)
8997 return omit_two_operands_loc (loc, type, boolean_false_node,
8998 arg0, arg1);
8999 else if (code == NE_EXPR)
9000 return omit_two_operands_loc (loc, type, boolean_true_node,
9001 arg0, arg1);
9003 /* For equal offsets we can simplify to a comparison of the
9004 base addresses. */
9005 else if (bitpos0 == bitpos1
9006 && (indirect_base0
9007 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9008 && (indirect_base1
9009 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9010 && ((offset0 == offset1)
9011 || (offset0 && offset1
9012 && operand_equal_p (offset0, offset1, 0))))
9014 if (indirect_base0)
9015 base0 = build_fold_addr_expr_loc (loc, base0);
9016 if (indirect_base1)
9017 base1 = build_fold_addr_expr_loc (loc, base1);
9018 return fold_build2_loc (loc, code, type, base0, base1);
9022 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9023 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9024 the resulting offset is smaller in absolute value than the
9025 original one and has the same sign. */
9026 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9027 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9028 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9029 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9030 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9031 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9032 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9034 tree const1 = TREE_OPERAND (arg0, 1);
9035 tree const2 = TREE_OPERAND (arg1, 1);
9036 tree variable1 = TREE_OPERAND (arg0, 0);
9037 tree variable2 = TREE_OPERAND (arg1, 0);
9038 tree cst;
9039 const char * const warnmsg = G_("assuming signed overflow does not "
9040 "occur when combining constants around "
9041 "a comparison");
9043 /* Put the constant on the side where it doesn't overflow and is
9044 of lower absolute value and of same sign than before. */
9045 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9046 ? MINUS_EXPR : PLUS_EXPR,
9047 const2, const1);
9048 if (!TREE_OVERFLOW (cst)
9049 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9050 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9052 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9053 return fold_build2_loc (loc, code, type,
9054 variable1,
9055 fold_build2_loc (loc, TREE_CODE (arg1),
9056 TREE_TYPE (arg1),
9057 variable2, cst));
9060 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9061 ? MINUS_EXPR : PLUS_EXPR,
9062 const1, const2);
9063 if (!TREE_OVERFLOW (cst)
9064 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9065 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9067 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9068 return fold_build2_loc (loc, code, type,
9069 fold_build2_loc (loc, TREE_CODE (arg0),
9070 TREE_TYPE (arg0),
9071 variable1, cst),
9072 variable2);
9076 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9077 signed arithmetic case. That form is created by the compiler
9078 often enough for folding it to be of value. One example is in
9079 computing loop trip counts after Operator Strength Reduction. */
9080 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9081 && TREE_CODE (arg0) == MULT_EXPR
9082 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9084 && integer_zerop (arg1))
9086 tree const1 = TREE_OPERAND (arg0, 1);
9087 tree const2 = arg1; /* zero */
9088 tree variable1 = TREE_OPERAND (arg0, 0);
9089 enum tree_code cmp_code = code;
9091 /* Handle unfolded multiplication by zero. */
9092 if (integer_zerop (const1))
9093 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9095 fold_overflow_warning (("assuming signed overflow does not occur when "
9096 "eliminating multiplication in comparison "
9097 "with zero"),
9098 WARN_STRICT_OVERFLOW_COMPARISON);
9100 /* If const1 is negative we swap the sense of the comparison. */
9101 if (tree_int_cst_sgn (const1) < 0)
9102 cmp_code = swap_tree_comparison (cmp_code);
9104 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9107 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9108 if (tem)
9109 return tem;
9111 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9113 tree targ0 = strip_float_extensions (arg0);
9114 tree targ1 = strip_float_extensions (arg1);
9115 tree newtype = TREE_TYPE (targ0);
9117 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9118 newtype = TREE_TYPE (targ1);
9120 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9121 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9122 return fold_build2_loc (loc, code, type,
9123 fold_convert_loc (loc, newtype, targ0),
9124 fold_convert_loc (loc, newtype, targ1));
9126 /* (-a) CMP (-b) -> b CMP a */
9127 if (TREE_CODE (arg0) == NEGATE_EXPR
9128 && TREE_CODE (arg1) == NEGATE_EXPR)
9129 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9130 TREE_OPERAND (arg0, 0));
9132 if (TREE_CODE (arg1) == REAL_CST)
9134 REAL_VALUE_TYPE cst;
9135 cst = TREE_REAL_CST (arg1);
9137 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9138 if (TREE_CODE (arg0) == NEGATE_EXPR)
9139 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9140 TREE_OPERAND (arg0, 0),
9141 build_real (TREE_TYPE (arg1),
9142 real_value_negate (&cst)));
9144 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9145 /* a CMP (-0) -> a CMP 0 */
9146 if (REAL_VALUE_MINUS_ZERO (cst))
9147 return fold_build2_loc (loc, code, type, arg0,
9148 build_real (TREE_TYPE (arg1), dconst0));
9150 /* x != NaN is always true, other ops are always false. */
9151 if (REAL_VALUE_ISNAN (cst)
9152 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9154 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9155 return omit_one_operand_loc (loc, type, tem, arg0);
9158 /* Fold comparisons against infinity. */
9159 if (REAL_VALUE_ISINF (cst)
9160 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9162 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9163 if (tem != NULL_TREE)
9164 return tem;
9168 /* If this is a comparison of a real constant with a PLUS_EXPR
9169 or a MINUS_EXPR of a real constant, we can convert it into a
9170 comparison with a revised real constant as long as no overflow
9171 occurs when unsafe_math_optimizations are enabled. */
9172 if (flag_unsafe_math_optimizations
9173 && TREE_CODE (arg1) == REAL_CST
9174 && (TREE_CODE (arg0) == PLUS_EXPR
9175 || TREE_CODE (arg0) == MINUS_EXPR)
9176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9177 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9178 ? MINUS_EXPR : PLUS_EXPR,
9179 arg1, TREE_OPERAND (arg0, 1)))
9180 && !TREE_OVERFLOW (tem))
9181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9183 /* Likewise, we can simplify a comparison of a real constant with
9184 a MINUS_EXPR whose first operand is also a real constant, i.e.
9185 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9186 floating-point types only if -fassociative-math is set. */
9187 if (flag_associative_math
9188 && TREE_CODE (arg1) == REAL_CST
9189 && TREE_CODE (arg0) == MINUS_EXPR
9190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9191 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9192 arg1))
9193 && !TREE_OVERFLOW (tem))
9194 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9195 TREE_OPERAND (arg0, 1), tem);
9197 /* Fold comparisons against built-in math functions. */
9198 if (TREE_CODE (arg1) == REAL_CST
9199 && flag_unsafe_math_optimizations
9200 && ! flag_errno_math)
9202 enum built_in_function fcode = builtin_mathfn_code (arg0);
9204 if (fcode != END_BUILTINS)
9206 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9207 if (tem != NULL_TREE)
9208 return tem;
9213 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9214 && CONVERT_EXPR_P (arg0))
9216 /* If we are widening one operand of an integer comparison,
9217 see if the other operand is similarly being widened. Perhaps we
9218 can do the comparison in the narrower type. */
9219 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9220 if (tem)
9221 return tem;
9223 /* Or if we are changing signedness. */
9224 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9225 if (tem)
9226 return tem;
9229 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9230 constant, we can simplify it. */
9231 if (TREE_CODE (arg1) == INTEGER_CST
9232 && (TREE_CODE (arg0) == MIN_EXPR
9233 || TREE_CODE (arg0) == MAX_EXPR)
9234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9236 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9237 if (tem)
9238 return tem;
9241 /* Simplify comparison of something with itself. (For IEEE
9242 floating-point, we can only do some of these simplifications.) */
9243 if (operand_equal_p (arg0, arg1, 0))
9245 switch (code)
9247 case EQ_EXPR:
9248 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9249 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9250 return constant_boolean_node (1, type);
9251 break;
9253 case GE_EXPR:
9254 case LE_EXPR:
9255 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9256 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9257 return constant_boolean_node (1, type);
9258 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9260 case NE_EXPR:
9261 /* For NE, we can only do this simplification if integer
9262 or we don't honor IEEE floating point NaNs. */
9263 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9264 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9265 break;
9266 /* ... fall through ... */
9267 case GT_EXPR:
9268 case LT_EXPR:
9269 return constant_boolean_node (0, type);
9270 default:
9271 gcc_unreachable ();
9275 /* If we are comparing an expression that just has comparisons
9276 of two integer values, arithmetic expressions of those comparisons,
9277 and constants, we can simplify it. There are only three cases
9278 to check: the two values can either be equal, the first can be
9279 greater, or the second can be greater. Fold the expression for
9280 those three values. Since each value must be 0 or 1, we have
9281 eight possibilities, each of which corresponds to the constant 0
9282 or 1 or one of the six possible comparisons.
9284 This handles common cases like (a > b) == 0 but also handles
9285 expressions like ((x > y) - (y > x)) > 0, which supposedly
9286 occur in macroized code. */
9288 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9290 tree cval1 = 0, cval2 = 0;
9291 int save_p = 0;
9293 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9294 /* Don't handle degenerate cases here; they should already
9295 have been handled anyway. */
9296 && cval1 != 0 && cval2 != 0
9297 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9298 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9299 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9300 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9301 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9302 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9303 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9305 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9306 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9308 /* We can't just pass T to eval_subst in case cval1 or cval2
9309 was the same as ARG1. */
9311 tree high_result
9312 = fold_build2_loc (loc, code, type,
9313 eval_subst (loc, arg0, cval1, maxval,
9314 cval2, minval),
9315 arg1);
9316 tree equal_result
9317 = fold_build2_loc (loc, code, type,
9318 eval_subst (loc, arg0, cval1, maxval,
9319 cval2, maxval),
9320 arg1);
9321 tree low_result
9322 = fold_build2_loc (loc, code, type,
9323 eval_subst (loc, arg0, cval1, minval,
9324 cval2, maxval),
9325 arg1);
9327 /* All three of these results should be 0 or 1. Confirm they are.
9328 Then use those values to select the proper code to use. */
9330 if (TREE_CODE (high_result) == INTEGER_CST
9331 && TREE_CODE (equal_result) == INTEGER_CST
9332 && TREE_CODE (low_result) == INTEGER_CST)
9334 /* Make a 3-bit mask with the high-order bit being the
9335 value for `>', the next for '=', and the low for '<'. */
9336 switch ((integer_onep (high_result) * 4)
9337 + (integer_onep (equal_result) * 2)
9338 + integer_onep (low_result))
9340 case 0:
9341 /* Always false. */
9342 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9343 case 1:
9344 code = LT_EXPR;
9345 break;
9346 case 2:
9347 code = EQ_EXPR;
9348 break;
9349 case 3:
9350 code = LE_EXPR;
9351 break;
9352 case 4:
9353 code = GT_EXPR;
9354 break;
9355 case 5:
9356 code = NE_EXPR;
9357 break;
9358 case 6:
9359 code = GE_EXPR;
9360 break;
9361 case 7:
9362 /* Always true. */
9363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9366 if (save_p)
9368 tem = save_expr (build2 (code, type, cval1, cval2));
9369 SET_EXPR_LOCATION (tem, loc);
9370 return tem;
9372 return fold_build2_loc (loc, code, type, cval1, cval2);
9377 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9378 into a single range test. */
9379 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9380 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9381 && TREE_CODE (arg1) == INTEGER_CST
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9383 && !integer_zerop (TREE_OPERAND (arg0, 1))
9384 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9385 && !TREE_OVERFLOW (arg1))
9387 tem = fold_div_compare (loc, code, type, arg0, arg1);
9388 if (tem != NULL_TREE)
9389 return tem;
9392 /* Fold ~X op ~Y as Y op X. */
9393 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9394 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9396 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9397 return fold_build2_loc (loc, code, type,
9398 fold_convert_loc (loc, cmp_type,
9399 TREE_OPERAND (arg1, 0)),
9400 TREE_OPERAND (arg0, 0));
9403 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9404 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9405 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9407 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9408 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9409 TREE_OPERAND (arg0, 0),
9410 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9411 fold_convert_loc (loc, cmp_type, arg1)));
9414 return NULL_TREE;
9418 /* Subroutine of fold_binary. Optimize complex multiplications of the
9419 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9420 argument EXPR represents the expression "z" of type TYPE. */
9422 static tree
9423 fold_mult_zconjz (location_t loc, tree type, tree expr)
9425 tree itype = TREE_TYPE (type);
9426 tree rpart, ipart, tem;
9428 if (TREE_CODE (expr) == COMPLEX_EXPR)
9430 rpart = TREE_OPERAND (expr, 0);
9431 ipart = TREE_OPERAND (expr, 1);
9433 else if (TREE_CODE (expr) == COMPLEX_CST)
9435 rpart = TREE_REALPART (expr);
9436 ipart = TREE_IMAGPART (expr);
9438 else
9440 expr = save_expr (expr);
9441 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9442 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9445 rpart = save_expr (rpart);
9446 ipart = save_expr (ipart);
9447 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9448 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9449 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9450 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9451 build_zero_cst (itype));
9455 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9456 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9457 guarantees that P and N have the same least significant log2(M) bits.
9458 N is not otherwise constrained. In particular, N is not normalized to
9459 0 <= N < M as is common. In general, the precise value of P is unknown.
9460 M is chosen as large as possible such that constant N can be determined.
9462 Returns M and sets *RESIDUE to N.
9464 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9465 account. This is not always possible due to PR 35705.
9468 static unsigned HOST_WIDE_INT
9469 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9470 bool allow_func_align)
9472 enum tree_code code;
9474 *residue = 0;
9476 code = TREE_CODE (expr);
9477 if (code == ADDR_EXPR)
9479 unsigned int bitalign;
9480 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9481 *residue /= BITS_PER_UNIT;
9482 return bitalign / BITS_PER_UNIT;
9484 else if (code == POINTER_PLUS_EXPR)
9486 tree op0, op1;
9487 unsigned HOST_WIDE_INT modulus;
9488 enum tree_code inner_code;
9490 op0 = TREE_OPERAND (expr, 0);
9491 STRIP_NOPS (op0);
9492 modulus = get_pointer_modulus_and_residue (op0, residue,
9493 allow_func_align);
9495 op1 = TREE_OPERAND (expr, 1);
9496 STRIP_NOPS (op1);
9497 inner_code = TREE_CODE (op1);
9498 if (inner_code == INTEGER_CST)
9500 *residue += TREE_INT_CST_LOW (op1);
9501 return modulus;
9503 else if (inner_code == MULT_EXPR)
9505 op1 = TREE_OPERAND (op1, 1);
9506 if (TREE_CODE (op1) == INTEGER_CST)
9508 unsigned HOST_WIDE_INT align;
9510 /* Compute the greatest power-of-2 divisor of op1. */
9511 align = TREE_INT_CST_LOW (op1);
9512 align &= -align;
9514 /* If align is non-zero and less than *modulus, replace
9515 *modulus with align., If align is 0, then either op1 is 0
9516 or the greatest power-of-2 divisor of op1 doesn't fit in an
9517 unsigned HOST_WIDE_INT. In either case, no additional
9518 constraint is imposed. */
9519 if (align)
9520 modulus = MIN (modulus, align);
9522 return modulus;
9527 /* If we get here, we were unable to determine anything useful about the
9528 expression. */
9529 return 1;
9532 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9533 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9535 static bool
9536 vec_cst_ctor_to_array (tree arg, tree *elts)
9538 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9540 if (TREE_CODE (arg) == VECTOR_CST)
9542 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9543 elts[i] = VECTOR_CST_ELT (arg, i);
9545 else if (TREE_CODE (arg) == CONSTRUCTOR)
9547 constructor_elt *elt;
9549 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9550 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9551 return false;
9552 else
9553 elts[i] = elt->value;
9555 else
9556 return false;
9557 for (; i < nelts; i++)
9558 elts[i]
9559 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9560 return true;
9563 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9564 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9565 NULL_TREE otherwise. */
9567 static tree
9568 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9570 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9571 tree *elts;
9572 bool need_ctor = false;
9574 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9575 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9576 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9577 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9578 return NULL_TREE;
9580 elts = XALLOCAVEC (tree, nelts * 3);
9581 if (!vec_cst_ctor_to_array (arg0, elts)
9582 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9583 return NULL_TREE;
9585 for (i = 0; i < nelts; i++)
9587 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9588 need_ctor = true;
9589 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9592 if (need_ctor)
9594 vec<constructor_elt, va_gc> *v;
9595 vec_alloc (v, nelts);
9596 for (i = 0; i < nelts; i++)
9597 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9598 return build_constructor (type, v);
9600 else
9601 return build_vector (type, &elts[2 * nelts]);
9604 /* Try to fold a pointer difference of type TYPE two address expressions of
9605 array references AREF0 and AREF1 using location LOC. Return a
9606 simplified expression for the difference or NULL_TREE. */
9608 static tree
9609 fold_addr_of_array_ref_difference (location_t loc, tree type,
9610 tree aref0, tree aref1)
9612 tree base0 = TREE_OPERAND (aref0, 0);
9613 tree base1 = TREE_OPERAND (aref1, 0);
9614 tree base_offset = build_int_cst (type, 0);
9616 /* If the bases are array references as well, recurse. If the bases
9617 are pointer indirections compute the difference of the pointers.
9618 If the bases are equal, we are set. */
9619 if ((TREE_CODE (base0) == ARRAY_REF
9620 && TREE_CODE (base1) == ARRAY_REF
9621 && (base_offset
9622 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9623 || (INDIRECT_REF_P (base0)
9624 && INDIRECT_REF_P (base1)
9625 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9626 TREE_OPERAND (base0, 0),
9627 TREE_OPERAND (base1, 0))))
9628 || operand_equal_p (base0, base1, 0))
9630 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9631 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9632 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9633 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9634 return fold_build2_loc (loc, PLUS_EXPR, type,
9635 base_offset,
9636 fold_build2_loc (loc, MULT_EXPR, type,
9637 diff, esz));
9639 return NULL_TREE;
9642 /* If the real or vector real constant CST of type TYPE has an exact
9643 inverse, return it, else return NULL. */
9645 static tree
9646 exact_inverse (tree type, tree cst)
9648 REAL_VALUE_TYPE r;
9649 tree unit_type, *elts;
9650 machine_mode mode;
9651 unsigned vec_nelts, i;
9653 switch (TREE_CODE (cst))
9655 case REAL_CST:
9656 r = TREE_REAL_CST (cst);
9658 if (exact_real_inverse (TYPE_MODE (type), &r))
9659 return build_real (type, r);
9661 return NULL_TREE;
9663 case VECTOR_CST:
9664 vec_nelts = VECTOR_CST_NELTS (cst);
9665 elts = XALLOCAVEC (tree, vec_nelts);
9666 unit_type = TREE_TYPE (type);
9667 mode = TYPE_MODE (unit_type);
9669 for (i = 0; i < vec_nelts; i++)
9671 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9672 if (!exact_real_inverse (mode, &r))
9673 return NULL_TREE;
9674 elts[i] = build_real (unit_type, r);
9677 return build_vector (type, elts);
9679 default:
9680 return NULL_TREE;
9684 /* Mask out the tz least significant bits of X of type TYPE where
9685 tz is the number of trailing zeroes in Y. */
9686 static wide_int
9687 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9689 int tz = wi::ctz (y);
9690 if (tz > 0)
9691 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9692 return x;
9695 /* Return true when T is an address and is known to be nonzero.
9696 For floating point we further ensure that T is not denormal.
9697 Similar logic is present in nonzero_address in rtlanal.h.
9699 If the return value is based on the assumption that signed overflow
9700 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9701 change *STRICT_OVERFLOW_P. */
9703 static bool
9704 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9706 tree type = TREE_TYPE (t);
9707 enum tree_code code;
9709 /* Doing something useful for floating point would need more work. */
9710 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9711 return false;
9713 code = TREE_CODE (t);
9714 switch (TREE_CODE_CLASS (code))
9716 case tcc_unary:
9717 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9718 strict_overflow_p);
9719 case tcc_binary:
9720 case tcc_comparison:
9721 return tree_binary_nonzero_warnv_p (code, type,
9722 TREE_OPERAND (t, 0),
9723 TREE_OPERAND (t, 1),
9724 strict_overflow_p);
9725 case tcc_constant:
9726 case tcc_declaration:
9727 case tcc_reference:
9728 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9730 default:
9731 break;
9734 switch (code)
9736 case TRUTH_NOT_EXPR:
9737 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9738 strict_overflow_p);
9740 case TRUTH_AND_EXPR:
9741 case TRUTH_OR_EXPR:
9742 case TRUTH_XOR_EXPR:
9743 return tree_binary_nonzero_warnv_p (code, type,
9744 TREE_OPERAND (t, 0),
9745 TREE_OPERAND (t, 1),
9746 strict_overflow_p);
9748 case COND_EXPR:
9749 case CONSTRUCTOR:
9750 case OBJ_TYPE_REF:
9751 case ASSERT_EXPR:
9752 case ADDR_EXPR:
9753 case WITH_SIZE_EXPR:
9754 case SSA_NAME:
9755 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9757 case COMPOUND_EXPR:
9758 case MODIFY_EXPR:
9759 case BIND_EXPR:
9760 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9761 strict_overflow_p);
9763 case SAVE_EXPR:
9764 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9765 strict_overflow_p);
9767 case CALL_EXPR:
9769 tree fndecl = get_callee_fndecl (t);
9770 if (!fndecl) return false;
9771 if (flag_delete_null_pointer_checks && !flag_check_new
9772 && DECL_IS_OPERATOR_NEW (fndecl)
9773 && !TREE_NOTHROW (fndecl))
9774 return true;
9775 if (flag_delete_null_pointer_checks
9776 && lookup_attribute ("returns_nonnull",
9777 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9778 return true;
9779 return alloca_call_p (t);
9782 default:
9783 break;
9785 return false;
9788 /* Return true when T is an address and is known to be nonzero.
9789 Handle warnings about undefined signed overflow. */
9791 static bool
9792 tree_expr_nonzero_p (tree t)
9794 bool ret, strict_overflow_p;
9796 strict_overflow_p = false;
9797 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9798 if (strict_overflow_p)
9799 fold_overflow_warning (("assuming signed overflow does not occur when "
9800 "determining that expression is always "
9801 "non-zero"),
9802 WARN_STRICT_OVERFLOW_MISC);
9803 return ret;
9806 /* Fold a binary expression of code CODE and type TYPE with operands
9807 OP0 and OP1. LOC is the location of the resulting expression.
9808 Return the folded expression if folding is successful. Otherwise,
9809 return NULL_TREE. */
9811 tree
9812 fold_binary_loc (location_t loc,
9813 enum tree_code code, tree type, tree op0, tree op1)
9815 enum tree_code_class kind = TREE_CODE_CLASS (code);
9816 tree arg0, arg1, tem;
9817 tree t1 = NULL_TREE;
9818 bool strict_overflow_p;
9819 unsigned int prec;
9821 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9822 && TREE_CODE_LENGTH (code) == 2
9823 && op0 != NULL_TREE
9824 && op1 != NULL_TREE);
9826 arg0 = op0;
9827 arg1 = op1;
9829 /* Strip any conversions that don't change the mode. This is
9830 safe for every expression, except for a comparison expression
9831 because its signedness is derived from its operands. So, in
9832 the latter case, only strip conversions that don't change the
9833 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9834 preserved.
9836 Note that this is done as an internal manipulation within the
9837 constant folder, in order to find the simplest representation
9838 of the arguments so that their form can be studied. In any
9839 cases, the appropriate type conversions should be put back in
9840 the tree that will get out of the constant folder. */
9842 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9844 STRIP_SIGN_NOPS (arg0);
9845 STRIP_SIGN_NOPS (arg1);
9847 else
9849 STRIP_NOPS (arg0);
9850 STRIP_NOPS (arg1);
9853 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9854 constant but we can't do arithmetic on them. */
9855 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9856 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9857 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9858 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9859 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9860 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9861 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9863 if (kind == tcc_binary)
9865 /* Make sure type and arg0 have the same saturating flag. */
9866 gcc_assert (TYPE_SATURATING (type)
9867 == TYPE_SATURATING (TREE_TYPE (arg0)));
9868 tem = const_binop (code, arg0, arg1);
9870 else if (kind == tcc_comparison)
9871 tem = fold_relational_const (code, type, arg0, arg1);
9872 else
9873 tem = NULL_TREE;
9875 if (tem != NULL_TREE)
9877 if (TREE_TYPE (tem) != type)
9878 tem = fold_convert_loc (loc, type, tem);
9879 return tem;
9883 /* If this is a commutative operation, and ARG0 is a constant, move it
9884 to ARG1 to reduce the number of tests below. */
9885 if (commutative_tree_code (code)
9886 && tree_swap_operands_p (arg0, arg1, true))
9887 return fold_build2_loc (loc, code, type, op1, op0);
9889 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9890 to ARG1 to reduce the number of tests below. */
9891 if (kind == tcc_comparison
9892 && tree_swap_operands_p (arg0, arg1, true))
9893 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9895 tem = generic_simplify (loc, code, type, op0, op1);
9896 if (tem)
9897 return tem;
9899 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9901 First check for cases where an arithmetic operation is applied to a
9902 compound, conditional, or comparison operation. Push the arithmetic
9903 operation inside the compound or conditional to see if any folding
9904 can then be done. Convert comparison to conditional for this purpose.
9905 The also optimizes non-constant cases that used to be done in
9906 expand_expr.
9908 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9909 one of the operands is a comparison and the other is a comparison, a
9910 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9911 code below would make the expression more complex. Change it to a
9912 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9913 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9915 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9916 || code == EQ_EXPR || code == NE_EXPR)
9917 && TREE_CODE (type) != VECTOR_TYPE
9918 && ((truth_value_p (TREE_CODE (arg0))
9919 && (truth_value_p (TREE_CODE (arg1))
9920 || (TREE_CODE (arg1) == BIT_AND_EXPR
9921 && integer_onep (TREE_OPERAND (arg1, 1)))))
9922 || (truth_value_p (TREE_CODE (arg1))
9923 && (truth_value_p (TREE_CODE (arg0))
9924 || (TREE_CODE (arg0) == BIT_AND_EXPR
9925 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9927 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9928 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9929 : TRUTH_XOR_EXPR,
9930 boolean_type_node,
9931 fold_convert_loc (loc, boolean_type_node, arg0),
9932 fold_convert_loc (loc, boolean_type_node, arg1));
9934 if (code == EQ_EXPR)
9935 tem = invert_truthvalue_loc (loc, tem);
9937 return fold_convert_loc (loc, type, tem);
9940 if (TREE_CODE_CLASS (code) == tcc_binary
9941 || TREE_CODE_CLASS (code) == tcc_comparison)
9943 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9945 tem = fold_build2_loc (loc, code, type,
9946 fold_convert_loc (loc, TREE_TYPE (op0),
9947 TREE_OPERAND (arg0, 1)), op1);
9948 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9949 tem);
9951 if (TREE_CODE (arg1) == COMPOUND_EXPR
9952 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9954 tem = fold_build2_loc (loc, code, type, op0,
9955 fold_convert_loc (loc, TREE_TYPE (op1),
9956 TREE_OPERAND (arg1, 1)));
9957 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9958 tem);
9961 if (TREE_CODE (arg0) == COND_EXPR
9962 || TREE_CODE (arg0) == VEC_COND_EXPR
9963 || COMPARISON_CLASS_P (arg0))
9965 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9966 arg0, arg1,
9967 /*cond_first_p=*/1);
9968 if (tem != NULL_TREE)
9969 return tem;
9972 if (TREE_CODE (arg1) == COND_EXPR
9973 || TREE_CODE (arg1) == VEC_COND_EXPR
9974 || COMPARISON_CLASS_P (arg1))
9976 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9977 arg1, arg0,
9978 /*cond_first_p=*/0);
9979 if (tem != NULL_TREE)
9980 return tem;
9984 switch (code)
9986 case MEM_REF:
9987 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9988 if (TREE_CODE (arg0) == ADDR_EXPR
9989 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9991 tree iref = TREE_OPERAND (arg0, 0);
9992 return fold_build2 (MEM_REF, type,
9993 TREE_OPERAND (iref, 0),
9994 int_const_binop (PLUS_EXPR, arg1,
9995 TREE_OPERAND (iref, 1)));
9998 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9999 if (TREE_CODE (arg0) == ADDR_EXPR
10000 && handled_component_p (TREE_OPERAND (arg0, 0)))
10002 tree base;
10003 HOST_WIDE_INT coffset;
10004 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10005 &coffset);
10006 if (!base)
10007 return NULL_TREE;
10008 return fold_build2 (MEM_REF, type,
10009 build_fold_addr_expr (base),
10010 int_const_binop (PLUS_EXPR, arg1,
10011 size_int (coffset)));
10014 return NULL_TREE;
10016 case POINTER_PLUS_EXPR:
10017 /* 0 +p index -> (type)index */
10018 if (integer_zerop (arg0))
10019 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10021 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10022 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10023 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10024 return fold_convert_loc (loc, type,
10025 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10026 fold_convert_loc (loc, sizetype,
10027 arg1),
10028 fold_convert_loc (loc, sizetype,
10029 arg0)));
10031 /* (PTR +p B) +p A -> PTR +p (B + A) */
10032 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10034 tree inner;
10035 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10036 tree arg00 = TREE_OPERAND (arg0, 0);
10037 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10038 arg01, fold_convert_loc (loc, sizetype, arg1));
10039 return fold_convert_loc (loc, type,
10040 fold_build_pointer_plus_loc (loc,
10041 arg00, inner));
10044 /* PTR_CST +p CST -> CST1 */
10045 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10046 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10047 fold_convert_loc (loc, type, arg1));
10049 return NULL_TREE;
10051 case PLUS_EXPR:
10052 /* A + (-B) -> A - B */
10053 if (TREE_CODE (arg1) == NEGATE_EXPR
10054 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10055 return fold_build2_loc (loc, MINUS_EXPR, type,
10056 fold_convert_loc (loc, type, arg0),
10057 fold_convert_loc (loc, type,
10058 TREE_OPERAND (arg1, 0)));
10059 /* (-A) + B -> B - A */
10060 if (TREE_CODE (arg0) == NEGATE_EXPR
10061 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10062 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10063 return fold_build2_loc (loc, MINUS_EXPR, type,
10064 fold_convert_loc (loc, type, arg1),
10065 fold_convert_loc (loc, type,
10066 TREE_OPERAND (arg0, 0)));
10068 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10070 /* Convert ~A + 1 to -A. */
10071 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10072 && integer_each_onep (arg1))
10073 return fold_build1_loc (loc, NEGATE_EXPR, type,
10074 fold_convert_loc (loc, type,
10075 TREE_OPERAND (arg0, 0)));
10077 /* ~X + X is -1. */
10078 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10079 && !TYPE_OVERFLOW_TRAPS (type))
10081 tree tem = TREE_OPERAND (arg0, 0);
10083 STRIP_NOPS (tem);
10084 if (operand_equal_p (tem, arg1, 0))
10086 t1 = build_all_ones_cst (type);
10087 return omit_one_operand_loc (loc, type, t1, arg1);
10091 /* X + ~X is -1. */
10092 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10093 && !TYPE_OVERFLOW_TRAPS (type))
10095 tree tem = TREE_OPERAND (arg1, 0);
10097 STRIP_NOPS (tem);
10098 if (operand_equal_p (arg0, tem, 0))
10100 t1 = build_all_ones_cst (type);
10101 return omit_one_operand_loc (loc, type, t1, arg0);
10105 /* X + (X / CST) * -CST is X % CST. */
10106 if (TREE_CODE (arg1) == MULT_EXPR
10107 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10108 && operand_equal_p (arg0,
10109 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10111 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10112 tree cst1 = TREE_OPERAND (arg1, 1);
10113 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10114 cst1, cst0);
10115 if (sum && integer_zerop (sum))
10116 return fold_convert_loc (loc, type,
10117 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10118 TREE_TYPE (arg0), arg0,
10119 cst0));
10123 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10124 one. Make sure the type is not saturating and has the signedness of
10125 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10126 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10127 if ((TREE_CODE (arg0) == MULT_EXPR
10128 || TREE_CODE (arg1) == MULT_EXPR)
10129 && !TYPE_SATURATING (type)
10130 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10131 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10132 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10134 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10135 if (tem)
10136 return tem;
10139 if (! FLOAT_TYPE_P (type))
10141 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10142 with a constant, and the two constants have no bits in common,
10143 we should treat this as a BIT_IOR_EXPR since this may produce more
10144 simplifications. */
10145 if (TREE_CODE (arg0) == BIT_AND_EXPR
10146 && TREE_CODE (arg1) == BIT_AND_EXPR
10147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10148 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10149 && wi::bit_and (TREE_OPERAND (arg0, 1),
10150 TREE_OPERAND (arg1, 1)) == 0)
10152 code = BIT_IOR_EXPR;
10153 goto bit_ior;
10156 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10157 (plus (plus (mult) (mult)) (foo)) so that we can
10158 take advantage of the factoring cases below. */
10159 if (TYPE_OVERFLOW_WRAPS (type)
10160 && (((TREE_CODE (arg0) == PLUS_EXPR
10161 || TREE_CODE (arg0) == MINUS_EXPR)
10162 && TREE_CODE (arg1) == MULT_EXPR)
10163 || ((TREE_CODE (arg1) == PLUS_EXPR
10164 || TREE_CODE (arg1) == MINUS_EXPR)
10165 && TREE_CODE (arg0) == MULT_EXPR)))
10167 tree parg0, parg1, parg, marg;
10168 enum tree_code pcode;
10170 if (TREE_CODE (arg1) == MULT_EXPR)
10171 parg = arg0, marg = arg1;
10172 else
10173 parg = arg1, marg = arg0;
10174 pcode = TREE_CODE (parg);
10175 parg0 = TREE_OPERAND (parg, 0);
10176 parg1 = TREE_OPERAND (parg, 1);
10177 STRIP_NOPS (parg0);
10178 STRIP_NOPS (parg1);
10180 if (TREE_CODE (parg0) == MULT_EXPR
10181 && TREE_CODE (parg1) != MULT_EXPR)
10182 return fold_build2_loc (loc, pcode, type,
10183 fold_build2_loc (loc, PLUS_EXPR, type,
10184 fold_convert_loc (loc, type,
10185 parg0),
10186 fold_convert_loc (loc, type,
10187 marg)),
10188 fold_convert_loc (loc, type, parg1));
10189 if (TREE_CODE (parg0) != MULT_EXPR
10190 && TREE_CODE (parg1) == MULT_EXPR)
10191 return
10192 fold_build2_loc (loc, PLUS_EXPR, type,
10193 fold_convert_loc (loc, type, parg0),
10194 fold_build2_loc (loc, pcode, type,
10195 fold_convert_loc (loc, type, marg),
10196 fold_convert_loc (loc, type,
10197 parg1)));
10200 else
10202 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10203 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10204 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10206 /* Likewise if the operands are reversed. */
10207 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10208 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10210 /* Convert X + -C into X - C. */
10211 if (TREE_CODE (arg1) == REAL_CST
10212 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10214 tem = fold_negate_const (arg1, type);
10215 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10216 return fold_build2_loc (loc, MINUS_EXPR, type,
10217 fold_convert_loc (loc, type, arg0),
10218 fold_convert_loc (loc, type, tem));
10221 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10222 to __complex__ ( x, y ). This is not the same for SNaNs or
10223 if signed zeros are involved. */
10224 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10225 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10226 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10228 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10229 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10230 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10231 bool arg0rz = false, arg0iz = false;
10232 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10233 || (arg0i && (arg0iz = real_zerop (arg0i))))
10235 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10236 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10237 if (arg0rz && arg1i && real_zerop (arg1i))
10239 tree rp = arg1r ? arg1r
10240 : build1 (REALPART_EXPR, rtype, arg1);
10241 tree ip = arg0i ? arg0i
10242 : build1 (IMAGPART_EXPR, rtype, arg0);
10243 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10245 else if (arg0iz && arg1r && real_zerop (arg1r))
10247 tree rp = arg0r ? arg0r
10248 : build1 (REALPART_EXPR, rtype, arg0);
10249 tree ip = arg1i ? arg1i
10250 : build1 (IMAGPART_EXPR, rtype, arg1);
10251 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10256 if (flag_unsafe_math_optimizations
10257 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10258 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10259 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10260 return tem;
10262 /* Convert x+x into x*2.0. */
10263 if (operand_equal_p (arg0, arg1, 0)
10264 && SCALAR_FLOAT_TYPE_P (type))
10265 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10266 build_real (type, dconst2));
10268 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10269 We associate floats only if the user has specified
10270 -fassociative-math. */
10271 if (flag_associative_math
10272 && TREE_CODE (arg1) == PLUS_EXPR
10273 && TREE_CODE (arg0) != MULT_EXPR)
10275 tree tree10 = TREE_OPERAND (arg1, 0);
10276 tree tree11 = TREE_OPERAND (arg1, 1);
10277 if (TREE_CODE (tree11) == MULT_EXPR
10278 && TREE_CODE (tree10) == MULT_EXPR)
10280 tree tree0;
10281 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10282 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10285 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10286 We associate floats only if the user has specified
10287 -fassociative-math. */
10288 if (flag_associative_math
10289 && TREE_CODE (arg0) == PLUS_EXPR
10290 && TREE_CODE (arg1) != MULT_EXPR)
10292 tree tree00 = TREE_OPERAND (arg0, 0);
10293 tree tree01 = TREE_OPERAND (arg0, 1);
10294 if (TREE_CODE (tree01) == MULT_EXPR
10295 && TREE_CODE (tree00) == MULT_EXPR)
10297 tree tree0;
10298 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10299 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10304 bit_rotate:
10305 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10306 is a rotate of A by C1 bits. */
10307 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10308 is a rotate of A by B bits. */
10310 enum tree_code code0, code1;
10311 tree rtype;
10312 code0 = TREE_CODE (arg0);
10313 code1 = TREE_CODE (arg1);
10314 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10315 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10316 && operand_equal_p (TREE_OPERAND (arg0, 0),
10317 TREE_OPERAND (arg1, 0), 0)
10318 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10319 TYPE_UNSIGNED (rtype))
10320 /* Only create rotates in complete modes. Other cases are not
10321 expanded properly. */
10322 && (element_precision (rtype)
10323 == element_precision (TYPE_MODE (rtype))))
10325 tree tree01, tree11;
10326 enum tree_code code01, code11;
10328 tree01 = TREE_OPERAND (arg0, 1);
10329 tree11 = TREE_OPERAND (arg1, 1);
10330 STRIP_NOPS (tree01);
10331 STRIP_NOPS (tree11);
10332 code01 = TREE_CODE (tree01);
10333 code11 = TREE_CODE (tree11);
10334 if (code01 == INTEGER_CST
10335 && code11 == INTEGER_CST
10336 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10337 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10339 tem = build2_loc (loc, LROTATE_EXPR,
10340 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10341 TREE_OPERAND (arg0, 0),
10342 code0 == LSHIFT_EXPR ? tree01 : tree11);
10343 return fold_convert_loc (loc, type, tem);
10345 else if (code11 == MINUS_EXPR)
10347 tree tree110, tree111;
10348 tree110 = TREE_OPERAND (tree11, 0);
10349 tree111 = TREE_OPERAND (tree11, 1);
10350 STRIP_NOPS (tree110);
10351 STRIP_NOPS (tree111);
10352 if (TREE_CODE (tree110) == INTEGER_CST
10353 && 0 == compare_tree_int (tree110,
10354 element_precision
10355 (TREE_TYPE (TREE_OPERAND
10356 (arg0, 0))))
10357 && operand_equal_p (tree01, tree111, 0))
10358 return
10359 fold_convert_loc (loc, type,
10360 build2 ((code0 == LSHIFT_EXPR
10361 ? LROTATE_EXPR
10362 : RROTATE_EXPR),
10363 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10364 TREE_OPERAND (arg0, 0), tree01));
10366 else if (code01 == MINUS_EXPR)
10368 tree tree010, tree011;
10369 tree010 = TREE_OPERAND (tree01, 0);
10370 tree011 = TREE_OPERAND (tree01, 1);
10371 STRIP_NOPS (tree010);
10372 STRIP_NOPS (tree011);
10373 if (TREE_CODE (tree010) == INTEGER_CST
10374 && 0 == compare_tree_int (tree010,
10375 element_precision
10376 (TREE_TYPE (TREE_OPERAND
10377 (arg0, 0))))
10378 && operand_equal_p (tree11, tree011, 0))
10379 return fold_convert_loc
10380 (loc, type,
10381 build2 ((code0 != LSHIFT_EXPR
10382 ? LROTATE_EXPR
10383 : RROTATE_EXPR),
10384 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10385 TREE_OPERAND (arg0, 0), tree11));
10390 associate:
10391 /* In most languages, can't associate operations on floats through
10392 parentheses. Rather than remember where the parentheses were, we
10393 don't associate floats at all, unless the user has specified
10394 -fassociative-math.
10395 And, we need to make sure type is not saturating. */
10397 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10398 && !TYPE_SATURATING (type))
10400 tree var0, con0, lit0, minus_lit0;
10401 tree var1, con1, lit1, minus_lit1;
10402 tree atype = type;
10403 bool ok = true;
10405 /* Split both trees into variables, constants, and literals. Then
10406 associate each group together, the constants with literals,
10407 then the result with variables. This increases the chances of
10408 literals being recombined later and of generating relocatable
10409 expressions for the sum of a constant and literal. */
10410 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10411 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10412 code == MINUS_EXPR);
10414 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10415 if (code == MINUS_EXPR)
10416 code = PLUS_EXPR;
10418 /* With undefined overflow prefer doing association in a type
10419 which wraps on overflow, if that is one of the operand types. */
10420 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10421 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10423 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10424 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10425 atype = TREE_TYPE (arg0);
10426 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10427 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10428 atype = TREE_TYPE (arg1);
10429 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10432 /* With undefined overflow we can only associate constants with one
10433 variable, and constants whose association doesn't overflow. */
10434 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10435 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10437 if (var0 && var1)
10439 tree tmp0 = var0;
10440 tree tmp1 = var1;
10442 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10443 tmp0 = TREE_OPERAND (tmp0, 0);
10444 if (CONVERT_EXPR_P (tmp0)
10445 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10446 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10447 <= TYPE_PRECISION (atype)))
10448 tmp0 = TREE_OPERAND (tmp0, 0);
10449 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10450 tmp1 = TREE_OPERAND (tmp1, 0);
10451 if (CONVERT_EXPR_P (tmp1)
10452 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10453 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10454 <= TYPE_PRECISION (atype)))
10455 tmp1 = TREE_OPERAND (tmp1, 0);
10456 /* The only case we can still associate with two variables
10457 is if they are the same, modulo negation and bit-pattern
10458 preserving conversions. */
10459 if (!operand_equal_p (tmp0, tmp1, 0))
10460 ok = false;
10464 /* Only do something if we found more than two objects. Otherwise,
10465 nothing has changed and we risk infinite recursion. */
10466 if (ok
10467 && (2 < ((var0 != 0) + (var1 != 0)
10468 + (con0 != 0) + (con1 != 0)
10469 + (lit0 != 0) + (lit1 != 0)
10470 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10472 bool any_overflows = false;
10473 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10474 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10475 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10476 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10477 var0 = associate_trees (loc, var0, var1, code, atype);
10478 con0 = associate_trees (loc, con0, con1, code, atype);
10479 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10480 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10481 code, atype);
10483 /* Preserve the MINUS_EXPR if the negative part of the literal is
10484 greater than the positive part. Otherwise, the multiplicative
10485 folding code (i.e extract_muldiv) may be fooled in case
10486 unsigned constants are subtracted, like in the following
10487 example: ((X*2 + 4) - 8U)/2. */
10488 if (minus_lit0 && lit0)
10490 if (TREE_CODE (lit0) == INTEGER_CST
10491 && TREE_CODE (minus_lit0) == INTEGER_CST
10492 && tree_int_cst_lt (lit0, minus_lit0))
10494 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10495 MINUS_EXPR, atype);
10496 lit0 = 0;
10498 else
10500 lit0 = associate_trees (loc, lit0, minus_lit0,
10501 MINUS_EXPR, atype);
10502 minus_lit0 = 0;
10506 /* Don't introduce overflows through reassociation. */
10507 if (!any_overflows
10508 && ((lit0 && TREE_OVERFLOW (lit0))
10509 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10510 return NULL_TREE;
10512 if (minus_lit0)
10514 if (con0 == 0)
10515 return
10516 fold_convert_loc (loc, type,
10517 associate_trees (loc, var0, minus_lit0,
10518 MINUS_EXPR, atype));
10519 else
10521 con0 = associate_trees (loc, con0, minus_lit0,
10522 MINUS_EXPR, atype);
10523 return
10524 fold_convert_loc (loc, type,
10525 associate_trees (loc, var0, con0,
10526 PLUS_EXPR, atype));
10530 con0 = associate_trees (loc, con0, lit0, code, atype);
10531 return
10532 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10533 code, atype));
10537 return NULL_TREE;
10539 case MINUS_EXPR:
10540 /* Pointer simplifications for subtraction, simple reassociations. */
10541 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10543 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10544 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10545 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10547 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10548 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10549 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10550 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10551 return fold_build2_loc (loc, PLUS_EXPR, type,
10552 fold_build2_loc (loc, MINUS_EXPR, type,
10553 arg00, arg10),
10554 fold_build2_loc (loc, MINUS_EXPR, type,
10555 arg01, arg11));
10557 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10558 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10560 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10561 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10562 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10563 fold_convert_loc (loc, type, arg1));
10564 if (tmp)
10565 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10567 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10568 simplifies. */
10569 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10571 tree arg10 = fold_convert_loc (loc, type,
10572 TREE_OPERAND (arg1, 0));
10573 tree arg11 = fold_convert_loc (loc, type,
10574 TREE_OPERAND (arg1, 1));
10575 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10576 fold_convert_loc (loc, type, arg0),
10577 arg10);
10578 if (tmp)
10579 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10582 /* A - (-B) -> A + B */
10583 if (TREE_CODE (arg1) == NEGATE_EXPR)
10584 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10585 fold_convert_loc (loc, type,
10586 TREE_OPERAND (arg1, 0)));
10587 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10588 if (TREE_CODE (arg0) == NEGATE_EXPR
10589 && negate_expr_p (arg1)
10590 && reorder_operands_p (arg0, arg1))
10591 return fold_build2_loc (loc, MINUS_EXPR, type,
10592 fold_convert_loc (loc, type,
10593 negate_expr (arg1)),
10594 fold_convert_loc (loc, type,
10595 TREE_OPERAND (arg0, 0)));
10596 /* Convert -A - 1 to ~A. */
10597 if (TREE_CODE (arg0) == NEGATE_EXPR
10598 && integer_each_onep (arg1)
10599 && !TYPE_OVERFLOW_TRAPS (type))
10600 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10601 fold_convert_loc (loc, type,
10602 TREE_OPERAND (arg0, 0)));
10604 /* Convert -1 - A to ~A. */
10605 if (TREE_CODE (type) != COMPLEX_TYPE
10606 && integer_all_onesp (arg0))
10607 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10610 /* X - (X / Y) * Y is X % Y. */
10611 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10612 && TREE_CODE (arg1) == MULT_EXPR
10613 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10614 && operand_equal_p (arg0,
10615 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10616 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10617 TREE_OPERAND (arg1, 1), 0))
10618 return
10619 fold_convert_loc (loc, type,
10620 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10621 arg0, TREE_OPERAND (arg1, 1)));
10623 if (! FLOAT_TYPE_P (type))
10625 if (integer_zerop (arg0))
10626 return negate_expr (fold_convert_loc (loc, type, arg1));
10628 /* Fold A - (A & B) into ~B & A. */
10629 if (!TREE_SIDE_EFFECTS (arg0)
10630 && TREE_CODE (arg1) == BIT_AND_EXPR)
10632 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10634 tree arg10 = fold_convert_loc (loc, type,
10635 TREE_OPERAND (arg1, 0));
10636 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10637 fold_build1_loc (loc, BIT_NOT_EXPR,
10638 type, arg10),
10639 fold_convert_loc (loc, type, arg0));
10641 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10643 tree arg11 = fold_convert_loc (loc,
10644 type, TREE_OPERAND (arg1, 1));
10645 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10646 fold_build1_loc (loc, BIT_NOT_EXPR,
10647 type, arg11),
10648 fold_convert_loc (loc, type, arg0));
10652 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10653 any power of 2 minus 1. */
10654 if (TREE_CODE (arg0) == BIT_AND_EXPR
10655 && TREE_CODE (arg1) == BIT_AND_EXPR
10656 && operand_equal_p (TREE_OPERAND (arg0, 0),
10657 TREE_OPERAND (arg1, 0), 0))
10659 tree mask0 = TREE_OPERAND (arg0, 1);
10660 tree mask1 = TREE_OPERAND (arg1, 1);
10661 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10663 if (operand_equal_p (tem, mask1, 0))
10665 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10666 TREE_OPERAND (arg0, 0), mask1);
10667 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10672 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10673 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10674 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10676 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10677 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10678 (-ARG1 + ARG0) reduces to -ARG1. */
10679 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10680 return negate_expr (fold_convert_loc (loc, type, arg1));
10682 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10683 __complex__ ( x, -y ). This is not the same for SNaNs or if
10684 signed zeros are involved. */
10685 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10686 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10687 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10689 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10690 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10691 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10692 bool arg0rz = false, arg0iz = false;
10693 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10694 || (arg0i && (arg0iz = real_zerop (arg0i))))
10696 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10697 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10698 if (arg0rz && arg1i && real_zerop (arg1i))
10700 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10701 arg1r ? arg1r
10702 : build1 (REALPART_EXPR, rtype, arg1));
10703 tree ip = arg0i ? arg0i
10704 : build1 (IMAGPART_EXPR, rtype, arg0);
10705 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10707 else if (arg0iz && arg1r && real_zerop (arg1r))
10709 tree rp = arg0r ? arg0r
10710 : build1 (REALPART_EXPR, rtype, arg0);
10711 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10712 arg1i ? arg1i
10713 : build1 (IMAGPART_EXPR, rtype, arg1));
10714 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10719 /* A - B -> A + (-B) if B is easily negatable. */
10720 if (negate_expr_p (arg1)
10721 && ((FLOAT_TYPE_P (type)
10722 /* Avoid this transformation if B is a positive REAL_CST. */
10723 && (TREE_CODE (arg1) != REAL_CST
10724 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10725 || INTEGRAL_TYPE_P (type)))
10726 return fold_build2_loc (loc, PLUS_EXPR, type,
10727 fold_convert_loc (loc, type, arg0),
10728 fold_convert_loc (loc, type,
10729 negate_expr (arg1)));
10731 /* Try folding difference of addresses. */
10733 HOST_WIDE_INT diff;
10735 if ((TREE_CODE (arg0) == ADDR_EXPR
10736 || TREE_CODE (arg1) == ADDR_EXPR)
10737 && ptr_difference_const (arg0, arg1, &diff))
10738 return build_int_cst_type (type, diff);
10741 /* Fold &a[i] - &a[j] to i-j. */
10742 if (TREE_CODE (arg0) == ADDR_EXPR
10743 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10744 && TREE_CODE (arg1) == ADDR_EXPR
10745 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10747 tree tem = fold_addr_of_array_ref_difference (loc, type,
10748 TREE_OPERAND (arg0, 0),
10749 TREE_OPERAND (arg1, 0));
10750 if (tem)
10751 return tem;
10754 if (FLOAT_TYPE_P (type)
10755 && flag_unsafe_math_optimizations
10756 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10757 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10758 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10759 return tem;
10761 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10762 one. Make sure the type is not saturating and has the signedness of
10763 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10764 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10765 if ((TREE_CODE (arg0) == MULT_EXPR
10766 || TREE_CODE (arg1) == MULT_EXPR)
10767 && !TYPE_SATURATING (type)
10768 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10769 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10770 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10772 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10773 if (tem)
10774 return tem;
10777 goto associate;
10779 case MULT_EXPR:
10780 /* (-A) * (-B) -> A * B */
10781 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10782 return fold_build2_loc (loc, MULT_EXPR, type,
10783 fold_convert_loc (loc, type,
10784 TREE_OPERAND (arg0, 0)),
10785 fold_convert_loc (loc, type,
10786 negate_expr (arg1)));
10787 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10788 return fold_build2_loc (loc, MULT_EXPR, type,
10789 fold_convert_loc (loc, type,
10790 negate_expr (arg0)),
10791 fold_convert_loc (loc, type,
10792 TREE_OPERAND (arg1, 0)));
10794 if (! FLOAT_TYPE_P (type))
10796 /* Transform x * -1 into -x. Make sure to do the negation
10797 on the original operand with conversions not stripped
10798 because we can only strip non-sign-changing conversions. */
10799 if (integer_minus_onep (arg1))
10800 return fold_convert_loc (loc, type, negate_expr (op0));
10801 /* Transform x * -C into -x * C if x is easily negatable. */
10802 if (TREE_CODE (arg1) == INTEGER_CST
10803 && tree_int_cst_sgn (arg1) == -1
10804 && negate_expr_p (arg0)
10805 && (tem = negate_expr (arg1)) != arg1
10806 && !TREE_OVERFLOW (tem))
10807 return fold_build2_loc (loc, MULT_EXPR, type,
10808 fold_convert_loc (loc, type,
10809 negate_expr (arg0)),
10810 tem);
10812 /* (a * (1 << b)) is (a << b) */
10813 if (TREE_CODE (arg1) == LSHIFT_EXPR
10814 && integer_onep (TREE_OPERAND (arg1, 0)))
10815 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10816 TREE_OPERAND (arg1, 1));
10817 if (TREE_CODE (arg0) == LSHIFT_EXPR
10818 && integer_onep (TREE_OPERAND (arg0, 0)))
10819 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10820 TREE_OPERAND (arg0, 1));
10822 /* (A + A) * C -> A * 2 * C */
10823 if (TREE_CODE (arg0) == PLUS_EXPR
10824 && TREE_CODE (arg1) == INTEGER_CST
10825 && operand_equal_p (TREE_OPERAND (arg0, 0),
10826 TREE_OPERAND (arg0, 1), 0))
10827 return fold_build2_loc (loc, MULT_EXPR, type,
10828 omit_one_operand_loc (loc, type,
10829 TREE_OPERAND (arg0, 0),
10830 TREE_OPERAND (arg0, 1)),
10831 fold_build2_loc (loc, MULT_EXPR, type,
10832 build_int_cst (type, 2) , arg1));
10834 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10835 sign-changing only. */
10836 if (TREE_CODE (arg1) == INTEGER_CST
10837 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10838 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10839 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10841 strict_overflow_p = false;
10842 if (TREE_CODE (arg1) == INTEGER_CST
10843 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10844 &strict_overflow_p)))
10846 if (strict_overflow_p)
10847 fold_overflow_warning (("assuming signed overflow does not "
10848 "occur when simplifying "
10849 "multiplication"),
10850 WARN_STRICT_OVERFLOW_MISC);
10851 return fold_convert_loc (loc, type, tem);
10854 /* Optimize z * conj(z) for integer complex numbers. */
10855 if (TREE_CODE (arg0) == CONJ_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 return fold_mult_zconjz (loc, type, arg1);
10858 if (TREE_CODE (arg1) == CONJ_EXPR
10859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10860 return fold_mult_zconjz (loc, type, arg0);
10862 else
10864 /* Maybe fold x * 0 to 0. The expressions aren't the same
10865 when x is NaN, since x * 0 is also NaN. Nor are they the
10866 same in modes with signed zeros, since multiplying a
10867 negative value by 0 gives -0, not +0. */
10868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10869 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10870 && real_zerop (arg1))
10871 return omit_one_operand_loc (loc, type, arg1, arg0);
10872 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10873 Likewise for complex arithmetic with signed zeros. */
10874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10875 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10876 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10877 && real_onep (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10880 /* Transform x * -1.0 into -x. */
10881 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10882 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10883 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10884 && real_minus_onep (arg1))
10885 return fold_convert_loc (loc, type, negate_expr (arg0));
10887 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10888 the result for floating point types due to rounding so it is applied
10889 only if -fassociative-math was specify. */
10890 if (flag_associative_math
10891 && TREE_CODE (arg0) == RDIV_EXPR
10892 && TREE_CODE (arg1) == REAL_CST
10893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10895 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10896 arg1);
10897 if (tem)
10898 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10899 TREE_OPERAND (arg0, 1));
10902 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10903 if (operand_equal_p (arg0, arg1, 0))
10905 tree tem = fold_strip_sign_ops (arg0);
10906 if (tem != NULL_TREE)
10908 tem = fold_convert_loc (loc, type, tem);
10909 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10913 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10914 This is not the same for NaNs or if signed zeros are
10915 involved. */
10916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10918 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10919 && TREE_CODE (arg1) == COMPLEX_CST
10920 && real_zerop (TREE_REALPART (arg1)))
10922 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10923 if (real_onep (TREE_IMAGPART (arg1)))
10924 return
10925 fold_build2_loc (loc, COMPLEX_EXPR, type,
10926 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10927 rtype, arg0)),
10928 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10929 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10930 return
10931 fold_build2_loc (loc, COMPLEX_EXPR, type,
10932 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10933 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10934 rtype, arg0)));
10937 /* Optimize z * conj(z) for floating point complex numbers.
10938 Guarded by flag_unsafe_math_optimizations as non-finite
10939 imaginary components don't produce scalar results. */
10940 if (flag_unsafe_math_optimizations
10941 && TREE_CODE (arg0) == CONJ_EXPR
10942 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10943 return fold_mult_zconjz (loc, type, arg1);
10944 if (flag_unsafe_math_optimizations
10945 && TREE_CODE (arg1) == CONJ_EXPR
10946 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10947 return fold_mult_zconjz (loc, type, arg0);
10949 if (flag_unsafe_math_optimizations)
10951 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10952 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10954 /* Optimizations of root(...)*root(...). */
10955 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10957 tree rootfn, arg;
10958 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10959 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10961 /* Optimize sqrt(x)*sqrt(x) as x. */
10962 if (BUILTIN_SQRT_P (fcode0)
10963 && operand_equal_p (arg00, arg10, 0)
10964 && ! HONOR_SNANS (TYPE_MODE (type)))
10965 return arg00;
10967 /* Optimize root(x)*root(y) as root(x*y). */
10968 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10969 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10970 return build_call_expr_loc (loc, rootfn, 1, arg);
10973 /* Optimize expN(x)*expN(y) as expN(x+y). */
10974 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10976 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10977 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10978 CALL_EXPR_ARG (arg0, 0),
10979 CALL_EXPR_ARG (arg1, 0));
10980 return build_call_expr_loc (loc, expfn, 1, arg);
10983 /* Optimizations of pow(...)*pow(...). */
10984 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10985 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10986 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10988 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10989 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10990 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10991 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10993 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10994 if (operand_equal_p (arg01, arg11, 0))
10996 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10997 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10998 arg00, arg10);
10999 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11002 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11003 if (operand_equal_p (arg00, arg10, 0))
11005 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11006 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11007 arg01, arg11);
11008 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11012 /* Optimize tan(x)*cos(x) as sin(x). */
11013 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11014 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11015 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11016 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11017 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11018 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11019 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11020 CALL_EXPR_ARG (arg1, 0), 0))
11022 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11024 if (sinfn != NULL_TREE)
11025 return build_call_expr_loc (loc, sinfn, 1,
11026 CALL_EXPR_ARG (arg0, 0));
11029 /* Optimize x*pow(x,c) as pow(x,c+1). */
11030 if (fcode1 == BUILT_IN_POW
11031 || fcode1 == BUILT_IN_POWF
11032 || fcode1 == BUILT_IN_POWL)
11034 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11035 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11036 if (TREE_CODE (arg11) == REAL_CST
11037 && !TREE_OVERFLOW (arg11)
11038 && operand_equal_p (arg0, arg10, 0))
11040 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11041 REAL_VALUE_TYPE c;
11042 tree arg;
11044 c = TREE_REAL_CST (arg11);
11045 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11046 arg = build_real (type, c);
11047 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11051 /* Optimize pow(x,c)*x as pow(x,c+1). */
11052 if (fcode0 == BUILT_IN_POW
11053 || fcode0 == BUILT_IN_POWF
11054 || fcode0 == BUILT_IN_POWL)
11056 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11057 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11058 if (TREE_CODE (arg01) == REAL_CST
11059 && !TREE_OVERFLOW (arg01)
11060 && operand_equal_p (arg1, arg00, 0))
11062 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11063 REAL_VALUE_TYPE c;
11064 tree arg;
11066 c = TREE_REAL_CST (arg01);
11067 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11068 arg = build_real (type, c);
11069 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11073 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11074 if (!in_gimple_form
11075 && optimize
11076 && operand_equal_p (arg0, arg1, 0))
11078 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11080 if (powfn)
11082 tree arg = build_real (type, dconst2);
11083 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11088 goto associate;
11090 case BIT_IOR_EXPR:
11091 bit_ior:
11092 /* ~X | X is -1. */
11093 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11096 t1 = build_zero_cst (type);
11097 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11098 return omit_one_operand_loc (loc, type, t1, arg1);
11101 /* X | ~X is -1. */
11102 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11105 t1 = build_zero_cst (type);
11106 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11107 return omit_one_operand_loc (loc, type, t1, arg0);
11110 /* Canonicalize (X & C1) | C2. */
11111 if (TREE_CODE (arg0) == BIT_AND_EXPR
11112 && TREE_CODE (arg1) == INTEGER_CST
11113 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11115 int width = TYPE_PRECISION (type), w;
11116 wide_int c1 = TREE_OPERAND (arg0, 1);
11117 wide_int c2 = arg1;
11119 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11120 if ((c1 & c2) == c1)
11121 return omit_one_operand_loc (loc, type, arg1,
11122 TREE_OPERAND (arg0, 0));
11124 wide_int msk = wi::mask (width, false,
11125 TYPE_PRECISION (TREE_TYPE (arg1)));
11127 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11128 if (msk.and_not (c1 | c2) == 0)
11129 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11130 TREE_OPERAND (arg0, 0), arg1);
11132 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11133 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11134 mode which allows further optimizations. */
11135 c1 &= msk;
11136 c2 &= msk;
11137 wide_int c3 = c1.and_not (c2);
11138 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11140 wide_int mask = wi::mask (w, false,
11141 TYPE_PRECISION (type));
11142 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11144 c3 = mask;
11145 break;
11149 if (c3 != c1)
11150 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11151 fold_build2_loc (loc, BIT_AND_EXPR, type,
11152 TREE_OPERAND (arg0, 0),
11153 wide_int_to_tree (type,
11154 c3)),
11155 arg1);
11158 /* (X & Y) | Y is (X, Y). */
11159 if (TREE_CODE (arg0) == BIT_AND_EXPR
11160 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11161 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11162 /* (X & Y) | X is (Y, X). */
11163 if (TREE_CODE (arg0) == BIT_AND_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11165 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11166 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11167 /* X | (X & Y) is (Y, X). */
11168 if (TREE_CODE (arg1) == BIT_AND_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11170 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11171 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11172 /* X | (Y & X) is (Y, X). */
11173 if (TREE_CODE (arg1) == BIT_AND_EXPR
11174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11175 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11176 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11178 /* (X & ~Y) | (~X & Y) is X ^ Y */
11179 if (TREE_CODE (arg0) == BIT_AND_EXPR
11180 && TREE_CODE (arg1) == BIT_AND_EXPR)
11182 tree a0, a1, l0, l1, n0, n1;
11184 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11185 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11187 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11188 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11190 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11191 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11193 if ((operand_equal_p (n0, a0, 0)
11194 && operand_equal_p (n1, a1, 0))
11195 || (operand_equal_p (n0, a1, 0)
11196 && operand_equal_p (n1, a0, 0)))
11197 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11200 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11201 if (t1 != NULL_TREE)
11202 return t1;
11204 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11206 This results in more efficient code for machines without a NAND
11207 instruction. Combine will canonicalize to the first form
11208 which will allow use of NAND instructions provided by the
11209 backend if they exist. */
11210 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11211 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11213 return
11214 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11215 build2 (BIT_AND_EXPR, type,
11216 fold_convert_loc (loc, type,
11217 TREE_OPERAND (arg0, 0)),
11218 fold_convert_loc (loc, type,
11219 TREE_OPERAND (arg1, 0))));
11222 /* See if this can be simplified into a rotate first. If that
11223 is unsuccessful continue in the association code. */
11224 goto bit_rotate;
11226 case BIT_XOR_EXPR:
11227 /* ~X ^ X is -1. */
11228 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11231 t1 = build_zero_cst (type);
11232 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11233 return omit_one_operand_loc (loc, type, t1, arg1);
11236 /* X ^ ~X is -1. */
11237 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11238 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11240 t1 = build_zero_cst (type);
11241 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11242 return omit_one_operand_loc (loc, type, t1, arg0);
11245 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11246 with a constant, and the two constants have no bits in common,
11247 we should treat this as a BIT_IOR_EXPR since this may produce more
11248 simplifications. */
11249 if (TREE_CODE (arg0) == BIT_AND_EXPR
11250 && TREE_CODE (arg1) == BIT_AND_EXPR
11251 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11252 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11253 && wi::bit_and (TREE_OPERAND (arg0, 1),
11254 TREE_OPERAND (arg1, 1)) == 0)
11256 code = BIT_IOR_EXPR;
11257 goto bit_ior;
11260 /* (X | Y) ^ X -> Y & ~ X*/
11261 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11262 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11264 tree t2 = TREE_OPERAND (arg0, 1);
11265 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11266 arg1);
11267 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11268 fold_convert_loc (loc, type, t2),
11269 fold_convert_loc (loc, type, t1));
11270 return t1;
11273 /* (Y | X) ^ X -> Y & ~ X*/
11274 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11277 tree t2 = TREE_OPERAND (arg0, 0);
11278 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11279 arg1);
11280 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11281 fold_convert_loc (loc, type, t2),
11282 fold_convert_loc (loc, type, t1));
11283 return t1;
11286 /* X ^ (X | Y) -> Y & ~ X*/
11287 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11288 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11290 tree t2 = TREE_OPERAND (arg1, 1);
11291 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11292 arg0);
11293 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11294 fold_convert_loc (loc, type, t2),
11295 fold_convert_loc (loc, type, t1));
11296 return t1;
11299 /* X ^ (Y | X) -> Y & ~ X*/
11300 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11301 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11303 tree t2 = TREE_OPERAND (arg1, 0);
11304 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11305 arg0);
11306 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11307 fold_convert_loc (loc, type, t2),
11308 fold_convert_loc (loc, type, t1));
11309 return t1;
11312 /* Convert ~X ^ ~Y to X ^ Y. */
11313 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11314 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11315 return fold_build2_loc (loc, code, type,
11316 fold_convert_loc (loc, type,
11317 TREE_OPERAND (arg0, 0)),
11318 fold_convert_loc (loc, type,
11319 TREE_OPERAND (arg1, 0)));
11321 /* Convert ~X ^ C to X ^ ~C. */
11322 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11323 && TREE_CODE (arg1) == INTEGER_CST)
11324 return fold_build2_loc (loc, code, type,
11325 fold_convert_loc (loc, type,
11326 TREE_OPERAND (arg0, 0)),
11327 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11329 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11330 if (TREE_CODE (arg0) == BIT_AND_EXPR
11331 && INTEGRAL_TYPE_P (type)
11332 && integer_onep (TREE_OPERAND (arg0, 1))
11333 && integer_onep (arg1))
11334 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11335 build_zero_cst (TREE_TYPE (arg0)));
11337 /* Fold (X & Y) ^ Y as ~X & Y. */
11338 if (TREE_CODE (arg0) == BIT_AND_EXPR
11339 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11341 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11342 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11343 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11344 fold_convert_loc (loc, type, arg1));
11346 /* Fold (X & Y) ^ X as ~Y & X. */
11347 if (TREE_CODE (arg0) == BIT_AND_EXPR
11348 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11349 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11351 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11352 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11353 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11354 fold_convert_loc (loc, type, arg1));
11356 /* Fold X ^ (X & Y) as X & ~Y. */
11357 if (TREE_CODE (arg1) == BIT_AND_EXPR
11358 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11360 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11361 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11362 fold_convert_loc (loc, type, arg0),
11363 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11365 /* Fold X ^ (Y & X) as ~Y & X. */
11366 if (TREE_CODE (arg1) == BIT_AND_EXPR
11367 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11368 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11370 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11371 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11372 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11373 fold_convert_loc (loc, type, arg0));
11376 /* See if this can be simplified into a rotate first. If that
11377 is unsuccessful continue in the association code. */
11378 goto bit_rotate;
11380 case BIT_AND_EXPR:
11381 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11382 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11383 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11384 || (TREE_CODE (arg0) == EQ_EXPR
11385 && integer_zerop (TREE_OPERAND (arg0, 1))))
11386 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11387 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11389 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11390 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11391 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11392 || (TREE_CODE (arg1) == EQ_EXPR
11393 && integer_zerop (TREE_OPERAND (arg1, 1))))
11394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11395 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11397 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11398 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11399 && TREE_CODE (arg1) == INTEGER_CST
11400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11402 tree tmp1 = fold_convert_loc (loc, type, arg1);
11403 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11404 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11405 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11406 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11407 return
11408 fold_convert_loc (loc, type,
11409 fold_build2_loc (loc, BIT_IOR_EXPR,
11410 type, tmp2, tmp3));
11413 /* (X | Y) & Y is (X, Y). */
11414 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11415 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11416 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11417 /* (X | Y) & X is (Y, X). */
11418 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11420 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11421 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11422 /* X & (X | Y) is (Y, X). */
11423 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11425 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11426 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11427 /* X & (Y | X) is (Y, X). */
11428 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11430 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11431 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11433 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11434 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11435 && INTEGRAL_TYPE_P (type)
11436 && integer_onep (TREE_OPERAND (arg0, 1))
11437 && integer_onep (arg1))
11439 tree tem2;
11440 tem = TREE_OPERAND (arg0, 0);
11441 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11442 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11443 tem, tem2);
11444 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11445 build_zero_cst (TREE_TYPE (tem)));
11447 /* Fold ~X & 1 as (X & 1) == 0. */
11448 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11449 && INTEGRAL_TYPE_P (type)
11450 && integer_onep (arg1))
11452 tree tem2;
11453 tem = TREE_OPERAND (arg0, 0);
11454 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11455 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11456 tem, tem2);
11457 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11458 build_zero_cst (TREE_TYPE (tem)));
11460 /* Fold !X & 1 as X == 0. */
11461 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11462 && integer_onep (arg1))
11464 tem = TREE_OPERAND (arg0, 0);
11465 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11466 build_zero_cst (TREE_TYPE (tem)));
11469 /* Fold (X ^ Y) & Y as ~X & Y. */
11470 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11471 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11473 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11474 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11475 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11476 fold_convert_loc (loc, type, arg1));
11478 /* Fold (X ^ Y) & X as ~Y & X. */
11479 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11481 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11483 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11484 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11485 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11486 fold_convert_loc (loc, type, arg1));
11488 /* Fold X & (X ^ Y) as X & ~Y. */
11489 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11492 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11493 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11494 fold_convert_loc (loc, type, arg0),
11495 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11497 /* Fold X & (Y ^ X) as ~Y & X. */
11498 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11499 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11500 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11502 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11503 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11504 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11505 fold_convert_loc (loc, type, arg0));
11508 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11509 multiple of 1 << CST. */
11510 if (TREE_CODE (arg1) == INTEGER_CST)
11512 wide_int cst1 = arg1;
11513 wide_int ncst1 = -cst1;
11514 if ((cst1 & ncst1) == ncst1
11515 && multiple_of_p (type, arg0,
11516 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11517 return fold_convert_loc (loc, type, arg0);
11520 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11521 bits from CST2. */
11522 if (TREE_CODE (arg1) == INTEGER_CST
11523 && TREE_CODE (arg0) == MULT_EXPR
11524 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11526 wide_int warg1 = arg1;
11527 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11529 if (masked == 0)
11530 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11531 arg0, arg1);
11532 else if (masked != warg1)
11534 /* Avoid the transform if arg1 is a mask of some
11535 mode which allows further optimizations. */
11536 int pop = wi::popcount (warg1);
11537 if (!(pop >= BITS_PER_UNIT
11538 && exact_log2 (pop) != -1
11539 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11540 return fold_build2_loc (loc, code, type, op0,
11541 wide_int_to_tree (type, masked));
11545 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11546 ((A & N) + B) & M -> (A + B) & M
11547 Similarly if (N & M) == 0,
11548 ((A | N) + B) & M -> (A + B) & M
11549 and for - instead of + (or unary - instead of +)
11550 and/or ^ instead of |.
11551 If B is constant and (B & M) == 0, fold into A & M. */
11552 if (TREE_CODE (arg1) == INTEGER_CST)
11554 wide_int cst1 = arg1;
11555 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11556 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11557 && (TREE_CODE (arg0) == PLUS_EXPR
11558 || TREE_CODE (arg0) == MINUS_EXPR
11559 || TREE_CODE (arg0) == NEGATE_EXPR)
11560 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11561 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11563 tree pmop[2];
11564 int which = 0;
11565 wide_int cst0;
11567 /* Now we know that arg0 is (C + D) or (C - D) or
11568 -C and arg1 (M) is == (1LL << cst) - 1.
11569 Store C into PMOP[0] and D into PMOP[1]. */
11570 pmop[0] = TREE_OPERAND (arg0, 0);
11571 pmop[1] = NULL;
11572 if (TREE_CODE (arg0) != NEGATE_EXPR)
11574 pmop[1] = TREE_OPERAND (arg0, 1);
11575 which = 1;
11578 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11579 which = -1;
11581 for (; which >= 0; which--)
11582 switch (TREE_CODE (pmop[which]))
11584 case BIT_AND_EXPR:
11585 case BIT_IOR_EXPR:
11586 case BIT_XOR_EXPR:
11587 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11588 != INTEGER_CST)
11589 break;
11590 cst0 = TREE_OPERAND (pmop[which], 1);
11591 cst0 &= cst1;
11592 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11594 if (cst0 != cst1)
11595 break;
11597 else if (cst0 != 0)
11598 break;
11599 /* If C or D is of the form (A & N) where
11600 (N & M) == M, or of the form (A | N) or
11601 (A ^ N) where (N & M) == 0, replace it with A. */
11602 pmop[which] = TREE_OPERAND (pmop[which], 0);
11603 break;
11604 case INTEGER_CST:
11605 /* If C or D is a N where (N & M) == 0, it can be
11606 omitted (assumed 0). */
11607 if ((TREE_CODE (arg0) == PLUS_EXPR
11608 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11609 && (cst1 & pmop[which]) == 0)
11610 pmop[which] = NULL;
11611 break;
11612 default:
11613 break;
11616 /* Only build anything new if we optimized one or both arguments
11617 above. */
11618 if (pmop[0] != TREE_OPERAND (arg0, 0)
11619 || (TREE_CODE (arg0) != NEGATE_EXPR
11620 && pmop[1] != TREE_OPERAND (arg0, 1)))
11622 tree utype = TREE_TYPE (arg0);
11623 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11625 /* Perform the operations in a type that has defined
11626 overflow behavior. */
11627 utype = unsigned_type_for (TREE_TYPE (arg0));
11628 if (pmop[0] != NULL)
11629 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11630 if (pmop[1] != NULL)
11631 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11634 if (TREE_CODE (arg0) == NEGATE_EXPR)
11635 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11636 else if (TREE_CODE (arg0) == PLUS_EXPR)
11638 if (pmop[0] != NULL && pmop[1] != NULL)
11639 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11640 pmop[0], pmop[1]);
11641 else if (pmop[0] != NULL)
11642 tem = pmop[0];
11643 else if (pmop[1] != NULL)
11644 tem = pmop[1];
11645 else
11646 return build_int_cst (type, 0);
11648 else if (pmop[0] == NULL)
11649 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11650 else
11651 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11652 pmop[0], pmop[1]);
11653 /* TEM is now the new binary +, - or unary - replacement. */
11654 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11655 fold_convert_loc (loc, utype, arg1));
11656 return fold_convert_loc (loc, type, tem);
11661 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11662 if (t1 != NULL_TREE)
11663 return t1;
11664 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11665 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11666 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11668 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11670 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11671 if (mask == -1)
11672 return
11673 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11676 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11678 This results in more efficient code for machines without a NOR
11679 instruction. Combine will canonicalize to the first form
11680 which will allow use of NOR instructions provided by the
11681 backend if they exist. */
11682 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11683 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11685 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11686 build2 (BIT_IOR_EXPR, type,
11687 fold_convert_loc (loc, type,
11688 TREE_OPERAND (arg0, 0)),
11689 fold_convert_loc (loc, type,
11690 TREE_OPERAND (arg1, 0))));
11693 /* If arg0 is derived from the address of an object or function, we may
11694 be able to fold this expression using the object or function's
11695 alignment. */
11696 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11698 unsigned HOST_WIDE_INT modulus, residue;
11699 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11701 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11702 integer_onep (arg1));
11704 /* This works because modulus is a power of 2. If this weren't the
11705 case, we'd have to replace it by its greatest power-of-2
11706 divisor: modulus & -modulus. */
11707 if (low < modulus)
11708 return build_int_cst (type, residue & low);
11711 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11712 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11713 if the new mask might be further optimized. */
11714 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11715 || TREE_CODE (arg0) == RSHIFT_EXPR)
11716 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11717 && TREE_CODE (arg1) == INTEGER_CST
11718 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11719 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11720 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11721 < TYPE_PRECISION (TREE_TYPE (arg0))))
11723 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11724 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11725 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11726 tree shift_type = TREE_TYPE (arg0);
11728 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11729 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11730 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11731 && TYPE_PRECISION (TREE_TYPE (arg0))
11732 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11734 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11735 tree arg00 = TREE_OPERAND (arg0, 0);
11736 /* See if more bits can be proven as zero because of
11737 zero extension. */
11738 if (TREE_CODE (arg00) == NOP_EXPR
11739 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11741 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11742 if (TYPE_PRECISION (inner_type)
11743 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11744 && TYPE_PRECISION (inner_type) < prec)
11746 prec = TYPE_PRECISION (inner_type);
11747 /* See if we can shorten the right shift. */
11748 if (shiftc < prec)
11749 shift_type = inner_type;
11750 /* Otherwise X >> C1 is all zeros, so we'll optimize
11751 it into (X, 0) later on by making sure zerobits
11752 is all ones. */
11755 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11756 if (shiftc < prec)
11758 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11759 zerobits <<= prec - shiftc;
11761 /* For arithmetic shift if sign bit could be set, zerobits
11762 can contain actually sign bits, so no transformation is
11763 possible, unless MASK masks them all away. In that
11764 case the shift needs to be converted into logical shift. */
11765 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11766 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11768 if ((mask & zerobits) == 0)
11769 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11770 else
11771 zerobits = 0;
11775 /* ((X << 16) & 0xff00) is (X, 0). */
11776 if ((mask & zerobits) == mask)
11777 return omit_one_operand_loc (loc, type,
11778 build_int_cst (type, 0), arg0);
11780 newmask = mask | zerobits;
11781 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11783 /* Only do the transformation if NEWMASK is some integer
11784 mode's mask. */
11785 for (prec = BITS_PER_UNIT;
11786 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11787 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11788 break;
11789 if (prec < HOST_BITS_PER_WIDE_INT
11790 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11792 tree newmaskt;
11794 if (shift_type != TREE_TYPE (arg0))
11796 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11797 fold_convert_loc (loc, shift_type,
11798 TREE_OPERAND (arg0, 0)),
11799 TREE_OPERAND (arg0, 1));
11800 tem = fold_convert_loc (loc, type, tem);
11802 else
11803 tem = op0;
11804 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11805 if (!tree_int_cst_equal (newmaskt, arg1))
11806 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11811 goto associate;
11813 case RDIV_EXPR:
11814 /* Don't touch a floating-point divide by zero unless the mode
11815 of the constant can represent infinity. */
11816 if (TREE_CODE (arg1) == REAL_CST
11817 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11818 && real_zerop (arg1))
11819 return NULL_TREE;
11821 /* Optimize A / A to 1.0 if we don't care about
11822 NaNs or Infinities. Skip the transformation
11823 for non-real operands. */
11824 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11825 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11826 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11827 && operand_equal_p (arg0, arg1, 0))
11829 tree r = build_real (TREE_TYPE (arg0), dconst1);
11831 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11834 /* The complex version of the above A / A optimization. */
11835 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11836 && operand_equal_p (arg0, arg1, 0))
11838 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11839 if (! HONOR_NANS (TYPE_MODE (elem_type))
11840 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11842 tree r = build_real (elem_type, dconst1);
11843 /* omit_two_operands will call fold_convert for us. */
11844 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11848 /* (-A) / (-B) -> A / B */
11849 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11850 return fold_build2_loc (loc, RDIV_EXPR, type,
11851 TREE_OPERAND (arg0, 0),
11852 negate_expr (arg1));
11853 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11854 return fold_build2_loc (loc, RDIV_EXPR, type,
11855 negate_expr (arg0),
11856 TREE_OPERAND (arg1, 0));
11858 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11859 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11860 && real_onep (arg1))
11861 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11863 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11864 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11865 && real_minus_onep (arg1))
11866 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11867 negate_expr (arg0)));
11869 /* If ARG1 is a constant, we can convert this to a multiply by the
11870 reciprocal. This does not have the same rounding properties,
11871 so only do this if -freciprocal-math. We can actually
11872 always safely do it if ARG1 is a power of two, but it's hard to
11873 tell if it is or not in a portable manner. */
11874 if (optimize
11875 && (TREE_CODE (arg1) == REAL_CST
11876 || (TREE_CODE (arg1) == COMPLEX_CST
11877 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11878 || (TREE_CODE (arg1) == VECTOR_CST
11879 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11881 if (flag_reciprocal_math
11882 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11883 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11884 /* Find the reciprocal if optimizing and the result is exact.
11885 TODO: Complex reciprocal not implemented. */
11886 if (TREE_CODE (arg1) != COMPLEX_CST)
11888 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11890 if (inverse)
11891 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11894 /* Convert A/B/C to A/(B*C). */
11895 if (flag_reciprocal_math
11896 && TREE_CODE (arg0) == RDIV_EXPR)
11897 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11898 fold_build2_loc (loc, MULT_EXPR, type,
11899 TREE_OPERAND (arg0, 1), arg1));
11901 /* Convert A/(B/C) to (A/B)*C. */
11902 if (flag_reciprocal_math
11903 && TREE_CODE (arg1) == RDIV_EXPR)
11904 return fold_build2_loc (loc, MULT_EXPR, type,
11905 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11906 TREE_OPERAND (arg1, 0)),
11907 TREE_OPERAND (arg1, 1));
11909 /* Convert C1/(X*C2) into (C1/C2)/X. */
11910 if (flag_reciprocal_math
11911 && TREE_CODE (arg1) == MULT_EXPR
11912 && TREE_CODE (arg0) == REAL_CST
11913 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11915 tree tem = const_binop (RDIV_EXPR, arg0,
11916 TREE_OPERAND (arg1, 1));
11917 if (tem)
11918 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11919 TREE_OPERAND (arg1, 0));
11922 if (flag_unsafe_math_optimizations)
11924 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11925 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11927 /* Optimize sin(x)/cos(x) as tan(x). */
11928 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11929 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11930 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11931 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11932 CALL_EXPR_ARG (arg1, 0), 0))
11934 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11936 if (tanfn != NULL_TREE)
11937 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11940 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11941 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11942 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11943 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11944 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11945 CALL_EXPR_ARG (arg1, 0), 0))
11947 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11949 if (tanfn != NULL_TREE)
11951 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11952 CALL_EXPR_ARG (arg0, 0));
11953 return fold_build2_loc (loc, RDIV_EXPR, type,
11954 build_real (type, dconst1), tmp);
11958 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11959 NaNs or Infinities. */
11960 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11961 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11962 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11964 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11965 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11967 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11968 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11969 && operand_equal_p (arg00, arg01, 0))
11971 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11973 if (cosfn != NULL_TREE)
11974 return build_call_expr_loc (loc, cosfn, 1, arg00);
11978 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11979 NaNs or Infinities. */
11980 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11981 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11982 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11984 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11985 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11987 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11988 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11989 && operand_equal_p (arg00, arg01, 0))
11991 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11993 if (cosfn != NULL_TREE)
11995 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11996 return fold_build2_loc (loc, RDIV_EXPR, type,
11997 build_real (type, dconst1),
11998 tmp);
12003 /* Optimize pow(x,c)/x as pow(x,c-1). */
12004 if (fcode0 == BUILT_IN_POW
12005 || fcode0 == BUILT_IN_POWF
12006 || fcode0 == BUILT_IN_POWL)
12008 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12009 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12010 if (TREE_CODE (arg01) == REAL_CST
12011 && !TREE_OVERFLOW (arg01)
12012 && operand_equal_p (arg1, arg00, 0))
12014 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12015 REAL_VALUE_TYPE c;
12016 tree arg;
12018 c = TREE_REAL_CST (arg01);
12019 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12020 arg = build_real (type, c);
12021 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12025 /* Optimize a/root(b/c) into a*root(c/b). */
12026 if (BUILTIN_ROOT_P (fcode1))
12028 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12030 if (TREE_CODE (rootarg) == RDIV_EXPR)
12032 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12033 tree b = TREE_OPERAND (rootarg, 0);
12034 tree c = TREE_OPERAND (rootarg, 1);
12036 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12038 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12039 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12043 /* Optimize x/expN(y) into x*expN(-y). */
12044 if (BUILTIN_EXPONENT_P (fcode1))
12046 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12047 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12048 arg1 = build_call_expr_loc (loc,
12049 expfn, 1,
12050 fold_convert_loc (loc, type, arg));
12051 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12054 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12055 if (fcode1 == BUILT_IN_POW
12056 || fcode1 == BUILT_IN_POWF
12057 || fcode1 == BUILT_IN_POWL)
12059 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12060 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12061 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12062 tree neg11 = fold_convert_loc (loc, type,
12063 negate_expr (arg11));
12064 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12065 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12068 return NULL_TREE;
12070 case TRUNC_DIV_EXPR:
12071 /* Optimize (X & (-A)) / A where A is a power of 2,
12072 to X >> log2(A) */
12073 if (TREE_CODE (arg0) == BIT_AND_EXPR
12074 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12075 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12077 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12078 arg1, TREE_OPERAND (arg0, 1));
12079 if (sum && integer_zerop (sum)) {
12080 tree pow2 = build_int_cst (integer_type_node,
12081 wi::exact_log2 (arg1));
12082 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12083 TREE_OPERAND (arg0, 0), pow2);
12087 /* Fall through */
12089 case FLOOR_DIV_EXPR:
12090 /* Simplify A / (B << N) where A and B are positive and B is
12091 a power of 2, to A >> (N + log2(B)). */
12092 strict_overflow_p = false;
12093 if (TREE_CODE (arg1) == LSHIFT_EXPR
12094 && (TYPE_UNSIGNED (type)
12095 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12097 tree sval = TREE_OPERAND (arg1, 0);
12098 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12100 tree sh_cnt = TREE_OPERAND (arg1, 1);
12101 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12102 wi::exact_log2 (sval));
12104 if (strict_overflow_p)
12105 fold_overflow_warning (("assuming signed overflow does not "
12106 "occur when simplifying A / (B << N)"),
12107 WARN_STRICT_OVERFLOW_MISC);
12109 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12110 sh_cnt, pow2);
12111 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12112 fold_convert_loc (loc, type, arg0), sh_cnt);
12116 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12117 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12118 if (INTEGRAL_TYPE_P (type)
12119 && TYPE_UNSIGNED (type)
12120 && code == FLOOR_DIV_EXPR)
12121 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12123 /* Fall through */
12125 case ROUND_DIV_EXPR:
12126 case CEIL_DIV_EXPR:
12127 case EXACT_DIV_EXPR:
12128 if (integer_zerop (arg1))
12129 return NULL_TREE;
12130 /* X / -1 is -X. */
12131 if (!TYPE_UNSIGNED (type)
12132 && TREE_CODE (arg1) == INTEGER_CST
12133 && wi::eq_p (arg1, -1))
12134 return fold_convert_loc (loc, type, negate_expr (arg0));
12136 /* Convert -A / -B to A / B when the type is signed and overflow is
12137 undefined. */
12138 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12139 && TREE_CODE (arg0) == NEGATE_EXPR
12140 && negate_expr_p (arg1))
12142 if (INTEGRAL_TYPE_P (type))
12143 fold_overflow_warning (("assuming signed overflow does not occur "
12144 "when distributing negation across "
12145 "division"),
12146 WARN_STRICT_OVERFLOW_MISC);
12147 return fold_build2_loc (loc, code, type,
12148 fold_convert_loc (loc, type,
12149 TREE_OPERAND (arg0, 0)),
12150 fold_convert_loc (loc, type,
12151 negate_expr (arg1)));
12153 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12154 && TREE_CODE (arg1) == NEGATE_EXPR
12155 && negate_expr_p (arg0))
12157 if (INTEGRAL_TYPE_P (type))
12158 fold_overflow_warning (("assuming signed overflow does not occur "
12159 "when distributing negation across "
12160 "division"),
12161 WARN_STRICT_OVERFLOW_MISC);
12162 return fold_build2_loc (loc, code, type,
12163 fold_convert_loc (loc, type,
12164 negate_expr (arg0)),
12165 fold_convert_loc (loc, type,
12166 TREE_OPERAND (arg1, 0)));
12169 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12170 operation, EXACT_DIV_EXPR.
12172 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12173 At one time others generated faster code, it's not clear if they do
12174 after the last round to changes to the DIV code in expmed.c. */
12175 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12176 && multiple_of_p (type, arg0, arg1))
12177 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12179 strict_overflow_p = false;
12180 if (TREE_CODE (arg1) == INTEGER_CST
12181 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12182 &strict_overflow_p)))
12184 if (strict_overflow_p)
12185 fold_overflow_warning (("assuming signed overflow does not occur "
12186 "when simplifying division"),
12187 WARN_STRICT_OVERFLOW_MISC);
12188 return fold_convert_loc (loc, type, tem);
12191 return NULL_TREE;
12193 case CEIL_MOD_EXPR:
12194 case FLOOR_MOD_EXPR:
12195 case ROUND_MOD_EXPR:
12196 case TRUNC_MOD_EXPR:
12197 /* X % -1 is zero. */
12198 if (!TYPE_UNSIGNED (type)
12199 && TREE_CODE (arg1) == INTEGER_CST
12200 && wi::eq_p (arg1, -1))
12201 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12203 /* X % -C is the same as X % C. */
12204 if (code == TRUNC_MOD_EXPR
12205 && TYPE_SIGN (type) == SIGNED
12206 && TREE_CODE (arg1) == INTEGER_CST
12207 && !TREE_OVERFLOW (arg1)
12208 && wi::neg_p (arg1)
12209 && !TYPE_OVERFLOW_TRAPS (type)
12210 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12211 && !sign_bit_p (arg1, arg1))
12212 return fold_build2_loc (loc, code, type,
12213 fold_convert_loc (loc, type, arg0),
12214 fold_convert_loc (loc, type,
12215 negate_expr (arg1)));
12217 /* X % -Y is the same as X % Y. */
12218 if (code == TRUNC_MOD_EXPR
12219 && !TYPE_UNSIGNED (type)
12220 && TREE_CODE (arg1) == NEGATE_EXPR
12221 && !TYPE_OVERFLOW_TRAPS (type))
12222 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12223 fold_convert_loc (loc, type,
12224 TREE_OPERAND (arg1, 0)));
12226 strict_overflow_p = false;
12227 if (TREE_CODE (arg1) == INTEGER_CST
12228 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12229 &strict_overflow_p)))
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not occur "
12233 "when simplifying modulus"),
12234 WARN_STRICT_OVERFLOW_MISC);
12235 return fold_convert_loc (loc, type, tem);
12238 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12239 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12240 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12241 && (TYPE_UNSIGNED (type)
12242 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12244 tree c = arg1;
12245 /* Also optimize A % (C << N) where C is a power of 2,
12246 to A & ((C << N) - 1). */
12247 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12248 c = TREE_OPERAND (arg1, 0);
12250 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12252 tree mask
12253 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12254 build_int_cst (TREE_TYPE (arg1), 1));
12255 if (strict_overflow_p)
12256 fold_overflow_warning (("assuming signed overflow does not "
12257 "occur when simplifying "
12258 "X % (power of two)"),
12259 WARN_STRICT_OVERFLOW_MISC);
12260 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12261 fold_convert_loc (loc, type, arg0),
12262 fold_convert_loc (loc, type, mask));
12266 return NULL_TREE;
12268 case LROTATE_EXPR:
12269 case RROTATE_EXPR:
12270 if (integer_all_onesp (arg0))
12271 return omit_one_operand_loc (loc, type, arg0, arg1);
12272 goto shift;
12274 case RSHIFT_EXPR:
12275 /* Optimize -1 >> x for arithmetic right shifts. */
12276 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12277 && tree_expr_nonnegative_p (arg1))
12278 return omit_one_operand_loc (loc, type, arg0, arg1);
12279 /* ... fall through ... */
12281 case LSHIFT_EXPR:
12282 shift:
12283 if (integer_zerop (arg1))
12284 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12285 if (integer_zerop (arg0))
12286 return omit_one_operand_loc (loc, type, arg0, arg1);
12288 /* Prefer vector1 << scalar to vector1 << vector2
12289 if vector2 is uniform. */
12290 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12291 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12292 return fold_build2_loc (loc, code, type, op0, tem);
12294 /* Since negative shift count is not well-defined,
12295 don't try to compute it in the compiler. */
12296 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12297 return NULL_TREE;
12299 prec = element_precision (type);
12301 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12302 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12303 && tree_to_uhwi (arg1) < prec
12304 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12305 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12307 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12308 + tree_to_uhwi (arg1));
12310 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12311 being well defined. */
12312 if (low >= prec)
12314 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12315 low = low % prec;
12316 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12317 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12318 TREE_OPERAND (arg0, 0));
12319 else
12320 low = prec - 1;
12323 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12324 build_int_cst (TREE_TYPE (arg1), low));
12327 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12328 into x & ((unsigned)-1 >> c) for unsigned types. */
12329 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12330 || (TYPE_UNSIGNED (type)
12331 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12332 && tree_fits_uhwi_p (arg1)
12333 && tree_to_uhwi (arg1) < prec
12334 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12335 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12337 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12338 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12339 tree lshift;
12340 tree arg00;
12342 if (low0 == low1)
12344 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12346 lshift = build_minus_one_cst (type);
12347 lshift = const_binop (code, lshift, arg1);
12349 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12353 /* Rewrite an LROTATE_EXPR by a constant into an
12354 RROTATE_EXPR by a new constant. */
12355 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12357 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12358 tem = const_binop (MINUS_EXPR, tem, arg1);
12359 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12362 /* If we have a rotate of a bit operation with the rotate count and
12363 the second operand of the bit operation both constant,
12364 permute the two operations. */
12365 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12366 && (TREE_CODE (arg0) == BIT_AND_EXPR
12367 || TREE_CODE (arg0) == BIT_IOR_EXPR
12368 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12370 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12371 fold_build2_loc (loc, code, type,
12372 TREE_OPERAND (arg0, 0), arg1),
12373 fold_build2_loc (loc, code, type,
12374 TREE_OPERAND (arg0, 1), arg1));
12376 /* Two consecutive rotates adding up to the some integer
12377 multiple of the precision of the type can be ignored. */
12378 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12379 && TREE_CODE (arg0) == RROTATE_EXPR
12380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12381 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12382 prec) == 0)
12383 return TREE_OPERAND (arg0, 0);
12385 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12386 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12387 if the latter can be further optimized. */
12388 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12389 && TREE_CODE (arg0) == BIT_AND_EXPR
12390 && TREE_CODE (arg1) == INTEGER_CST
12391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12393 tree mask = fold_build2_loc (loc, code, type,
12394 fold_convert_loc (loc, type,
12395 TREE_OPERAND (arg0, 1)),
12396 arg1);
12397 tree shift = fold_build2_loc (loc, code, type,
12398 fold_convert_loc (loc, type,
12399 TREE_OPERAND (arg0, 0)),
12400 arg1);
12401 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12402 if (tem)
12403 return tem;
12406 return NULL_TREE;
12408 case MIN_EXPR:
12409 if (operand_equal_p (arg0, arg1, 0))
12410 return omit_one_operand_loc (loc, type, arg0, arg1);
12411 if (INTEGRAL_TYPE_P (type)
12412 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12413 return omit_one_operand_loc (loc, type, arg1, arg0);
12414 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12415 if (tem)
12416 return tem;
12417 goto associate;
12419 case MAX_EXPR:
12420 if (operand_equal_p (arg0, arg1, 0))
12421 return omit_one_operand_loc (loc, type, arg0, arg1);
12422 if (INTEGRAL_TYPE_P (type)
12423 && TYPE_MAX_VALUE (type)
12424 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12425 return omit_one_operand_loc (loc, type, arg1, arg0);
12426 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12427 if (tem)
12428 return tem;
12429 goto associate;
12431 case TRUTH_ANDIF_EXPR:
12432 /* Note that the operands of this must be ints
12433 and their values must be 0 or 1.
12434 ("true" is a fixed value perhaps depending on the language.) */
12435 /* If first arg is constant zero, return it. */
12436 if (integer_zerop (arg0))
12437 return fold_convert_loc (loc, type, arg0);
12438 case TRUTH_AND_EXPR:
12439 /* If either arg is constant true, drop it. */
12440 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12441 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12442 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12443 /* Preserve sequence points. */
12444 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12446 /* If second arg is constant zero, result is zero, but first arg
12447 must be evaluated. */
12448 if (integer_zerop (arg1))
12449 return omit_one_operand_loc (loc, type, arg1, arg0);
12450 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12451 case will be handled here. */
12452 if (integer_zerop (arg0))
12453 return omit_one_operand_loc (loc, type, arg0, arg1);
12455 /* !X && X is always false. */
12456 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12458 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12459 /* X && !X is always false. */
12460 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12461 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12462 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12464 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12465 means A >= Y && A != MAX, but in this case we know that
12466 A < X <= MAX. */
12468 if (!TREE_SIDE_EFFECTS (arg0)
12469 && !TREE_SIDE_EFFECTS (arg1))
12471 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12472 if (tem && !operand_equal_p (tem, arg0, 0))
12473 return fold_build2_loc (loc, code, type, tem, arg1);
12475 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12476 if (tem && !operand_equal_p (tem, arg1, 0))
12477 return fold_build2_loc (loc, code, type, arg0, tem);
12480 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12481 != NULL_TREE)
12482 return tem;
12484 return NULL_TREE;
12486 case TRUTH_ORIF_EXPR:
12487 /* Note that the operands of this must be ints
12488 and their values must be 0 or true.
12489 ("true" is a fixed value perhaps depending on the language.) */
12490 /* If first arg is constant true, return it. */
12491 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12492 return fold_convert_loc (loc, type, arg0);
12493 case TRUTH_OR_EXPR:
12494 /* If either arg is constant zero, drop it. */
12495 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12496 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12497 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12498 /* Preserve sequence points. */
12499 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12501 /* If second arg is constant true, result is true, but we must
12502 evaluate first arg. */
12503 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12504 return omit_one_operand_loc (loc, type, arg1, arg0);
12505 /* Likewise for first arg, but note this only occurs here for
12506 TRUTH_OR_EXPR. */
12507 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12508 return omit_one_operand_loc (loc, type, arg0, arg1);
12510 /* !X || X is always true. */
12511 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12512 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12513 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12514 /* X || !X is always true. */
12515 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12516 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12517 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12519 /* (X && !Y) || (!X && Y) is X ^ Y */
12520 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12521 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12523 tree a0, a1, l0, l1, n0, n1;
12525 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12526 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12528 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12529 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12531 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12532 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12534 if ((operand_equal_p (n0, a0, 0)
12535 && operand_equal_p (n1, a1, 0))
12536 || (operand_equal_p (n0, a1, 0)
12537 && operand_equal_p (n1, a0, 0)))
12538 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12541 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12542 != NULL_TREE)
12543 return tem;
12545 return NULL_TREE;
12547 case TRUTH_XOR_EXPR:
12548 /* If the second arg is constant zero, drop it. */
12549 if (integer_zerop (arg1))
12550 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12551 /* If the second arg is constant true, this is a logical inversion. */
12552 if (integer_onep (arg1))
12554 tem = invert_truthvalue_loc (loc, arg0);
12555 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12557 /* Identical arguments cancel to zero. */
12558 if (operand_equal_p (arg0, arg1, 0))
12559 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12561 /* !X ^ X is always true. */
12562 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12563 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12564 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12566 /* X ^ !X is always true. */
12567 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12568 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12569 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12571 return NULL_TREE;
12573 case EQ_EXPR:
12574 case NE_EXPR:
12575 STRIP_NOPS (arg0);
12576 STRIP_NOPS (arg1);
12578 tem = fold_comparison (loc, code, type, op0, op1);
12579 if (tem != NULL_TREE)
12580 return tem;
12582 /* bool_var != 0 becomes bool_var. */
12583 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12584 && code == NE_EXPR)
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12587 /* bool_var == 1 becomes bool_var. */
12588 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12589 && code == EQ_EXPR)
12590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12592 /* bool_var != 1 becomes !bool_var. */
12593 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12594 && code == NE_EXPR)
12595 return fold_convert_loc (loc, type,
12596 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12597 TREE_TYPE (arg0), arg0));
12599 /* bool_var == 0 becomes !bool_var. */
12600 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12601 && code == EQ_EXPR)
12602 return fold_convert_loc (loc, type,
12603 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12604 TREE_TYPE (arg0), arg0));
12606 /* !exp != 0 becomes !exp */
12607 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12608 && code == NE_EXPR)
12609 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12611 /* If this is an equality comparison of the address of two non-weak,
12612 unaliased symbols neither of which are extern (since we do not
12613 have access to attributes for externs), then we know the result. */
12614 if (TREE_CODE (arg0) == ADDR_EXPR
12615 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12616 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12617 && ! lookup_attribute ("alias",
12618 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12619 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12620 && TREE_CODE (arg1) == ADDR_EXPR
12621 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12622 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12623 && ! lookup_attribute ("alias",
12624 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12625 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12627 /* We know that we're looking at the address of two
12628 non-weak, unaliased, static _DECL nodes.
12630 It is both wasteful and incorrect to call operand_equal_p
12631 to compare the two ADDR_EXPR nodes. It is wasteful in that
12632 all we need to do is test pointer equality for the arguments
12633 to the two ADDR_EXPR nodes. It is incorrect to use
12634 operand_equal_p as that function is NOT equivalent to a
12635 C equality test. It can in fact return false for two
12636 objects which would test as equal using the C equality
12637 operator. */
12638 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12639 return constant_boolean_node (equal
12640 ? code == EQ_EXPR : code != EQ_EXPR,
12641 type);
12644 /* Similarly for a NEGATE_EXPR. */
12645 if (TREE_CODE (arg0) == NEGATE_EXPR
12646 && TREE_CODE (arg1) == INTEGER_CST
12647 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12648 arg1)))
12649 && TREE_CODE (tem) == INTEGER_CST
12650 && !TREE_OVERFLOW (tem))
12651 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12653 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12654 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12655 && TREE_CODE (arg1) == INTEGER_CST
12656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12657 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12658 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12659 fold_convert_loc (loc,
12660 TREE_TYPE (arg0),
12661 arg1),
12662 TREE_OPERAND (arg0, 1)));
12664 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12665 if ((TREE_CODE (arg0) == PLUS_EXPR
12666 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12667 || TREE_CODE (arg0) == MINUS_EXPR)
12668 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12669 0)),
12670 arg1, 0)
12671 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12672 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12674 tree val = TREE_OPERAND (arg0, 1);
12675 return omit_two_operands_loc (loc, type,
12676 fold_build2_loc (loc, code, type,
12677 val,
12678 build_int_cst (TREE_TYPE (val),
12679 0)),
12680 TREE_OPERAND (arg0, 0), arg1);
12683 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12684 if (TREE_CODE (arg0) == MINUS_EXPR
12685 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12686 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12687 1)),
12688 arg1, 0)
12689 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12691 return omit_two_operands_loc (loc, type,
12692 code == NE_EXPR
12693 ? boolean_true_node : boolean_false_node,
12694 TREE_OPERAND (arg0, 1), arg1);
12697 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12698 if (TREE_CODE (arg0) == ABS_EXPR
12699 && (integer_zerop (arg1) || real_zerop (arg1)))
12700 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12702 /* If this is an EQ or NE comparison with zero and ARG0 is
12703 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12704 two operations, but the latter can be done in one less insn
12705 on machines that have only two-operand insns or on which a
12706 constant cannot be the first operand. */
12707 if (TREE_CODE (arg0) == BIT_AND_EXPR
12708 && integer_zerop (arg1))
12710 tree arg00 = TREE_OPERAND (arg0, 0);
12711 tree arg01 = TREE_OPERAND (arg0, 1);
12712 if (TREE_CODE (arg00) == LSHIFT_EXPR
12713 && integer_onep (TREE_OPERAND (arg00, 0)))
12715 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12716 arg01, TREE_OPERAND (arg00, 1));
12717 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12718 build_int_cst (TREE_TYPE (arg0), 1));
12719 return fold_build2_loc (loc, code, type,
12720 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12721 arg1);
12723 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12724 && integer_onep (TREE_OPERAND (arg01, 0)))
12726 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12727 arg00, TREE_OPERAND (arg01, 1));
12728 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12729 build_int_cst (TREE_TYPE (arg0), 1));
12730 return fold_build2_loc (loc, code, type,
12731 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12732 arg1);
12736 /* If this is an NE or EQ comparison of zero against the result of a
12737 signed MOD operation whose second operand is a power of 2, make
12738 the MOD operation unsigned since it is simpler and equivalent. */
12739 if (integer_zerop (arg1)
12740 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12741 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12742 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12743 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12744 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12745 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12747 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12748 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12749 fold_convert_loc (loc, newtype,
12750 TREE_OPERAND (arg0, 0)),
12751 fold_convert_loc (loc, newtype,
12752 TREE_OPERAND (arg0, 1)));
12754 return fold_build2_loc (loc, code, type, newmod,
12755 fold_convert_loc (loc, newtype, arg1));
12758 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12759 C1 is a valid shift constant, and C2 is a power of two, i.e.
12760 a single bit. */
12761 if (TREE_CODE (arg0) == BIT_AND_EXPR
12762 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12763 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12764 == INTEGER_CST
12765 && integer_pow2p (TREE_OPERAND (arg0, 1))
12766 && integer_zerop (arg1))
12768 tree itype = TREE_TYPE (arg0);
12769 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12770 prec = TYPE_PRECISION (itype);
12772 /* Check for a valid shift count. */
12773 if (wi::ltu_p (arg001, prec))
12775 tree arg01 = TREE_OPERAND (arg0, 1);
12776 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12777 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12778 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12779 can be rewritten as (X & (C2 << C1)) != 0. */
12780 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12782 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12783 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12784 return fold_build2_loc (loc, code, type, tem,
12785 fold_convert_loc (loc, itype, arg1));
12787 /* Otherwise, for signed (arithmetic) shifts,
12788 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12789 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12790 else if (!TYPE_UNSIGNED (itype))
12791 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12792 arg000, build_int_cst (itype, 0));
12793 /* Otherwise, of unsigned (logical) shifts,
12794 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12795 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12796 else
12797 return omit_one_operand_loc (loc, type,
12798 code == EQ_EXPR ? integer_one_node
12799 : integer_zero_node,
12800 arg000);
12804 /* If we have (A & C) == C where C is a power of 2, convert this into
12805 (A & C) != 0. Similarly for NE_EXPR. */
12806 if (TREE_CODE (arg0) == BIT_AND_EXPR
12807 && integer_pow2p (TREE_OPERAND (arg0, 1))
12808 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12809 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12810 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12811 integer_zero_node));
12813 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12814 bit, then fold the expression into A < 0 or A >= 0. */
12815 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12816 if (tem)
12817 return tem;
12819 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12820 Similarly for NE_EXPR. */
12821 if (TREE_CODE (arg0) == BIT_AND_EXPR
12822 && TREE_CODE (arg1) == INTEGER_CST
12823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12825 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12826 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12827 TREE_OPERAND (arg0, 1));
12828 tree dandnotc
12829 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12830 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12831 notc);
12832 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12833 if (integer_nonzerop (dandnotc))
12834 return omit_one_operand_loc (loc, type, rslt, arg0);
12837 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12838 Similarly for NE_EXPR. */
12839 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12840 && TREE_CODE (arg1) == INTEGER_CST
12841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12843 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12844 tree candnotd
12845 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12846 TREE_OPERAND (arg0, 1),
12847 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12848 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12849 if (integer_nonzerop (candnotd))
12850 return omit_one_operand_loc (loc, type, rslt, arg0);
12853 /* If this is a comparison of a field, we may be able to simplify it. */
12854 if ((TREE_CODE (arg0) == COMPONENT_REF
12855 || TREE_CODE (arg0) == BIT_FIELD_REF)
12856 /* Handle the constant case even without -O
12857 to make sure the warnings are given. */
12858 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12860 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12861 if (t1)
12862 return t1;
12865 /* Optimize comparisons of strlen vs zero to a compare of the
12866 first character of the string vs zero. To wit,
12867 strlen(ptr) == 0 => *ptr == 0
12868 strlen(ptr) != 0 => *ptr != 0
12869 Other cases should reduce to one of these two (or a constant)
12870 due to the return value of strlen being unsigned. */
12871 if (TREE_CODE (arg0) == CALL_EXPR
12872 && integer_zerop (arg1))
12874 tree fndecl = get_callee_fndecl (arg0);
12876 if (fndecl
12877 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12878 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12879 && call_expr_nargs (arg0) == 1
12880 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12882 tree iref = build_fold_indirect_ref_loc (loc,
12883 CALL_EXPR_ARG (arg0, 0));
12884 return fold_build2_loc (loc, code, type, iref,
12885 build_int_cst (TREE_TYPE (iref), 0));
12889 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12890 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12891 if (TREE_CODE (arg0) == RSHIFT_EXPR
12892 && integer_zerop (arg1)
12893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12895 tree arg00 = TREE_OPERAND (arg0, 0);
12896 tree arg01 = TREE_OPERAND (arg0, 1);
12897 tree itype = TREE_TYPE (arg00);
12898 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12900 if (TYPE_UNSIGNED (itype))
12902 itype = signed_type_for (itype);
12903 arg00 = fold_convert_loc (loc, itype, arg00);
12905 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12906 type, arg00, build_zero_cst (itype));
12910 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12911 if (integer_zerop (arg1)
12912 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12913 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12914 TREE_OPERAND (arg0, 1));
12916 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12917 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12918 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12919 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12920 build_zero_cst (TREE_TYPE (arg0)));
12921 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12922 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12924 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12925 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12926 build_zero_cst (TREE_TYPE (arg0)));
12928 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12929 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12930 && TREE_CODE (arg1) == INTEGER_CST
12931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12932 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12933 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12934 TREE_OPERAND (arg0, 1), arg1));
12936 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12937 (X & C) == 0 when C is a single bit. */
12938 if (TREE_CODE (arg0) == BIT_AND_EXPR
12939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12940 && integer_zerop (arg1)
12941 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12943 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12944 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12945 TREE_OPERAND (arg0, 1));
12946 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12947 type, tem,
12948 fold_convert_loc (loc, TREE_TYPE (arg0),
12949 arg1));
12952 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12953 constant C is a power of two, i.e. a single bit. */
12954 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12955 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12956 && integer_zerop (arg1)
12957 && integer_pow2p (TREE_OPERAND (arg0, 1))
12958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12959 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12961 tree arg00 = TREE_OPERAND (arg0, 0);
12962 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12963 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12966 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12967 when is C is a power of two, i.e. a single bit. */
12968 if (TREE_CODE (arg0) == BIT_AND_EXPR
12969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12970 && integer_zerop (arg1)
12971 && integer_pow2p (TREE_OPERAND (arg0, 1))
12972 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12973 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12975 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12976 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12977 arg000, TREE_OPERAND (arg0, 1));
12978 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12979 tem, build_int_cst (TREE_TYPE (tem), 0));
12982 if (integer_zerop (arg1)
12983 && tree_expr_nonzero_p (arg0))
12985 tree res = constant_boolean_node (code==NE_EXPR, type);
12986 return omit_one_operand_loc (loc, type, res, arg0);
12989 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12990 if (TREE_CODE (arg0) == NEGATE_EXPR
12991 && TREE_CODE (arg1) == NEGATE_EXPR)
12992 return fold_build2_loc (loc, code, type,
12993 TREE_OPERAND (arg0, 0),
12994 fold_convert_loc (loc, TREE_TYPE (arg0),
12995 TREE_OPERAND (arg1, 0)));
12997 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12998 if (TREE_CODE (arg0) == BIT_AND_EXPR
12999 && TREE_CODE (arg1) == BIT_AND_EXPR)
13001 tree arg00 = TREE_OPERAND (arg0, 0);
13002 tree arg01 = TREE_OPERAND (arg0, 1);
13003 tree arg10 = TREE_OPERAND (arg1, 0);
13004 tree arg11 = TREE_OPERAND (arg1, 1);
13005 tree itype = TREE_TYPE (arg0);
13007 if (operand_equal_p (arg01, arg11, 0))
13008 return fold_build2_loc (loc, code, type,
13009 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13010 fold_build2_loc (loc,
13011 BIT_XOR_EXPR, itype,
13012 arg00, arg10),
13013 arg01),
13014 build_zero_cst (itype));
13016 if (operand_equal_p (arg01, arg10, 0))
13017 return fold_build2_loc (loc, code, type,
13018 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13019 fold_build2_loc (loc,
13020 BIT_XOR_EXPR, itype,
13021 arg00, arg11),
13022 arg01),
13023 build_zero_cst (itype));
13025 if (operand_equal_p (arg00, arg11, 0))
13026 return fold_build2_loc (loc, code, type,
13027 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13028 fold_build2_loc (loc,
13029 BIT_XOR_EXPR, itype,
13030 arg01, arg10),
13031 arg00),
13032 build_zero_cst (itype));
13034 if (operand_equal_p (arg00, arg10, 0))
13035 return fold_build2_loc (loc, code, type,
13036 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13037 fold_build2_loc (loc,
13038 BIT_XOR_EXPR, itype,
13039 arg01, arg11),
13040 arg00),
13041 build_zero_cst (itype));
13044 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13045 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13047 tree arg00 = TREE_OPERAND (arg0, 0);
13048 tree arg01 = TREE_OPERAND (arg0, 1);
13049 tree arg10 = TREE_OPERAND (arg1, 0);
13050 tree arg11 = TREE_OPERAND (arg1, 1);
13051 tree itype = TREE_TYPE (arg0);
13053 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13054 operand_equal_p guarantees no side-effects so we don't need
13055 to use omit_one_operand on Z. */
13056 if (operand_equal_p (arg01, arg11, 0))
13057 return fold_build2_loc (loc, code, type, arg00,
13058 fold_convert_loc (loc, TREE_TYPE (arg00),
13059 arg10));
13060 if (operand_equal_p (arg01, arg10, 0))
13061 return fold_build2_loc (loc, code, type, arg00,
13062 fold_convert_loc (loc, TREE_TYPE (arg00),
13063 arg11));
13064 if (operand_equal_p (arg00, arg11, 0))
13065 return fold_build2_loc (loc, code, type, arg01,
13066 fold_convert_loc (loc, TREE_TYPE (arg01),
13067 arg10));
13068 if (operand_equal_p (arg00, arg10, 0))
13069 return fold_build2_loc (loc, code, type, arg01,
13070 fold_convert_loc (loc, TREE_TYPE (arg01),
13071 arg11));
13073 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13074 if (TREE_CODE (arg01) == INTEGER_CST
13075 && TREE_CODE (arg11) == INTEGER_CST)
13077 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13078 fold_convert_loc (loc, itype, arg11));
13079 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13080 return fold_build2_loc (loc, code, type, tem,
13081 fold_convert_loc (loc, itype, arg10));
13085 /* Attempt to simplify equality/inequality comparisons of complex
13086 values. Only lower the comparison if the result is known or
13087 can be simplified to a single scalar comparison. */
13088 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13089 || TREE_CODE (arg0) == COMPLEX_CST)
13090 && (TREE_CODE (arg1) == COMPLEX_EXPR
13091 || TREE_CODE (arg1) == COMPLEX_CST))
13093 tree real0, imag0, real1, imag1;
13094 tree rcond, icond;
13096 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13098 real0 = TREE_OPERAND (arg0, 0);
13099 imag0 = TREE_OPERAND (arg0, 1);
13101 else
13103 real0 = TREE_REALPART (arg0);
13104 imag0 = TREE_IMAGPART (arg0);
13107 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13109 real1 = TREE_OPERAND (arg1, 0);
13110 imag1 = TREE_OPERAND (arg1, 1);
13112 else
13114 real1 = TREE_REALPART (arg1);
13115 imag1 = TREE_IMAGPART (arg1);
13118 rcond = fold_binary_loc (loc, code, type, real0, real1);
13119 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13121 if (integer_zerop (rcond))
13123 if (code == EQ_EXPR)
13124 return omit_two_operands_loc (loc, type, boolean_false_node,
13125 imag0, imag1);
13126 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13128 else
13130 if (code == NE_EXPR)
13131 return omit_two_operands_loc (loc, type, boolean_true_node,
13132 imag0, imag1);
13133 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13137 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13138 if (icond && TREE_CODE (icond) == INTEGER_CST)
13140 if (integer_zerop (icond))
13142 if (code == EQ_EXPR)
13143 return omit_two_operands_loc (loc, type, boolean_false_node,
13144 real0, real1);
13145 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13147 else
13149 if (code == NE_EXPR)
13150 return omit_two_operands_loc (loc, type, boolean_true_node,
13151 real0, real1);
13152 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13157 return NULL_TREE;
13159 case LT_EXPR:
13160 case GT_EXPR:
13161 case LE_EXPR:
13162 case GE_EXPR:
13163 tem = fold_comparison (loc, code, type, op0, op1);
13164 if (tem != NULL_TREE)
13165 return tem;
13167 /* Transform comparisons of the form X +- C CMP X. */
13168 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13170 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13171 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13172 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13173 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13175 tree arg01 = TREE_OPERAND (arg0, 1);
13176 enum tree_code code0 = TREE_CODE (arg0);
13177 int is_positive;
13179 if (TREE_CODE (arg01) == REAL_CST)
13180 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13181 else
13182 is_positive = tree_int_cst_sgn (arg01);
13184 /* (X - c) > X becomes false. */
13185 if (code == GT_EXPR
13186 && ((code0 == MINUS_EXPR && is_positive >= 0)
13187 || (code0 == PLUS_EXPR && is_positive <= 0)))
13189 if (TREE_CODE (arg01) == INTEGER_CST
13190 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13191 fold_overflow_warning (("assuming signed overflow does not "
13192 "occur when assuming that (X - c) > X "
13193 "is always false"),
13194 WARN_STRICT_OVERFLOW_ALL);
13195 return constant_boolean_node (0, type);
13198 /* Likewise (X + c) < X becomes false. */
13199 if (code == LT_EXPR
13200 && ((code0 == PLUS_EXPR && is_positive >= 0)
13201 || (code0 == MINUS_EXPR && is_positive <= 0)))
13203 if (TREE_CODE (arg01) == INTEGER_CST
13204 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13205 fold_overflow_warning (("assuming signed overflow does not "
13206 "occur when assuming that "
13207 "(X + c) < X is always false"),
13208 WARN_STRICT_OVERFLOW_ALL);
13209 return constant_boolean_node (0, type);
13212 /* Convert (X - c) <= X to true. */
13213 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13214 && code == LE_EXPR
13215 && ((code0 == MINUS_EXPR && is_positive >= 0)
13216 || (code0 == PLUS_EXPR && is_positive <= 0)))
13218 if (TREE_CODE (arg01) == INTEGER_CST
13219 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13220 fold_overflow_warning (("assuming signed overflow does not "
13221 "occur when assuming that "
13222 "(X - c) <= X is always true"),
13223 WARN_STRICT_OVERFLOW_ALL);
13224 return constant_boolean_node (1, type);
13227 /* Convert (X + c) >= X to true. */
13228 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13229 && code == GE_EXPR
13230 && ((code0 == PLUS_EXPR && is_positive >= 0)
13231 || (code0 == MINUS_EXPR && is_positive <= 0)))
13233 if (TREE_CODE (arg01) == INTEGER_CST
13234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13235 fold_overflow_warning (("assuming signed overflow does not "
13236 "occur when assuming that "
13237 "(X + c) >= X is always true"),
13238 WARN_STRICT_OVERFLOW_ALL);
13239 return constant_boolean_node (1, type);
13242 if (TREE_CODE (arg01) == INTEGER_CST)
13244 /* Convert X + c > X and X - c < X to true for integers. */
13245 if (code == GT_EXPR
13246 && ((code0 == PLUS_EXPR && is_positive > 0)
13247 || (code0 == MINUS_EXPR && is_positive < 0)))
13249 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13250 fold_overflow_warning (("assuming signed overflow does "
13251 "not occur when assuming that "
13252 "(X + c) > X is always true"),
13253 WARN_STRICT_OVERFLOW_ALL);
13254 return constant_boolean_node (1, type);
13257 if (code == LT_EXPR
13258 && ((code0 == MINUS_EXPR && is_positive > 0)
13259 || (code0 == PLUS_EXPR && is_positive < 0)))
13261 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13262 fold_overflow_warning (("assuming signed overflow does "
13263 "not occur when assuming that "
13264 "(X - c) < X is always true"),
13265 WARN_STRICT_OVERFLOW_ALL);
13266 return constant_boolean_node (1, type);
13269 /* Convert X + c <= X and X - c >= X to false for integers. */
13270 if (code == LE_EXPR
13271 && ((code0 == PLUS_EXPR && is_positive > 0)
13272 || (code0 == MINUS_EXPR && is_positive < 0)))
13274 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13275 fold_overflow_warning (("assuming signed overflow does "
13276 "not occur when assuming that "
13277 "(X + c) <= X is always false"),
13278 WARN_STRICT_OVERFLOW_ALL);
13279 return constant_boolean_node (0, type);
13282 if (code == GE_EXPR
13283 && ((code0 == MINUS_EXPR && is_positive > 0)
13284 || (code0 == PLUS_EXPR && is_positive < 0)))
13286 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13287 fold_overflow_warning (("assuming signed overflow does "
13288 "not occur when assuming that "
13289 "(X - c) >= X is always false"),
13290 WARN_STRICT_OVERFLOW_ALL);
13291 return constant_boolean_node (0, type);
13296 /* Comparisons with the highest or lowest possible integer of
13297 the specified precision will have known values. */
13299 tree arg1_type = TREE_TYPE (arg1);
13300 unsigned int prec = TYPE_PRECISION (arg1_type);
13302 if (TREE_CODE (arg1) == INTEGER_CST
13303 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13305 wide_int max = wi::max_value (arg1_type);
13306 wide_int signed_max = wi::max_value (prec, SIGNED);
13307 wide_int min = wi::min_value (arg1_type);
13309 if (wi::eq_p (arg1, max))
13310 switch (code)
13312 case GT_EXPR:
13313 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13315 case GE_EXPR:
13316 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13318 case LE_EXPR:
13319 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13321 case LT_EXPR:
13322 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13324 /* The GE_EXPR and LT_EXPR cases above are not normally
13325 reached because of previous transformations. */
13327 default:
13328 break;
13330 else if (wi::eq_p (arg1, max - 1))
13331 switch (code)
13333 case GT_EXPR:
13334 arg1 = const_binop (PLUS_EXPR, arg1,
13335 build_int_cst (TREE_TYPE (arg1), 1));
13336 return fold_build2_loc (loc, EQ_EXPR, type,
13337 fold_convert_loc (loc,
13338 TREE_TYPE (arg1), arg0),
13339 arg1);
13340 case LE_EXPR:
13341 arg1 = const_binop (PLUS_EXPR, arg1,
13342 build_int_cst (TREE_TYPE (arg1), 1));
13343 return fold_build2_loc (loc, NE_EXPR, type,
13344 fold_convert_loc (loc, TREE_TYPE (arg1),
13345 arg0),
13346 arg1);
13347 default:
13348 break;
13350 else if (wi::eq_p (arg1, min))
13351 switch (code)
13353 case LT_EXPR:
13354 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13356 case LE_EXPR:
13357 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13359 case GE_EXPR:
13360 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13362 case GT_EXPR:
13363 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13365 default:
13366 break;
13368 else if (wi::eq_p (arg1, min + 1))
13369 switch (code)
13371 case GE_EXPR:
13372 arg1 = const_binop (MINUS_EXPR, arg1,
13373 build_int_cst (TREE_TYPE (arg1), 1));
13374 return fold_build2_loc (loc, NE_EXPR, type,
13375 fold_convert_loc (loc,
13376 TREE_TYPE (arg1), arg0),
13377 arg1);
13378 case LT_EXPR:
13379 arg1 = const_binop (MINUS_EXPR, arg1,
13380 build_int_cst (TREE_TYPE (arg1), 1));
13381 return fold_build2_loc (loc, EQ_EXPR, type,
13382 fold_convert_loc (loc, TREE_TYPE (arg1),
13383 arg0),
13384 arg1);
13385 default:
13386 break;
13389 else if (wi::eq_p (arg1, signed_max)
13390 && TYPE_UNSIGNED (arg1_type)
13391 /* We will flip the signedness of the comparison operator
13392 associated with the mode of arg1, so the sign bit is
13393 specified by this mode. Check that arg1 is the signed
13394 max associated with this sign bit. */
13395 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13396 /* signed_type does not work on pointer types. */
13397 && INTEGRAL_TYPE_P (arg1_type))
13399 /* The following case also applies to X < signed_max+1
13400 and X >= signed_max+1 because previous transformations. */
13401 if (code == LE_EXPR || code == GT_EXPR)
13403 tree st = signed_type_for (arg1_type);
13404 return fold_build2_loc (loc,
13405 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13406 type, fold_convert_loc (loc, st, arg0),
13407 build_int_cst (st, 0));
13413 /* If we are comparing an ABS_EXPR with a constant, we can
13414 convert all the cases into explicit comparisons, but they may
13415 well not be faster than doing the ABS and one comparison.
13416 But ABS (X) <= C is a range comparison, which becomes a subtraction
13417 and a comparison, and is probably faster. */
13418 if (code == LE_EXPR
13419 && TREE_CODE (arg1) == INTEGER_CST
13420 && TREE_CODE (arg0) == ABS_EXPR
13421 && ! TREE_SIDE_EFFECTS (arg0)
13422 && (0 != (tem = negate_expr (arg1)))
13423 && TREE_CODE (tem) == INTEGER_CST
13424 && !TREE_OVERFLOW (tem))
13425 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13426 build2 (GE_EXPR, type,
13427 TREE_OPERAND (arg0, 0), tem),
13428 build2 (LE_EXPR, type,
13429 TREE_OPERAND (arg0, 0), arg1));
13431 /* Convert ABS_EXPR<x> >= 0 to true. */
13432 strict_overflow_p = false;
13433 if (code == GE_EXPR
13434 && (integer_zerop (arg1)
13435 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13436 && real_zerop (arg1)))
13437 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13439 if (strict_overflow_p)
13440 fold_overflow_warning (("assuming signed overflow does not occur "
13441 "when simplifying comparison of "
13442 "absolute value and zero"),
13443 WARN_STRICT_OVERFLOW_CONDITIONAL);
13444 return omit_one_operand_loc (loc, type,
13445 constant_boolean_node (true, type),
13446 arg0);
13449 /* Convert ABS_EXPR<x> < 0 to false. */
13450 strict_overflow_p = false;
13451 if (code == LT_EXPR
13452 && (integer_zerop (arg1) || real_zerop (arg1))
13453 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13455 if (strict_overflow_p)
13456 fold_overflow_warning (("assuming signed overflow does not occur "
13457 "when simplifying comparison of "
13458 "absolute value and zero"),
13459 WARN_STRICT_OVERFLOW_CONDITIONAL);
13460 return omit_one_operand_loc (loc, type,
13461 constant_boolean_node (false, type),
13462 arg0);
13465 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13466 and similarly for >= into !=. */
13467 if ((code == LT_EXPR || code == GE_EXPR)
13468 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13469 && TREE_CODE (arg1) == LSHIFT_EXPR
13470 && integer_onep (TREE_OPERAND (arg1, 0)))
13471 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13472 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13473 TREE_OPERAND (arg1, 1)),
13474 build_zero_cst (TREE_TYPE (arg0)));
13476 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13477 otherwise Y might be >= # of bits in X's type and thus e.g.
13478 (unsigned char) (1 << Y) for Y 15 might be 0.
13479 If the cast is widening, then 1 << Y should have unsigned type,
13480 otherwise if Y is number of bits in the signed shift type minus 1,
13481 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13482 31 might be 0xffffffff80000000. */
13483 if ((code == LT_EXPR || code == GE_EXPR)
13484 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13485 && CONVERT_EXPR_P (arg1)
13486 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13487 && (TYPE_PRECISION (TREE_TYPE (arg1))
13488 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13489 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13490 || (TYPE_PRECISION (TREE_TYPE (arg1))
13491 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13492 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13494 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13495 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13496 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13497 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13498 build_zero_cst (TREE_TYPE (arg0)));
13501 return NULL_TREE;
13503 case UNORDERED_EXPR:
13504 case ORDERED_EXPR:
13505 case UNLT_EXPR:
13506 case UNLE_EXPR:
13507 case UNGT_EXPR:
13508 case UNGE_EXPR:
13509 case UNEQ_EXPR:
13510 case LTGT_EXPR:
13511 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13513 t1 = fold_relational_const (code, type, arg0, arg1);
13514 if (t1 != NULL_TREE)
13515 return t1;
13518 /* If the first operand is NaN, the result is constant. */
13519 if (TREE_CODE (arg0) == REAL_CST
13520 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13521 && (code != LTGT_EXPR || ! flag_trapping_math))
13523 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13524 ? integer_zero_node
13525 : integer_one_node;
13526 return omit_one_operand_loc (loc, type, t1, arg1);
13529 /* If the second operand is NaN, the result is constant. */
13530 if (TREE_CODE (arg1) == REAL_CST
13531 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13532 && (code != LTGT_EXPR || ! flag_trapping_math))
13534 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13535 ? integer_zero_node
13536 : integer_one_node;
13537 return omit_one_operand_loc (loc, type, t1, arg0);
13540 /* Simplify unordered comparison of something with itself. */
13541 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13542 && operand_equal_p (arg0, arg1, 0))
13543 return constant_boolean_node (1, type);
13545 if (code == LTGT_EXPR
13546 && !flag_trapping_math
13547 && operand_equal_p (arg0, arg1, 0))
13548 return constant_boolean_node (0, type);
13550 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13552 tree targ0 = strip_float_extensions (arg0);
13553 tree targ1 = strip_float_extensions (arg1);
13554 tree newtype = TREE_TYPE (targ0);
13556 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13557 newtype = TREE_TYPE (targ1);
13559 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13560 return fold_build2_loc (loc, code, type,
13561 fold_convert_loc (loc, newtype, targ0),
13562 fold_convert_loc (loc, newtype, targ1));
13565 return NULL_TREE;
13567 case COMPOUND_EXPR:
13568 /* When pedantic, a compound expression can be neither an lvalue
13569 nor an integer constant expression. */
13570 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13571 return NULL_TREE;
13572 /* Don't let (0, 0) be null pointer constant. */
13573 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13574 : fold_convert_loc (loc, type, arg1);
13575 return pedantic_non_lvalue_loc (loc, tem);
13577 case COMPLEX_EXPR:
13578 if ((TREE_CODE (arg0) == REAL_CST
13579 && TREE_CODE (arg1) == REAL_CST)
13580 || (TREE_CODE (arg0) == INTEGER_CST
13581 && TREE_CODE (arg1) == INTEGER_CST))
13582 return build_complex (type, arg0, arg1);
13583 if (TREE_CODE (arg0) == REALPART_EXPR
13584 && TREE_CODE (arg1) == IMAGPART_EXPR
13585 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13586 && operand_equal_p (TREE_OPERAND (arg0, 0),
13587 TREE_OPERAND (arg1, 0), 0))
13588 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13589 TREE_OPERAND (arg1, 0));
13590 return NULL_TREE;
13592 case ASSERT_EXPR:
13593 /* An ASSERT_EXPR should never be passed to fold_binary. */
13594 gcc_unreachable ();
13596 case VEC_PACK_TRUNC_EXPR:
13597 case VEC_PACK_FIX_TRUNC_EXPR:
13599 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13600 tree *elts;
13602 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13603 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13604 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13605 return NULL_TREE;
13607 elts = XALLOCAVEC (tree, nelts);
13608 if (!vec_cst_ctor_to_array (arg0, elts)
13609 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13610 return NULL_TREE;
13612 for (i = 0; i < nelts; i++)
13614 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13615 ? NOP_EXPR : FIX_TRUNC_EXPR,
13616 TREE_TYPE (type), elts[i]);
13617 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13618 return NULL_TREE;
13621 return build_vector (type, elts);
13624 case VEC_WIDEN_MULT_LO_EXPR:
13625 case VEC_WIDEN_MULT_HI_EXPR:
13626 case VEC_WIDEN_MULT_EVEN_EXPR:
13627 case VEC_WIDEN_MULT_ODD_EXPR:
13629 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13630 unsigned int out, ofs, scale;
13631 tree *elts;
13633 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13634 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13635 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13636 return NULL_TREE;
13638 elts = XALLOCAVEC (tree, nelts * 4);
13639 if (!vec_cst_ctor_to_array (arg0, elts)
13640 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13641 return NULL_TREE;
13643 if (code == VEC_WIDEN_MULT_LO_EXPR)
13644 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13645 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13646 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13647 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13648 scale = 1, ofs = 0;
13649 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13650 scale = 1, ofs = 1;
13652 for (out = 0; out < nelts; out++)
13654 unsigned int in1 = (out << scale) + ofs;
13655 unsigned int in2 = in1 + nelts * 2;
13656 tree t1, t2;
13658 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13659 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13661 if (t1 == NULL_TREE || t2 == NULL_TREE)
13662 return NULL_TREE;
13663 elts[out] = const_binop (MULT_EXPR, t1, t2);
13664 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13665 return NULL_TREE;
13668 return build_vector (type, elts);
13671 default:
13672 return NULL_TREE;
13673 } /* switch (code) */
13676 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13677 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13678 of GOTO_EXPR. */
13680 static tree
13681 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13683 switch (TREE_CODE (*tp))
13685 case LABEL_EXPR:
13686 return *tp;
13688 case GOTO_EXPR:
13689 *walk_subtrees = 0;
13691 /* ... fall through ... */
13693 default:
13694 return NULL_TREE;
13698 /* Return whether the sub-tree ST contains a label which is accessible from
13699 outside the sub-tree. */
13701 static bool
13702 contains_label_p (tree st)
13704 return
13705 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13708 /* Fold a ternary expression of code CODE and type TYPE with operands
13709 OP0, OP1, and OP2. Return the folded expression if folding is
13710 successful. Otherwise, return NULL_TREE. */
13712 tree
13713 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13714 tree op0, tree op1, tree op2)
13716 tree tem;
13717 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13718 enum tree_code_class kind = TREE_CODE_CLASS (code);
13720 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13721 && TREE_CODE_LENGTH (code) == 3);
13723 /* If this is a commutative operation, and OP0 is a constant, move it
13724 to OP1 to reduce the number of tests below. */
13725 if (commutative_ternary_tree_code (code)
13726 && tree_swap_operands_p (op0, op1, true))
13727 return fold_build3_loc (loc, code, type, op1, op0, op2);
13729 tem = generic_simplify (loc, code, type, op0, op1, op2);
13730 if (tem)
13731 return tem;
13733 /* Strip any conversions that don't change the mode. This is safe
13734 for every expression, except for a comparison expression because
13735 its signedness is derived from its operands. So, in the latter
13736 case, only strip conversions that don't change the signedness.
13738 Note that this is done as an internal manipulation within the
13739 constant folder, in order to find the simplest representation of
13740 the arguments so that their form can be studied. In any cases,
13741 the appropriate type conversions should be put back in the tree
13742 that will get out of the constant folder. */
13743 if (op0)
13745 arg0 = op0;
13746 STRIP_NOPS (arg0);
13749 if (op1)
13751 arg1 = op1;
13752 STRIP_NOPS (arg1);
13755 if (op2)
13757 arg2 = op2;
13758 STRIP_NOPS (arg2);
13761 switch (code)
13763 case COMPONENT_REF:
13764 if (TREE_CODE (arg0) == CONSTRUCTOR
13765 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13767 unsigned HOST_WIDE_INT idx;
13768 tree field, value;
13769 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13770 if (field == arg1)
13771 return value;
13773 return NULL_TREE;
13775 case COND_EXPR:
13776 case VEC_COND_EXPR:
13777 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13778 so all simple results must be passed through pedantic_non_lvalue. */
13779 if (TREE_CODE (arg0) == INTEGER_CST)
13781 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13782 tem = integer_zerop (arg0) ? op2 : op1;
13783 /* Only optimize constant conditions when the selected branch
13784 has the same type as the COND_EXPR. This avoids optimizing
13785 away "c ? x : throw", where the throw has a void type.
13786 Avoid throwing away that operand which contains label. */
13787 if ((!TREE_SIDE_EFFECTS (unused_op)
13788 || !contains_label_p (unused_op))
13789 && (! VOID_TYPE_P (TREE_TYPE (tem))
13790 || VOID_TYPE_P (type)))
13791 return pedantic_non_lvalue_loc (loc, tem);
13792 return NULL_TREE;
13794 else if (TREE_CODE (arg0) == VECTOR_CST)
13796 if (integer_all_onesp (arg0))
13797 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13798 if (integer_zerop (arg0))
13799 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13801 if ((TREE_CODE (arg1) == VECTOR_CST
13802 || TREE_CODE (arg1) == CONSTRUCTOR)
13803 && (TREE_CODE (arg2) == VECTOR_CST
13804 || TREE_CODE (arg2) == CONSTRUCTOR))
13806 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13807 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13808 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13809 for (i = 0; i < nelts; i++)
13811 tree val = VECTOR_CST_ELT (arg0, i);
13812 if (integer_all_onesp (val))
13813 sel[i] = i;
13814 else if (integer_zerop (val))
13815 sel[i] = nelts + i;
13816 else /* Currently unreachable. */
13817 return NULL_TREE;
13819 tree t = fold_vec_perm (type, arg1, arg2, sel);
13820 if (t != NULL_TREE)
13821 return t;
13825 if (operand_equal_p (arg1, op2, 0))
13826 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13828 /* If we have A op B ? A : C, we may be able to convert this to a
13829 simpler expression, depending on the operation and the values
13830 of B and C. Signed zeros prevent all of these transformations,
13831 for reasons given above each one.
13833 Also try swapping the arguments and inverting the conditional. */
13834 if (COMPARISON_CLASS_P (arg0)
13835 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13836 arg1, TREE_OPERAND (arg0, 1))
13837 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13839 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13840 if (tem)
13841 return tem;
13844 if (COMPARISON_CLASS_P (arg0)
13845 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13846 op2,
13847 TREE_OPERAND (arg0, 1))
13848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13850 location_t loc0 = expr_location_or (arg0, loc);
13851 tem = fold_invert_truthvalue (loc0, arg0);
13852 if (tem && COMPARISON_CLASS_P (tem))
13854 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13855 if (tem)
13856 return tem;
13860 /* If the second operand is simpler than the third, swap them
13861 since that produces better jump optimization results. */
13862 if (truth_value_p (TREE_CODE (arg0))
13863 && tree_swap_operands_p (op1, op2, false))
13865 location_t loc0 = expr_location_or (arg0, loc);
13866 /* See if this can be inverted. If it can't, possibly because
13867 it was a floating-point inequality comparison, don't do
13868 anything. */
13869 tem = fold_invert_truthvalue (loc0, arg0);
13870 if (tem)
13871 return fold_build3_loc (loc, code, type, tem, op2, op1);
13874 /* Convert A ? 1 : 0 to simply A. */
13875 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13876 : (integer_onep (op1)
13877 && !VECTOR_TYPE_P (type)))
13878 && integer_zerop (op2)
13879 /* If we try to convert OP0 to our type, the
13880 call to fold will try to move the conversion inside
13881 a COND, which will recurse. In that case, the COND_EXPR
13882 is probably the best choice, so leave it alone. */
13883 && type == TREE_TYPE (arg0))
13884 return pedantic_non_lvalue_loc (loc, arg0);
13886 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13887 over COND_EXPR in cases such as floating point comparisons. */
13888 if (integer_zerop (op1)
13889 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13890 : (integer_onep (op2)
13891 && !VECTOR_TYPE_P (type)))
13892 && truth_value_p (TREE_CODE (arg0)))
13893 return pedantic_non_lvalue_loc (loc,
13894 fold_convert_loc (loc, type,
13895 invert_truthvalue_loc (loc,
13896 arg0)));
13898 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13899 if (TREE_CODE (arg0) == LT_EXPR
13900 && integer_zerop (TREE_OPERAND (arg0, 1))
13901 && integer_zerop (op2)
13902 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13904 /* sign_bit_p looks through both zero and sign extensions,
13905 but for this optimization only sign extensions are
13906 usable. */
13907 tree tem2 = TREE_OPERAND (arg0, 0);
13908 while (tem != tem2)
13910 if (TREE_CODE (tem2) != NOP_EXPR
13911 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13913 tem = NULL_TREE;
13914 break;
13916 tem2 = TREE_OPERAND (tem2, 0);
13918 /* sign_bit_p only checks ARG1 bits within A's precision.
13919 If <sign bit of A> has wider type than A, bits outside
13920 of A's precision in <sign bit of A> need to be checked.
13921 If they are all 0, this optimization needs to be done
13922 in unsigned A's type, if they are all 1 in signed A's type,
13923 otherwise this can't be done. */
13924 if (tem
13925 && TYPE_PRECISION (TREE_TYPE (tem))
13926 < TYPE_PRECISION (TREE_TYPE (arg1))
13927 && TYPE_PRECISION (TREE_TYPE (tem))
13928 < TYPE_PRECISION (type))
13930 int inner_width, outer_width;
13931 tree tem_type;
13933 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13934 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13935 if (outer_width > TYPE_PRECISION (type))
13936 outer_width = TYPE_PRECISION (type);
13938 wide_int mask = wi::shifted_mask
13939 (inner_width, outer_width - inner_width, false,
13940 TYPE_PRECISION (TREE_TYPE (arg1)));
13942 wide_int common = mask & arg1;
13943 if (common == mask)
13945 tem_type = signed_type_for (TREE_TYPE (tem));
13946 tem = fold_convert_loc (loc, tem_type, tem);
13948 else if (common == 0)
13950 tem_type = unsigned_type_for (TREE_TYPE (tem));
13951 tem = fold_convert_loc (loc, tem_type, tem);
13953 else
13954 tem = NULL;
13957 if (tem)
13958 return
13959 fold_convert_loc (loc, type,
13960 fold_build2_loc (loc, BIT_AND_EXPR,
13961 TREE_TYPE (tem), tem,
13962 fold_convert_loc (loc,
13963 TREE_TYPE (tem),
13964 arg1)));
13967 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13968 already handled above. */
13969 if (TREE_CODE (arg0) == BIT_AND_EXPR
13970 && integer_onep (TREE_OPERAND (arg0, 1))
13971 && integer_zerop (op2)
13972 && integer_pow2p (arg1))
13974 tree tem = TREE_OPERAND (arg0, 0);
13975 STRIP_NOPS (tem);
13976 if (TREE_CODE (tem) == RSHIFT_EXPR
13977 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13978 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13979 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13981 TREE_OPERAND (tem, 0), arg1);
13984 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13985 is probably obsolete because the first operand should be a
13986 truth value (that's why we have the two cases above), but let's
13987 leave it in until we can confirm this for all front-ends. */
13988 if (integer_zerop (op2)
13989 && TREE_CODE (arg0) == NE_EXPR
13990 && integer_zerop (TREE_OPERAND (arg0, 1))
13991 && integer_pow2p (arg1)
13992 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13993 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13994 arg1, OEP_ONLY_CONST))
13995 return pedantic_non_lvalue_loc (loc,
13996 fold_convert_loc (loc, type,
13997 TREE_OPERAND (arg0, 0)));
13999 /* Disable the transformations below for vectors, since
14000 fold_binary_op_with_conditional_arg may undo them immediately,
14001 yielding an infinite loop. */
14002 if (code == VEC_COND_EXPR)
14003 return NULL_TREE;
14005 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14006 if (integer_zerop (op2)
14007 && truth_value_p (TREE_CODE (arg0))
14008 && truth_value_p (TREE_CODE (arg1))
14009 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14010 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14011 : TRUTH_ANDIF_EXPR,
14012 type, fold_convert_loc (loc, type, arg0), arg1);
14014 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14015 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14016 && truth_value_p (TREE_CODE (arg0))
14017 && truth_value_p (TREE_CODE (arg1))
14018 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14020 location_t loc0 = expr_location_or (arg0, loc);
14021 /* Only perform transformation if ARG0 is easily inverted. */
14022 tem = fold_invert_truthvalue (loc0, arg0);
14023 if (tem)
14024 return fold_build2_loc (loc, code == VEC_COND_EXPR
14025 ? BIT_IOR_EXPR
14026 : TRUTH_ORIF_EXPR,
14027 type, fold_convert_loc (loc, type, tem),
14028 arg1);
14031 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14032 if (integer_zerop (arg1)
14033 && truth_value_p (TREE_CODE (arg0))
14034 && truth_value_p (TREE_CODE (op2))
14035 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14037 location_t loc0 = expr_location_or (arg0, loc);
14038 /* Only perform transformation if ARG0 is easily inverted. */
14039 tem = fold_invert_truthvalue (loc0, arg0);
14040 if (tem)
14041 return fold_build2_loc (loc, code == VEC_COND_EXPR
14042 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14043 type, fold_convert_loc (loc, type, tem),
14044 op2);
14047 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14048 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14049 && truth_value_p (TREE_CODE (arg0))
14050 && truth_value_p (TREE_CODE (op2))
14051 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14052 return fold_build2_loc (loc, code == VEC_COND_EXPR
14053 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14054 type, fold_convert_loc (loc, type, arg0), op2);
14056 return NULL_TREE;
14058 case CALL_EXPR:
14059 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14060 of fold_ternary on them. */
14061 gcc_unreachable ();
14063 case BIT_FIELD_REF:
14064 if ((TREE_CODE (arg0) == VECTOR_CST
14065 || (TREE_CODE (arg0) == CONSTRUCTOR
14066 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14067 && (type == TREE_TYPE (TREE_TYPE (arg0))
14068 || (TREE_CODE (type) == VECTOR_TYPE
14069 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14071 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14072 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14073 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14074 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14076 if (n != 0
14077 && (idx % width) == 0
14078 && (n % width) == 0
14079 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14081 idx = idx / width;
14082 n = n / width;
14084 if (TREE_CODE (arg0) == VECTOR_CST)
14086 if (n == 1)
14087 return VECTOR_CST_ELT (arg0, idx);
14089 tree *vals = XALLOCAVEC (tree, n);
14090 for (unsigned i = 0; i < n; ++i)
14091 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14092 return build_vector (type, vals);
14095 /* Constructor elements can be subvectors. */
14096 unsigned HOST_WIDE_INT k = 1;
14097 if (CONSTRUCTOR_NELTS (arg0) != 0)
14099 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14100 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14101 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14104 /* We keep an exact subset of the constructor elements. */
14105 if ((idx % k) == 0 && (n % k) == 0)
14107 if (CONSTRUCTOR_NELTS (arg0) == 0)
14108 return build_constructor (type, NULL);
14109 idx /= k;
14110 n /= k;
14111 if (n == 1)
14113 if (idx < CONSTRUCTOR_NELTS (arg0))
14114 return CONSTRUCTOR_ELT (arg0, idx)->value;
14115 return build_zero_cst (type);
14118 vec<constructor_elt, va_gc> *vals;
14119 vec_alloc (vals, n);
14120 for (unsigned i = 0;
14121 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14122 ++i)
14123 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14124 CONSTRUCTOR_ELT
14125 (arg0, idx + i)->value);
14126 return build_constructor (type, vals);
14128 /* The bitfield references a single constructor element. */
14129 else if (idx + n <= (idx / k + 1) * k)
14131 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14132 return build_zero_cst (type);
14133 else if (n == k)
14134 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14135 else
14136 return fold_build3_loc (loc, code, type,
14137 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14138 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14143 /* A bit-field-ref that referenced the full argument can be stripped. */
14144 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14145 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14146 && integer_zerop (op2))
14147 return fold_convert_loc (loc, type, arg0);
14149 /* On constants we can use native encode/interpret to constant
14150 fold (nearly) all BIT_FIELD_REFs. */
14151 if (CONSTANT_CLASS_P (arg0)
14152 && can_native_interpret_type_p (type)
14153 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14154 /* This limitation should not be necessary, we just need to
14155 round this up to mode size. */
14156 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14157 /* Need bit-shifting of the buffer to relax the following. */
14158 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14160 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14161 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14162 unsigned HOST_WIDE_INT clen;
14163 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14164 /* ??? We cannot tell native_encode_expr to start at
14165 some random byte only. So limit us to a reasonable amount
14166 of work. */
14167 if (clen <= 4096)
14169 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14170 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14171 if (len > 0
14172 && len * BITS_PER_UNIT >= bitpos + bitsize)
14174 tree v = native_interpret_expr (type,
14175 b + bitpos / BITS_PER_UNIT,
14176 bitsize / BITS_PER_UNIT);
14177 if (v)
14178 return v;
14183 return NULL_TREE;
14185 case FMA_EXPR:
14186 /* For integers we can decompose the FMA if possible. */
14187 if (TREE_CODE (arg0) == INTEGER_CST
14188 && TREE_CODE (arg1) == INTEGER_CST)
14189 return fold_build2_loc (loc, PLUS_EXPR, type,
14190 const_binop (MULT_EXPR, arg0, arg1), arg2);
14191 if (integer_zerop (arg2))
14192 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14194 return fold_fma (loc, type, arg0, arg1, arg2);
14196 case VEC_PERM_EXPR:
14197 if (TREE_CODE (arg2) == VECTOR_CST)
14199 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14200 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14201 bool need_mask_canon = false;
14202 bool all_in_vec0 = true;
14203 bool all_in_vec1 = true;
14204 bool maybe_identity = true;
14205 bool single_arg = (op0 == op1);
14206 bool changed = false;
14208 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14209 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14210 for (i = 0; i < nelts; i++)
14212 tree val = VECTOR_CST_ELT (arg2, i);
14213 if (TREE_CODE (val) != INTEGER_CST)
14214 return NULL_TREE;
14216 /* Make sure that the perm value is in an acceptable
14217 range. */
14218 wide_int t = val;
14219 if (wi::gtu_p (t, mask))
14221 need_mask_canon = true;
14222 sel[i] = t.to_uhwi () & mask;
14224 else
14225 sel[i] = t.to_uhwi ();
14227 if (sel[i] < nelts)
14228 all_in_vec1 = false;
14229 else
14230 all_in_vec0 = false;
14232 if ((sel[i] & (nelts-1)) != i)
14233 maybe_identity = false;
14236 if (maybe_identity)
14238 if (all_in_vec0)
14239 return op0;
14240 if (all_in_vec1)
14241 return op1;
14244 if (all_in_vec0)
14245 op1 = op0;
14246 else if (all_in_vec1)
14248 op0 = op1;
14249 for (i = 0; i < nelts; i++)
14250 sel[i] -= nelts;
14251 need_mask_canon = true;
14254 if ((TREE_CODE (op0) == VECTOR_CST
14255 || TREE_CODE (op0) == CONSTRUCTOR)
14256 && (TREE_CODE (op1) == VECTOR_CST
14257 || TREE_CODE (op1) == CONSTRUCTOR))
14259 tree t = fold_vec_perm (type, op0, op1, sel);
14260 if (t != NULL_TREE)
14261 return t;
14264 if (op0 == op1 && !single_arg)
14265 changed = true;
14267 if (need_mask_canon && arg2 == op2)
14269 tree *tsel = XALLOCAVEC (tree, nelts);
14270 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14271 for (i = 0; i < nelts; i++)
14272 tsel[i] = build_int_cst (eltype, sel[i]);
14273 op2 = build_vector (TREE_TYPE (arg2), tsel);
14274 changed = true;
14277 if (changed)
14278 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14280 return NULL_TREE;
14282 default:
14283 return NULL_TREE;
14284 } /* switch (code) */
14287 /* Perform constant folding and related simplification of EXPR.
14288 The related simplifications include x*1 => x, x*0 => 0, etc.,
14289 and application of the associative law.
14290 NOP_EXPR conversions may be removed freely (as long as we
14291 are careful not to change the type of the overall expression).
14292 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14293 but we can constant-fold them if they have constant operands. */
14295 #ifdef ENABLE_FOLD_CHECKING
14296 # define fold(x) fold_1 (x)
14297 static tree fold_1 (tree);
14298 static
14299 #endif
14300 tree
14301 fold (tree expr)
14303 const tree t = expr;
14304 enum tree_code code = TREE_CODE (t);
14305 enum tree_code_class kind = TREE_CODE_CLASS (code);
14306 tree tem;
14307 location_t loc = EXPR_LOCATION (expr);
14309 /* Return right away if a constant. */
14310 if (kind == tcc_constant)
14311 return t;
14313 /* CALL_EXPR-like objects with variable numbers of operands are
14314 treated specially. */
14315 if (kind == tcc_vl_exp)
14317 if (code == CALL_EXPR)
14319 tem = fold_call_expr (loc, expr, false);
14320 return tem ? tem : expr;
14322 return expr;
14325 if (IS_EXPR_CODE_CLASS (kind))
14327 tree type = TREE_TYPE (t);
14328 tree op0, op1, op2;
14330 switch (TREE_CODE_LENGTH (code))
14332 case 1:
14333 op0 = TREE_OPERAND (t, 0);
14334 tem = fold_unary_loc (loc, code, type, op0);
14335 return tem ? tem : expr;
14336 case 2:
14337 op0 = TREE_OPERAND (t, 0);
14338 op1 = TREE_OPERAND (t, 1);
14339 tem = fold_binary_loc (loc, code, type, op0, op1);
14340 return tem ? tem : expr;
14341 case 3:
14342 op0 = TREE_OPERAND (t, 0);
14343 op1 = TREE_OPERAND (t, 1);
14344 op2 = TREE_OPERAND (t, 2);
14345 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14346 return tem ? tem : expr;
14347 default:
14348 break;
14352 switch (code)
14354 case ARRAY_REF:
14356 tree op0 = TREE_OPERAND (t, 0);
14357 tree op1 = TREE_OPERAND (t, 1);
14359 if (TREE_CODE (op1) == INTEGER_CST
14360 && TREE_CODE (op0) == CONSTRUCTOR
14361 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14363 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14364 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14365 unsigned HOST_WIDE_INT begin = 0;
14367 /* Find a matching index by means of a binary search. */
14368 while (begin != end)
14370 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14371 tree index = (*elts)[middle].index;
14373 if (TREE_CODE (index) == INTEGER_CST
14374 && tree_int_cst_lt (index, op1))
14375 begin = middle + 1;
14376 else if (TREE_CODE (index) == INTEGER_CST
14377 && tree_int_cst_lt (op1, index))
14378 end = middle;
14379 else if (TREE_CODE (index) == RANGE_EXPR
14380 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14381 begin = middle + 1;
14382 else if (TREE_CODE (index) == RANGE_EXPR
14383 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14384 end = middle;
14385 else
14386 return (*elts)[middle].value;
14390 return t;
14393 /* Return a VECTOR_CST if possible. */
14394 case CONSTRUCTOR:
14396 tree type = TREE_TYPE (t);
14397 if (TREE_CODE (type) != VECTOR_TYPE)
14398 return t;
14400 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14401 unsigned HOST_WIDE_INT idx, pos = 0;
14402 tree value;
14404 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14406 if (!CONSTANT_CLASS_P (value))
14407 return t;
14408 if (TREE_CODE (value) == VECTOR_CST)
14410 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14411 vec[pos++] = VECTOR_CST_ELT (value, i);
14413 else
14414 vec[pos++] = value;
14416 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14417 vec[pos] = build_zero_cst (TREE_TYPE (type));
14419 return build_vector (type, vec);
14422 case CONST_DECL:
14423 return fold (DECL_INITIAL (t));
14425 default:
14426 return t;
14427 } /* switch (code) */
14430 #ifdef ENABLE_FOLD_CHECKING
14431 #undef fold
14433 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14434 hash_table<pointer_hash<const tree_node> > *);
14435 static void fold_check_failed (const_tree, const_tree);
14436 void print_fold_checksum (const_tree);
14438 /* When --enable-checking=fold, compute a digest of expr before
14439 and after actual fold call to see if fold did not accidentally
14440 change original expr. */
14442 tree
14443 fold (tree expr)
14445 tree ret;
14446 struct md5_ctx ctx;
14447 unsigned char checksum_before[16], checksum_after[16];
14448 hash_table<pointer_hash<const tree_node> > ht (32);
14450 md5_init_ctx (&ctx);
14451 fold_checksum_tree (expr, &ctx, &ht);
14452 md5_finish_ctx (&ctx, checksum_before);
14453 ht.empty ();
14455 ret = fold_1 (expr);
14457 md5_init_ctx (&ctx);
14458 fold_checksum_tree (expr, &ctx, &ht);
14459 md5_finish_ctx (&ctx, checksum_after);
14461 if (memcmp (checksum_before, checksum_after, 16))
14462 fold_check_failed (expr, ret);
14464 return ret;
14467 void
14468 print_fold_checksum (const_tree expr)
14470 struct md5_ctx ctx;
14471 unsigned char checksum[16], cnt;
14472 hash_table<pointer_hash<const tree_node> > ht (32);
14474 md5_init_ctx (&ctx);
14475 fold_checksum_tree (expr, &ctx, &ht);
14476 md5_finish_ctx (&ctx, checksum);
14477 for (cnt = 0; cnt < 16; ++cnt)
14478 fprintf (stderr, "%02x", checksum[cnt]);
14479 putc ('\n', stderr);
14482 static void
14483 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14485 internal_error ("fold check: original tree changed by fold");
14488 static void
14489 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14490 hash_table<pointer_hash <const tree_node> > *ht)
14492 const tree_node **slot;
14493 enum tree_code code;
14494 union tree_node buf;
14495 int i, len;
14497 recursive_label:
14498 if (expr == NULL)
14499 return;
14500 slot = ht->find_slot (expr, INSERT);
14501 if (*slot != NULL)
14502 return;
14503 *slot = expr;
14504 code = TREE_CODE (expr);
14505 if (TREE_CODE_CLASS (code) == tcc_declaration
14506 && DECL_ASSEMBLER_NAME_SET_P (expr))
14508 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14509 memcpy ((char *) &buf, expr, tree_size (expr));
14510 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14511 expr = (tree) &buf;
14513 else if (TREE_CODE_CLASS (code) == tcc_type
14514 && (TYPE_POINTER_TO (expr)
14515 || TYPE_REFERENCE_TO (expr)
14516 || TYPE_CACHED_VALUES_P (expr)
14517 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14518 || TYPE_NEXT_VARIANT (expr)))
14520 /* Allow these fields to be modified. */
14521 tree tmp;
14522 memcpy ((char *) &buf, expr, tree_size (expr));
14523 expr = tmp = (tree) &buf;
14524 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14525 TYPE_POINTER_TO (tmp) = NULL;
14526 TYPE_REFERENCE_TO (tmp) = NULL;
14527 TYPE_NEXT_VARIANT (tmp) = NULL;
14528 if (TYPE_CACHED_VALUES_P (tmp))
14530 TYPE_CACHED_VALUES_P (tmp) = 0;
14531 TYPE_CACHED_VALUES (tmp) = NULL;
14534 md5_process_bytes (expr, tree_size (expr), ctx);
14535 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14536 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14537 if (TREE_CODE_CLASS (code) != tcc_type
14538 && TREE_CODE_CLASS (code) != tcc_declaration
14539 && code != TREE_LIST
14540 && code != SSA_NAME
14541 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14542 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14543 switch (TREE_CODE_CLASS (code))
14545 case tcc_constant:
14546 switch (code)
14548 case STRING_CST:
14549 md5_process_bytes (TREE_STRING_POINTER (expr),
14550 TREE_STRING_LENGTH (expr), ctx);
14551 break;
14552 case COMPLEX_CST:
14553 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14554 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14555 break;
14556 case VECTOR_CST:
14557 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14558 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14559 break;
14560 default:
14561 break;
14563 break;
14564 case tcc_exceptional:
14565 switch (code)
14567 case TREE_LIST:
14568 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14569 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14570 expr = TREE_CHAIN (expr);
14571 goto recursive_label;
14572 break;
14573 case TREE_VEC:
14574 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14575 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14576 break;
14577 default:
14578 break;
14580 break;
14581 case tcc_expression:
14582 case tcc_reference:
14583 case tcc_comparison:
14584 case tcc_unary:
14585 case tcc_binary:
14586 case tcc_statement:
14587 case tcc_vl_exp:
14588 len = TREE_OPERAND_LENGTH (expr);
14589 for (i = 0; i < len; ++i)
14590 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14591 break;
14592 case tcc_declaration:
14593 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14594 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14595 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14597 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14598 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14599 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14600 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14601 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14604 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14606 if (TREE_CODE (expr) == FUNCTION_DECL)
14608 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14609 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14611 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14613 break;
14614 case tcc_type:
14615 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14616 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14617 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14618 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14619 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14620 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14621 if (INTEGRAL_TYPE_P (expr)
14622 || SCALAR_FLOAT_TYPE_P (expr))
14624 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14625 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14627 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14628 if (TREE_CODE (expr) == RECORD_TYPE
14629 || TREE_CODE (expr) == UNION_TYPE
14630 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14631 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14632 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14633 break;
14634 default:
14635 break;
14639 /* Helper function for outputting the checksum of a tree T. When
14640 debugging with gdb, you can "define mynext" to be "next" followed
14641 by "call debug_fold_checksum (op0)", then just trace down till the
14642 outputs differ. */
14644 DEBUG_FUNCTION void
14645 debug_fold_checksum (const_tree t)
14647 int i;
14648 unsigned char checksum[16];
14649 struct md5_ctx ctx;
14650 hash_table<pointer_hash<const tree_node> > ht (32);
14652 md5_init_ctx (&ctx);
14653 fold_checksum_tree (t, &ctx, &ht);
14654 md5_finish_ctx (&ctx, checksum);
14655 ht.empty ();
14657 for (i = 0; i < 16; i++)
14658 fprintf (stderr, "%d ", checksum[i]);
14660 fprintf (stderr, "\n");
14663 #endif
14665 /* Fold a unary tree expression with code CODE of type TYPE with an
14666 operand OP0. LOC is the location of the resulting expression.
14667 Return a folded expression if successful. Otherwise, return a tree
14668 expression with code CODE of type TYPE with an operand OP0. */
14670 tree
14671 fold_build1_stat_loc (location_t loc,
14672 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14674 tree tem;
14675 #ifdef ENABLE_FOLD_CHECKING
14676 unsigned char checksum_before[16], checksum_after[16];
14677 struct md5_ctx ctx;
14678 hash_table<pointer_hash<const tree_node> > ht (32);
14680 md5_init_ctx (&ctx);
14681 fold_checksum_tree (op0, &ctx, &ht);
14682 md5_finish_ctx (&ctx, checksum_before);
14683 ht.empty ();
14684 #endif
14686 tem = fold_unary_loc (loc, code, type, op0);
14687 if (!tem)
14688 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14690 #ifdef ENABLE_FOLD_CHECKING
14691 md5_init_ctx (&ctx);
14692 fold_checksum_tree (op0, &ctx, &ht);
14693 md5_finish_ctx (&ctx, checksum_after);
14695 if (memcmp (checksum_before, checksum_after, 16))
14696 fold_check_failed (op0, tem);
14697 #endif
14698 return tem;
14701 /* Fold a binary tree expression with code CODE of type TYPE with
14702 operands OP0 and OP1. LOC is the location of the resulting
14703 expression. Return a folded expression if successful. Otherwise,
14704 return a tree expression with code CODE of type TYPE with operands
14705 OP0 and OP1. */
14707 tree
14708 fold_build2_stat_loc (location_t loc,
14709 enum tree_code code, tree type, tree op0, tree op1
14710 MEM_STAT_DECL)
14712 tree tem;
14713 #ifdef ENABLE_FOLD_CHECKING
14714 unsigned char checksum_before_op0[16],
14715 checksum_before_op1[16],
14716 checksum_after_op0[16],
14717 checksum_after_op1[16];
14718 struct md5_ctx ctx;
14719 hash_table<pointer_hash<const tree_node> > ht (32);
14721 md5_init_ctx (&ctx);
14722 fold_checksum_tree (op0, &ctx, &ht);
14723 md5_finish_ctx (&ctx, checksum_before_op0);
14724 ht.empty ();
14726 md5_init_ctx (&ctx);
14727 fold_checksum_tree (op1, &ctx, &ht);
14728 md5_finish_ctx (&ctx, checksum_before_op1);
14729 ht.empty ();
14730 #endif
14732 tem = fold_binary_loc (loc, code, type, op0, op1);
14733 if (!tem)
14734 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14736 #ifdef ENABLE_FOLD_CHECKING
14737 md5_init_ctx (&ctx);
14738 fold_checksum_tree (op0, &ctx, &ht);
14739 md5_finish_ctx (&ctx, checksum_after_op0);
14740 ht.empty ();
14742 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14743 fold_check_failed (op0, tem);
14745 md5_init_ctx (&ctx);
14746 fold_checksum_tree (op1, &ctx, &ht);
14747 md5_finish_ctx (&ctx, checksum_after_op1);
14749 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14750 fold_check_failed (op1, tem);
14751 #endif
14752 return tem;
14755 /* Fold a ternary tree expression with code CODE of type TYPE with
14756 operands OP0, OP1, and OP2. Return a folded expression if
14757 successful. Otherwise, return a tree expression with code CODE of
14758 type TYPE with operands OP0, OP1, and OP2. */
14760 tree
14761 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14762 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14764 tree tem;
14765 #ifdef ENABLE_FOLD_CHECKING
14766 unsigned char checksum_before_op0[16],
14767 checksum_before_op1[16],
14768 checksum_before_op2[16],
14769 checksum_after_op0[16],
14770 checksum_after_op1[16],
14771 checksum_after_op2[16];
14772 struct md5_ctx ctx;
14773 hash_table<pointer_hash<const tree_node> > ht (32);
14775 md5_init_ctx (&ctx);
14776 fold_checksum_tree (op0, &ctx, &ht);
14777 md5_finish_ctx (&ctx, checksum_before_op0);
14778 ht.empty ();
14780 md5_init_ctx (&ctx);
14781 fold_checksum_tree (op1, &ctx, &ht);
14782 md5_finish_ctx (&ctx, checksum_before_op1);
14783 ht.empty ();
14785 md5_init_ctx (&ctx);
14786 fold_checksum_tree (op2, &ctx, &ht);
14787 md5_finish_ctx (&ctx, checksum_before_op2);
14788 ht.empty ();
14789 #endif
14791 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14792 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14793 if (!tem)
14794 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14796 #ifdef ENABLE_FOLD_CHECKING
14797 md5_init_ctx (&ctx);
14798 fold_checksum_tree (op0, &ctx, &ht);
14799 md5_finish_ctx (&ctx, checksum_after_op0);
14800 ht.empty ();
14802 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14803 fold_check_failed (op0, tem);
14805 md5_init_ctx (&ctx);
14806 fold_checksum_tree (op1, &ctx, &ht);
14807 md5_finish_ctx (&ctx, checksum_after_op1);
14808 ht.empty ();
14810 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14811 fold_check_failed (op1, tem);
14813 md5_init_ctx (&ctx);
14814 fold_checksum_tree (op2, &ctx, &ht);
14815 md5_finish_ctx (&ctx, checksum_after_op2);
14817 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14818 fold_check_failed (op2, tem);
14819 #endif
14820 return tem;
14823 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14824 arguments in ARGARRAY, and a null static chain.
14825 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14826 of type TYPE from the given operands as constructed by build_call_array. */
14828 tree
14829 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14830 int nargs, tree *argarray)
14832 tree tem;
14833 #ifdef ENABLE_FOLD_CHECKING
14834 unsigned char checksum_before_fn[16],
14835 checksum_before_arglist[16],
14836 checksum_after_fn[16],
14837 checksum_after_arglist[16];
14838 struct md5_ctx ctx;
14839 hash_table<pointer_hash<const tree_node> > ht (32);
14840 int i;
14842 md5_init_ctx (&ctx);
14843 fold_checksum_tree (fn, &ctx, &ht);
14844 md5_finish_ctx (&ctx, checksum_before_fn);
14845 ht.empty ();
14847 md5_init_ctx (&ctx);
14848 for (i = 0; i < nargs; i++)
14849 fold_checksum_tree (argarray[i], &ctx, &ht);
14850 md5_finish_ctx (&ctx, checksum_before_arglist);
14851 ht.empty ();
14852 #endif
14854 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14856 #ifdef ENABLE_FOLD_CHECKING
14857 md5_init_ctx (&ctx);
14858 fold_checksum_tree (fn, &ctx, &ht);
14859 md5_finish_ctx (&ctx, checksum_after_fn);
14860 ht.empty ();
14862 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14863 fold_check_failed (fn, tem);
14865 md5_init_ctx (&ctx);
14866 for (i = 0; i < nargs; i++)
14867 fold_checksum_tree (argarray[i], &ctx, &ht);
14868 md5_finish_ctx (&ctx, checksum_after_arglist);
14870 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14871 fold_check_failed (NULL_TREE, tem);
14872 #endif
14873 return tem;
14876 /* Perform constant folding and related simplification of initializer
14877 expression EXPR. These behave identically to "fold_buildN" but ignore
14878 potential run-time traps and exceptions that fold must preserve. */
14880 #define START_FOLD_INIT \
14881 int saved_signaling_nans = flag_signaling_nans;\
14882 int saved_trapping_math = flag_trapping_math;\
14883 int saved_rounding_math = flag_rounding_math;\
14884 int saved_trapv = flag_trapv;\
14885 int saved_folding_initializer = folding_initializer;\
14886 flag_signaling_nans = 0;\
14887 flag_trapping_math = 0;\
14888 flag_rounding_math = 0;\
14889 flag_trapv = 0;\
14890 folding_initializer = 1;
14892 #define END_FOLD_INIT \
14893 flag_signaling_nans = saved_signaling_nans;\
14894 flag_trapping_math = saved_trapping_math;\
14895 flag_rounding_math = saved_rounding_math;\
14896 flag_trapv = saved_trapv;\
14897 folding_initializer = saved_folding_initializer;
14899 tree
14900 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14901 tree type, tree op)
14903 tree result;
14904 START_FOLD_INIT;
14906 result = fold_build1_loc (loc, code, type, op);
14908 END_FOLD_INIT;
14909 return result;
14912 tree
14913 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14914 tree type, tree op0, tree op1)
14916 tree result;
14917 START_FOLD_INIT;
14919 result = fold_build2_loc (loc, code, type, op0, op1);
14921 END_FOLD_INIT;
14922 return result;
14925 tree
14926 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14927 int nargs, tree *argarray)
14929 tree result;
14930 START_FOLD_INIT;
14932 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14934 END_FOLD_INIT;
14935 return result;
14938 #undef START_FOLD_INIT
14939 #undef END_FOLD_INIT
14941 /* Determine if first argument is a multiple of second argument. Return 0 if
14942 it is not, or we cannot easily determined it to be.
14944 An example of the sort of thing we care about (at this point; this routine
14945 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14946 fold cases do now) is discovering that
14948 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14950 is a multiple of
14952 SAVE_EXPR (J * 8)
14954 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14956 This code also handles discovering that
14958 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14960 is a multiple of 8 so we don't have to worry about dealing with a
14961 possible remainder.
14963 Note that we *look* inside a SAVE_EXPR only to determine how it was
14964 calculated; it is not safe for fold to do much of anything else with the
14965 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14966 at run time. For example, the latter example above *cannot* be implemented
14967 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14968 evaluation time of the original SAVE_EXPR is not necessarily the same at
14969 the time the new expression is evaluated. The only optimization of this
14970 sort that would be valid is changing
14972 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14974 divided by 8 to
14976 SAVE_EXPR (I) * SAVE_EXPR (J)
14978 (where the same SAVE_EXPR (J) is used in the original and the
14979 transformed version). */
14982 multiple_of_p (tree type, const_tree top, const_tree bottom)
14984 if (operand_equal_p (top, bottom, 0))
14985 return 1;
14987 if (TREE_CODE (type) != INTEGER_TYPE)
14988 return 0;
14990 switch (TREE_CODE (top))
14992 case BIT_AND_EXPR:
14993 /* Bitwise and provides a power of two multiple. If the mask is
14994 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14995 if (!integer_pow2p (bottom))
14996 return 0;
14997 /* FALLTHRU */
14999 case MULT_EXPR:
15000 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15001 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15003 case PLUS_EXPR:
15004 case MINUS_EXPR:
15005 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15006 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15008 case LSHIFT_EXPR:
15009 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15011 tree op1, t1;
15013 op1 = TREE_OPERAND (top, 1);
15014 /* const_binop may not detect overflow correctly,
15015 so check for it explicitly here. */
15016 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15017 && 0 != (t1 = fold_convert (type,
15018 const_binop (LSHIFT_EXPR,
15019 size_one_node,
15020 op1)))
15021 && !TREE_OVERFLOW (t1))
15022 return multiple_of_p (type, t1, bottom);
15024 return 0;
15026 case NOP_EXPR:
15027 /* Can't handle conversions from non-integral or wider integral type. */
15028 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15029 || (TYPE_PRECISION (type)
15030 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15031 return 0;
15033 /* .. fall through ... */
15035 case SAVE_EXPR:
15036 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15038 case COND_EXPR:
15039 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15040 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15042 case INTEGER_CST:
15043 if (TREE_CODE (bottom) != INTEGER_CST
15044 || integer_zerop (bottom)
15045 || (TYPE_UNSIGNED (type)
15046 && (tree_int_cst_sgn (top) < 0
15047 || tree_int_cst_sgn (bottom) < 0)))
15048 return 0;
15049 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15050 SIGNED);
15052 default:
15053 return 0;
15057 /* Return true if CODE or TYPE is known to be non-negative. */
15059 static bool
15060 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15062 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15063 && truth_value_p (code))
15064 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15065 have a signed:1 type (where the value is -1 and 0). */
15066 return true;
15067 return false;
15070 /* Return true if (CODE OP0) is known to be non-negative. If the return
15071 value is based on the assumption that signed overflow is undefined,
15072 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15073 *STRICT_OVERFLOW_P. */
15075 bool
15076 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15077 bool *strict_overflow_p)
15079 if (TYPE_UNSIGNED (type))
15080 return true;
15082 switch (code)
15084 case ABS_EXPR:
15085 /* We can't return 1 if flag_wrapv is set because
15086 ABS_EXPR<INT_MIN> = INT_MIN. */
15087 if (!INTEGRAL_TYPE_P (type))
15088 return true;
15089 if (TYPE_OVERFLOW_UNDEFINED (type))
15091 *strict_overflow_p = true;
15092 return true;
15094 break;
15096 case NON_LVALUE_EXPR:
15097 case FLOAT_EXPR:
15098 case FIX_TRUNC_EXPR:
15099 return tree_expr_nonnegative_warnv_p (op0,
15100 strict_overflow_p);
15102 case NOP_EXPR:
15104 tree inner_type = TREE_TYPE (op0);
15105 tree outer_type = type;
15107 if (TREE_CODE (outer_type) == REAL_TYPE)
15109 if (TREE_CODE (inner_type) == REAL_TYPE)
15110 return tree_expr_nonnegative_warnv_p (op0,
15111 strict_overflow_p);
15112 if (INTEGRAL_TYPE_P (inner_type))
15114 if (TYPE_UNSIGNED (inner_type))
15115 return true;
15116 return tree_expr_nonnegative_warnv_p (op0,
15117 strict_overflow_p);
15120 else if (INTEGRAL_TYPE_P (outer_type))
15122 if (TREE_CODE (inner_type) == REAL_TYPE)
15123 return tree_expr_nonnegative_warnv_p (op0,
15124 strict_overflow_p);
15125 if (INTEGRAL_TYPE_P (inner_type))
15126 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15127 && TYPE_UNSIGNED (inner_type);
15130 break;
15132 default:
15133 return tree_simple_nonnegative_warnv_p (code, type);
15136 /* We don't know sign of `t', so be conservative and return false. */
15137 return false;
15140 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15141 value is based on the assumption that signed overflow is undefined,
15142 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15143 *STRICT_OVERFLOW_P. */
15145 bool
15146 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15147 tree op1, bool *strict_overflow_p)
15149 if (TYPE_UNSIGNED (type))
15150 return true;
15152 switch (code)
15154 case POINTER_PLUS_EXPR:
15155 case PLUS_EXPR:
15156 if (FLOAT_TYPE_P (type))
15157 return (tree_expr_nonnegative_warnv_p (op0,
15158 strict_overflow_p)
15159 && tree_expr_nonnegative_warnv_p (op1,
15160 strict_overflow_p));
15162 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15163 both unsigned and at least 2 bits shorter than the result. */
15164 if (TREE_CODE (type) == INTEGER_TYPE
15165 && TREE_CODE (op0) == NOP_EXPR
15166 && TREE_CODE (op1) == NOP_EXPR)
15168 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15169 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15170 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15171 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15173 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15174 TYPE_PRECISION (inner2)) + 1;
15175 return prec < TYPE_PRECISION (type);
15178 break;
15180 case MULT_EXPR:
15181 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15183 /* x * x is always non-negative for floating point x
15184 or without overflow. */
15185 if (operand_equal_p (op0, op1, 0)
15186 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15187 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15189 if (TYPE_OVERFLOW_UNDEFINED (type))
15190 *strict_overflow_p = true;
15191 return true;
15195 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15196 both unsigned and their total bits is shorter than the result. */
15197 if (TREE_CODE (type) == INTEGER_TYPE
15198 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15199 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15201 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15202 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15203 : TREE_TYPE (op0);
15204 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15205 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15206 : TREE_TYPE (op1);
15208 bool unsigned0 = TYPE_UNSIGNED (inner0);
15209 bool unsigned1 = TYPE_UNSIGNED (inner1);
15211 if (TREE_CODE (op0) == INTEGER_CST)
15212 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15214 if (TREE_CODE (op1) == INTEGER_CST)
15215 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15217 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15218 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15220 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15221 ? tree_int_cst_min_precision (op0, UNSIGNED)
15222 : TYPE_PRECISION (inner0);
15224 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15225 ? tree_int_cst_min_precision (op1, UNSIGNED)
15226 : TYPE_PRECISION (inner1);
15228 return precision0 + precision1 < TYPE_PRECISION (type);
15231 return false;
15233 case BIT_AND_EXPR:
15234 case MAX_EXPR:
15235 return (tree_expr_nonnegative_warnv_p (op0,
15236 strict_overflow_p)
15237 || tree_expr_nonnegative_warnv_p (op1,
15238 strict_overflow_p));
15240 case BIT_IOR_EXPR:
15241 case BIT_XOR_EXPR:
15242 case MIN_EXPR:
15243 case RDIV_EXPR:
15244 case TRUNC_DIV_EXPR:
15245 case CEIL_DIV_EXPR:
15246 case FLOOR_DIV_EXPR:
15247 case ROUND_DIV_EXPR:
15248 return (tree_expr_nonnegative_warnv_p (op0,
15249 strict_overflow_p)
15250 && tree_expr_nonnegative_warnv_p (op1,
15251 strict_overflow_p));
15253 case TRUNC_MOD_EXPR:
15254 case CEIL_MOD_EXPR:
15255 case FLOOR_MOD_EXPR:
15256 case ROUND_MOD_EXPR:
15257 return tree_expr_nonnegative_warnv_p (op0,
15258 strict_overflow_p);
15259 default:
15260 return tree_simple_nonnegative_warnv_p (code, type);
15263 /* We don't know sign of `t', so be conservative and return false. */
15264 return false;
15267 /* Return true if T is known to be non-negative. If the return
15268 value is based on the assumption that signed overflow is undefined,
15269 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15270 *STRICT_OVERFLOW_P. */
15272 bool
15273 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15275 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15276 return true;
15278 switch (TREE_CODE (t))
15280 case INTEGER_CST:
15281 return tree_int_cst_sgn (t) >= 0;
15283 case REAL_CST:
15284 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15286 case FIXED_CST:
15287 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15289 case COND_EXPR:
15290 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15291 strict_overflow_p)
15292 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15293 strict_overflow_p));
15294 default:
15295 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15296 TREE_TYPE (t));
15298 /* We don't know sign of `t', so be conservative and return false. */
15299 return false;
15302 /* Return true if T is known to be non-negative. If the return
15303 value is based on the assumption that signed overflow is undefined,
15304 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15305 *STRICT_OVERFLOW_P. */
15307 bool
15308 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15309 tree arg0, tree arg1, bool *strict_overflow_p)
15311 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15312 switch (DECL_FUNCTION_CODE (fndecl))
15314 CASE_FLT_FN (BUILT_IN_ACOS):
15315 CASE_FLT_FN (BUILT_IN_ACOSH):
15316 CASE_FLT_FN (BUILT_IN_CABS):
15317 CASE_FLT_FN (BUILT_IN_COSH):
15318 CASE_FLT_FN (BUILT_IN_ERFC):
15319 CASE_FLT_FN (BUILT_IN_EXP):
15320 CASE_FLT_FN (BUILT_IN_EXP10):
15321 CASE_FLT_FN (BUILT_IN_EXP2):
15322 CASE_FLT_FN (BUILT_IN_FABS):
15323 CASE_FLT_FN (BUILT_IN_FDIM):
15324 CASE_FLT_FN (BUILT_IN_HYPOT):
15325 CASE_FLT_FN (BUILT_IN_POW10):
15326 CASE_INT_FN (BUILT_IN_FFS):
15327 CASE_INT_FN (BUILT_IN_PARITY):
15328 CASE_INT_FN (BUILT_IN_POPCOUNT):
15329 CASE_INT_FN (BUILT_IN_CLZ):
15330 CASE_INT_FN (BUILT_IN_CLRSB):
15331 case BUILT_IN_BSWAP32:
15332 case BUILT_IN_BSWAP64:
15333 /* Always true. */
15334 return true;
15336 CASE_FLT_FN (BUILT_IN_SQRT):
15337 /* sqrt(-0.0) is -0.0. */
15338 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15339 return true;
15340 return tree_expr_nonnegative_warnv_p (arg0,
15341 strict_overflow_p);
15343 CASE_FLT_FN (BUILT_IN_ASINH):
15344 CASE_FLT_FN (BUILT_IN_ATAN):
15345 CASE_FLT_FN (BUILT_IN_ATANH):
15346 CASE_FLT_FN (BUILT_IN_CBRT):
15347 CASE_FLT_FN (BUILT_IN_CEIL):
15348 CASE_FLT_FN (BUILT_IN_ERF):
15349 CASE_FLT_FN (BUILT_IN_EXPM1):
15350 CASE_FLT_FN (BUILT_IN_FLOOR):
15351 CASE_FLT_FN (BUILT_IN_FMOD):
15352 CASE_FLT_FN (BUILT_IN_FREXP):
15353 CASE_FLT_FN (BUILT_IN_ICEIL):
15354 CASE_FLT_FN (BUILT_IN_IFLOOR):
15355 CASE_FLT_FN (BUILT_IN_IRINT):
15356 CASE_FLT_FN (BUILT_IN_IROUND):
15357 CASE_FLT_FN (BUILT_IN_LCEIL):
15358 CASE_FLT_FN (BUILT_IN_LDEXP):
15359 CASE_FLT_FN (BUILT_IN_LFLOOR):
15360 CASE_FLT_FN (BUILT_IN_LLCEIL):
15361 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15362 CASE_FLT_FN (BUILT_IN_LLRINT):
15363 CASE_FLT_FN (BUILT_IN_LLROUND):
15364 CASE_FLT_FN (BUILT_IN_LRINT):
15365 CASE_FLT_FN (BUILT_IN_LROUND):
15366 CASE_FLT_FN (BUILT_IN_MODF):
15367 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15368 CASE_FLT_FN (BUILT_IN_RINT):
15369 CASE_FLT_FN (BUILT_IN_ROUND):
15370 CASE_FLT_FN (BUILT_IN_SCALB):
15371 CASE_FLT_FN (BUILT_IN_SCALBLN):
15372 CASE_FLT_FN (BUILT_IN_SCALBN):
15373 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15374 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15375 CASE_FLT_FN (BUILT_IN_SINH):
15376 CASE_FLT_FN (BUILT_IN_TANH):
15377 CASE_FLT_FN (BUILT_IN_TRUNC):
15378 /* True if the 1st argument is nonnegative. */
15379 return tree_expr_nonnegative_warnv_p (arg0,
15380 strict_overflow_p);
15382 CASE_FLT_FN (BUILT_IN_FMAX):
15383 /* True if the 1st OR 2nd arguments are nonnegative. */
15384 return (tree_expr_nonnegative_warnv_p (arg0,
15385 strict_overflow_p)
15386 || (tree_expr_nonnegative_warnv_p (arg1,
15387 strict_overflow_p)));
15389 CASE_FLT_FN (BUILT_IN_FMIN):
15390 /* True if the 1st AND 2nd arguments are nonnegative. */
15391 return (tree_expr_nonnegative_warnv_p (arg0,
15392 strict_overflow_p)
15393 && (tree_expr_nonnegative_warnv_p (arg1,
15394 strict_overflow_p)));
15396 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15397 /* True if the 2nd argument is nonnegative. */
15398 return tree_expr_nonnegative_warnv_p (arg1,
15399 strict_overflow_p);
15401 CASE_FLT_FN (BUILT_IN_POWI):
15402 /* True if the 1st argument is nonnegative or the second
15403 argument is an even integer. */
15404 if (TREE_CODE (arg1) == INTEGER_CST
15405 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15406 return true;
15407 return tree_expr_nonnegative_warnv_p (arg0,
15408 strict_overflow_p);
15410 CASE_FLT_FN (BUILT_IN_POW):
15411 /* True if the 1st argument is nonnegative or the second
15412 argument is an even integer valued real. */
15413 if (TREE_CODE (arg1) == REAL_CST)
15415 REAL_VALUE_TYPE c;
15416 HOST_WIDE_INT n;
15418 c = TREE_REAL_CST (arg1);
15419 n = real_to_integer (&c);
15420 if ((n & 1) == 0)
15422 REAL_VALUE_TYPE cint;
15423 real_from_integer (&cint, VOIDmode, n, SIGNED);
15424 if (real_identical (&c, &cint))
15425 return true;
15428 return tree_expr_nonnegative_warnv_p (arg0,
15429 strict_overflow_p);
15431 default:
15432 break;
15434 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15435 type);
15438 /* Return true if T is known to be non-negative. If the return
15439 value is based on the assumption that signed overflow is undefined,
15440 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15441 *STRICT_OVERFLOW_P. */
15443 static bool
15444 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15446 enum tree_code code = TREE_CODE (t);
15447 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15448 return true;
15450 switch (code)
15452 case TARGET_EXPR:
15454 tree temp = TARGET_EXPR_SLOT (t);
15455 t = TARGET_EXPR_INITIAL (t);
15457 /* If the initializer is non-void, then it's a normal expression
15458 that will be assigned to the slot. */
15459 if (!VOID_TYPE_P (t))
15460 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15462 /* Otherwise, the initializer sets the slot in some way. One common
15463 way is an assignment statement at the end of the initializer. */
15464 while (1)
15466 if (TREE_CODE (t) == BIND_EXPR)
15467 t = expr_last (BIND_EXPR_BODY (t));
15468 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15469 || TREE_CODE (t) == TRY_CATCH_EXPR)
15470 t = expr_last (TREE_OPERAND (t, 0));
15471 else if (TREE_CODE (t) == STATEMENT_LIST)
15472 t = expr_last (t);
15473 else
15474 break;
15476 if (TREE_CODE (t) == MODIFY_EXPR
15477 && TREE_OPERAND (t, 0) == temp)
15478 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15479 strict_overflow_p);
15481 return false;
15484 case CALL_EXPR:
15486 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15487 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15489 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15490 get_callee_fndecl (t),
15491 arg0,
15492 arg1,
15493 strict_overflow_p);
15495 case COMPOUND_EXPR:
15496 case MODIFY_EXPR:
15497 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15498 strict_overflow_p);
15499 case BIND_EXPR:
15500 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15501 strict_overflow_p);
15502 case SAVE_EXPR:
15503 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15504 strict_overflow_p);
15506 default:
15507 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15508 TREE_TYPE (t));
15511 /* We don't know sign of `t', so be conservative and return false. */
15512 return false;
15515 /* Return true if T is known to be non-negative. If the return
15516 value is based on the assumption that signed overflow is undefined,
15517 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15518 *STRICT_OVERFLOW_P. */
15520 bool
15521 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15523 enum tree_code code;
15524 if (t == error_mark_node)
15525 return false;
15527 code = TREE_CODE (t);
15528 switch (TREE_CODE_CLASS (code))
15530 case tcc_binary:
15531 case tcc_comparison:
15532 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15533 TREE_TYPE (t),
15534 TREE_OPERAND (t, 0),
15535 TREE_OPERAND (t, 1),
15536 strict_overflow_p);
15538 case tcc_unary:
15539 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15540 TREE_TYPE (t),
15541 TREE_OPERAND (t, 0),
15542 strict_overflow_p);
15544 case tcc_constant:
15545 case tcc_declaration:
15546 case tcc_reference:
15547 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15549 default:
15550 break;
15553 switch (code)
15555 case TRUTH_AND_EXPR:
15556 case TRUTH_OR_EXPR:
15557 case TRUTH_XOR_EXPR:
15558 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15559 TREE_TYPE (t),
15560 TREE_OPERAND (t, 0),
15561 TREE_OPERAND (t, 1),
15562 strict_overflow_p);
15563 case TRUTH_NOT_EXPR:
15564 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15565 TREE_TYPE (t),
15566 TREE_OPERAND (t, 0),
15567 strict_overflow_p);
15569 case COND_EXPR:
15570 case CONSTRUCTOR:
15571 case OBJ_TYPE_REF:
15572 case ASSERT_EXPR:
15573 case ADDR_EXPR:
15574 case WITH_SIZE_EXPR:
15575 case SSA_NAME:
15576 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15578 default:
15579 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15583 /* Return true if `t' is known to be non-negative. Handle warnings
15584 about undefined signed overflow. */
15586 bool
15587 tree_expr_nonnegative_p (tree t)
15589 bool ret, strict_overflow_p;
15591 strict_overflow_p = false;
15592 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15593 if (strict_overflow_p)
15594 fold_overflow_warning (("assuming signed overflow does not occur when "
15595 "determining that expression is always "
15596 "non-negative"),
15597 WARN_STRICT_OVERFLOW_MISC);
15598 return ret;
15602 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15603 For floating point we further ensure that T is not denormal.
15604 Similar logic is present in nonzero_address in rtlanal.h.
15606 If the return value is based on the assumption that signed overflow
15607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15608 change *STRICT_OVERFLOW_P. */
15610 bool
15611 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15612 bool *strict_overflow_p)
15614 switch (code)
15616 case ABS_EXPR:
15617 return tree_expr_nonzero_warnv_p (op0,
15618 strict_overflow_p);
15620 case NOP_EXPR:
15622 tree inner_type = TREE_TYPE (op0);
15623 tree outer_type = type;
15625 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15626 && tree_expr_nonzero_warnv_p (op0,
15627 strict_overflow_p));
15629 break;
15631 case NON_LVALUE_EXPR:
15632 return tree_expr_nonzero_warnv_p (op0,
15633 strict_overflow_p);
15635 default:
15636 break;
15639 return false;
15642 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15643 For floating point we further ensure that T is not denormal.
15644 Similar logic is present in nonzero_address in rtlanal.h.
15646 If the return value is based on the assumption that signed overflow
15647 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15648 change *STRICT_OVERFLOW_P. */
15650 bool
15651 tree_binary_nonzero_warnv_p (enum tree_code code,
15652 tree type,
15653 tree op0,
15654 tree op1, bool *strict_overflow_p)
15656 bool sub_strict_overflow_p;
15657 switch (code)
15659 case POINTER_PLUS_EXPR:
15660 case PLUS_EXPR:
15661 if (TYPE_OVERFLOW_UNDEFINED (type))
15663 /* With the presence of negative values it is hard
15664 to say something. */
15665 sub_strict_overflow_p = false;
15666 if (!tree_expr_nonnegative_warnv_p (op0,
15667 &sub_strict_overflow_p)
15668 || !tree_expr_nonnegative_warnv_p (op1,
15669 &sub_strict_overflow_p))
15670 return false;
15671 /* One of operands must be positive and the other non-negative. */
15672 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15673 overflows, on a twos-complement machine the sum of two
15674 nonnegative numbers can never be zero. */
15675 return (tree_expr_nonzero_warnv_p (op0,
15676 strict_overflow_p)
15677 || tree_expr_nonzero_warnv_p (op1,
15678 strict_overflow_p));
15680 break;
15682 case MULT_EXPR:
15683 if (TYPE_OVERFLOW_UNDEFINED (type))
15685 if (tree_expr_nonzero_warnv_p (op0,
15686 strict_overflow_p)
15687 && tree_expr_nonzero_warnv_p (op1,
15688 strict_overflow_p))
15690 *strict_overflow_p = true;
15691 return true;
15694 break;
15696 case MIN_EXPR:
15697 sub_strict_overflow_p = false;
15698 if (tree_expr_nonzero_warnv_p (op0,
15699 &sub_strict_overflow_p)
15700 && tree_expr_nonzero_warnv_p (op1,
15701 &sub_strict_overflow_p))
15703 if (sub_strict_overflow_p)
15704 *strict_overflow_p = true;
15706 break;
15708 case MAX_EXPR:
15709 sub_strict_overflow_p = false;
15710 if (tree_expr_nonzero_warnv_p (op0,
15711 &sub_strict_overflow_p))
15713 if (sub_strict_overflow_p)
15714 *strict_overflow_p = true;
15716 /* When both operands are nonzero, then MAX must be too. */
15717 if (tree_expr_nonzero_warnv_p (op1,
15718 strict_overflow_p))
15719 return true;
15721 /* MAX where operand 0 is positive is positive. */
15722 return tree_expr_nonnegative_warnv_p (op0,
15723 strict_overflow_p);
15725 /* MAX where operand 1 is positive is positive. */
15726 else if (tree_expr_nonzero_warnv_p (op1,
15727 &sub_strict_overflow_p)
15728 && tree_expr_nonnegative_warnv_p (op1,
15729 &sub_strict_overflow_p))
15731 if (sub_strict_overflow_p)
15732 *strict_overflow_p = true;
15733 return true;
15735 break;
15737 case BIT_IOR_EXPR:
15738 return (tree_expr_nonzero_warnv_p (op1,
15739 strict_overflow_p)
15740 || tree_expr_nonzero_warnv_p (op0,
15741 strict_overflow_p));
15743 default:
15744 break;
15747 return false;
15750 /* Return true when T is an address and is known to be nonzero.
15751 For floating point we further ensure that T is not denormal.
15752 Similar logic is present in nonzero_address in rtlanal.h.
15754 If the return value is based on the assumption that signed overflow
15755 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15756 change *STRICT_OVERFLOW_P. */
15758 bool
15759 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15761 bool sub_strict_overflow_p;
15762 switch (TREE_CODE (t))
15764 case INTEGER_CST:
15765 return !integer_zerop (t);
15767 case ADDR_EXPR:
15769 tree base = TREE_OPERAND (t, 0);
15771 if (!DECL_P (base))
15772 base = get_base_address (base);
15774 if (!base)
15775 return false;
15777 /* For objects in symbol table check if we know they are non-zero.
15778 Don't do anything for variables and functions before symtab is built;
15779 it is quite possible that they will be declared weak later. */
15780 if (DECL_P (base) && decl_in_symtab_p (base))
15782 struct symtab_node *symbol;
15784 symbol = symtab_node::get_create (base);
15785 if (symbol)
15786 return symbol->nonzero_address ();
15787 else
15788 return false;
15791 /* Function local objects are never NULL. */
15792 if (DECL_P (base)
15793 && (DECL_CONTEXT (base)
15794 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15795 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15796 return true;
15798 /* Constants are never weak. */
15799 if (CONSTANT_CLASS_P (base))
15800 return true;
15802 return false;
15805 case COND_EXPR:
15806 sub_strict_overflow_p = false;
15807 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15808 &sub_strict_overflow_p)
15809 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15810 &sub_strict_overflow_p))
15812 if (sub_strict_overflow_p)
15813 *strict_overflow_p = true;
15814 return true;
15816 break;
15818 default:
15819 break;
15821 return false;
15824 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15825 attempt to fold the expression to a constant without modifying TYPE,
15826 OP0 or OP1.
15828 If the expression could be simplified to a constant, then return
15829 the constant. If the expression would not be simplified to a
15830 constant, then return NULL_TREE. */
15832 tree
15833 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15835 tree tem = fold_binary (code, type, op0, op1);
15836 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15839 /* Given the components of a unary expression CODE, TYPE and OP0,
15840 attempt to fold the expression to a constant without modifying
15841 TYPE or OP0.
15843 If the expression could be simplified to a constant, then return
15844 the constant. If the expression would not be simplified to a
15845 constant, then return NULL_TREE. */
15847 tree
15848 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15850 tree tem = fold_unary (code, type, op0);
15851 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15854 /* If EXP represents referencing an element in a constant string
15855 (either via pointer arithmetic or array indexing), return the
15856 tree representing the value accessed, otherwise return NULL. */
15858 tree
15859 fold_read_from_constant_string (tree exp)
15861 if ((TREE_CODE (exp) == INDIRECT_REF
15862 || TREE_CODE (exp) == ARRAY_REF)
15863 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15865 tree exp1 = TREE_OPERAND (exp, 0);
15866 tree index;
15867 tree string;
15868 location_t loc = EXPR_LOCATION (exp);
15870 if (TREE_CODE (exp) == INDIRECT_REF)
15871 string = string_constant (exp1, &index);
15872 else
15874 tree low_bound = array_ref_low_bound (exp);
15875 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15877 /* Optimize the special-case of a zero lower bound.
15879 We convert the low_bound to sizetype to avoid some problems
15880 with constant folding. (E.g. suppose the lower bound is 1,
15881 and its mode is QI. Without the conversion,l (ARRAY
15882 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15883 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15884 if (! integer_zerop (low_bound))
15885 index = size_diffop_loc (loc, index,
15886 fold_convert_loc (loc, sizetype, low_bound));
15888 string = exp1;
15891 if (string
15892 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15893 && TREE_CODE (string) == STRING_CST
15894 && TREE_CODE (index) == INTEGER_CST
15895 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15896 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15897 == MODE_INT)
15898 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15899 return build_int_cst_type (TREE_TYPE (exp),
15900 (TREE_STRING_POINTER (string)
15901 [TREE_INT_CST_LOW (index)]));
15903 return NULL;
15906 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15907 an integer constant, real, or fixed-point constant.
15909 TYPE is the type of the result. */
15911 static tree
15912 fold_negate_const (tree arg0, tree type)
15914 tree t = NULL_TREE;
15916 switch (TREE_CODE (arg0))
15918 case INTEGER_CST:
15920 bool overflow;
15921 wide_int val = wi::neg (arg0, &overflow);
15922 t = force_fit_type (type, val, 1,
15923 (overflow | TREE_OVERFLOW (arg0))
15924 && !TYPE_UNSIGNED (type));
15925 break;
15928 case REAL_CST:
15929 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15930 break;
15932 case FIXED_CST:
15934 FIXED_VALUE_TYPE f;
15935 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15936 &(TREE_FIXED_CST (arg0)), NULL,
15937 TYPE_SATURATING (type));
15938 t = build_fixed (type, f);
15939 /* Propagate overflow flags. */
15940 if (overflow_p | TREE_OVERFLOW (arg0))
15941 TREE_OVERFLOW (t) = 1;
15942 break;
15945 default:
15946 gcc_unreachable ();
15949 return t;
15952 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15953 an integer constant or real constant.
15955 TYPE is the type of the result. */
15957 tree
15958 fold_abs_const (tree arg0, tree type)
15960 tree t = NULL_TREE;
15962 switch (TREE_CODE (arg0))
15964 case INTEGER_CST:
15966 /* If the value is unsigned or non-negative, then the absolute value
15967 is the same as the ordinary value. */
15968 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15969 t = arg0;
15971 /* If the value is negative, then the absolute value is
15972 its negation. */
15973 else
15975 bool overflow;
15976 wide_int val = wi::neg (arg0, &overflow);
15977 t = force_fit_type (type, val, -1,
15978 overflow | TREE_OVERFLOW (arg0));
15981 break;
15983 case REAL_CST:
15984 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15985 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15986 else
15987 t = arg0;
15988 break;
15990 default:
15991 gcc_unreachable ();
15994 return t;
15997 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15998 constant. TYPE is the type of the result. */
16000 static tree
16001 fold_not_const (const_tree arg0, tree type)
16003 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16005 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16008 /* Given CODE, a relational operator, the target type, TYPE and two
16009 constant operands OP0 and OP1, return the result of the
16010 relational operation. If the result is not a compile time
16011 constant, then return NULL_TREE. */
16013 static tree
16014 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16016 int result, invert;
16018 /* From here on, the only cases we handle are when the result is
16019 known to be a constant. */
16021 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16023 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16024 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16026 /* Handle the cases where either operand is a NaN. */
16027 if (real_isnan (c0) || real_isnan (c1))
16029 switch (code)
16031 case EQ_EXPR:
16032 case ORDERED_EXPR:
16033 result = 0;
16034 break;
16036 case NE_EXPR:
16037 case UNORDERED_EXPR:
16038 case UNLT_EXPR:
16039 case UNLE_EXPR:
16040 case UNGT_EXPR:
16041 case UNGE_EXPR:
16042 case UNEQ_EXPR:
16043 result = 1;
16044 break;
16046 case LT_EXPR:
16047 case LE_EXPR:
16048 case GT_EXPR:
16049 case GE_EXPR:
16050 case LTGT_EXPR:
16051 if (flag_trapping_math)
16052 return NULL_TREE;
16053 result = 0;
16054 break;
16056 default:
16057 gcc_unreachable ();
16060 return constant_boolean_node (result, type);
16063 return constant_boolean_node (real_compare (code, c0, c1), type);
16066 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16068 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16069 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16070 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16073 /* Handle equality/inequality of complex constants. */
16074 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16076 tree rcond = fold_relational_const (code, type,
16077 TREE_REALPART (op0),
16078 TREE_REALPART (op1));
16079 tree icond = fold_relational_const (code, type,
16080 TREE_IMAGPART (op0),
16081 TREE_IMAGPART (op1));
16082 if (code == EQ_EXPR)
16083 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16084 else if (code == NE_EXPR)
16085 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16086 else
16087 return NULL_TREE;
16090 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16092 unsigned count = VECTOR_CST_NELTS (op0);
16093 tree *elts = XALLOCAVEC (tree, count);
16094 gcc_assert (VECTOR_CST_NELTS (op1) == count
16095 && TYPE_VECTOR_SUBPARTS (type) == count);
16097 for (unsigned i = 0; i < count; i++)
16099 tree elem_type = TREE_TYPE (type);
16100 tree elem0 = VECTOR_CST_ELT (op0, i);
16101 tree elem1 = VECTOR_CST_ELT (op1, i);
16103 tree tem = fold_relational_const (code, elem_type,
16104 elem0, elem1);
16106 if (tem == NULL_TREE)
16107 return NULL_TREE;
16109 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16112 return build_vector (type, elts);
16115 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16117 To compute GT, swap the arguments and do LT.
16118 To compute GE, do LT and invert the result.
16119 To compute LE, swap the arguments, do LT and invert the result.
16120 To compute NE, do EQ and invert the result.
16122 Therefore, the code below must handle only EQ and LT. */
16124 if (code == LE_EXPR || code == GT_EXPR)
16126 tree tem = op0;
16127 op0 = op1;
16128 op1 = tem;
16129 code = swap_tree_comparison (code);
16132 /* Note that it is safe to invert for real values here because we
16133 have already handled the one case that it matters. */
16135 invert = 0;
16136 if (code == NE_EXPR || code == GE_EXPR)
16138 invert = 1;
16139 code = invert_tree_comparison (code, false);
16142 /* Compute a result for LT or EQ if args permit;
16143 Otherwise return T. */
16144 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16146 if (code == EQ_EXPR)
16147 result = tree_int_cst_equal (op0, op1);
16148 else
16149 result = tree_int_cst_lt (op0, op1);
16151 else
16152 return NULL_TREE;
16154 if (invert)
16155 result ^= 1;
16156 return constant_boolean_node (result, type);
16159 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16160 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16161 itself. */
16163 tree
16164 fold_build_cleanup_point_expr (tree type, tree expr)
16166 /* If the expression does not have side effects then we don't have to wrap
16167 it with a cleanup point expression. */
16168 if (!TREE_SIDE_EFFECTS (expr))
16169 return expr;
16171 /* If the expression is a return, check to see if the expression inside the
16172 return has no side effects or the right hand side of the modify expression
16173 inside the return. If either don't have side effects set we don't need to
16174 wrap the expression in a cleanup point expression. Note we don't check the
16175 left hand side of the modify because it should always be a return decl. */
16176 if (TREE_CODE (expr) == RETURN_EXPR)
16178 tree op = TREE_OPERAND (expr, 0);
16179 if (!op || !TREE_SIDE_EFFECTS (op))
16180 return expr;
16181 op = TREE_OPERAND (op, 1);
16182 if (!TREE_SIDE_EFFECTS (op))
16183 return expr;
16186 return build1 (CLEANUP_POINT_EXPR, type, expr);
16189 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16190 of an indirection through OP0, or NULL_TREE if no simplification is
16191 possible. */
16193 tree
16194 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16196 tree sub = op0;
16197 tree subtype;
16199 STRIP_NOPS (sub);
16200 subtype = TREE_TYPE (sub);
16201 if (!POINTER_TYPE_P (subtype))
16202 return NULL_TREE;
16204 if (TREE_CODE (sub) == ADDR_EXPR)
16206 tree op = TREE_OPERAND (sub, 0);
16207 tree optype = TREE_TYPE (op);
16208 /* *&CONST_DECL -> to the value of the const decl. */
16209 if (TREE_CODE (op) == CONST_DECL)
16210 return DECL_INITIAL (op);
16211 /* *&p => p; make sure to handle *&"str"[cst] here. */
16212 if (type == optype)
16214 tree fop = fold_read_from_constant_string (op);
16215 if (fop)
16216 return fop;
16217 else
16218 return op;
16220 /* *(foo *)&fooarray => fooarray[0] */
16221 else if (TREE_CODE (optype) == ARRAY_TYPE
16222 && type == TREE_TYPE (optype)
16223 && (!in_gimple_form
16224 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16226 tree type_domain = TYPE_DOMAIN (optype);
16227 tree min_val = size_zero_node;
16228 if (type_domain && TYPE_MIN_VALUE (type_domain))
16229 min_val = TYPE_MIN_VALUE (type_domain);
16230 if (in_gimple_form
16231 && TREE_CODE (min_val) != INTEGER_CST)
16232 return NULL_TREE;
16233 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16234 NULL_TREE, NULL_TREE);
16236 /* *(foo *)&complexfoo => __real__ complexfoo */
16237 else if (TREE_CODE (optype) == COMPLEX_TYPE
16238 && type == TREE_TYPE (optype))
16239 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16240 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16241 else if (TREE_CODE (optype) == VECTOR_TYPE
16242 && type == TREE_TYPE (optype))
16244 tree part_width = TYPE_SIZE (type);
16245 tree index = bitsize_int (0);
16246 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16250 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16251 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16253 tree op00 = TREE_OPERAND (sub, 0);
16254 tree op01 = TREE_OPERAND (sub, 1);
16256 STRIP_NOPS (op00);
16257 if (TREE_CODE (op00) == ADDR_EXPR)
16259 tree op00type;
16260 op00 = TREE_OPERAND (op00, 0);
16261 op00type = TREE_TYPE (op00);
16263 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16264 if (TREE_CODE (op00type) == VECTOR_TYPE
16265 && type == TREE_TYPE (op00type))
16267 HOST_WIDE_INT offset = tree_to_shwi (op01);
16268 tree part_width = TYPE_SIZE (type);
16269 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16270 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16271 tree index = bitsize_int (indexi);
16273 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16274 return fold_build3_loc (loc,
16275 BIT_FIELD_REF, type, op00,
16276 part_width, index);
16279 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16280 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16281 && type == TREE_TYPE (op00type))
16283 tree size = TYPE_SIZE_UNIT (type);
16284 if (tree_int_cst_equal (size, op01))
16285 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16287 /* ((foo *)&fooarray)[1] => fooarray[1] */
16288 else if (TREE_CODE (op00type) == ARRAY_TYPE
16289 && type == TREE_TYPE (op00type))
16291 tree type_domain = TYPE_DOMAIN (op00type);
16292 tree min_val = size_zero_node;
16293 if (type_domain && TYPE_MIN_VALUE (type_domain))
16294 min_val = TYPE_MIN_VALUE (type_domain);
16295 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16296 TYPE_SIZE_UNIT (type));
16297 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16298 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16299 NULL_TREE, NULL_TREE);
16304 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16305 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16306 && type == TREE_TYPE (TREE_TYPE (subtype))
16307 && (!in_gimple_form
16308 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16310 tree type_domain;
16311 tree min_val = size_zero_node;
16312 sub = build_fold_indirect_ref_loc (loc, sub);
16313 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16314 if (type_domain && TYPE_MIN_VALUE (type_domain))
16315 min_val = TYPE_MIN_VALUE (type_domain);
16316 if (in_gimple_form
16317 && TREE_CODE (min_val) != INTEGER_CST)
16318 return NULL_TREE;
16319 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16320 NULL_TREE);
16323 return NULL_TREE;
16326 /* Builds an expression for an indirection through T, simplifying some
16327 cases. */
16329 tree
16330 build_fold_indirect_ref_loc (location_t loc, tree t)
16332 tree type = TREE_TYPE (TREE_TYPE (t));
16333 tree sub = fold_indirect_ref_1 (loc, type, t);
16335 if (sub)
16336 return sub;
16338 return build1_loc (loc, INDIRECT_REF, type, t);
16341 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16343 tree
16344 fold_indirect_ref_loc (location_t loc, tree t)
16346 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16348 if (sub)
16349 return sub;
16350 else
16351 return t;
16354 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16355 whose result is ignored. The type of the returned tree need not be
16356 the same as the original expression. */
16358 tree
16359 fold_ignored_result (tree t)
16361 if (!TREE_SIDE_EFFECTS (t))
16362 return integer_zero_node;
16364 for (;;)
16365 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16367 case tcc_unary:
16368 t = TREE_OPERAND (t, 0);
16369 break;
16371 case tcc_binary:
16372 case tcc_comparison:
16373 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16374 t = TREE_OPERAND (t, 0);
16375 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16376 t = TREE_OPERAND (t, 1);
16377 else
16378 return t;
16379 break;
16381 case tcc_expression:
16382 switch (TREE_CODE (t))
16384 case COMPOUND_EXPR:
16385 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16386 return t;
16387 t = TREE_OPERAND (t, 0);
16388 break;
16390 case COND_EXPR:
16391 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16392 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16393 return t;
16394 t = TREE_OPERAND (t, 0);
16395 break;
16397 default:
16398 return t;
16400 break;
16402 default:
16403 return t;
16407 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16409 tree
16410 round_up_loc (location_t loc, tree value, unsigned int divisor)
16412 tree div = NULL_TREE;
16414 if (divisor == 1)
16415 return value;
16417 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16418 have to do anything. Only do this when we are not given a const,
16419 because in that case, this check is more expensive than just
16420 doing it. */
16421 if (TREE_CODE (value) != INTEGER_CST)
16423 div = build_int_cst (TREE_TYPE (value), divisor);
16425 if (multiple_of_p (TREE_TYPE (value), value, div))
16426 return value;
16429 /* If divisor is a power of two, simplify this to bit manipulation. */
16430 if (divisor == (divisor & -divisor))
16432 if (TREE_CODE (value) == INTEGER_CST)
16434 wide_int val = value;
16435 bool overflow_p;
16437 if ((val & (divisor - 1)) == 0)
16438 return value;
16440 overflow_p = TREE_OVERFLOW (value);
16441 val &= ~(divisor - 1);
16442 val += divisor;
16443 if (val == 0)
16444 overflow_p = true;
16446 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16448 else
16450 tree t;
16452 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16453 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16454 t = build_int_cst (TREE_TYPE (value), -divisor);
16455 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16458 else
16460 if (!div)
16461 div = build_int_cst (TREE_TYPE (value), divisor);
16462 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16463 value = size_binop_loc (loc, MULT_EXPR, value, div);
16466 return value;
16469 /* Likewise, but round down. */
16471 tree
16472 round_down_loc (location_t loc, tree value, int divisor)
16474 tree div = NULL_TREE;
16476 gcc_assert (divisor > 0);
16477 if (divisor == 1)
16478 return value;
16480 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16481 have to do anything. Only do this when we are not given a const,
16482 because in that case, this check is more expensive than just
16483 doing it. */
16484 if (TREE_CODE (value) != INTEGER_CST)
16486 div = build_int_cst (TREE_TYPE (value), divisor);
16488 if (multiple_of_p (TREE_TYPE (value), value, div))
16489 return value;
16492 /* If divisor is a power of two, simplify this to bit manipulation. */
16493 if (divisor == (divisor & -divisor))
16495 tree t;
16497 t = build_int_cst (TREE_TYPE (value), -divisor);
16498 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16500 else
16502 if (!div)
16503 div = build_int_cst (TREE_TYPE (value), divisor);
16504 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16505 value = size_binop_loc (loc, MULT_EXPR, value, div);
16508 return value;
16511 /* Returns the pointer to the base of the object addressed by EXP and
16512 extracts the information about the offset of the access, storing it
16513 to PBITPOS and POFFSET. */
16515 static tree
16516 split_address_to_core_and_offset (tree exp,
16517 HOST_WIDE_INT *pbitpos, tree *poffset)
16519 tree core;
16520 machine_mode mode;
16521 int unsignedp, volatilep;
16522 HOST_WIDE_INT bitsize;
16523 location_t loc = EXPR_LOCATION (exp);
16525 if (TREE_CODE (exp) == ADDR_EXPR)
16527 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16528 poffset, &mode, &unsignedp, &volatilep,
16529 false);
16530 core = build_fold_addr_expr_loc (loc, core);
16532 else
16534 core = exp;
16535 *pbitpos = 0;
16536 *poffset = NULL_TREE;
16539 return core;
16542 /* Returns true if addresses of E1 and E2 differ by a constant, false
16543 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16545 bool
16546 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16548 tree core1, core2;
16549 HOST_WIDE_INT bitpos1, bitpos2;
16550 tree toffset1, toffset2, tdiff, type;
16552 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16553 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16555 if (bitpos1 % BITS_PER_UNIT != 0
16556 || bitpos2 % BITS_PER_UNIT != 0
16557 || !operand_equal_p (core1, core2, 0))
16558 return false;
16560 if (toffset1 && toffset2)
16562 type = TREE_TYPE (toffset1);
16563 if (type != TREE_TYPE (toffset2))
16564 toffset2 = fold_convert (type, toffset2);
16566 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16567 if (!cst_and_fits_in_hwi (tdiff))
16568 return false;
16570 *diff = int_cst_value (tdiff);
16572 else if (toffset1 || toffset2)
16574 /* If only one of the offsets is non-constant, the difference cannot
16575 be a constant. */
16576 return false;
16578 else
16579 *diff = 0;
16581 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16582 return true;
16585 /* Simplify the floating point expression EXP when the sign of the
16586 result is not significant. Return NULL_TREE if no simplification
16587 is possible. */
16589 tree
16590 fold_strip_sign_ops (tree exp)
16592 tree arg0, arg1;
16593 location_t loc = EXPR_LOCATION (exp);
16595 switch (TREE_CODE (exp))
16597 case ABS_EXPR:
16598 case NEGATE_EXPR:
16599 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16600 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16602 case MULT_EXPR:
16603 case RDIV_EXPR:
16604 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16605 return NULL_TREE;
16606 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16607 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16608 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16609 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16610 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16611 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16612 break;
16614 case COMPOUND_EXPR:
16615 arg0 = TREE_OPERAND (exp, 0);
16616 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16617 if (arg1)
16618 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16619 break;
16621 case COND_EXPR:
16622 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16623 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16624 if (arg0 || arg1)
16625 return fold_build3_loc (loc,
16626 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16627 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16628 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16629 break;
16631 case CALL_EXPR:
16633 const enum built_in_function fcode = builtin_mathfn_code (exp);
16634 switch (fcode)
16636 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16637 /* Strip copysign function call, return the 1st argument. */
16638 arg0 = CALL_EXPR_ARG (exp, 0);
16639 arg1 = CALL_EXPR_ARG (exp, 1);
16640 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16642 default:
16643 /* Strip sign ops from the argument of "odd" math functions. */
16644 if (negate_mathfn_p (fcode))
16646 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16647 if (arg0)
16648 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16650 break;
16653 break;
16655 default:
16656 break;
16658 return NULL_TREE;