2014-10-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blob218afa01ab4b9a834d074faa389b9b63963c84fc
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "cgraph.h"
81 #include "generic-match.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
109 static bool negate_mathfn_p (enum built_in_function);
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static tree const_binop (enum tree_code, tree, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
121 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
122 static tree make_bit_field_ref (location_t, tree, tree,
123 HOST_WIDE_INT, HOST_WIDE_INT, int);
124 static tree optimize_bit_field_compare (location_t, enum tree_code,
125 tree, tree, tree);
126 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
127 HOST_WIDE_INT *,
128 enum machine_mode *, int *, int *,
129 tree *, tree *);
130 static tree sign_bit_p (tree, const_tree);
131 static int simple_operand_p (const_tree);
132 static bool simple_operand_p_2 (tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
138 static tree unextend (tree, int, int, tree);
139 static tree optimize_minmax_comparison (location_t, enum tree_code,
140 tree, tree, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_mathfn_compare (location_t,
148 enum built_in_function, enum tree_code,
149 tree, tree, tree);
150 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
151 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
152 static bool reorder_operands_p (const_tree, const_tree);
153 static tree fold_negate_const (tree, tree);
154 static tree fold_not_const (const_tree, tree);
155 static tree fold_relational_const (enum tree_code, tree, tree, tree);
156 static tree fold_convert_const (enum tree_code, tree, tree);
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
161 static location_t
162 expr_location_or (tree t, location_t loc)
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
183 return x;
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
193 widest_int quo;
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
199 return NULL_TREE;
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
228 void
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
246 const char *warnmsg;
247 location_t locus;
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
264 return;
266 if (gimple_no_warning_p (stmt))
267 return;
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
275 return;
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
295 bool
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
323 static bool
324 negate_mathfn_p (enum built_in_function code)
326 switch (code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
359 default:
360 break;
362 return false;
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
368 bool
369 may_negate_without_overflow_p (const_tree t)
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 return !wi::only_sign_bit_p (t);
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
385 static bool
386 negate_expr_p (tree t)
388 tree type;
390 if (t == 0)
391 return false;
393 type = TREE_TYPE (t);
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
398 case INTEGER_CST:
399 if (TYPE_OVERFLOW_WRAPS (type))
400 return true;
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
408 case FIXED_CST:
409 case NEGATE_EXPR:
410 return true;
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
421 case VECTOR_CST:
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
426 int count = TYPE_VECTOR_SUBPARTS (type), i;
428 for (i = 0; i < count; i++)
429 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
430 return false;
432 return true;
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 /* In general we can't negate A / B, because if A is INT_MIN and
477 B is 1, we may turn this into INT_MIN / -1 which is undefined
478 and actually traps on some architectures. But if overflow is
479 undefined, we can negate, because - (INT_MIN / 1) is an
480 overflow. */
481 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
483 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
484 break;
485 /* If overflow is undefined then we have to be careful because
486 we ask whether it's ok to associate the negate with the
487 division which is not ok for example for
488 -((a - b) / c) where (-(a - b)) / c may invoke undefined
489 overflow because of negating INT_MIN. So do not use
490 negate_expr_p here but open-code the two important cases. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
492 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
494 return true;
496 else if (negate_expr_p (TREE_OPERAND (t, 0)))
497 return true;
498 return negate_expr_p (TREE_OPERAND (t, 1));
500 case NOP_EXPR:
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type) == REAL_TYPE)
504 tree tem = strip_float_extensions (t);
505 if (tem != t)
506 return negate_expr_p (tem);
508 break;
510 case CALL_EXPR:
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (builtin_mathfn_code (t)))
513 return negate_expr_p (CALL_EXPR_ARG (t, 0));
514 break;
516 case RSHIFT_EXPR:
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 tree op1 = TREE_OPERAND (t, 1);
521 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
522 return true;
524 break;
526 default:
527 break;
529 return false;
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
537 static tree
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
541 tree tem;
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_one_cst (type));
550 break;
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
570 case COMPLEX_CST:
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
581 break;
583 case VECTOR_CST:
585 int count = TYPE_VECTOR_SUBPARTS (type), i;
586 tree *elts = XALLOCAVEC (tree, count);
588 for (i = 0; i < count; i++)
590 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
591 if (elts[i] == NULL_TREE)
592 return NULL_TREE;
595 return build_vector (type, elts);
598 case COMPLEX_EXPR:
599 if (negate_expr_p (t))
600 return fold_build2_loc (loc, COMPLEX_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
602 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
603 break;
605 case CONJ_EXPR:
606 if (negate_expr_p (t))
607 return fold_build1_loc (loc, CONJ_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
609 break;
611 case NEGATE_EXPR:
612 return TREE_OPERAND (t, 0);
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
616 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
636 break;
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
641 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
651 /* Fall through. */
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
665 break;
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
704 break;
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
714 break;
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
721 tree fndecl, arg;
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
727 break;
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
744 break;
746 default:
747 break;
750 return NULL_TREE;
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
757 static tree
758 negate_expr (tree t)
760 tree type, tem;
761 location_t loc;
763 if (t == NULL_TREE)
764 return NULL_TREE;
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
800 tree var = 0;
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
869 if (negate_p)
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
879 return var;
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
901 if (code == PLUS_EXPR)
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
916 else if (code == MINUS_EXPR)
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
941 switch (code)
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
949 default:
950 break;
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
976 switch (code)
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1100 default:
1101 return NULL_TREE;
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1109 return t;
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1133 if (TREE_CODE (arg1) == INTEGER_CST)
1134 return int_const_binop (code, arg1, arg2);
1136 if (TREE_CODE (arg1) == REAL_CST)
1138 enum machine_mode mode;
1139 REAL_VALUE_TYPE d1;
1140 REAL_VALUE_TYPE d2;
1141 REAL_VALUE_TYPE value;
1142 REAL_VALUE_TYPE result;
1143 bool inexact;
1144 tree t, type;
1146 /* The following codes are handled by real_arithmetic. */
1147 switch (code)
1149 case PLUS_EXPR:
1150 case MINUS_EXPR:
1151 case MULT_EXPR:
1152 case RDIV_EXPR:
1153 case MIN_EXPR:
1154 case MAX_EXPR:
1155 break;
1157 default:
1158 return NULL_TREE;
1161 d1 = TREE_REAL_CST (arg1);
1162 d2 = TREE_REAL_CST (arg2);
1164 type = TREE_TYPE (arg1);
1165 mode = TYPE_MODE (type);
1167 /* Don't perform operation if we honor signaling NaNs and
1168 either operand is a NaN. */
1169 if (HONOR_SNANS (mode)
1170 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1171 return NULL_TREE;
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && REAL_VALUES_EQUAL (d2, dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 return arg1;
1184 else if (REAL_VALUE_ISNAN (d2))
1185 return arg2;
1187 inexact = real_arithmetic (&value, code, &d1, &d2);
1188 real_convert (&result, mode, &value);
1190 /* Don't constant fold this floating point operation if
1191 the result has overflowed and flag_trapping_math. */
1192 if (flag_trapping_math
1193 && MODE_HAS_INFINITIES (mode)
1194 && REAL_VALUE_ISINF (result)
1195 && !REAL_VALUE_ISINF (d1)
1196 && !REAL_VALUE_ISINF (d2))
1197 return NULL_TREE;
1199 /* Don't constant fold this floating point operation if the
1200 result may dependent upon the run-time rounding mode and
1201 flag_rounding_math is set, or if GCC's software emulation
1202 is unable to accurately represent the result. */
1203 if ((flag_rounding_math
1204 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1205 && (inexact || !real_identical (&result, &value)))
1206 return NULL_TREE;
1208 t = build_real (type, result);
1210 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1211 return t;
1214 if (TREE_CODE (arg1) == FIXED_CST)
1216 FIXED_VALUE_TYPE f1;
1217 FIXED_VALUE_TYPE f2;
1218 FIXED_VALUE_TYPE result;
1219 tree t, type;
1220 int sat_p;
1221 bool overflow_p;
1223 /* The following codes are handled by fixed_arithmetic. */
1224 switch (code)
1226 case PLUS_EXPR:
1227 case MINUS_EXPR:
1228 case MULT_EXPR:
1229 case TRUNC_DIV_EXPR:
1230 f2 = TREE_FIXED_CST (arg2);
1231 break;
1233 case LSHIFT_EXPR:
1234 case RSHIFT_EXPR:
1236 wide_int w2 = arg2;
1237 f2.data.high = w2.elt (1);
1238 f2.data.low = w2.elt (0);
1239 f2.mode = SImode;
1241 break;
1243 default:
1244 return NULL_TREE;
1247 f1 = TREE_FIXED_CST (arg1);
1248 type = TREE_TYPE (arg1);
1249 sat_p = TYPE_SATURATING (type);
1250 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1251 t = build_fixed (type, result);
1252 /* Propagate overflow flags. */
1253 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1254 TREE_OVERFLOW (t) = 1;
1255 return t;
1258 if (TREE_CODE (arg1) == COMPLEX_CST)
1260 tree type = TREE_TYPE (arg1);
1261 tree r1 = TREE_REALPART (arg1);
1262 tree i1 = TREE_IMAGPART (arg1);
1263 tree r2 = TREE_REALPART (arg2);
1264 tree i2 = TREE_IMAGPART (arg2);
1265 tree real, imag;
1267 switch (code)
1269 case PLUS_EXPR:
1270 case MINUS_EXPR:
1271 real = const_binop (code, r1, r2);
1272 imag = const_binop (code, i1, i2);
1273 break;
1275 case MULT_EXPR:
1276 if (COMPLEX_FLOAT_TYPE_P (type))
1277 return do_mpc_arg2 (arg1, arg2, type,
1278 /* do_nonfinite= */ folding_initializer,
1279 mpc_mul);
1281 real = const_binop (MINUS_EXPR,
1282 const_binop (MULT_EXPR, r1, r2),
1283 const_binop (MULT_EXPR, i1, i2));
1284 imag = const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR, r1, i2),
1286 const_binop (MULT_EXPR, i1, r2));
1287 break;
1289 case RDIV_EXPR:
1290 if (COMPLEX_FLOAT_TYPE_P (type))
1291 return do_mpc_arg2 (arg1, arg2, type,
1292 /* do_nonfinite= */ folding_initializer,
1293 mpc_div);
1294 /* Fallthru ... */
1295 case TRUNC_DIV_EXPR:
1296 case CEIL_DIV_EXPR:
1297 case FLOOR_DIV_EXPR:
1298 case ROUND_DIV_EXPR:
1299 if (flag_complex_method == 0)
1301 /* Keep this algorithm in sync with
1302 tree-complex.c:expand_complex_div_straight().
1304 Expand complex division to scalars, straightforward algorithm.
1305 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1306 t = br*br + bi*bi
1308 tree magsquared
1309 = const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR, r2, r2),
1311 const_binop (MULT_EXPR, i2, i2));
1312 tree t1
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r1, r2),
1315 const_binop (MULT_EXPR, i1, i2));
1316 tree t2
1317 = const_binop (MINUS_EXPR,
1318 const_binop (MULT_EXPR, i1, r2),
1319 const_binop (MULT_EXPR, r1, i2));
1321 real = const_binop (code, t1, magsquared);
1322 imag = const_binop (code, t2, magsquared);
1324 else
1326 /* Keep this algorithm in sync with
1327 tree-complex.c:expand_complex_div_wide().
1329 Expand complex division to scalars, modified algorithm to minimize
1330 overflow with wide input ranges. */
1331 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1332 fold_abs_const (r2, TREE_TYPE (type)),
1333 fold_abs_const (i2, TREE_TYPE (type)));
1335 if (integer_nonzerop (compare))
1337 /* In the TRUE branch, we compute
1338 ratio = br/bi;
1339 div = (br * ratio) + bi;
1340 tr = (ar * ratio) + ai;
1341 ti = (ai * ratio) - ar;
1342 tr = tr / div;
1343 ti = ti / div; */
1344 tree ratio = const_binop (code, r2, i2);
1345 tree div = const_binop (PLUS_EXPR, i2,
1346 const_binop (MULT_EXPR, r2, ratio));
1347 real = const_binop (MULT_EXPR, r1, ratio);
1348 real = const_binop (PLUS_EXPR, real, i1);
1349 real = const_binop (code, real, div);
1351 imag = const_binop (MULT_EXPR, i1, ratio);
1352 imag = const_binop (MINUS_EXPR, imag, r1);
1353 imag = const_binop (code, imag, div);
1355 else
1357 /* In the FALSE branch, we compute
1358 ratio = d/c;
1359 divisor = (d * ratio) + c;
1360 tr = (b * ratio) + a;
1361 ti = b - (a * ratio);
1362 tr = tr / div;
1363 ti = ti / div; */
1364 tree ratio = const_binop (code, i2, r2);
1365 tree div = const_binop (PLUS_EXPR, r2,
1366 const_binop (MULT_EXPR, i2, ratio));
1368 real = const_binop (MULT_EXPR, i1, ratio);
1369 real = const_binop (PLUS_EXPR, real, r1);
1370 real = const_binop (code, real, div);
1372 imag = const_binop (MULT_EXPR, r1, ratio);
1373 imag = const_binop (MINUS_EXPR, i1, imag);
1374 imag = const_binop (code, imag, div);
1377 break;
1379 default:
1380 return NULL_TREE;
1383 if (real && imag)
1384 return build_complex (type, real, imag);
1387 if (TREE_CODE (arg1) == VECTOR_CST
1388 && TREE_CODE (arg2) == VECTOR_CST)
1390 tree type = TREE_TYPE (arg1);
1391 int count = TYPE_VECTOR_SUBPARTS (type), i;
1392 tree *elts = XALLOCAVEC (tree, count);
1394 for (i = 0; i < count; i++)
1396 tree elem1 = VECTOR_CST_ELT (arg1, i);
1397 tree elem2 = VECTOR_CST_ELT (arg2, i);
1399 elts[i] = const_binop (code, elem1, elem2);
1401 /* It is possible that const_binop cannot handle the given
1402 code and return NULL_TREE */
1403 if (elts[i] == NULL_TREE)
1404 return NULL_TREE;
1407 return build_vector (type, elts);
1410 /* Shifts allow a scalar offset for a vector. */
1411 if (TREE_CODE (arg1) == VECTOR_CST
1412 && TREE_CODE (arg2) == INTEGER_CST)
1414 tree type = TREE_TYPE (arg1);
1415 int count = TYPE_VECTOR_SUBPARTS (type), i;
1416 tree *elts = XALLOCAVEC (tree, count);
1418 if (code == VEC_RSHIFT_EXPR)
1420 if (!tree_fits_uhwi_p (arg2))
1421 return NULL_TREE;
1423 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1424 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1425 unsigned HOST_WIDE_INT innerc
1426 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1427 if (shiftc >= outerc || (shiftc % innerc) != 0)
1428 return NULL_TREE;
1429 int offset = shiftc / innerc;
1430 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1431 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1432 vector element, but last element if BYTES_BIG_ENDIAN. */
1433 if (BYTES_BIG_ENDIAN)
1434 offset = -offset;
1435 tree zero = build_zero_cst (TREE_TYPE (type));
1436 for (i = 0; i < count; i++)
1438 if (i + offset < 0 || i + offset >= count)
1439 elts[i] = zero;
1440 else
1441 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1444 else
1445 for (i = 0; i < count; i++)
1447 tree elem1 = VECTOR_CST_ELT (arg1, i);
1449 elts[i] = const_binop (code, elem1, arg2);
1451 /* It is possible that const_binop cannot handle the given
1452 code and return NULL_TREE */
1453 if (elts[i] == NULL_TREE)
1454 return NULL_TREE;
1457 return build_vector (type, elts);
1459 return NULL_TREE;
1462 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1463 indicates which particular sizetype to create. */
1465 tree
1466 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1468 return build_int_cst (sizetype_tab[(int) kind], number);
1471 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1472 is a tree code. The type of the result is taken from the operands.
1473 Both must be equivalent integer types, ala int_binop_types_match_p.
1474 If the operands are constant, so is the result. */
1476 tree
1477 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1479 tree type = TREE_TYPE (arg0);
1481 if (arg0 == error_mark_node || arg1 == error_mark_node)
1482 return error_mark_node;
1484 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1485 TREE_TYPE (arg1)));
1487 /* Handle the special case of two integer constants faster. */
1488 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1490 /* And some specific cases even faster than that. */
1491 if (code == PLUS_EXPR)
1493 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1494 return arg1;
1495 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1496 return arg0;
1498 else if (code == MINUS_EXPR)
1500 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1501 return arg0;
1503 else if (code == MULT_EXPR)
1505 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1506 return arg1;
1509 /* Handle general case of two integer constants. For sizetype
1510 constant calculations we always want to know about overflow,
1511 even in the unsigned case. */
1512 return int_const_binop_1 (code, arg0, arg1, -1);
1515 return fold_build2_loc (loc, code, type, arg0, arg1);
1518 /* Given two values, either both of sizetype or both of bitsizetype,
1519 compute the difference between the two values. Return the value
1520 in signed type corresponding to the type of the operands. */
1522 tree
1523 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1525 tree type = TREE_TYPE (arg0);
1526 tree ctype;
1528 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1529 TREE_TYPE (arg1)));
1531 /* If the type is already signed, just do the simple thing. */
1532 if (!TYPE_UNSIGNED (type))
1533 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1535 if (type == sizetype)
1536 ctype = ssizetype;
1537 else if (type == bitsizetype)
1538 ctype = sbitsizetype;
1539 else
1540 ctype = signed_type_for (type);
1542 /* If either operand is not a constant, do the conversions to the signed
1543 type and subtract. The hardware will do the right thing with any
1544 overflow in the subtraction. */
1545 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1546 return size_binop_loc (loc, MINUS_EXPR,
1547 fold_convert_loc (loc, ctype, arg0),
1548 fold_convert_loc (loc, ctype, arg1));
1550 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1551 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1552 overflow) and negate (which can't either). Special-case a result
1553 of zero while we're here. */
1554 if (tree_int_cst_equal (arg0, arg1))
1555 return build_int_cst (ctype, 0);
1556 else if (tree_int_cst_lt (arg1, arg0))
1557 return fold_convert_loc (loc, ctype,
1558 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1559 else
1560 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1561 fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc,
1563 MINUS_EXPR,
1564 arg1, arg0)));
1567 /* A subroutine of fold_convert_const handling conversions of an
1568 INTEGER_CST to another integer type. */
1570 static tree
1571 fold_convert_const_int_from_int (tree type, const_tree arg1)
1573 /* Given an integer constant, make new constant with new type,
1574 appropriately sign-extended or truncated. Use widest_int
1575 so that any extension is done according ARG1's type. */
1576 return force_fit_type (type, wi::to_widest (arg1),
1577 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1578 TREE_OVERFLOW (arg1));
1581 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1582 to an integer type. */
1584 static tree
1585 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1587 bool overflow = false;
1588 tree t;
1590 /* The following code implements the floating point to integer
1591 conversion rules required by the Java Language Specification,
1592 that IEEE NaNs are mapped to zero and values that overflow
1593 the target precision saturate, i.e. values greater than
1594 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1595 are mapped to INT_MIN. These semantics are allowed by the
1596 C and C++ standards that simply state that the behavior of
1597 FP-to-integer conversion is unspecified upon overflow. */
1599 wide_int val;
1600 REAL_VALUE_TYPE r;
1601 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1603 switch (code)
1605 case FIX_TRUNC_EXPR:
1606 real_trunc (&r, VOIDmode, &x);
1607 break;
1609 default:
1610 gcc_unreachable ();
1613 /* If R is NaN, return zero and show we have an overflow. */
1614 if (REAL_VALUE_ISNAN (r))
1616 overflow = true;
1617 val = wi::zero (TYPE_PRECISION (type));
1620 /* See if R is less than the lower bound or greater than the
1621 upper bound. */
1623 if (! overflow)
1625 tree lt = TYPE_MIN_VALUE (type);
1626 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1627 if (REAL_VALUES_LESS (r, l))
1629 overflow = true;
1630 val = lt;
1634 if (! overflow)
1636 tree ut = TYPE_MAX_VALUE (type);
1637 if (ut)
1639 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1640 if (REAL_VALUES_LESS (u, r))
1642 overflow = true;
1643 val = ut;
1648 if (! overflow)
1649 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1651 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1652 return t;
1655 /* A subroutine of fold_convert_const handling conversions of a
1656 FIXED_CST to an integer type. */
1658 static tree
1659 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1661 tree t;
1662 double_int temp, temp_trunc;
1663 unsigned int mode;
1665 /* Right shift FIXED_CST to temp by fbit. */
1666 temp = TREE_FIXED_CST (arg1).data;
1667 mode = TREE_FIXED_CST (arg1).mode;
1668 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1670 temp = temp.rshift (GET_MODE_FBIT (mode),
1671 HOST_BITS_PER_DOUBLE_INT,
1672 SIGNED_FIXED_POINT_MODE_P (mode));
1674 /* Left shift temp to temp_trunc by fbit. */
1675 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1676 HOST_BITS_PER_DOUBLE_INT,
1677 SIGNED_FIXED_POINT_MODE_P (mode));
1679 else
1681 temp = double_int_zero;
1682 temp_trunc = double_int_zero;
1685 /* If FIXED_CST is negative, we need to round the value toward 0.
1686 By checking if the fractional bits are not zero to add 1 to temp. */
1687 if (SIGNED_FIXED_POINT_MODE_P (mode)
1688 && temp_trunc.is_negative ()
1689 && TREE_FIXED_CST (arg1).data != temp_trunc)
1690 temp += double_int_one;
1692 /* Given a fixed-point constant, make new constant with new type,
1693 appropriately sign-extended or truncated. */
1694 t = force_fit_type (type, temp, -1,
1695 (temp.is_negative ()
1696 && (TYPE_UNSIGNED (type)
1697 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1698 | TREE_OVERFLOW (arg1));
1700 return t;
1703 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1704 to another floating point type. */
1706 static tree
1707 fold_convert_const_real_from_real (tree type, const_tree arg1)
1709 REAL_VALUE_TYPE value;
1710 tree t;
1712 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1713 t = build_real (type, value);
1715 /* If converting an infinity or NAN to a representation that doesn't
1716 have one, set the overflow bit so that we can produce some kind of
1717 error message at the appropriate point if necessary. It's not the
1718 most user-friendly message, but it's better than nothing. */
1719 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1720 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1721 TREE_OVERFLOW (t) = 1;
1722 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_NANS (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 /* Regular overflow, conversion produced an infinity in a mode that
1726 can't represent them. */
1727 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1728 && REAL_VALUE_ISINF (value)
1729 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1730 TREE_OVERFLOW (t) = 1;
1731 else
1732 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1733 return t;
1736 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1737 to a floating point type. */
1739 static tree
1740 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1742 REAL_VALUE_TYPE value;
1743 tree t;
1745 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1746 t = build_real (type, value);
1748 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1749 return t;
1752 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1753 to another fixed-point type. */
1755 static tree
1756 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1758 FIXED_VALUE_TYPE value;
1759 tree t;
1760 bool overflow_p;
1762 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1763 TYPE_SATURATING (type));
1764 t = build_fixed (type, value);
1766 /* Propagate overflow flags. */
1767 if (overflow_p | TREE_OVERFLOW (arg1))
1768 TREE_OVERFLOW (t) = 1;
1769 return t;
1772 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1773 to a fixed-point type. */
1775 static tree
1776 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1778 FIXED_VALUE_TYPE value;
1779 tree t;
1780 bool overflow_p;
1781 double_int di;
1783 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1785 di.low = TREE_INT_CST_ELT (arg1, 0);
1786 if (TREE_INT_CST_NUNITS (arg1) == 1)
1787 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1788 else
1789 di.high = TREE_INT_CST_ELT (arg1, 1);
1791 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1792 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1793 TYPE_SATURATING (type));
1794 t = build_fixed (type, value);
1796 /* Propagate overflow flags. */
1797 if (overflow_p | TREE_OVERFLOW (arg1))
1798 TREE_OVERFLOW (t) = 1;
1799 return t;
1802 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1803 to a fixed-point type. */
1805 static tree
1806 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1808 FIXED_VALUE_TYPE value;
1809 tree t;
1810 bool overflow_p;
1812 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1813 &TREE_REAL_CST (arg1),
1814 TYPE_SATURATING (type));
1815 t = build_fixed (type, value);
1817 /* Propagate overflow flags. */
1818 if (overflow_p | TREE_OVERFLOW (arg1))
1819 TREE_OVERFLOW (t) = 1;
1820 return t;
1823 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1824 type TYPE. If no simplification can be done return NULL_TREE. */
1826 static tree
1827 fold_convert_const (enum tree_code code, tree type, tree arg1)
1829 if (TREE_TYPE (arg1) == type)
1830 return arg1;
1832 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1833 || TREE_CODE (type) == OFFSET_TYPE)
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return fold_convert_const_int_from_int (type, arg1);
1837 else if (TREE_CODE (arg1) == REAL_CST)
1838 return fold_convert_const_int_from_real (code, type, arg1);
1839 else if (TREE_CODE (arg1) == FIXED_CST)
1840 return fold_convert_const_int_from_fixed (type, arg1);
1842 else if (TREE_CODE (type) == REAL_TYPE)
1844 if (TREE_CODE (arg1) == INTEGER_CST)
1845 return build_real_from_int_cst (type, arg1);
1846 else if (TREE_CODE (arg1) == REAL_CST)
1847 return fold_convert_const_real_from_real (type, arg1);
1848 else if (TREE_CODE (arg1) == FIXED_CST)
1849 return fold_convert_const_real_from_fixed (type, arg1);
1851 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1853 if (TREE_CODE (arg1) == FIXED_CST)
1854 return fold_convert_const_fixed_from_fixed (type, arg1);
1855 else if (TREE_CODE (arg1) == INTEGER_CST)
1856 return fold_convert_const_fixed_from_int (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_fixed_from_real (type, arg1);
1860 return NULL_TREE;
1863 /* Construct a vector of zero elements of vector type TYPE. */
1865 static tree
1866 build_zero_vector (tree type)
1868 tree t;
1870 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 return build_vector_from_val (type, t);
1874 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1876 bool
1877 fold_convertible_p (const_tree type, const_tree arg)
1879 tree orig = TREE_TYPE (arg);
1881 if (type == orig)
1882 return true;
1884 if (TREE_CODE (arg) == ERROR_MARK
1885 || TREE_CODE (type) == ERROR_MARK
1886 || TREE_CODE (orig) == ERROR_MARK)
1887 return false;
1889 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1890 return true;
1892 switch (TREE_CODE (type))
1894 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1895 case POINTER_TYPE: case REFERENCE_TYPE:
1896 case OFFSET_TYPE:
1897 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1898 || TREE_CODE (orig) == OFFSET_TYPE)
1899 return true;
1900 return (TREE_CODE (orig) == VECTOR_TYPE
1901 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1903 case REAL_TYPE:
1904 case FIXED_POINT_TYPE:
1905 case COMPLEX_TYPE:
1906 case VECTOR_TYPE:
1907 case VOID_TYPE:
1908 return TREE_CODE (type) == TREE_CODE (orig);
1910 default:
1911 return false;
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1918 tree
1919 fold_convert_loc (location_t loc, tree type, tree arg)
1921 tree orig = TREE_TYPE (arg);
1922 tree tem;
1924 if (type == orig)
1925 return arg;
1927 if (TREE_CODE (arg) == ERROR_MARK
1928 || TREE_CODE (type) == ERROR_MARK
1929 || TREE_CODE (orig) == ERROR_MARK)
1930 return error_mark_node;
1932 switch (TREE_CODE (type))
1934 case POINTER_TYPE:
1935 case REFERENCE_TYPE:
1936 /* Handle conversions between pointers to different address spaces. */
1937 if (POINTER_TYPE_P (orig)
1938 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1939 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1940 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1941 /* fall through */
1943 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1944 case OFFSET_TYPE:
1945 if (TREE_CODE (arg) == INTEGER_CST)
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 return tem;
1951 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1952 || TREE_CODE (orig) == OFFSET_TYPE)
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954 if (TREE_CODE (orig) == COMPLEX_TYPE)
1955 return fold_convert_loc (loc, type,
1956 fold_build1_loc (loc, REALPART_EXPR,
1957 TREE_TYPE (orig), arg));
1958 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1959 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1960 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1962 case REAL_TYPE:
1963 if (TREE_CODE (arg) == INTEGER_CST)
1965 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1966 if (tem != NULL_TREE)
1967 return tem;
1969 else if (TREE_CODE (arg) == REAL_CST)
1971 tem = fold_convert_const (NOP_EXPR, type, arg);
1972 if (tem != NULL_TREE)
1973 return tem;
1975 else if (TREE_CODE (arg) == FIXED_CST)
1977 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1978 if (tem != NULL_TREE)
1979 return tem;
1982 switch (TREE_CODE (orig))
1984 case INTEGER_TYPE:
1985 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1986 case POINTER_TYPE: case REFERENCE_TYPE:
1987 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1989 case REAL_TYPE:
1990 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1992 case FIXED_POINT_TYPE:
1993 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1995 case COMPLEX_TYPE:
1996 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 return fold_convert_loc (loc, type, tem);
1999 default:
2000 gcc_unreachable ();
2003 case FIXED_POINT_TYPE:
2004 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2005 || TREE_CODE (arg) == REAL_CST)
2007 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2008 if (tem != NULL_TREE)
2009 goto fold_convert_exit;
2012 switch (TREE_CODE (orig))
2014 case FIXED_POINT_TYPE:
2015 case INTEGER_TYPE:
2016 case ENUMERAL_TYPE:
2017 case BOOLEAN_TYPE:
2018 case REAL_TYPE:
2019 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2021 case COMPLEX_TYPE:
2022 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2023 return fold_convert_loc (loc, type, tem);
2025 default:
2026 gcc_unreachable ();
2029 case COMPLEX_TYPE:
2030 switch (TREE_CODE (orig))
2032 case INTEGER_TYPE:
2033 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2034 case POINTER_TYPE: case REFERENCE_TYPE:
2035 case REAL_TYPE:
2036 case FIXED_POINT_TYPE:
2037 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2038 fold_convert_loc (loc, TREE_TYPE (type), arg),
2039 fold_convert_loc (loc, TREE_TYPE (type),
2040 integer_zero_node));
2041 case COMPLEX_TYPE:
2043 tree rpart, ipart;
2045 if (TREE_CODE (arg) == COMPLEX_EXPR)
2047 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2048 TREE_OPERAND (arg, 0));
2049 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2050 TREE_OPERAND (arg, 1));
2051 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2054 arg = save_expr (arg);
2055 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2056 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2057 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2058 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2059 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2062 default:
2063 gcc_unreachable ();
2066 case VECTOR_TYPE:
2067 if (integer_zerop (arg))
2068 return build_zero_vector (type);
2069 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2070 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2071 || TREE_CODE (orig) == VECTOR_TYPE);
2072 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2074 case VOID_TYPE:
2075 tem = fold_ignored_result (arg);
2076 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2078 default:
2079 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2080 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2081 gcc_unreachable ();
2083 fold_convert_exit:
2084 protected_set_expr_location_unshare (tem, loc);
2085 return tem;
2088 /* Return false if expr can be assumed not to be an lvalue, true
2089 otherwise. */
2091 static bool
2092 maybe_lvalue_p (const_tree x)
2094 /* We only need to wrap lvalue tree codes. */
2095 switch (TREE_CODE (x))
2097 case VAR_DECL:
2098 case PARM_DECL:
2099 case RESULT_DECL:
2100 case LABEL_DECL:
2101 case FUNCTION_DECL:
2102 case SSA_NAME:
2104 case COMPONENT_REF:
2105 case MEM_REF:
2106 case INDIRECT_REF:
2107 case ARRAY_REF:
2108 case ARRAY_RANGE_REF:
2109 case BIT_FIELD_REF:
2110 case OBJ_TYPE_REF:
2112 case REALPART_EXPR:
2113 case IMAGPART_EXPR:
2114 case PREINCREMENT_EXPR:
2115 case PREDECREMENT_EXPR:
2116 case SAVE_EXPR:
2117 case TRY_CATCH_EXPR:
2118 case WITH_CLEANUP_EXPR:
2119 case COMPOUND_EXPR:
2120 case MODIFY_EXPR:
2121 case TARGET_EXPR:
2122 case COND_EXPR:
2123 case BIND_EXPR:
2124 break;
2126 default:
2127 /* Assume the worst for front-end tree codes. */
2128 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2129 break;
2130 return false;
2133 return true;
2136 /* Return an expr equal to X but certainly not valid as an lvalue. */
2138 tree
2139 non_lvalue_loc (location_t loc, tree x)
2141 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2142 us. */
2143 if (in_gimple_form)
2144 return x;
2146 if (! maybe_lvalue_p (x))
2147 return x;
2148 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2151 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2152 Zero means allow extended lvalues. */
2154 int pedantic_lvalues;
2156 /* When pedantic, return an expr equal to X but certainly not valid as a
2157 pedantic lvalue. Otherwise, return X. */
2159 static tree
2160 pedantic_non_lvalue_loc (location_t loc, tree x)
2162 if (pedantic_lvalues)
2163 return non_lvalue_loc (loc, x);
2165 return protected_set_expr_location_unshare (x, loc);
2168 /* Given a tree comparison code, return the code that is the logical inverse.
2169 It is generally not safe to do this for floating-point comparisons, except
2170 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2171 ERROR_MARK in this case. */
2173 enum tree_code
2174 invert_tree_comparison (enum tree_code code, bool honor_nans)
2176 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2177 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2178 return ERROR_MARK;
2180 switch (code)
2182 case EQ_EXPR:
2183 return NE_EXPR;
2184 case NE_EXPR:
2185 return EQ_EXPR;
2186 case GT_EXPR:
2187 return honor_nans ? UNLE_EXPR : LE_EXPR;
2188 case GE_EXPR:
2189 return honor_nans ? UNLT_EXPR : LT_EXPR;
2190 case LT_EXPR:
2191 return honor_nans ? UNGE_EXPR : GE_EXPR;
2192 case LE_EXPR:
2193 return honor_nans ? UNGT_EXPR : GT_EXPR;
2194 case LTGT_EXPR:
2195 return UNEQ_EXPR;
2196 case UNEQ_EXPR:
2197 return LTGT_EXPR;
2198 case UNGT_EXPR:
2199 return LE_EXPR;
2200 case UNGE_EXPR:
2201 return LT_EXPR;
2202 case UNLT_EXPR:
2203 return GE_EXPR;
2204 case UNLE_EXPR:
2205 return GT_EXPR;
2206 case ORDERED_EXPR:
2207 return UNORDERED_EXPR;
2208 case UNORDERED_EXPR:
2209 return ORDERED_EXPR;
2210 default:
2211 gcc_unreachable ();
2215 /* Similar, but return the comparison that results if the operands are
2216 swapped. This is safe for floating-point. */
2218 enum tree_code
2219 swap_tree_comparison (enum tree_code code)
2221 switch (code)
2223 case EQ_EXPR:
2224 case NE_EXPR:
2225 case ORDERED_EXPR:
2226 case UNORDERED_EXPR:
2227 case LTGT_EXPR:
2228 case UNEQ_EXPR:
2229 return code;
2230 case GT_EXPR:
2231 return LT_EXPR;
2232 case GE_EXPR:
2233 return LE_EXPR;
2234 case LT_EXPR:
2235 return GT_EXPR;
2236 case LE_EXPR:
2237 return GE_EXPR;
2238 case UNGT_EXPR:
2239 return UNLT_EXPR;
2240 case UNGE_EXPR:
2241 return UNLE_EXPR;
2242 case UNLT_EXPR:
2243 return UNGT_EXPR;
2244 case UNLE_EXPR:
2245 return UNGE_EXPR;
2246 default:
2247 gcc_unreachable ();
2252 /* Convert a comparison tree code from an enum tree_code representation
2253 into a compcode bit-based encoding. This function is the inverse of
2254 compcode_to_comparison. */
2256 static enum comparison_code
2257 comparison_to_compcode (enum tree_code code)
2259 switch (code)
2261 case LT_EXPR:
2262 return COMPCODE_LT;
2263 case EQ_EXPR:
2264 return COMPCODE_EQ;
2265 case LE_EXPR:
2266 return COMPCODE_LE;
2267 case GT_EXPR:
2268 return COMPCODE_GT;
2269 case NE_EXPR:
2270 return COMPCODE_NE;
2271 case GE_EXPR:
2272 return COMPCODE_GE;
2273 case ORDERED_EXPR:
2274 return COMPCODE_ORD;
2275 case UNORDERED_EXPR:
2276 return COMPCODE_UNORD;
2277 case UNLT_EXPR:
2278 return COMPCODE_UNLT;
2279 case UNEQ_EXPR:
2280 return COMPCODE_UNEQ;
2281 case UNLE_EXPR:
2282 return COMPCODE_UNLE;
2283 case UNGT_EXPR:
2284 return COMPCODE_UNGT;
2285 case LTGT_EXPR:
2286 return COMPCODE_LTGT;
2287 case UNGE_EXPR:
2288 return COMPCODE_UNGE;
2289 default:
2290 gcc_unreachable ();
2294 /* Convert a compcode bit-based encoding of a comparison operator back
2295 to GCC's enum tree_code representation. This function is the
2296 inverse of comparison_to_compcode. */
2298 static enum tree_code
2299 compcode_to_comparison (enum comparison_code code)
2301 switch (code)
2303 case COMPCODE_LT:
2304 return LT_EXPR;
2305 case COMPCODE_EQ:
2306 return EQ_EXPR;
2307 case COMPCODE_LE:
2308 return LE_EXPR;
2309 case COMPCODE_GT:
2310 return GT_EXPR;
2311 case COMPCODE_NE:
2312 return NE_EXPR;
2313 case COMPCODE_GE:
2314 return GE_EXPR;
2315 case COMPCODE_ORD:
2316 return ORDERED_EXPR;
2317 case COMPCODE_UNORD:
2318 return UNORDERED_EXPR;
2319 case COMPCODE_UNLT:
2320 return UNLT_EXPR;
2321 case COMPCODE_UNEQ:
2322 return UNEQ_EXPR;
2323 case COMPCODE_UNLE:
2324 return UNLE_EXPR;
2325 case COMPCODE_UNGT:
2326 return UNGT_EXPR;
2327 case COMPCODE_LTGT:
2328 return LTGT_EXPR;
2329 case COMPCODE_UNGE:
2330 return UNGE_EXPR;
2331 default:
2332 gcc_unreachable ();
2336 /* Return a tree for the comparison which is the combination of
2337 doing the AND or OR (depending on CODE) of the two operations LCODE
2338 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2339 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2340 if this makes the transformation invalid. */
2342 tree
2343 combine_comparisons (location_t loc,
2344 enum tree_code code, enum tree_code lcode,
2345 enum tree_code rcode, tree truth_type,
2346 tree ll_arg, tree lr_arg)
2348 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2349 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2350 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2351 int compcode;
2353 switch (code)
2355 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2356 compcode = lcompcode & rcompcode;
2357 break;
2359 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2360 compcode = lcompcode | rcompcode;
2361 break;
2363 default:
2364 return NULL_TREE;
2367 if (!honor_nans)
2369 /* Eliminate unordered comparisons, as well as LTGT and ORD
2370 which are not used unless the mode has NaNs. */
2371 compcode &= ~COMPCODE_UNORD;
2372 if (compcode == COMPCODE_LTGT)
2373 compcode = COMPCODE_NE;
2374 else if (compcode == COMPCODE_ORD)
2375 compcode = COMPCODE_TRUE;
2377 else if (flag_trapping_math)
2379 /* Check that the original operation and the optimized ones will trap
2380 under the same condition. */
2381 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2382 && (lcompcode != COMPCODE_EQ)
2383 && (lcompcode != COMPCODE_ORD);
2384 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2385 && (rcompcode != COMPCODE_EQ)
2386 && (rcompcode != COMPCODE_ORD);
2387 bool trap = (compcode & COMPCODE_UNORD) == 0
2388 && (compcode != COMPCODE_EQ)
2389 && (compcode != COMPCODE_ORD);
2391 /* In a short-circuited boolean expression the LHS might be
2392 such that the RHS, if evaluated, will never trap. For
2393 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2394 if neither x nor y is NaN. (This is a mixed blessing: for
2395 example, the expression above will never trap, hence
2396 optimizing it to x < y would be invalid). */
2397 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2398 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2399 rtrap = false;
2401 /* If the comparison was short-circuited, and only the RHS
2402 trapped, we may now generate a spurious trap. */
2403 if (rtrap && !ltrap
2404 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2405 return NULL_TREE;
2407 /* If we changed the conditions that cause a trap, we lose. */
2408 if ((ltrap || rtrap) != trap)
2409 return NULL_TREE;
2412 if (compcode == COMPCODE_TRUE)
2413 return constant_boolean_node (true, truth_type);
2414 else if (compcode == COMPCODE_FALSE)
2415 return constant_boolean_node (false, truth_type);
2416 else
2418 enum tree_code tcode;
2420 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2421 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2425 /* Return nonzero if two operands (typically of the same tree node)
2426 are necessarily equal. If either argument has side-effects this
2427 function returns zero. FLAGS modifies behavior as follows:
2429 If OEP_ONLY_CONST is set, only return nonzero for constants.
2430 This function tests whether the operands are indistinguishable;
2431 it does not test whether they are equal using C's == operation.
2432 The distinction is important for IEEE floating point, because
2433 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2434 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2436 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2437 even though it may hold multiple values during a function.
2438 This is because a GCC tree node guarantees that nothing else is
2439 executed between the evaluation of its "operands" (which may often
2440 be evaluated in arbitrary order). Hence if the operands themselves
2441 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2442 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2443 unset means assuming isochronic (or instantaneous) tree equivalence.
2444 Unless comparing arbitrary expression trees, such as from different
2445 statements, this flag can usually be left unset.
2447 If OEP_PURE_SAME is set, then pure functions with identical arguments
2448 are considered the same. It is used when the caller has other ways
2449 to ensure that global memory is unchanged in between. */
2452 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2454 /* If either is ERROR_MARK, they aren't equal. */
2455 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2456 || TREE_TYPE (arg0) == error_mark_node
2457 || TREE_TYPE (arg1) == error_mark_node)
2458 return 0;
2460 /* Similar, if either does not have a type (like a released SSA name),
2461 they aren't equal. */
2462 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2463 return 0;
2465 /* Check equality of integer constants before bailing out due to
2466 precision differences. */
2467 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2468 return tree_int_cst_equal (arg0, arg1);
2470 /* If both types don't have the same signedness, then we can't consider
2471 them equal. We must check this before the STRIP_NOPS calls
2472 because they may change the signedness of the arguments. As pointers
2473 strictly don't have a signedness, require either two pointers or
2474 two non-pointers as well. */
2475 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2476 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2477 return 0;
2479 /* We cannot consider pointers to different address space equal. */
2480 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2481 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2482 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2483 return 0;
2485 /* If both types don't have the same precision, then it is not safe
2486 to strip NOPs. */
2487 if (element_precision (TREE_TYPE (arg0))
2488 != element_precision (TREE_TYPE (arg1)))
2489 return 0;
2491 STRIP_NOPS (arg0);
2492 STRIP_NOPS (arg1);
2494 /* In case both args are comparisons but with different comparison
2495 code, try to swap the comparison operands of one arg to produce
2496 a match and compare that variant. */
2497 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2498 && COMPARISON_CLASS_P (arg0)
2499 && COMPARISON_CLASS_P (arg1))
2501 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2503 if (TREE_CODE (arg0) == swap_code)
2504 return operand_equal_p (TREE_OPERAND (arg0, 0),
2505 TREE_OPERAND (arg1, 1), flags)
2506 && operand_equal_p (TREE_OPERAND (arg0, 1),
2507 TREE_OPERAND (arg1, 0), flags);
2510 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2511 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2512 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2513 return 0;
2515 /* This is needed for conversions and for COMPONENT_REF.
2516 Might as well play it safe and always test this. */
2517 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2518 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2519 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2520 return 0;
2522 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2523 We don't care about side effects in that case because the SAVE_EXPR
2524 takes care of that for us. In all other cases, two expressions are
2525 equal if they have no side effects. If we have two identical
2526 expressions with side effects that should be treated the same due
2527 to the only side effects being identical SAVE_EXPR's, that will
2528 be detected in the recursive calls below.
2529 If we are taking an invariant address of two identical objects
2530 they are necessarily equal as well. */
2531 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2532 && (TREE_CODE (arg0) == SAVE_EXPR
2533 || (flags & OEP_CONSTANT_ADDRESS_OF)
2534 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2535 return 1;
2537 /* Next handle constant cases, those for which we can return 1 even
2538 if ONLY_CONST is set. */
2539 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2540 switch (TREE_CODE (arg0))
2542 case INTEGER_CST:
2543 return tree_int_cst_equal (arg0, arg1);
2545 case FIXED_CST:
2546 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2547 TREE_FIXED_CST (arg1));
2549 case REAL_CST:
2550 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2551 TREE_REAL_CST (arg1)))
2552 return 1;
2555 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2557 /* If we do not distinguish between signed and unsigned zero,
2558 consider them equal. */
2559 if (real_zerop (arg0) && real_zerop (arg1))
2560 return 1;
2562 return 0;
2564 case VECTOR_CST:
2566 unsigned i;
2568 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2569 return 0;
2571 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2573 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2574 VECTOR_CST_ELT (arg1, i), flags))
2575 return 0;
2577 return 1;
2580 case COMPLEX_CST:
2581 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2582 flags)
2583 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2584 flags));
2586 case STRING_CST:
2587 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2588 && ! memcmp (TREE_STRING_POINTER (arg0),
2589 TREE_STRING_POINTER (arg1),
2590 TREE_STRING_LENGTH (arg0)));
2592 case ADDR_EXPR:
2593 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2594 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2595 ? OEP_CONSTANT_ADDRESS_OF : 0);
2596 default:
2597 break;
2600 if (flags & OEP_ONLY_CONST)
2601 return 0;
2603 /* Define macros to test an operand from arg0 and arg1 for equality and a
2604 variant that allows null and views null as being different from any
2605 non-null value. In the latter case, if either is null, the both
2606 must be; otherwise, do the normal comparison. */
2607 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2608 TREE_OPERAND (arg1, N), flags)
2610 #define OP_SAME_WITH_NULL(N) \
2611 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2612 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2614 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2616 case tcc_unary:
2617 /* Two conversions are equal only if signedness and modes match. */
2618 switch (TREE_CODE (arg0))
2620 CASE_CONVERT:
2621 case FIX_TRUNC_EXPR:
2622 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2623 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2624 return 0;
2625 break;
2626 default:
2627 break;
2630 return OP_SAME (0);
2633 case tcc_comparison:
2634 case tcc_binary:
2635 if (OP_SAME (0) && OP_SAME (1))
2636 return 1;
2638 /* For commutative ops, allow the other order. */
2639 return (commutative_tree_code (TREE_CODE (arg0))
2640 && operand_equal_p (TREE_OPERAND (arg0, 0),
2641 TREE_OPERAND (arg1, 1), flags)
2642 && operand_equal_p (TREE_OPERAND (arg0, 1),
2643 TREE_OPERAND (arg1, 0), flags));
2645 case tcc_reference:
2646 /* If either of the pointer (or reference) expressions we are
2647 dereferencing contain a side effect, these cannot be equal,
2648 but their addresses can be. */
2649 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2650 && (TREE_SIDE_EFFECTS (arg0)
2651 || TREE_SIDE_EFFECTS (arg1)))
2652 return 0;
2654 switch (TREE_CODE (arg0))
2656 case INDIRECT_REF:
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 return OP_SAME (0);
2660 case REALPART_EXPR:
2661 case IMAGPART_EXPR:
2662 return OP_SAME (0);
2664 case TARGET_MEM_REF:
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 /* Require equal extra operands and then fall through to MEM_REF
2667 handling of the two common operands. */
2668 if (!OP_SAME_WITH_NULL (2)
2669 || !OP_SAME_WITH_NULL (3)
2670 || !OP_SAME_WITH_NULL (4))
2671 return 0;
2672 /* Fallthru. */
2673 case MEM_REF:
2674 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2675 /* Require equal access sizes, and similar pointer types.
2676 We can have incomplete types for array references of
2677 variable-sized arrays from the Fortran frontend
2678 though. Also verify the types are compatible. */
2679 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2680 || (TYPE_SIZE (TREE_TYPE (arg0))
2681 && TYPE_SIZE (TREE_TYPE (arg1))
2682 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2683 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2684 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2685 && alias_ptr_types_compatible_p
2686 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2687 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2688 && OP_SAME (0) && OP_SAME (1));
2690 case ARRAY_REF:
2691 case ARRAY_RANGE_REF:
2692 /* Operands 2 and 3 may be null.
2693 Compare the array index by value if it is constant first as we
2694 may have different types but same value here. */
2695 if (!OP_SAME (0))
2696 return 0;
2697 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2698 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2699 TREE_OPERAND (arg1, 1))
2700 || OP_SAME (1))
2701 && OP_SAME_WITH_NULL (2)
2702 && OP_SAME_WITH_NULL (3));
2704 case COMPONENT_REF:
2705 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2706 may be NULL when we're called to compare MEM_EXPRs. */
2707 if (!OP_SAME_WITH_NULL (0)
2708 || !OP_SAME (1))
2709 return 0;
2710 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2711 return OP_SAME_WITH_NULL (2);
2713 case BIT_FIELD_REF:
2714 if (!OP_SAME (0))
2715 return 0;
2716 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2717 return OP_SAME (1) && OP_SAME (2);
2719 default:
2720 return 0;
2723 case tcc_expression:
2724 switch (TREE_CODE (arg0))
2726 case ADDR_EXPR:
2727 case TRUTH_NOT_EXPR:
2728 return OP_SAME (0);
2730 case TRUTH_ANDIF_EXPR:
2731 case TRUTH_ORIF_EXPR:
2732 return OP_SAME (0) && OP_SAME (1);
2734 case FMA_EXPR:
2735 case WIDEN_MULT_PLUS_EXPR:
2736 case WIDEN_MULT_MINUS_EXPR:
2737 if (!OP_SAME (2))
2738 return 0;
2739 /* The multiplcation operands are commutative. */
2740 /* FALLTHRU */
2742 case TRUTH_AND_EXPR:
2743 case TRUTH_OR_EXPR:
2744 case TRUTH_XOR_EXPR:
2745 if (OP_SAME (0) && OP_SAME (1))
2746 return 1;
2748 /* Otherwise take into account this is a commutative operation. */
2749 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2750 TREE_OPERAND (arg1, 1), flags)
2751 && operand_equal_p (TREE_OPERAND (arg0, 1),
2752 TREE_OPERAND (arg1, 0), flags));
2754 case COND_EXPR:
2755 case VEC_COND_EXPR:
2756 case DOT_PROD_EXPR:
2757 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2759 default:
2760 return 0;
2763 case tcc_vl_exp:
2764 switch (TREE_CODE (arg0))
2766 case CALL_EXPR:
2767 /* If the CALL_EXPRs call different functions, then they
2768 clearly can not be equal. */
2769 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2770 flags))
2771 return 0;
2774 unsigned int cef = call_expr_flags (arg0);
2775 if (flags & OEP_PURE_SAME)
2776 cef &= ECF_CONST | ECF_PURE;
2777 else
2778 cef &= ECF_CONST;
2779 if (!cef)
2780 return 0;
2783 /* Now see if all the arguments are the same. */
2785 const_call_expr_arg_iterator iter0, iter1;
2786 const_tree a0, a1;
2787 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2788 a1 = first_const_call_expr_arg (arg1, &iter1);
2789 a0 && a1;
2790 a0 = next_const_call_expr_arg (&iter0),
2791 a1 = next_const_call_expr_arg (&iter1))
2792 if (! operand_equal_p (a0, a1, flags))
2793 return 0;
2795 /* If we get here and both argument lists are exhausted
2796 then the CALL_EXPRs are equal. */
2797 return ! (a0 || a1);
2799 default:
2800 return 0;
2803 case tcc_declaration:
2804 /* Consider __builtin_sqrt equal to sqrt. */
2805 return (TREE_CODE (arg0) == FUNCTION_DECL
2806 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2807 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2808 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2810 default:
2811 return 0;
2814 #undef OP_SAME
2815 #undef OP_SAME_WITH_NULL
2818 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2819 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2821 When in doubt, return 0. */
2823 static int
2824 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2826 int unsignedp1, unsignedpo;
2827 tree primarg0, primarg1, primother;
2828 unsigned int correct_width;
2830 if (operand_equal_p (arg0, arg1, 0))
2831 return 1;
2833 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2834 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2835 return 0;
2837 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2838 and see if the inner values are the same. This removes any
2839 signedness comparison, which doesn't matter here. */
2840 primarg0 = arg0, primarg1 = arg1;
2841 STRIP_NOPS (primarg0);
2842 STRIP_NOPS (primarg1);
2843 if (operand_equal_p (primarg0, primarg1, 0))
2844 return 1;
2846 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2847 actual comparison operand, ARG0.
2849 First throw away any conversions to wider types
2850 already present in the operands. */
2852 primarg1 = get_narrower (arg1, &unsignedp1);
2853 primother = get_narrower (other, &unsignedpo);
2855 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2856 if (unsignedp1 == unsignedpo
2857 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2858 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2860 tree type = TREE_TYPE (arg0);
2862 /* Make sure shorter operand is extended the right way
2863 to match the longer operand. */
2864 primarg1 = fold_convert (signed_or_unsigned_type_for
2865 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2867 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2868 return 1;
2871 return 0;
2874 /* See if ARG is an expression that is either a comparison or is performing
2875 arithmetic on comparisons. The comparisons must only be comparing
2876 two different values, which will be stored in *CVAL1 and *CVAL2; if
2877 they are nonzero it means that some operands have already been found.
2878 No variables may be used anywhere else in the expression except in the
2879 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2880 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2882 If this is true, return 1. Otherwise, return zero. */
2884 static int
2885 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2892 tclass = tcc_unary;
2893 else if (tclass == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2895 || code == COMPOUND_EXPR))
2896 tclass = tcc_binary;
2898 else if (tclass == tcc_expression && code == SAVE_EXPR
2899 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2901 /* If we've already found a CVAL1 or CVAL2, this expression is
2902 two complex to handle. */
2903 if (*cval1 || *cval2)
2904 return 0;
2906 tclass = tcc_unary;
2907 *save_p = 1;
2910 switch (tclass)
2912 case tcc_unary:
2913 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2915 case tcc_binary:
2916 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2917 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2918 cval1, cval2, save_p));
2920 case tcc_constant:
2921 return 1;
2923 case tcc_expression:
2924 if (code == COND_EXPR)
2925 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2926 cval1, cval2, save_p)
2927 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2928 cval1, cval2, save_p)
2929 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2930 cval1, cval2, save_p));
2931 return 0;
2933 case tcc_comparison:
2934 /* First see if we can handle the first operand, then the second. For
2935 the second operand, we know *CVAL1 can't be zero. It must be that
2936 one side of the comparison is each of the values; test for the
2937 case where this isn't true by failing if the two operands
2938 are the same. */
2940 if (operand_equal_p (TREE_OPERAND (arg, 0),
2941 TREE_OPERAND (arg, 1), 0))
2942 return 0;
2944 if (*cval1 == 0)
2945 *cval1 = TREE_OPERAND (arg, 0);
2946 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2948 else if (*cval2 == 0)
2949 *cval2 = TREE_OPERAND (arg, 0);
2950 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2952 else
2953 return 0;
2955 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2957 else if (*cval2 == 0)
2958 *cval2 = TREE_OPERAND (arg, 1);
2959 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2961 else
2962 return 0;
2964 return 1;
2966 default:
2967 return 0;
2971 /* ARG is a tree that is known to contain just arithmetic operations and
2972 comparisons. Evaluate the operations in the tree substituting NEW0 for
2973 any occurrence of OLD0 as an operand of a comparison and likewise for
2974 NEW1 and OLD1. */
2976 static tree
2977 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2978 tree old1, tree new1)
2980 tree type = TREE_TYPE (arg);
2981 enum tree_code code = TREE_CODE (arg);
2982 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2984 /* We can handle some of the tcc_expression cases here. */
2985 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2986 tclass = tcc_unary;
2987 else if (tclass == tcc_expression
2988 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2989 tclass = tcc_binary;
2991 switch (tclass)
2993 case tcc_unary:
2994 return fold_build1_loc (loc, code, type,
2995 eval_subst (loc, TREE_OPERAND (arg, 0),
2996 old0, new0, old1, new1));
2998 case tcc_binary:
2999 return fold_build2_loc (loc, code, type,
3000 eval_subst (loc, TREE_OPERAND (arg, 0),
3001 old0, new0, old1, new1),
3002 eval_subst (loc, TREE_OPERAND (arg, 1),
3003 old0, new0, old1, new1));
3005 case tcc_expression:
3006 switch (code)
3008 case SAVE_EXPR:
3009 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3010 old1, new1);
3012 case COMPOUND_EXPR:
3013 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3014 old1, new1);
3016 case COND_EXPR:
3017 return fold_build3_loc (loc, code, type,
3018 eval_subst (loc, TREE_OPERAND (arg, 0),
3019 old0, new0, old1, new1),
3020 eval_subst (loc, TREE_OPERAND (arg, 1),
3021 old0, new0, old1, new1),
3022 eval_subst (loc, TREE_OPERAND (arg, 2),
3023 old0, new0, old1, new1));
3024 default:
3025 break;
3027 /* Fall through - ??? */
3029 case tcc_comparison:
3031 tree arg0 = TREE_OPERAND (arg, 0);
3032 tree arg1 = TREE_OPERAND (arg, 1);
3034 /* We need to check both for exact equality and tree equality. The
3035 former will be true if the operand has a side-effect. In that
3036 case, we know the operand occurred exactly once. */
3038 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3039 arg0 = new0;
3040 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3041 arg0 = new1;
3043 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3044 arg1 = new0;
3045 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3046 arg1 = new1;
3048 return fold_build2_loc (loc, code, type, arg0, arg1);
3051 default:
3052 return arg;
3056 /* Return a tree for the case when the result of an expression is RESULT
3057 converted to TYPE and OMITTED was previously an operand of the expression
3058 but is now not needed (e.g., we folded OMITTED * 0).
3060 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3061 the conversion of RESULT to TYPE. */
3063 tree
3064 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3066 tree t = fold_convert_loc (loc, type, result);
3068 /* If the resulting operand is an empty statement, just return the omitted
3069 statement casted to void. */
3070 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3071 return build1_loc (loc, NOP_EXPR, void_type_node,
3072 fold_ignored_result (omitted));
3074 if (TREE_SIDE_EFFECTS (omitted))
3075 return build2_loc (loc, COMPOUND_EXPR, type,
3076 fold_ignored_result (omitted), t);
3078 return non_lvalue_loc (loc, t);
3081 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3083 static tree
3084 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3085 tree omitted)
3087 tree t = fold_convert_loc (loc, type, result);
3089 /* If the resulting operand is an empty statement, just return the omitted
3090 statement casted to void. */
3091 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3092 return build1_loc (loc, NOP_EXPR, void_type_node,
3093 fold_ignored_result (omitted));
3095 if (TREE_SIDE_EFFECTS (omitted))
3096 return build2_loc (loc, COMPOUND_EXPR, type,
3097 fold_ignored_result (omitted), t);
3099 return pedantic_non_lvalue_loc (loc, t);
3102 /* Return a tree for the case when the result of an expression is RESULT
3103 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3104 of the expression but are now not needed.
3106 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3107 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3108 evaluated before OMITTED2. Otherwise, if neither has side effects,
3109 just do the conversion of RESULT to TYPE. */
3111 tree
3112 omit_two_operands_loc (location_t loc, tree type, tree result,
3113 tree omitted1, tree omitted2)
3115 tree t = fold_convert_loc (loc, type, result);
3117 if (TREE_SIDE_EFFECTS (omitted2))
3118 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3119 if (TREE_SIDE_EFFECTS (omitted1))
3120 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3122 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3126 /* Return a simplified tree node for the truth-negation of ARG. This
3127 never alters ARG itself. We assume that ARG is an operation that
3128 returns a truth value (0 or 1).
3130 FIXME: one would think we would fold the result, but it causes
3131 problems with the dominator optimizer. */
3133 static tree
3134 fold_truth_not_expr (location_t loc, tree arg)
3136 tree type = TREE_TYPE (arg);
3137 enum tree_code code = TREE_CODE (arg);
3138 location_t loc1, loc2;
3140 /* If this is a comparison, we can simply invert it, except for
3141 floating-point non-equality comparisons, in which case we just
3142 enclose a TRUTH_NOT_EXPR around what we have. */
3144 if (TREE_CODE_CLASS (code) == tcc_comparison)
3146 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3147 if (FLOAT_TYPE_P (op_type)
3148 && flag_trapping_math
3149 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3150 && code != NE_EXPR && code != EQ_EXPR)
3151 return NULL_TREE;
3153 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3154 if (code == ERROR_MARK)
3155 return NULL_TREE;
3157 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3158 TREE_OPERAND (arg, 1));
3161 switch (code)
3163 case INTEGER_CST:
3164 return constant_boolean_node (integer_zerop (arg), type);
3166 case TRUTH_AND_EXPR:
3167 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3168 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3169 return build2_loc (loc, TRUTH_OR_EXPR, type,
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3171 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3173 case TRUTH_OR_EXPR:
3174 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3175 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 return build2_loc (loc, TRUTH_AND_EXPR, type,
3177 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3178 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3180 case TRUTH_XOR_EXPR:
3181 /* Here we can invert either operand. We invert the first operand
3182 unless the second operand is a TRUTH_NOT_EXPR in which case our
3183 result is the XOR of the first operand with the inside of the
3184 negation of the second operand. */
3186 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3187 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3188 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3189 else
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3191 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3192 TREE_OPERAND (arg, 1));
3194 case TRUTH_ANDIF_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3197 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3198 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3199 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3201 case TRUTH_ORIF_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3204 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3206 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3208 case TRUTH_NOT_EXPR:
3209 return TREE_OPERAND (arg, 0);
3211 case COND_EXPR:
3213 tree arg1 = TREE_OPERAND (arg, 1);
3214 tree arg2 = TREE_OPERAND (arg, 2);
3216 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3217 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3219 /* A COND_EXPR may have a throw as one operand, which
3220 then has void type. Just leave void operands
3221 as they are. */
3222 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3223 VOID_TYPE_P (TREE_TYPE (arg1))
3224 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3225 VOID_TYPE_P (TREE_TYPE (arg2))
3226 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3229 case COMPOUND_EXPR:
3230 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3231 return build2_loc (loc, COMPOUND_EXPR, type,
3232 TREE_OPERAND (arg, 0),
3233 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3235 case NON_LVALUE_EXPR:
3236 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3237 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3239 CASE_CONVERT:
3240 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3241 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3243 /* ... fall through ... */
3245 case FLOAT_EXPR:
3246 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3247 return build1_loc (loc, TREE_CODE (arg), type,
3248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3250 case BIT_AND_EXPR:
3251 if (!integer_onep (TREE_OPERAND (arg, 1)))
3252 return NULL_TREE;
3253 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3255 case SAVE_EXPR:
3256 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3258 case CLEANUP_POINT_EXPR:
3259 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3260 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3261 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3263 default:
3264 return NULL_TREE;
3268 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3269 assume that ARG is an operation that returns a truth value (0 or 1
3270 for scalars, 0 or -1 for vectors). Return the folded expression if
3271 folding is successful. Otherwise, return NULL_TREE. */
3273 static tree
3274 fold_invert_truthvalue (location_t loc, tree arg)
3276 tree type = TREE_TYPE (arg);
3277 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3278 ? BIT_NOT_EXPR
3279 : TRUTH_NOT_EXPR,
3280 type, arg);
3283 /* Return a simplified tree node for the truth-negation of ARG. This
3284 never alters ARG itself. We assume that ARG is an operation that
3285 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3287 tree
3288 invert_truthvalue_loc (location_t loc, tree arg)
3290 if (TREE_CODE (arg) == ERROR_MARK)
3291 return arg;
3293 tree type = TREE_TYPE (arg);
3294 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3295 ? BIT_NOT_EXPR
3296 : TRUTH_NOT_EXPR,
3297 type, arg);
3300 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3301 operands are another bit-wise operation with a common input. If so,
3302 distribute the bit operations to save an operation and possibly two if
3303 constants are involved. For example, convert
3304 (A | B) & (A | C) into A | (B & C)
3305 Further simplification will occur if B and C are constants.
3307 If this optimization cannot be done, 0 will be returned. */
3309 static tree
3310 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3311 tree arg0, tree arg1)
3313 tree common;
3314 tree left, right;
3316 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3317 || TREE_CODE (arg0) == code
3318 || (TREE_CODE (arg0) != BIT_AND_EXPR
3319 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3320 return 0;
3322 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3324 common = TREE_OPERAND (arg0, 0);
3325 left = TREE_OPERAND (arg0, 1);
3326 right = TREE_OPERAND (arg1, 1);
3328 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3330 common = TREE_OPERAND (arg0, 0);
3331 left = TREE_OPERAND (arg0, 1);
3332 right = TREE_OPERAND (arg1, 0);
3334 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3336 common = TREE_OPERAND (arg0, 1);
3337 left = TREE_OPERAND (arg0, 0);
3338 right = TREE_OPERAND (arg1, 1);
3340 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3342 common = TREE_OPERAND (arg0, 1);
3343 left = TREE_OPERAND (arg0, 0);
3344 right = TREE_OPERAND (arg1, 0);
3346 else
3347 return 0;
3349 common = fold_convert_loc (loc, type, common);
3350 left = fold_convert_loc (loc, type, left);
3351 right = fold_convert_loc (loc, type, right);
3352 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3353 fold_build2_loc (loc, code, type, left, right));
3356 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3357 with code CODE. This optimization is unsafe. */
3358 static tree
3359 distribute_real_division (location_t loc, enum tree_code code, tree type,
3360 tree arg0, tree arg1)
3362 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3363 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3365 /* (A / C) +- (B / C) -> (A +- B) / C. */
3366 if (mul0 == mul1
3367 && operand_equal_p (TREE_OPERAND (arg0, 1),
3368 TREE_OPERAND (arg1, 1), 0))
3369 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3370 fold_build2_loc (loc, code, type,
3371 TREE_OPERAND (arg0, 0),
3372 TREE_OPERAND (arg1, 0)),
3373 TREE_OPERAND (arg0, 1));
3375 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3376 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3377 TREE_OPERAND (arg1, 0), 0)
3378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3379 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3381 REAL_VALUE_TYPE r0, r1;
3382 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3383 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3384 if (!mul0)
3385 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3386 if (!mul1)
3387 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3388 real_arithmetic (&r0, code, &r0, &r1);
3389 return fold_build2_loc (loc, MULT_EXPR, type,
3390 TREE_OPERAND (arg0, 0),
3391 build_real (type, r0));
3394 return NULL_TREE;
3397 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3398 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3400 static tree
3401 make_bit_field_ref (location_t loc, tree inner, tree type,
3402 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3404 tree result, bftype;
3406 if (bitpos == 0)
3408 tree size = TYPE_SIZE (TREE_TYPE (inner));
3409 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3410 || POINTER_TYPE_P (TREE_TYPE (inner)))
3411 && tree_fits_shwi_p (size)
3412 && tree_to_shwi (size) == bitsize)
3413 return fold_convert_loc (loc, type, inner);
3416 bftype = type;
3417 if (TYPE_PRECISION (bftype) != bitsize
3418 || TYPE_UNSIGNED (bftype) == !unsignedp)
3419 bftype = build_nonstandard_integer_type (bitsize, 0);
3421 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3422 size_int (bitsize), bitsize_int (bitpos));
3424 if (bftype != type)
3425 result = fold_convert_loc (loc, type, result);
3427 return result;
3430 /* Optimize a bit-field compare.
3432 There are two cases: First is a compare against a constant and the
3433 second is a comparison of two items where the fields are at the same
3434 bit position relative to the start of a chunk (byte, halfword, word)
3435 large enough to contain it. In these cases we can avoid the shift
3436 implicit in bitfield extractions.
3438 For constants, we emit a compare of the shifted constant with the
3439 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3440 compared. For two fields at the same position, we do the ANDs with the
3441 similar mask and compare the result of the ANDs.
3443 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3444 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3445 are the left and right operands of the comparison, respectively.
3447 If the optimization described above can be done, we return the resulting
3448 tree. Otherwise we return zero. */
3450 static tree
3451 optimize_bit_field_compare (location_t loc, enum tree_code code,
3452 tree compare_type, tree lhs, tree rhs)
3454 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3455 tree type = TREE_TYPE (lhs);
3456 tree unsigned_type;
3457 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3458 enum machine_mode lmode, rmode, nmode;
3459 int lunsignedp, runsignedp;
3460 int lvolatilep = 0, rvolatilep = 0;
3461 tree linner, rinner = NULL_TREE;
3462 tree mask;
3463 tree offset;
3465 /* Get all the information about the extractions being done. If the bit size
3466 if the same as the size of the underlying object, we aren't doing an
3467 extraction at all and so can do nothing. We also don't want to
3468 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3469 then will no longer be able to replace it. */
3470 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3471 &lunsignedp, &lvolatilep, false);
3472 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3473 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3474 return 0;
3476 if (!const_p)
3478 /* If this is not a constant, we can only do something if bit positions,
3479 sizes, and signedness are the same. */
3480 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3481 &runsignedp, &rvolatilep, false);
3483 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3484 || lunsignedp != runsignedp || offset != 0
3485 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3486 return 0;
3489 /* See if we can find a mode to refer to this field. We should be able to,
3490 but fail if we can't. */
3491 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3492 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3493 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3494 TYPE_ALIGN (TREE_TYPE (rinner))),
3495 word_mode, false);
3496 if (nmode == VOIDmode)
3497 return 0;
3499 /* Set signed and unsigned types of the precision of this mode for the
3500 shifts below. */
3501 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3503 /* Compute the bit position and size for the new reference and our offset
3504 within it. If the new reference is the same size as the original, we
3505 won't optimize anything, so return zero. */
3506 nbitsize = GET_MODE_BITSIZE (nmode);
3507 nbitpos = lbitpos & ~ (nbitsize - 1);
3508 lbitpos -= nbitpos;
3509 if (nbitsize == lbitsize)
3510 return 0;
3512 if (BYTES_BIG_ENDIAN)
3513 lbitpos = nbitsize - lbitsize - lbitpos;
3515 /* Make the mask to be used against the extracted field. */
3516 mask = build_int_cst_type (unsigned_type, -1);
3517 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3518 mask = const_binop (RSHIFT_EXPR, mask,
3519 size_int (nbitsize - lbitsize - lbitpos));
3521 if (! const_p)
3522 /* If not comparing with constant, just rework the comparison
3523 and return. */
3524 return fold_build2_loc (loc, code, compare_type,
3525 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3526 make_bit_field_ref (loc, linner,
3527 unsigned_type,
3528 nbitsize, nbitpos,
3530 mask),
3531 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3532 make_bit_field_ref (loc, rinner,
3533 unsigned_type,
3534 nbitsize, nbitpos,
3536 mask));
3538 /* Otherwise, we are handling the constant case. See if the constant is too
3539 big for the field. Warn and return a tree of for 0 (false) if so. We do
3540 this not only for its own sake, but to avoid having to test for this
3541 error case below. If we didn't, we might generate wrong code.
3543 For unsigned fields, the constant shifted right by the field length should
3544 be all zero. For signed fields, the high-order bits should agree with
3545 the sign bit. */
3547 if (lunsignedp)
3549 if (wi::lrshift (rhs, lbitsize) != 0)
3551 warning (0, "comparison is always %d due to width of bit-field",
3552 code == NE_EXPR);
3553 return constant_boolean_node (code == NE_EXPR, compare_type);
3556 else
3558 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3559 if (tem != 0 && tem != -1)
3561 warning (0, "comparison is always %d due to width of bit-field",
3562 code == NE_EXPR);
3563 return constant_boolean_node (code == NE_EXPR, compare_type);
3567 /* Single-bit compares should always be against zero. */
3568 if (lbitsize == 1 && ! integer_zerop (rhs))
3570 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3571 rhs = build_int_cst (type, 0);
3574 /* Make a new bitfield reference, shift the constant over the
3575 appropriate number of bits and mask it with the computed mask
3576 (in case this was a signed field). If we changed it, make a new one. */
3577 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3579 rhs = const_binop (BIT_AND_EXPR,
3580 const_binop (LSHIFT_EXPR,
3581 fold_convert_loc (loc, unsigned_type, rhs),
3582 size_int (lbitpos)),
3583 mask);
3585 lhs = build2_loc (loc, code, compare_type,
3586 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3587 return lhs;
3590 /* Subroutine for fold_truth_andor_1: decode a field reference.
3592 If EXP is a comparison reference, we return the innermost reference.
3594 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3595 set to the starting bit number.
3597 If the innermost field can be completely contained in a mode-sized
3598 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3600 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3601 otherwise it is not changed.
3603 *PUNSIGNEDP is set to the signedness of the field.
3605 *PMASK is set to the mask used. This is either contained in a
3606 BIT_AND_EXPR or derived from the width of the field.
3608 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3610 Return 0 if this is not a component reference or is one that we can't
3611 do anything with. */
3613 static tree
3614 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3615 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3616 int *punsignedp, int *pvolatilep,
3617 tree *pmask, tree *pand_mask)
3619 tree outer_type = 0;
3620 tree and_mask = 0;
3621 tree mask, inner, offset;
3622 tree unsigned_type;
3623 unsigned int precision;
3625 /* All the optimizations using this function assume integer fields.
3626 There are problems with FP fields since the type_for_size call
3627 below can fail for, e.g., XFmode. */
3628 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3629 return 0;
3631 /* We are interested in the bare arrangement of bits, so strip everything
3632 that doesn't affect the machine mode. However, record the type of the
3633 outermost expression if it may matter below. */
3634 if (CONVERT_EXPR_P (exp)
3635 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3636 outer_type = TREE_TYPE (exp);
3637 STRIP_NOPS (exp);
3639 if (TREE_CODE (exp) == BIT_AND_EXPR)
3641 and_mask = TREE_OPERAND (exp, 1);
3642 exp = TREE_OPERAND (exp, 0);
3643 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3644 if (TREE_CODE (and_mask) != INTEGER_CST)
3645 return 0;
3648 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3649 punsignedp, pvolatilep, false);
3650 if ((inner == exp && and_mask == 0)
3651 || *pbitsize < 0 || offset != 0
3652 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3653 return 0;
3655 /* If the number of bits in the reference is the same as the bitsize of
3656 the outer type, then the outer type gives the signedness. Otherwise
3657 (in case of a small bitfield) the signedness is unchanged. */
3658 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3659 *punsignedp = TYPE_UNSIGNED (outer_type);
3661 /* Compute the mask to access the bitfield. */
3662 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3663 precision = TYPE_PRECISION (unsigned_type);
3665 mask = build_int_cst_type (unsigned_type, -1);
3667 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3668 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3670 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3671 if (and_mask != 0)
3672 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3673 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3675 *pmask = mask;
3676 *pand_mask = and_mask;
3677 return inner;
3680 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3681 bit positions and MASK is SIGNED. */
3683 static int
3684 all_ones_mask_p (const_tree mask, unsigned int size)
3686 tree type = TREE_TYPE (mask);
3687 unsigned int precision = TYPE_PRECISION (type);
3689 /* If this function returns true when the type of the mask is
3690 UNSIGNED, then there will be errors. In particular see
3691 gcc.c-torture/execute/990326-1.c. There does not appear to be
3692 any documentation paper trail as to why this is so. But the pre
3693 wide-int worked with that restriction and it has been preserved
3694 here. */
3695 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3696 return false;
3698 return wi::mask (size, false, precision) == mask;
3701 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3702 represents the sign bit of EXP's type. If EXP represents a sign
3703 or zero extension, also test VAL against the unextended type.
3704 The return value is the (sub)expression whose sign bit is VAL,
3705 or NULL_TREE otherwise. */
3707 static tree
3708 sign_bit_p (tree exp, const_tree val)
3710 int width;
3711 tree t;
3713 /* Tree EXP must have an integral type. */
3714 t = TREE_TYPE (exp);
3715 if (! INTEGRAL_TYPE_P (t))
3716 return NULL_TREE;
3718 /* Tree VAL must be an integer constant. */
3719 if (TREE_CODE (val) != INTEGER_CST
3720 || TREE_OVERFLOW (val))
3721 return NULL_TREE;
3723 width = TYPE_PRECISION (t);
3724 if (wi::only_sign_bit_p (val, width))
3725 return exp;
3727 /* Handle extension from a narrower type. */
3728 if (TREE_CODE (exp) == NOP_EXPR
3729 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3730 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3732 return NULL_TREE;
3735 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3736 to be evaluated unconditionally. */
3738 static int
3739 simple_operand_p (const_tree exp)
3741 /* Strip any conversions that don't change the machine mode. */
3742 STRIP_NOPS (exp);
3744 return (CONSTANT_CLASS_P (exp)
3745 || TREE_CODE (exp) == SSA_NAME
3746 || (DECL_P (exp)
3747 && ! TREE_ADDRESSABLE (exp)
3748 && ! TREE_THIS_VOLATILE (exp)
3749 && ! DECL_NONLOCAL (exp)
3750 /* Don't regard global variables as simple. They may be
3751 allocated in ways unknown to the compiler (shared memory,
3752 #pragma weak, etc). */
3753 && ! TREE_PUBLIC (exp)
3754 && ! DECL_EXTERNAL (exp)
3755 /* Weakrefs are not safe to be read, since they can be NULL.
3756 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3757 have DECL_WEAK flag set. */
3758 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3759 /* Loading a static variable is unduly expensive, but global
3760 registers aren't expensive. */
3761 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3764 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3765 to be evaluated unconditionally.
3766 I addition to simple_operand_p, we assume that comparisons, conversions,
3767 and logic-not operations are simple, if their operands are simple, too. */
3769 static bool
3770 simple_operand_p_2 (tree exp)
3772 enum tree_code code;
3774 if (TREE_SIDE_EFFECTS (exp)
3775 || tree_could_trap_p (exp))
3776 return false;
3778 while (CONVERT_EXPR_P (exp))
3779 exp = TREE_OPERAND (exp, 0);
3781 code = TREE_CODE (exp);
3783 if (TREE_CODE_CLASS (code) == tcc_comparison)
3784 return (simple_operand_p (TREE_OPERAND (exp, 0))
3785 && simple_operand_p (TREE_OPERAND (exp, 1)));
3787 if (code == TRUTH_NOT_EXPR)
3788 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3790 return simple_operand_p (exp);
3794 /* The following functions are subroutines to fold_range_test and allow it to
3795 try to change a logical combination of comparisons into a range test.
3797 For example, both
3798 X == 2 || X == 3 || X == 4 || X == 5
3800 X >= 2 && X <= 5
3801 are converted to
3802 (unsigned) (X - 2) <= 3
3804 We describe each set of comparisons as being either inside or outside
3805 a range, using a variable named like IN_P, and then describe the
3806 range with a lower and upper bound. If one of the bounds is omitted,
3807 it represents either the highest or lowest value of the type.
3809 In the comments below, we represent a range by two numbers in brackets
3810 preceded by a "+" to designate being inside that range, or a "-" to
3811 designate being outside that range, so the condition can be inverted by
3812 flipping the prefix. An omitted bound is represented by a "-". For
3813 example, "- [-, 10]" means being outside the range starting at the lowest
3814 possible value and ending at 10, in other words, being greater than 10.
3815 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3816 always false.
3818 We set up things so that the missing bounds are handled in a consistent
3819 manner so neither a missing bound nor "true" and "false" need to be
3820 handled using a special case. */
3822 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3823 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3824 and UPPER1_P are nonzero if the respective argument is an upper bound
3825 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3826 must be specified for a comparison. ARG1 will be converted to ARG0's
3827 type if both are specified. */
3829 static tree
3830 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3831 tree arg1, int upper1_p)
3833 tree tem;
3834 int result;
3835 int sgn0, sgn1;
3837 /* If neither arg represents infinity, do the normal operation.
3838 Else, if not a comparison, return infinity. Else handle the special
3839 comparison rules. Note that most of the cases below won't occur, but
3840 are handled for consistency. */
3842 if (arg0 != 0 && arg1 != 0)
3844 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3845 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3846 STRIP_NOPS (tem);
3847 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3850 if (TREE_CODE_CLASS (code) != tcc_comparison)
3851 return 0;
3853 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3854 for neither. In real maths, we cannot assume open ended ranges are
3855 the same. But, this is computer arithmetic, where numbers are finite.
3856 We can therefore make the transformation of any unbounded range with
3857 the value Z, Z being greater than any representable number. This permits
3858 us to treat unbounded ranges as equal. */
3859 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3860 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3861 switch (code)
3863 case EQ_EXPR:
3864 result = sgn0 == sgn1;
3865 break;
3866 case NE_EXPR:
3867 result = sgn0 != sgn1;
3868 break;
3869 case LT_EXPR:
3870 result = sgn0 < sgn1;
3871 break;
3872 case LE_EXPR:
3873 result = sgn0 <= sgn1;
3874 break;
3875 case GT_EXPR:
3876 result = sgn0 > sgn1;
3877 break;
3878 case GE_EXPR:
3879 result = sgn0 >= sgn1;
3880 break;
3881 default:
3882 gcc_unreachable ();
3885 return constant_boolean_node (result, type);
3888 /* Helper routine for make_range. Perform one step for it, return
3889 new expression if the loop should continue or NULL_TREE if it should
3890 stop. */
3892 tree
3893 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3894 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3895 bool *strict_overflow_p)
3897 tree arg0_type = TREE_TYPE (arg0);
3898 tree n_low, n_high, low = *p_low, high = *p_high;
3899 int in_p = *p_in_p, n_in_p;
3901 switch (code)
3903 case TRUTH_NOT_EXPR:
3904 /* We can only do something if the range is testing for zero. */
3905 if (low == NULL_TREE || high == NULL_TREE
3906 || ! integer_zerop (low) || ! integer_zerop (high))
3907 return NULL_TREE;
3908 *p_in_p = ! in_p;
3909 return arg0;
3911 case EQ_EXPR: case NE_EXPR:
3912 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3913 /* We can only do something if the range is testing for zero
3914 and if the second operand is an integer constant. Note that
3915 saying something is "in" the range we make is done by
3916 complementing IN_P since it will set in the initial case of
3917 being not equal to zero; "out" is leaving it alone. */
3918 if (low == NULL_TREE || high == NULL_TREE
3919 || ! integer_zerop (low) || ! integer_zerop (high)
3920 || TREE_CODE (arg1) != INTEGER_CST)
3921 return NULL_TREE;
3923 switch (code)
3925 case NE_EXPR: /* - [c, c] */
3926 low = high = arg1;
3927 break;
3928 case EQ_EXPR: /* + [c, c] */
3929 in_p = ! in_p, low = high = arg1;
3930 break;
3931 case GT_EXPR: /* - [-, c] */
3932 low = 0, high = arg1;
3933 break;
3934 case GE_EXPR: /* + [c, -] */
3935 in_p = ! in_p, low = arg1, high = 0;
3936 break;
3937 case LT_EXPR: /* - [c, -] */
3938 low = arg1, high = 0;
3939 break;
3940 case LE_EXPR: /* + [-, c] */
3941 in_p = ! in_p, low = 0, high = arg1;
3942 break;
3943 default:
3944 gcc_unreachable ();
3947 /* If this is an unsigned comparison, we also know that EXP is
3948 greater than or equal to zero. We base the range tests we make
3949 on that fact, so we record it here so we can parse existing
3950 range tests. We test arg0_type since often the return type
3951 of, e.g. EQ_EXPR, is boolean. */
3952 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3954 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3955 in_p, low, high, 1,
3956 build_int_cst (arg0_type, 0),
3957 NULL_TREE))
3958 return NULL_TREE;
3960 in_p = n_in_p, low = n_low, high = n_high;
3962 /* If the high bound is missing, but we have a nonzero low
3963 bound, reverse the range so it goes from zero to the low bound
3964 minus 1. */
3965 if (high == 0 && low && ! integer_zerop (low))
3967 in_p = ! in_p;
3968 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3969 build_int_cst (TREE_TYPE (low), 1), 0);
3970 low = build_int_cst (arg0_type, 0);
3974 *p_low = low;
3975 *p_high = high;
3976 *p_in_p = in_p;
3977 return arg0;
3979 case NEGATE_EXPR:
3980 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3981 low and high are non-NULL, then normalize will DTRT. */
3982 if (!TYPE_UNSIGNED (arg0_type)
3983 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3985 if (low == NULL_TREE)
3986 low = TYPE_MIN_VALUE (arg0_type);
3987 if (high == NULL_TREE)
3988 high = TYPE_MAX_VALUE (arg0_type);
3991 /* (-x) IN [a,b] -> x in [-b, -a] */
3992 n_low = range_binop (MINUS_EXPR, exp_type,
3993 build_int_cst (exp_type, 0),
3994 0, high, 1);
3995 n_high = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, low, 0);
3998 if (n_high != 0 && TREE_OVERFLOW (n_high))
3999 return NULL_TREE;
4000 goto normalize;
4002 case BIT_NOT_EXPR:
4003 /* ~ X -> -X - 1 */
4004 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4005 build_int_cst (exp_type, 1));
4007 case PLUS_EXPR:
4008 case MINUS_EXPR:
4009 if (TREE_CODE (arg1) != INTEGER_CST)
4010 return NULL_TREE;
4012 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4013 move a constant to the other side. */
4014 if (!TYPE_UNSIGNED (arg0_type)
4015 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4016 return NULL_TREE;
4018 /* If EXP is signed, any overflow in the computation is undefined,
4019 so we don't worry about it so long as our computations on
4020 the bounds don't overflow. For unsigned, overflow is defined
4021 and this is exactly the right thing. */
4022 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4023 arg0_type, low, 0, arg1, 0);
4024 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4025 arg0_type, high, 1, arg1, 0);
4026 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4027 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4028 return NULL_TREE;
4030 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4031 *strict_overflow_p = true;
4033 normalize:
4034 /* Check for an unsigned range which has wrapped around the maximum
4035 value thus making n_high < n_low, and normalize it. */
4036 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4038 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4039 build_int_cst (TREE_TYPE (n_high), 1), 0);
4040 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4041 build_int_cst (TREE_TYPE (n_low), 1), 0);
4043 /* If the range is of the form +/- [ x+1, x ], we won't
4044 be able to normalize it. But then, it represents the
4045 whole range or the empty set, so make it
4046 +/- [ -, - ]. */
4047 if (tree_int_cst_equal (n_low, low)
4048 && tree_int_cst_equal (n_high, high))
4049 low = high = 0;
4050 else
4051 in_p = ! in_p;
4053 else
4054 low = n_low, high = n_high;
4056 *p_low = low;
4057 *p_high = high;
4058 *p_in_p = in_p;
4059 return arg0;
4061 CASE_CONVERT:
4062 case NON_LVALUE_EXPR:
4063 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4064 return NULL_TREE;
4066 if (! INTEGRAL_TYPE_P (arg0_type)
4067 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4068 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4069 return NULL_TREE;
4071 n_low = low, n_high = high;
4073 if (n_low != 0)
4074 n_low = fold_convert_loc (loc, arg0_type, n_low);
4076 if (n_high != 0)
4077 n_high = fold_convert_loc (loc, arg0_type, n_high);
4079 /* If we're converting arg0 from an unsigned type, to exp,
4080 a signed type, we will be doing the comparison as unsigned.
4081 The tests above have already verified that LOW and HIGH
4082 are both positive.
4084 So we have to ensure that we will handle large unsigned
4085 values the same way that the current signed bounds treat
4086 negative values. */
4088 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4090 tree high_positive;
4091 tree equiv_type;
4092 /* For fixed-point modes, we need to pass the saturating flag
4093 as the 2nd parameter. */
4094 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4095 equiv_type
4096 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4097 TYPE_SATURATING (arg0_type));
4098 else
4099 equiv_type
4100 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4102 /* A range without an upper bound is, naturally, unbounded.
4103 Since convert would have cropped a very large value, use
4104 the max value for the destination type. */
4105 high_positive
4106 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4107 : TYPE_MAX_VALUE (arg0_type);
4109 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4110 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4111 fold_convert_loc (loc, arg0_type,
4112 high_positive),
4113 build_int_cst (arg0_type, 1));
4115 /* If the low bound is specified, "and" the range with the
4116 range for which the original unsigned value will be
4117 positive. */
4118 if (low != 0)
4120 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4121 1, fold_convert_loc (loc, arg0_type,
4122 integer_zero_node),
4123 high_positive))
4124 return NULL_TREE;
4126 in_p = (n_in_p == in_p);
4128 else
4130 /* Otherwise, "or" the range with the range of the input
4131 that will be interpreted as negative. */
4132 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4133 1, fold_convert_loc (loc, arg0_type,
4134 integer_zero_node),
4135 high_positive))
4136 return NULL_TREE;
4138 in_p = (in_p != n_in_p);
4142 *p_low = n_low;
4143 *p_high = n_high;
4144 *p_in_p = in_p;
4145 return arg0;
4147 default:
4148 return NULL_TREE;
4152 /* Given EXP, a logical expression, set the range it is testing into
4153 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4154 actually being tested. *PLOW and *PHIGH will be made of the same
4155 type as the returned expression. If EXP is not a comparison, we
4156 will most likely not be returning a useful value and range. Set
4157 *STRICT_OVERFLOW_P to true if the return value is only valid
4158 because signed overflow is undefined; otherwise, do not change
4159 *STRICT_OVERFLOW_P. */
4161 tree
4162 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4163 bool *strict_overflow_p)
4165 enum tree_code code;
4166 tree arg0, arg1 = NULL_TREE;
4167 tree exp_type, nexp;
4168 int in_p;
4169 tree low, high;
4170 location_t loc = EXPR_LOCATION (exp);
4172 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4173 and see if we can refine the range. Some of the cases below may not
4174 happen, but it doesn't seem worth worrying about this. We "continue"
4175 the outer loop when we've changed something; otherwise we "break"
4176 the switch, which will "break" the while. */
4178 in_p = 0;
4179 low = high = build_int_cst (TREE_TYPE (exp), 0);
4181 while (1)
4183 code = TREE_CODE (exp);
4184 exp_type = TREE_TYPE (exp);
4185 arg0 = NULL_TREE;
4187 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4189 if (TREE_OPERAND_LENGTH (exp) > 0)
4190 arg0 = TREE_OPERAND (exp, 0);
4191 if (TREE_CODE_CLASS (code) == tcc_binary
4192 || TREE_CODE_CLASS (code) == tcc_comparison
4193 || (TREE_CODE_CLASS (code) == tcc_expression
4194 && TREE_OPERAND_LENGTH (exp) > 1))
4195 arg1 = TREE_OPERAND (exp, 1);
4197 if (arg0 == NULL_TREE)
4198 break;
4200 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4201 &high, &in_p, strict_overflow_p);
4202 if (nexp == NULL_TREE)
4203 break;
4204 exp = nexp;
4207 /* If EXP is a constant, we can evaluate whether this is true or false. */
4208 if (TREE_CODE (exp) == INTEGER_CST)
4210 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4211 exp, 0, low, 0))
4212 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4213 exp, 1, high, 1)));
4214 low = high = 0;
4215 exp = 0;
4218 *pin_p = in_p, *plow = low, *phigh = high;
4219 return exp;
4222 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4223 type, TYPE, return an expression to test if EXP is in (or out of, depending
4224 on IN_P) the range. Return 0 if the test couldn't be created. */
4226 tree
4227 build_range_check (location_t loc, tree type, tree exp, int in_p,
4228 tree low, tree high)
4230 tree etype = TREE_TYPE (exp), value;
4232 #ifdef HAVE_canonicalize_funcptr_for_compare
4233 /* Disable this optimization for function pointer expressions
4234 on targets that require function pointer canonicalization. */
4235 if (HAVE_canonicalize_funcptr_for_compare
4236 && TREE_CODE (etype) == POINTER_TYPE
4237 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4238 return NULL_TREE;
4239 #endif
4241 if (! in_p)
4243 value = build_range_check (loc, type, exp, 1, low, high);
4244 if (value != 0)
4245 return invert_truthvalue_loc (loc, value);
4247 return 0;
4250 if (low == 0 && high == 0)
4251 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4253 if (low == 0)
4254 return fold_build2_loc (loc, LE_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, high));
4257 if (high == 0)
4258 return fold_build2_loc (loc, GE_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, low));
4261 if (operand_equal_p (low, high, 0))
4262 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4263 fold_convert_loc (loc, etype, low));
4265 if (integer_zerop (low))
4267 if (! TYPE_UNSIGNED (etype))
4269 etype = unsigned_type_for (etype);
4270 high = fold_convert_loc (loc, etype, high);
4271 exp = fold_convert_loc (loc, etype, exp);
4273 return build_range_check (loc, type, exp, 1, 0, high);
4276 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4277 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4279 int prec = TYPE_PRECISION (etype);
4281 if (wi::mask (prec - 1, false, prec) == high)
4283 if (TYPE_UNSIGNED (etype))
4285 tree signed_etype = signed_type_for (etype);
4286 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4287 etype
4288 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4289 else
4290 etype = signed_etype;
4291 exp = fold_convert_loc (loc, etype, exp);
4293 return fold_build2_loc (loc, GT_EXPR, type, exp,
4294 build_int_cst (etype, 0));
4298 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4299 This requires wrap-around arithmetics for the type of the expression.
4300 First make sure that arithmetics in this type is valid, then make sure
4301 that it wraps around. */
4302 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4303 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4304 TYPE_UNSIGNED (etype));
4306 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4308 tree utype, minv, maxv;
4310 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4311 for the type in question, as we rely on this here. */
4312 utype = unsigned_type_for (etype);
4313 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4314 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4315 build_int_cst (TREE_TYPE (maxv), 1), 1);
4316 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4318 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4319 minv, 1, maxv, 1)))
4320 etype = utype;
4321 else
4322 return 0;
4325 high = fold_convert_loc (loc, etype, high);
4326 low = fold_convert_loc (loc, etype, low);
4327 exp = fold_convert_loc (loc, etype, exp);
4329 value = const_binop (MINUS_EXPR, high, low);
4332 if (POINTER_TYPE_P (etype))
4334 if (value != 0 && !TREE_OVERFLOW (value))
4336 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4337 return build_range_check (loc, type,
4338 fold_build_pointer_plus_loc (loc, exp, low),
4339 1, build_int_cst (etype, 0), value);
4341 return 0;
4344 if (value != 0 && !TREE_OVERFLOW (value))
4345 return build_range_check (loc, type,
4346 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4347 1, build_int_cst (etype, 0), value);
4349 return 0;
4352 /* Return the predecessor of VAL in its type, handling the infinite case. */
4354 static tree
4355 range_predecessor (tree val)
4357 tree type = TREE_TYPE (val);
4359 if (INTEGRAL_TYPE_P (type)
4360 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4361 return 0;
4362 else
4363 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4364 build_int_cst (TREE_TYPE (val), 1), 0);
4367 /* Return the successor of VAL in its type, handling the infinite case. */
4369 static tree
4370 range_successor (tree val)
4372 tree type = TREE_TYPE (val);
4374 if (INTEGRAL_TYPE_P (type)
4375 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4376 return 0;
4377 else
4378 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4379 build_int_cst (TREE_TYPE (val), 1), 0);
4382 /* Given two ranges, see if we can merge them into one. Return 1 if we
4383 can, 0 if we can't. Set the output range into the specified parameters. */
4385 bool
4386 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4387 tree high0, int in1_p, tree low1, tree high1)
4389 int no_overlap;
4390 int subset;
4391 int temp;
4392 tree tem;
4393 int in_p;
4394 tree low, high;
4395 int lowequal = ((low0 == 0 && low1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 low0, 0, low1, 0)));
4398 int highequal = ((high0 == 0 && high1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 high0, 1, high1, 1)));
4402 /* Make range 0 be the range that starts first, or ends last if they
4403 start at the same value. Swap them if it isn't. */
4404 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 low0, 0, low1, 0))
4406 || (lowequal
4407 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 high1, 1, high0, 1))))
4410 temp = in0_p, in0_p = in1_p, in1_p = temp;
4411 tem = low0, low0 = low1, low1 = tem;
4412 tem = high0, high0 = high1, high1 = tem;
4415 /* Now flag two cases, whether the ranges are disjoint or whether the
4416 second range is totally subsumed in the first. Note that the tests
4417 below are simplified by the ones above. */
4418 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4419 high0, 1, low1, 0));
4420 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4421 high1, 1, high0, 1));
4423 /* We now have four cases, depending on whether we are including or
4424 excluding the two ranges. */
4425 if (in0_p && in1_p)
4427 /* If they don't overlap, the result is false. If the second range
4428 is a subset it is the result. Otherwise, the range is from the start
4429 of the second to the end of the first. */
4430 if (no_overlap)
4431 in_p = 0, low = high = 0;
4432 else if (subset)
4433 in_p = 1, low = low1, high = high1;
4434 else
4435 in_p = 1, low = low1, high = high0;
4438 else if (in0_p && ! in1_p)
4440 /* If they don't overlap, the result is the first range. If they are
4441 equal, the result is false. If the second range is a subset of the
4442 first, and the ranges begin at the same place, we go from just after
4443 the end of the second range to the end of the first. If the second
4444 range is not a subset of the first, or if it is a subset and both
4445 ranges end at the same place, the range starts at the start of the
4446 first range and ends just before the second range.
4447 Otherwise, we can't describe this as a single range. */
4448 if (no_overlap)
4449 in_p = 1, low = low0, high = high0;
4450 else if (lowequal && highequal)
4451 in_p = 0, low = high = 0;
4452 else if (subset && lowequal)
4454 low = range_successor (high1);
4455 high = high0;
4456 in_p = 1;
4457 if (low == 0)
4459 /* We are in the weird situation where high0 > high1 but
4460 high1 has no successor. Punt. */
4461 return 0;
4464 else if (! subset || highequal)
4466 low = low0;
4467 high = range_predecessor (low1);
4468 in_p = 1;
4469 if (high == 0)
4471 /* low0 < low1 but low1 has no predecessor. Punt. */
4472 return 0;
4475 else
4476 return 0;
4479 else if (! in0_p && in1_p)
4481 /* If they don't overlap, the result is the second range. If the second
4482 is a subset of the first, the result is false. Otherwise,
4483 the range starts just after the first range and ends at the
4484 end of the second. */
4485 if (no_overlap)
4486 in_p = 1, low = low1, high = high1;
4487 else if (subset || highequal)
4488 in_p = 0, low = high = 0;
4489 else
4491 low = range_successor (high0);
4492 high = high1;
4493 in_p = 1;
4494 if (low == 0)
4496 /* high1 > high0 but high0 has no successor. Punt. */
4497 return 0;
4502 else
4504 /* The case where we are excluding both ranges. Here the complex case
4505 is if they don't overlap. In that case, the only time we have a
4506 range is if they are adjacent. If the second is a subset of the
4507 first, the result is the first. Otherwise, the range to exclude
4508 starts at the beginning of the first range and ends at the end of the
4509 second. */
4510 if (no_overlap)
4512 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4513 range_successor (high0),
4514 1, low1, 0)))
4515 in_p = 0, low = low0, high = high1;
4516 else
4518 /* Canonicalize - [min, x] into - [-, x]. */
4519 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4520 switch (TREE_CODE (TREE_TYPE (low0)))
4522 case ENUMERAL_TYPE:
4523 if (TYPE_PRECISION (TREE_TYPE (low0))
4524 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4525 break;
4526 /* FALLTHROUGH */
4527 case INTEGER_TYPE:
4528 if (tree_int_cst_equal (low0,
4529 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4530 low0 = 0;
4531 break;
4532 case POINTER_TYPE:
4533 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4534 && integer_zerop (low0))
4535 low0 = 0;
4536 break;
4537 default:
4538 break;
4541 /* Canonicalize - [x, max] into - [x, -]. */
4542 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4543 switch (TREE_CODE (TREE_TYPE (high1)))
4545 case ENUMERAL_TYPE:
4546 if (TYPE_PRECISION (TREE_TYPE (high1))
4547 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4548 break;
4549 /* FALLTHROUGH */
4550 case INTEGER_TYPE:
4551 if (tree_int_cst_equal (high1,
4552 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4553 high1 = 0;
4554 break;
4555 case POINTER_TYPE:
4556 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4557 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4558 high1, 1,
4559 build_int_cst (TREE_TYPE (high1), 1),
4560 1)))
4561 high1 = 0;
4562 break;
4563 default:
4564 break;
4567 /* The ranges might be also adjacent between the maximum and
4568 minimum values of the given type. For
4569 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4570 return + [x + 1, y - 1]. */
4571 if (low0 == 0 && high1 == 0)
4573 low = range_successor (high0);
4574 high = range_predecessor (low1);
4575 if (low == 0 || high == 0)
4576 return 0;
4578 in_p = 1;
4580 else
4581 return 0;
4584 else if (subset)
4585 in_p = 0, low = low0, high = high0;
4586 else
4587 in_p = 0, low = low0, high = high1;
4590 *pin_p = in_p, *plow = low, *phigh = high;
4591 return 1;
4595 /* Subroutine of fold, looking inside expressions of the form
4596 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4597 of the COND_EXPR. This function is being used also to optimize
4598 A op B ? C : A, by reversing the comparison first.
4600 Return a folded expression whose code is not a COND_EXPR
4601 anymore, or NULL_TREE if no folding opportunity is found. */
4603 static tree
4604 fold_cond_expr_with_comparison (location_t loc, tree type,
4605 tree arg0, tree arg1, tree arg2)
4607 enum tree_code comp_code = TREE_CODE (arg0);
4608 tree arg00 = TREE_OPERAND (arg0, 0);
4609 tree arg01 = TREE_OPERAND (arg0, 1);
4610 tree arg1_type = TREE_TYPE (arg1);
4611 tree tem;
4613 STRIP_NOPS (arg1);
4614 STRIP_NOPS (arg2);
4616 /* If we have A op 0 ? A : -A, consider applying the following
4617 transformations:
4619 A == 0? A : -A same as -A
4620 A != 0? A : -A same as A
4621 A >= 0? A : -A same as abs (A)
4622 A > 0? A : -A same as abs (A)
4623 A <= 0? A : -A same as -abs (A)
4624 A < 0? A : -A same as -abs (A)
4626 None of these transformations work for modes with signed
4627 zeros. If A is +/-0, the first two transformations will
4628 change the sign of the result (from +0 to -0, or vice
4629 versa). The last four will fix the sign of the result,
4630 even though the original expressions could be positive or
4631 negative, depending on the sign of A.
4633 Note that all these transformations are correct if A is
4634 NaN, since the two alternatives (A and -A) are also NaNs. */
4635 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4636 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4637 ? real_zerop (arg01)
4638 : integer_zerop (arg01))
4639 && ((TREE_CODE (arg2) == NEGATE_EXPR
4640 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4641 /* In the case that A is of the form X-Y, '-A' (arg2) may
4642 have already been folded to Y-X, check for that. */
4643 || (TREE_CODE (arg1) == MINUS_EXPR
4644 && TREE_CODE (arg2) == MINUS_EXPR
4645 && operand_equal_p (TREE_OPERAND (arg1, 0),
4646 TREE_OPERAND (arg2, 1), 0)
4647 && operand_equal_p (TREE_OPERAND (arg1, 1),
4648 TREE_OPERAND (arg2, 0), 0))))
4649 switch (comp_code)
4651 case EQ_EXPR:
4652 case UNEQ_EXPR:
4653 tem = fold_convert_loc (loc, arg1_type, arg1);
4654 return pedantic_non_lvalue_loc (loc,
4655 fold_convert_loc (loc, type,
4656 negate_expr (tem)));
4657 case NE_EXPR:
4658 case LTGT_EXPR:
4659 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4660 case UNGE_EXPR:
4661 case UNGT_EXPR:
4662 if (flag_trapping_math)
4663 break;
4664 /* Fall through. */
4665 case GE_EXPR:
4666 case GT_EXPR:
4667 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4668 arg1 = fold_convert_loc (loc, signed_type_for
4669 (TREE_TYPE (arg1)), arg1);
4670 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4672 case UNLE_EXPR:
4673 case UNLT_EXPR:
4674 if (flag_trapping_math)
4675 break;
4676 case LE_EXPR:
4677 case LT_EXPR:
4678 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4679 arg1 = fold_convert_loc (loc, signed_type_for
4680 (TREE_TYPE (arg1)), arg1);
4681 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4682 return negate_expr (fold_convert_loc (loc, type, tem));
4683 default:
4684 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4685 break;
4688 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4689 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4690 both transformations are correct when A is NaN: A != 0
4691 is then true, and A == 0 is false. */
4693 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4694 && integer_zerop (arg01) && integer_zerop (arg2))
4696 if (comp_code == NE_EXPR)
4697 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4698 else if (comp_code == EQ_EXPR)
4699 return build_zero_cst (type);
4702 /* Try some transformations of A op B ? A : B.
4704 A == B? A : B same as B
4705 A != B? A : B same as A
4706 A >= B? A : B same as max (A, B)
4707 A > B? A : B same as max (B, A)
4708 A <= B? A : B same as min (A, B)
4709 A < B? A : B same as min (B, A)
4711 As above, these transformations don't work in the presence
4712 of signed zeros. For example, if A and B are zeros of
4713 opposite sign, the first two transformations will change
4714 the sign of the result. In the last four, the original
4715 expressions give different results for (A=+0, B=-0) and
4716 (A=-0, B=+0), but the transformed expressions do not.
4718 The first two transformations are correct if either A or B
4719 is a NaN. In the first transformation, the condition will
4720 be false, and B will indeed be chosen. In the case of the
4721 second transformation, the condition A != B will be true,
4722 and A will be chosen.
4724 The conversions to max() and min() are not correct if B is
4725 a number and A is not. The conditions in the original
4726 expressions will be false, so all four give B. The min()
4727 and max() versions would give a NaN instead. */
4728 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4729 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4730 /* Avoid these transformations if the COND_EXPR may be used
4731 as an lvalue in the C++ front-end. PR c++/19199. */
4732 && (in_gimple_form
4733 || VECTOR_TYPE_P (type)
4734 || (strcmp (lang_hooks.name, "GNU C++") != 0
4735 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4736 || ! maybe_lvalue_p (arg1)
4737 || ! maybe_lvalue_p (arg2)))
4739 tree comp_op0 = arg00;
4740 tree comp_op1 = arg01;
4741 tree comp_type = TREE_TYPE (comp_op0);
4743 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4744 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4746 comp_type = type;
4747 comp_op0 = arg1;
4748 comp_op1 = arg2;
4751 switch (comp_code)
4753 case EQ_EXPR:
4754 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4755 case NE_EXPR:
4756 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4757 case LE_EXPR:
4758 case LT_EXPR:
4759 case UNLE_EXPR:
4760 case UNLT_EXPR:
4761 /* In C++ a ?: expression can be an lvalue, so put the
4762 operand which will be used if they are equal first
4763 so that we can convert this back to the
4764 corresponding COND_EXPR. */
4765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4767 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4768 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4769 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4770 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4771 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4772 comp_op1, comp_op0);
4773 return pedantic_non_lvalue_loc (loc,
4774 fold_convert_loc (loc, type, tem));
4776 break;
4777 case GE_EXPR:
4778 case GT_EXPR:
4779 case UNGE_EXPR:
4780 case UNGT_EXPR:
4781 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4783 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4784 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4785 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4786 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4787 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4788 comp_op1, comp_op0);
4789 return pedantic_non_lvalue_loc (loc,
4790 fold_convert_loc (loc, type, tem));
4792 break;
4793 case UNEQ_EXPR:
4794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4795 return pedantic_non_lvalue_loc (loc,
4796 fold_convert_loc (loc, type, arg2));
4797 break;
4798 case LTGT_EXPR:
4799 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4800 return pedantic_non_lvalue_loc (loc,
4801 fold_convert_loc (loc, type, arg1));
4802 break;
4803 default:
4804 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4805 break;
4809 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4810 we might still be able to simplify this. For example,
4811 if C1 is one less or one more than C2, this might have started
4812 out as a MIN or MAX and been transformed by this function.
4813 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4815 if (INTEGRAL_TYPE_P (type)
4816 && TREE_CODE (arg01) == INTEGER_CST
4817 && TREE_CODE (arg2) == INTEGER_CST)
4818 switch (comp_code)
4820 case EQ_EXPR:
4821 if (TREE_CODE (arg1) == INTEGER_CST)
4822 break;
4823 /* We can replace A with C1 in this case. */
4824 arg1 = fold_convert_loc (loc, type, arg01);
4825 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4827 case LT_EXPR:
4828 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4829 MIN_EXPR, to preserve the signedness of the comparison. */
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
4834 build_int_cst (type, 1)),
4835 OEP_ONLY_CONST))
4837 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 arg2));
4840 return pedantic_non_lvalue_loc (loc,
4841 fold_convert_loc (loc, type, tem));
4843 break;
4845 case LE_EXPR:
4846 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4847 as above. */
4848 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4849 OEP_ONLY_CONST)
4850 && operand_equal_p (arg01,
4851 const_binop (MINUS_EXPR, arg2,
4852 build_int_cst (type, 1)),
4853 OEP_ONLY_CONST))
4855 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4856 fold_convert_loc (loc, TREE_TYPE (arg00),
4857 arg2));
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, tem));
4861 break;
4863 case GT_EXPR:
4864 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4865 MAX_EXPR, to preserve the signedness of the comparison. */
4866 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4867 OEP_ONLY_CONST)
4868 && operand_equal_p (arg01,
4869 const_binop (MINUS_EXPR, arg2,
4870 build_int_cst (type, 1)),
4871 OEP_ONLY_CONST))
4873 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4874 fold_convert_loc (loc, TREE_TYPE (arg00),
4875 arg2));
4876 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4878 break;
4880 case GE_EXPR:
4881 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4882 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4883 OEP_ONLY_CONST)
4884 && operand_equal_p (arg01,
4885 const_binop (PLUS_EXPR, arg2,
4886 build_int_cst (type, 1)),
4887 OEP_ONLY_CONST))
4889 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4890 fold_convert_loc (loc, TREE_TYPE (arg00),
4891 arg2));
4892 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4894 break;
4895 case NE_EXPR:
4896 break;
4897 default:
4898 gcc_unreachable ();
4901 return NULL_TREE;
4906 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4907 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4908 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4909 false) >= 2)
4910 #endif
4912 /* EXP is some logical combination of boolean tests. See if we can
4913 merge it into some range test. Return the new tree if so. */
4915 static tree
4916 fold_range_test (location_t loc, enum tree_code code, tree type,
4917 tree op0, tree op1)
4919 int or_op = (code == TRUTH_ORIF_EXPR
4920 || code == TRUTH_OR_EXPR);
4921 int in0_p, in1_p, in_p;
4922 tree low0, low1, low, high0, high1, high;
4923 bool strict_overflow_p = false;
4924 tree tem, lhs, rhs;
4925 const char * const warnmsg = G_("assuming signed overflow does not occur "
4926 "when simplifying range test");
4928 if (!INTEGRAL_TYPE_P (type))
4929 return 0;
4931 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4932 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4934 /* If this is an OR operation, invert both sides; we will invert
4935 again at the end. */
4936 if (or_op)
4937 in0_p = ! in0_p, in1_p = ! in1_p;
4939 /* If both expressions are the same, if we can merge the ranges, and we
4940 can build the range test, return it or it inverted. If one of the
4941 ranges is always true or always false, consider it to be the same
4942 expression as the other. */
4943 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4944 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4945 in1_p, low1, high1)
4946 && 0 != (tem = (build_range_check (loc, type,
4947 lhs != 0 ? lhs
4948 : rhs != 0 ? rhs : integer_zero_node,
4949 in_p, low, high))))
4951 if (strict_overflow_p)
4952 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4953 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4956 /* On machines where the branch cost is expensive, if this is a
4957 short-circuited branch and the underlying object on both sides
4958 is the same, make a non-short-circuit operation. */
4959 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4960 && lhs != 0 && rhs != 0
4961 && (code == TRUTH_ANDIF_EXPR
4962 || code == TRUTH_ORIF_EXPR)
4963 && operand_equal_p (lhs, rhs, 0))
4965 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4966 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4967 which cases we can't do this. */
4968 if (simple_operand_p (lhs))
4969 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4970 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4971 type, op0, op1);
4973 else if (!lang_hooks.decls.global_bindings_p ()
4974 && !CONTAINS_PLACEHOLDER_P (lhs))
4976 tree common = save_expr (lhs);
4978 if (0 != (lhs = build_range_check (loc, type, common,
4979 or_op ? ! in0_p : in0_p,
4980 low0, high0))
4981 && (0 != (rhs = build_range_check (loc, type, common,
4982 or_op ? ! in1_p : in1_p,
4983 low1, high1))))
4985 if (strict_overflow_p)
4986 fold_overflow_warning (warnmsg,
4987 WARN_STRICT_OVERFLOW_COMPARISON);
4988 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4989 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4990 type, lhs, rhs);
4995 return 0;
4998 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4999 bit value. Arrange things so the extra bits will be set to zero if and
5000 only if C is signed-extended to its full width. If MASK is nonzero,
5001 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5003 static tree
5004 unextend (tree c, int p, int unsignedp, tree mask)
5006 tree type = TREE_TYPE (c);
5007 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5008 tree temp;
5010 if (p == modesize || unsignedp)
5011 return c;
5013 /* We work by getting just the sign bit into the low-order bit, then
5014 into the high-order bit, then sign-extend. We then XOR that value
5015 with C. */
5016 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5018 /* We must use a signed type in order to get an arithmetic right shift.
5019 However, we must also avoid introducing accidental overflows, so that
5020 a subsequent call to integer_zerop will work. Hence we must
5021 do the type conversion here. At this point, the constant is either
5022 zero or one, and the conversion to a signed type can never overflow.
5023 We could get an overflow if this conversion is done anywhere else. */
5024 if (TYPE_UNSIGNED (type))
5025 temp = fold_convert (signed_type_for (type), temp);
5027 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5028 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5029 if (mask != 0)
5030 temp = const_binop (BIT_AND_EXPR, temp,
5031 fold_convert (TREE_TYPE (c), mask));
5032 /* If necessary, convert the type back to match the type of C. */
5033 if (TYPE_UNSIGNED (type))
5034 temp = fold_convert (type, temp);
5036 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5039 /* For an expression that has the form
5040 (A && B) || ~B
5042 (A || B) && ~B,
5043 we can drop one of the inner expressions and simplify to
5044 A || ~B
5046 A && ~B
5047 LOC is the location of the resulting expression. OP is the inner
5048 logical operation; the left-hand side in the examples above, while CMPOP
5049 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5050 removing a condition that guards another, as in
5051 (A != NULL && A->...) || A == NULL
5052 which we must not transform. If RHS_ONLY is true, only eliminate the
5053 right-most operand of the inner logical operation. */
5055 static tree
5056 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5057 bool rhs_only)
5059 tree type = TREE_TYPE (cmpop);
5060 enum tree_code code = TREE_CODE (cmpop);
5061 enum tree_code truthop_code = TREE_CODE (op);
5062 tree lhs = TREE_OPERAND (op, 0);
5063 tree rhs = TREE_OPERAND (op, 1);
5064 tree orig_lhs = lhs, orig_rhs = rhs;
5065 enum tree_code rhs_code = TREE_CODE (rhs);
5066 enum tree_code lhs_code = TREE_CODE (lhs);
5067 enum tree_code inv_code;
5069 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5070 return NULL_TREE;
5072 if (TREE_CODE_CLASS (code) != tcc_comparison)
5073 return NULL_TREE;
5075 if (rhs_code == truthop_code)
5077 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5078 if (newrhs != NULL_TREE)
5080 rhs = newrhs;
5081 rhs_code = TREE_CODE (rhs);
5084 if (lhs_code == truthop_code && !rhs_only)
5086 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5087 if (newlhs != NULL_TREE)
5089 lhs = newlhs;
5090 lhs_code = TREE_CODE (lhs);
5094 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5095 if (inv_code == rhs_code
5096 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5098 return lhs;
5099 if (!rhs_only && inv_code == lhs_code
5100 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5101 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5102 return rhs;
5103 if (rhs != orig_rhs || lhs != orig_lhs)
5104 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5105 lhs, rhs);
5106 return NULL_TREE;
5109 /* Find ways of folding logical expressions of LHS and RHS:
5110 Try to merge two comparisons to the same innermost item.
5111 Look for range tests like "ch >= '0' && ch <= '9'".
5112 Look for combinations of simple terms on machines with expensive branches
5113 and evaluate the RHS unconditionally.
5115 For example, if we have p->a == 2 && p->b == 4 and we can make an
5116 object large enough to span both A and B, we can do this with a comparison
5117 against the object ANDed with the a mask.
5119 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5120 operations to do this with one comparison.
5122 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5123 function and the one above.
5125 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5126 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5128 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5129 two operands.
5131 We return the simplified tree or 0 if no optimization is possible. */
5133 static tree
5134 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5135 tree lhs, tree rhs)
5137 /* If this is the "or" of two comparisons, we can do something if
5138 the comparisons are NE_EXPR. If this is the "and", we can do something
5139 if the comparisons are EQ_EXPR. I.e.,
5140 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5142 WANTED_CODE is this operation code. For single bit fields, we can
5143 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5144 comparison for one-bit fields. */
5146 enum tree_code wanted_code;
5147 enum tree_code lcode, rcode;
5148 tree ll_arg, lr_arg, rl_arg, rr_arg;
5149 tree ll_inner, lr_inner, rl_inner, rr_inner;
5150 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5151 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5152 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5153 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5154 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5155 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5156 enum machine_mode lnmode, rnmode;
5157 tree ll_mask, lr_mask, rl_mask, rr_mask;
5158 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5159 tree l_const, r_const;
5160 tree lntype, rntype, result;
5161 HOST_WIDE_INT first_bit, end_bit;
5162 int volatilep;
5164 /* Start by getting the comparison codes. Fail if anything is volatile.
5165 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5166 it were surrounded with a NE_EXPR. */
5168 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5169 return 0;
5171 lcode = TREE_CODE (lhs);
5172 rcode = TREE_CODE (rhs);
5174 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5176 lhs = build2 (NE_EXPR, truth_type, lhs,
5177 build_int_cst (TREE_TYPE (lhs), 0));
5178 lcode = NE_EXPR;
5181 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5183 rhs = build2 (NE_EXPR, truth_type, rhs,
5184 build_int_cst (TREE_TYPE (rhs), 0));
5185 rcode = NE_EXPR;
5188 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5189 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5190 return 0;
5192 ll_arg = TREE_OPERAND (lhs, 0);
5193 lr_arg = TREE_OPERAND (lhs, 1);
5194 rl_arg = TREE_OPERAND (rhs, 0);
5195 rr_arg = TREE_OPERAND (rhs, 1);
5197 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5198 if (simple_operand_p (ll_arg)
5199 && simple_operand_p (lr_arg))
5201 if (operand_equal_p (ll_arg, rl_arg, 0)
5202 && operand_equal_p (lr_arg, rr_arg, 0))
5204 result = combine_comparisons (loc, code, lcode, rcode,
5205 truth_type, ll_arg, lr_arg);
5206 if (result)
5207 return result;
5209 else if (operand_equal_p (ll_arg, rr_arg, 0)
5210 && operand_equal_p (lr_arg, rl_arg, 0))
5212 result = combine_comparisons (loc, code, lcode,
5213 swap_tree_comparison (rcode),
5214 truth_type, ll_arg, lr_arg);
5215 if (result)
5216 return result;
5220 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5221 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5223 /* If the RHS can be evaluated unconditionally and its operands are
5224 simple, it wins to evaluate the RHS unconditionally on machines
5225 with expensive branches. In this case, this isn't a comparison
5226 that can be merged. */
5228 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5229 false) >= 2
5230 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5231 && simple_operand_p (rl_arg)
5232 && simple_operand_p (rr_arg))
5234 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5235 if (code == TRUTH_OR_EXPR
5236 && lcode == NE_EXPR && integer_zerop (lr_arg)
5237 && rcode == NE_EXPR && integer_zerop (rr_arg)
5238 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5239 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5240 return build2_loc (loc, NE_EXPR, truth_type,
5241 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5242 ll_arg, rl_arg),
5243 build_int_cst (TREE_TYPE (ll_arg), 0));
5245 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5246 if (code == TRUTH_AND_EXPR
5247 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5248 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5249 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5250 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5251 return build2_loc (loc, EQ_EXPR, truth_type,
5252 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5253 ll_arg, rl_arg),
5254 build_int_cst (TREE_TYPE (ll_arg), 0));
5257 /* See if the comparisons can be merged. Then get all the parameters for
5258 each side. */
5260 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5261 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5262 return 0;
5264 volatilep = 0;
5265 ll_inner = decode_field_reference (loc, ll_arg,
5266 &ll_bitsize, &ll_bitpos, &ll_mode,
5267 &ll_unsignedp, &volatilep, &ll_mask,
5268 &ll_and_mask);
5269 lr_inner = decode_field_reference (loc, lr_arg,
5270 &lr_bitsize, &lr_bitpos, &lr_mode,
5271 &lr_unsignedp, &volatilep, &lr_mask,
5272 &lr_and_mask);
5273 rl_inner = decode_field_reference (loc, rl_arg,
5274 &rl_bitsize, &rl_bitpos, &rl_mode,
5275 &rl_unsignedp, &volatilep, &rl_mask,
5276 &rl_and_mask);
5277 rr_inner = decode_field_reference (loc, rr_arg,
5278 &rr_bitsize, &rr_bitpos, &rr_mode,
5279 &rr_unsignedp, &volatilep, &rr_mask,
5280 &rr_and_mask);
5282 /* It must be true that the inner operation on the lhs of each
5283 comparison must be the same if we are to be able to do anything.
5284 Then see if we have constants. If not, the same must be true for
5285 the rhs's. */
5286 if (volatilep || ll_inner == 0 || rl_inner == 0
5287 || ! operand_equal_p (ll_inner, rl_inner, 0))
5288 return 0;
5290 if (TREE_CODE (lr_arg) == INTEGER_CST
5291 && TREE_CODE (rr_arg) == INTEGER_CST)
5292 l_const = lr_arg, r_const = rr_arg;
5293 else if (lr_inner == 0 || rr_inner == 0
5294 || ! operand_equal_p (lr_inner, rr_inner, 0))
5295 return 0;
5296 else
5297 l_const = r_const = 0;
5299 /* If either comparison code is not correct for our logical operation,
5300 fail. However, we can convert a one-bit comparison against zero into
5301 the opposite comparison against that bit being set in the field. */
5303 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5304 if (lcode != wanted_code)
5306 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5308 /* Make the left operand unsigned, since we are only interested
5309 in the value of one bit. Otherwise we are doing the wrong
5310 thing below. */
5311 ll_unsignedp = 1;
5312 l_const = ll_mask;
5314 else
5315 return 0;
5318 /* This is analogous to the code for l_const above. */
5319 if (rcode != wanted_code)
5321 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5323 rl_unsignedp = 1;
5324 r_const = rl_mask;
5326 else
5327 return 0;
5330 /* See if we can find a mode that contains both fields being compared on
5331 the left. If we can't, fail. Otherwise, update all constants and masks
5332 to be relative to a field of that size. */
5333 first_bit = MIN (ll_bitpos, rl_bitpos);
5334 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5335 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5336 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5337 volatilep);
5338 if (lnmode == VOIDmode)
5339 return 0;
5341 lnbitsize = GET_MODE_BITSIZE (lnmode);
5342 lnbitpos = first_bit & ~ (lnbitsize - 1);
5343 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5344 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5346 if (BYTES_BIG_ENDIAN)
5348 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5349 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5352 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5353 size_int (xll_bitpos));
5354 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5355 size_int (xrl_bitpos));
5357 if (l_const)
5359 l_const = fold_convert_loc (loc, lntype, l_const);
5360 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5361 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5362 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5363 fold_build1_loc (loc, BIT_NOT_EXPR,
5364 lntype, ll_mask))))
5366 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5368 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5371 if (r_const)
5373 r_const = fold_convert_loc (loc, lntype, r_const);
5374 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5375 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5376 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5377 fold_build1_loc (loc, BIT_NOT_EXPR,
5378 lntype, rl_mask))))
5380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5386 /* If the right sides are not constant, do the same for it. Also,
5387 disallow this optimization if a size or signedness mismatch occurs
5388 between the left and right sides. */
5389 if (l_const == 0)
5391 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5392 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5393 /* Make sure the two fields on the right
5394 correspond to the left without being swapped. */
5395 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5396 return 0;
5398 first_bit = MIN (lr_bitpos, rr_bitpos);
5399 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5400 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5401 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5402 volatilep);
5403 if (rnmode == VOIDmode)
5404 return 0;
5406 rnbitsize = GET_MODE_BITSIZE (rnmode);
5407 rnbitpos = first_bit & ~ (rnbitsize - 1);
5408 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5409 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5411 if (BYTES_BIG_ENDIAN)
5413 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5414 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5417 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 rntype, lr_mask),
5419 size_int (xlr_bitpos));
5420 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, rr_mask),
5422 size_int (xrr_bitpos));
5424 /* Make a mask that corresponds to both fields being compared.
5425 Do this for both items being compared. If the operands are the
5426 same size and the bits being compared are in the same position
5427 then we can do this by masking both and comparing the masked
5428 results. */
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5431 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5433 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5434 ll_unsignedp || rl_unsignedp);
5435 if (! all_ones_mask_p (ll_mask, lnbitsize))
5436 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5438 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5439 lr_unsignedp || rr_unsignedp);
5440 if (! all_ones_mask_p (lr_mask, rnbitsize))
5441 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5443 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5446 /* There is still another way we can do something: If both pairs of
5447 fields being compared are adjacent, we may be able to make a wider
5448 field containing them both.
5450 Note that we still must mask the lhs/rhs expressions. Furthermore,
5451 the mask must be shifted to account for the shift done by
5452 make_bit_field_ref. */
5453 if ((ll_bitsize + ll_bitpos == rl_bitpos
5454 && lr_bitsize + lr_bitpos == rr_bitpos)
5455 || (ll_bitpos == rl_bitpos + rl_bitsize
5456 && lr_bitpos == rr_bitpos + rr_bitsize))
5458 tree type;
5460 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5461 ll_bitsize + rl_bitsize,
5462 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5463 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5464 lr_bitsize + rr_bitsize,
5465 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5467 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5468 size_int (MIN (xll_bitpos, xrl_bitpos)));
5469 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5470 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5472 /* Convert to the smaller type before masking out unwanted bits. */
5473 type = lntype;
5474 if (lntype != rntype)
5476 if (lnbitsize > rnbitsize)
5478 lhs = fold_convert_loc (loc, rntype, lhs);
5479 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5480 type = rntype;
5482 else if (lnbitsize < rnbitsize)
5484 rhs = fold_convert_loc (loc, lntype, rhs);
5485 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5486 type = lntype;
5490 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5491 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5493 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5494 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5496 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5499 return 0;
5502 /* Handle the case of comparisons with constants. If there is something in
5503 common between the masks, those bits of the constants must be the same.
5504 If not, the condition is always false. Test for this to avoid generating
5505 incorrect code below. */
5506 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5507 if (! integer_zerop (result)
5508 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5509 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5511 if (wanted_code == NE_EXPR)
5513 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5514 return constant_boolean_node (true, truth_type);
5516 else
5518 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5519 return constant_boolean_node (false, truth_type);
5523 /* Construct the expression we will return. First get the component
5524 reference we will make. Unless the mask is all ones the width of
5525 that field, perform the mask operation. Then compare with the
5526 merged constant. */
5527 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5528 ll_unsignedp || rl_unsignedp);
5530 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5531 if (! all_ones_mask_p (ll_mask, lnbitsize))
5532 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5534 return build2_loc (loc, wanted_code, truth_type, result,
5535 const_binop (BIT_IOR_EXPR, l_const, r_const));
5538 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5539 constant. */
5541 static tree
5542 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5543 tree op0, tree op1)
5545 tree arg0 = op0;
5546 enum tree_code op_code;
5547 tree comp_const;
5548 tree minmax_const;
5549 int consts_equal, consts_lt;
5550 tree inner;
5552 STRIP_SIGN_NOPS (arg0);
5554 op_code = TREE_CODE (arg0);
5555 minmax_const = TREE_OPERAND (arg0, 1);
5556 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5557 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5558 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5559 inner = TREE_OPERAND (arg0, 0);
5561 /* If something does not permit us to optimize, return the original tree. */
5562 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5563 || TREE_CODE (comp_const) != INTEGER_CST
5564 || TREE_OVERFLOW (comp_const)
5565 || TREE_CODE (minmax_const) != INTEGER_CST
5566 || TREE_OVERFLOW (minmax_const))
5567 return NULL_TREE;
5569 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5570 and GT_EXPR, doing the rest with recursive calls using logical
5571 simplifications. */
5572 switch (code)
5574 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5576 tree tem
5577 = optimize_minmax_comparison (loc,
5578 invert_tree_comparison (code, false),
5579 type, op0, op1);
5580 if (tem)
5581 return invert_truthvalue_loc (loc, tem);
5582 return NULL_TREE;
5585 case GE_EXPR:
5586 return
5587 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5588 optimize_minmax_comparison
5589 (loc, EQ_EXPR, type, arg0, comp_const),
5590 optimize_minmax_comparison
5591 (loc, GT_EXPR, type, arg0, comp_const));
5593 case EQ_EXPR:
5594 if (op_code == MAX_EXPR && consts_equal)
5595 /* MAX (X, 0) == 0 -> X <= 0 */
5596 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR && consts_lt)
5599 /* MAX (X, 0) == 5 -> X == 5 */
5600 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5602 else if (op_code == MAX_EXPR)
5603 /* MAX (X, 0) == -1 -> false */
5604 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5606 else if (consts_equal)
5607 /* MIN (X, 0) == 0 -> X >= 0 */
5608 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5610 else if (consts_lt)
5611 /* MIN (X, 0) == 5 -> false */
5612 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5614 else
5615 /* MIN (X, 0) == -1 -> X == -1 */
5616 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5618 case GT_EXPR:
5619 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5620 /* MAX (X, 0) > 0 -> X > 0
5621 MAX (X, 0) > 5 -> X > 5 */
5622 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5624 else if (op_code == MAX_EXPR)
5625 /* MAX (X, 0) > -1 -> true */
5626 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5628 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5629 /* MIN (X, 0) > 0 -> false
5630 MIN (X, 0) > 5 -> false */
5631 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5633 else
5634 /* MIN (X, 0) > -1 -> X > -1 */
5635 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5637 default:
5638 return NULL_TREE;
5642 /* T is an integer expression that is being multiplied, divided, or taken a
5643 modulus (CODE says which and what kind of divide or modulus) by a
5644 constant C. See if we can eliminate that operation by folding it with
5645 other operations already in T. WIDE_TYPE, if non-null, is a type that
5646 should be used for the computation if wider than our type.
5648 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5649 (X * 2) + (Y * 4). We must, however, be assured that either the original
5650 expression would not overflow or that overflow is undefined for the type
5651 in the language in question.
5653 If we return a non-null expression, it is an equivalent form of the
5654 original computation, but need not be in the original type.
5656 We set *STRICT_OVERFLOW_P to true if the return values depends on
5657 signed overflow being undefined. Otherwise we do not change
5658 *STRICT_OVERFLOW_P. */
5660 static tree
5661 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5662 bool *strict_overflow_p)
5664 /* To avoid exponential search depth, refuse to allow recursion past
5665 three levels. Beyond that (1) it's highly unlikely that we'll find
5666 something interesting and (2) we've probably processed it before
5667 when we built the inner expression. */
5669 static int depth;
5670 tree ret;
5672 if (depth > 3)
5673 return NULL;
5675 depth++;
5676 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5677 depth--;
5679 return ret;
5682 static tree
5683 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5684 bool *strict_overflow_p)
5686 tree type = TREE_TYPE (t);
5687 enum tree_code tcode = TREE_CODE (t);
5688 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5689 > GET_MODE_SIZE (TYPE_MODE (type)))
5690 ? wide_type : type);
5691 tree t1, t2;
5692 int same_p = tcode == code;
5693 tree op0 = NULL_TREE, op1 = NULL_TREE;
5694 bool sub_strict_overflow_p;
5696 /* Don't deal with constants of zero here; they confuse the code below. */
5697 if (integer_zerop (c))
5698 return NULL_TREE;
5700 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5701 op0 = TREE_OPERAND (t, 0);
5703 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5704 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5706 /* Note that we need not handle conditional operations here since fold
5707 already handles those cases. So just do arithmetic here. */
5708 switch (tcode)
5710 case INTEGER_CST:
5711 /* For a constant, we can always simplify if we are a multiply
5712 or (for divide and modulus) if it is a multiple of our constant. */
5713 if (code == MULT_EXPR
5714 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5715 return const_binop (code, fold_convert (ctype, t),
5716 fold_convert (ctype, c));
5717 break;
5719 CASE_CONVERT: case NON_LVALUE_EXPR:
5720 /* If op0 is an expression ... */
5721 if ((COMPARISON_CLASS_P (op0)
5722 || UNARY_CLASS_P (op0)
5723 || BINARY_CLASS_P (op0)
5724 || VL_EXP_CLASS_P (op0)
5725 || EXPRESSION_CLASS_P (op0))
5726 /* ... and has wrapping overflow, and its type is smaller
5727 than ctype, then we cannot pass through as widening. */
5728 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5729 && (TYPE_PRECISION (ctype)
5730 > TYPE_PRECISION (TREE_TYPE (op0))))
5731 /* ... or this is a truncation (t is narrower than op0),
5732 then we cannot pass through this narrowing. */
5733 || (TYPE_PRECISION (type)
5734 < TYPE_PRECISION (TREE_TYPE (op0)))
5735 /* ... or signedness changes for division or modulus,
5736 then we cannot pass through this conversion. */
5737 || (code != MULT_EXPR
5738 && (TYPE_UNSIGNED (ctype)
5739 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5740 /* ... or has undefined overflow while the converted to
5741 type has not, we cannot do the operation in the inner type
5742 as that would introduce undefined overflow. */
5743 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5744 && !TYPE_OVERFLOW_UNDEFINED (type))))
5745 break;
5747 /* Pass the constant down and see if we can make a simplification. If
5748 we can, replace this expression with the inner simplification for
5749 possible later conversion to our or some other type. */
5750 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5751 && TREE_CODE (t2) == INTEGER_CST
5752 && !TREE_OVERFLOW (t2)
5753 && (0 != (t1 = extract_muldiv (op0, t2, code,
5754 code == MULT_EXPR
5755 ? ctype : NULL_TREE,
5756 strict_overflow_p))))
5757 return t1;
5758 break;
5760 case ABS_EXPR:
5761 /* If widening the type changes it from signed to unsigned, then we
5762 must avoid building ABS_EXPR itself as unsigned. */
5763 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5765 tree cstype = (*signed_type_for) (ctype);
5766 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5767 != 0)
5769 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5770 return fold_convert (ctype, t1);
5772 break;
5774 /* If the constant is negative, we cannot simplify this. */
5775 if (tree_int_cst_sgn (c) == -1)
5776 break;
5777 /* FALLTHROUGH */
5778 case NEGATE_EXPR:
5779 /* For division and modulus, type can't be unsigned, as e.g.
5780 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5781 For signed types, even with wrapping overflow, this is fine. */
5782 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5783 break;
5784 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5785 != 0)
5786 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5787 break;
5789 case MIN_EXPR: case MAX_EXPR:
5790 /* If widening the type changes the signedness, then we can't perform
5791 this optimization as that changes the result. */
5792 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5793 break;
5795 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5796 sub_strict_overflow_p = false;
5797 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5798 &sub_strict_overflow_p)) != 0
5799 && (t2 = extract_muldiv (op1, c, code, wide_type,
5800 &sub_strict_overflow_p)) != 0)
5802 if (tree_int_cst_sgn (c) < 0)
5803 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5804 if (sub_strict_overflow_p)
5805 *strict_overflow_p = true;
5806 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5807 fold_convert (ctype, t2));
5809 break;
5811 case LSHIFT_EXPR: case RSHIFT_EXPR:
5812 /* If the second operand is constant, this is a multiplication
5813 or floor division, by a power of two, so we can treat it that
5814 way unless the multiplier or divisor overflows. Signed
5815 left-shift overflow is implementation-defined rather than
5816 undefined in C90, so do not convert signed left shift into
5817 multiplication. */
5818 if (TREE_CODE (op1) == INTEGER_CST
5819 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5820 /* const_binop may not detect overflow correctly,
5821 so check for it explicitly here. */
5822 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5823 && 0 != (t1 = fold_convert (ctype,
5824 const_binop (LSHIFT_EXPR,
5825 size_one_node,
5826 op1)))
5827 && !TREE_OVERFLOW (t1))
5828 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5829 ? MULT_EXPR : FLOOR_DIV_EXPR,
5830 ctype,
5831 fold_convert (ctype, op0),
5832 t1),
5833 c, code, wide_type, strict_overflow_p);
5834 break;
5836 case PLUS_EXPR: case MINUS_EXPR:
5837 /* See if we can eliminate the operation on both sides. If we can, we
5838 can return a new PLUS or MINUS. If we can't, the only remaining
5839 cases where we can do anything are if the second operand is a
5840 constant. */
5841 sub_strict_overflow_p = false;
5842 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5843 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5844 if (t1 != 0 && t2 != 0
5845 && (code == MULT_EXPR
5846 /* If not multiplication, we can only do this if both operands
5847 are divisible by c. */
5848 || (multiple_of_p (ctype, op0, c)
5849 && multiple_of_p (ctype, op1, c))))
5851 if (sub_strict_overflow_p)
5852 *strict_overflow_p = true;
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5854 fold_convert (ctype, t2));
5857 /* If this was a subtraction, negate OP1 and set it to be an addition.
5858 This simplifies the logic below. */
5859 if (tcode == MINUS_EXPR)
5861 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5862 /* If OP1 was not easily negatable, the constant may be OP0. */
5863 if (TREE_CODE (op0) == INTEGER_CST)
5865 tree tem = op0;
5866 op0 = op1;
5867 op1 = tem;
5868 tem = t1;
5869 t1 = t2;
5870 t2 = tem;
5874 if (TREE_CODE (op1) != INTEGER_CST)
5875 break;
5877 /* If either OP1 or C are negative, this optimization is not safe for
5878 some of the division and remainder types while for others we need
5879 to change the code. */
5880 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5882 if (code == CEIL_DIV_EXPR)
5883 code = FLOOR_DIV_EXPR;
5884 else if (code == FLOOR_DIV_EXPR)
5885 code = CEIL_DIV_EXPR;
5886 else if (code != MULT_EXPR
5887 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5888 break;
5891 /* If it's a multiply or a division/modulus operation of a multiple
5892 of our constant, do the operation and verify it doesn't overflow. */
5893 if (code == MULT_EXPR
5894 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5896 op1 = const_binop (code, fold_convert (ctype, op1),
5897 fold_convert (ctype, c));
5898 /* We allow the constant to overflow with wrapping semantics. */
5899 if (op1 == 0
5900 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5901 break;
5903 else
5904 break;
5906 /* If we have an unsigned type, we cannot widen the operation since it
5907 will change the result if the original computation overflowed. */
5908 if (TYPE_UNSIGNED (ctype) && ctype != type)
5909 break;
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow
5919 and overflow is defined. With undefined overflow
5920 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5921 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5922 return fold_build2 (tcode, ctype,
5923 fold_build2 (code, ctype,
5924 fold_convert (ctype, op0),
5925 fold_convert (ctype, c)),
5926 op1);
5928 break;
5930 case MULT_EXPR:
5931 /* We have a special case here if we are doing something like
5932 (C * 8) % 4 since we know that's zero. */
5933 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5934 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5935 /* If the multiplication can overflow we cannot optimize this. */
5936 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5937 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5938 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5940 *strict_overflow_p = true;
5941 return omit_one_operand (type, integer_zero_node, op0);
5944 /* ... fall through ... */
5946 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5947 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5948 /* If we can extract our operation from the LHS, do so and return a
5949 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5950 do something only if the second operand is a constant. */
5951 if (same_p
5952 && (t1 = extract_muldiv (op0, c, code, wide_type,
5953 strict_overflow_p)) != 0)
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5955 fold_convert (ctype, op1));
5956 else if (tcode == MULT_EXPR && code == MULT_EXPR
5957 && (t1 = extract_muldiv (op1, c, code, wide_type,
5958 strict_overflow_p)) != 0)
5959 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5960 fold_convert (ctype, t1));
5961 else if (TREE_CODE (op1) != INTEGER_CST)
5962 return 0;
5964 /* If these are the same operation types, we can associate them
5965 assuming no overflow. */
5966 if (tcode == code)
5968 bool overflow_p = false;
5969 bool overflow_mul_p;
5970 signop sign = TYPE_SIGN (ctype);
5971 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5972 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5973 if (overflow_mul_p
5974 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5975 overflow_p = true;
5976 if (!overflow_p)
5977 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5978 wide_int_to_tree (ctype, mul));
5981 /* If these operations "cancel" each other, we have the main
5982 optimizations of this pass, which occur when either constant is a
5983 multiple of the other, in which case we replace this with either an
5984 operation or CODE or TCODE.
5986 If we have an unsigned type, we cannot do this since it will change
5987 the result if the original computation overflowed. */
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5989 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5990 || (tcode == MULT_EXPR
5991 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5992 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5993 && code != MULT_EXPR)))
5995 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6002 op1, c)));
6004 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6006 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6007 *strict_overflow_p = true;
6008 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6009 fold_convert (ctype,
6010 const_binop (TRUNC_DIV_EXPR,
6011 c, op1)));
6014 break;
6016 default:
6017 break;
6020 return 0;
6023 /* Return a node which has the indicated constant VALUE (either 0 or
6024 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6025 and is of the indicated TYPE. */
6027 tree
6028 constant_boolean_node (bool value, tree type)
6030 if (type == integer_type_node)
6031 return value ? integer_one_node : integer_zero_node;
6032 else if (type == boolean_type_node)
6033 return value ? boolean_true_node : boolean_false_node;
6034 else if (TREE_CODE (type) == VECTOR_TYPE)
6035 return build_vector_from_val (type,
6036 build_int_cst (TREE_TYPE (type),
6037 value ? -1 : 0));
6038 else
6039 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6043 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6044 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6045 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6046 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6047 COND is the first argument to CODE; otherwise (as in the example
6048 given here), it is the second argument. TYPE is the type of the
6049 original expression. Return NULL_TREE if no simplification is
6050 possible. */
6052 static tree
6053 fold_binary_op_with_conditional_arg (location_t loc,
6054 enum tree_code code,
6055 tree type, tree op0, tree op1,
6056 tree cond, tree arg, int cond_first_p)
6058 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6059 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6060 tree test, true_value, false_value;
6061 tree lhs = NULL_TREE;
6062 tree rhs = NULL_TREE;
6063 enum tree_code cond_code = COND_EXPR;
6065 if (TREE_CODE (cond) == COND_EXPR
6066 || TREE_CODE (cond) == VEC_COND_EXPR)
6068 test = TREE_OPERAND (cond, 0);
6069 true_value = TREE_OPERAND (cond, 1);
6070 false_value = TREE_OPERAND (cond, 2);
6071 /* If this operand throws an expression, then it does not make
6072 sense to try to perform a logical or arithmetic operation
6073 involving it. */
6074 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6075 lhs = true_value;
6076 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6077 rhs = false_value;
6079 else
6081 tree testtype = TREE_TYPE (cond);
6082 test = cond;
6083 true_value = constant_boolean_node (true, testtype);
6084 false_value = constant_boolean_node (false, testtype);
6087 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6088 cond_code = VEC_COND_EXPR;
6090 /* This transformation is only worthwhile if we don't have to wrap ARG
6091 in a SAVE_EXPR and the operation can be simplified without recursing
6092 on at least one of the branches once its pushed inside the COND_EXPR. */
6093 if (!TREE_CONSTANT (arg)
6094 && (TREE_SIDE_EFFECTS (arg)
6095 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6096 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6097 return NULL_TREE;
6099 arg = fold_convert_loc (loc, arg_type, arg);
6100 if (lhs == 0)
6102 true_value = fold_convert_loc (loc, cond_type, true_value);
6103 if (cond_first_p)
6104 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6105 else
6106 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6108 if (rhs == 0)
6110 false_value = fold_convert_loc (loc, cond_type, false_value);
6111 if (cond_first_p)
6112 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6113 else
6114 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6117 /* Check that we have simplified at least one of the branches. */
6118 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6119 return NULL_TREE;
6121 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6125 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6127 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6128 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6129 ADDEND is the same as X.
6131 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6132 and finite. The problematic cases are when X is zero, and its mode
6133 has signed zeros. In the case of rounding towards -infinity,
6134 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6135 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6137 bool
6138 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6140 if (!real_zerop (addend))
6141 return false;
6143 /* Don't allow the fold with -fsignaling-nans. */
6144 if (HONOR_SNANS (TYPE_MODE (type)))
6145 return false;
6147 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6148 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6149 return true;
6151 /* In a vector or complex, we would need to check the sign of all zeros. */
6152 if (TREE_CODE (addend) != REAL_CST)
6153 return false;
6155 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6156 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6157 negate = !negate;
6159 /* The mode has signed zeros, and we have to honor their sign.
6160 In this situation, there is only one case we can return true for.
6161 X - 0 is the same as X unless rounding towards -infinity is
6162 supported. */
6163 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6166 /* Subroutine of fold() that checks comparisons of built-in math
6167 functions against real constants.
6169 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6170 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6171 is the type of the result and ARG0 and ARG1 are the operands of the
6172 comparison. ARG1 must be a TREE_REAL_CST.
6174 The function returns the constant folded tree if a simplification
6175 can be made, and NULL_TREE otherwise. */
6177 static tree
6178 fold_mathfn_compare (location_t loc,
6179 enum built_in_function fcode, enum tree_code code,
6180 tree type, tree arg0, tree arg1)
6182 REAL_VALUE_TYPE c;
6184 if (BUILTIN_SQRT_P (fcode))
6186 tree arg = CALL_EXPR_ARG (arg0, 0);
6187 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6189 c = TREE_REAL_CST (arg1);
6190 if (REAL_VALUE_NEGATIVE (c))
6192 /* sqrt(x) < y is always false, if y is negative. */
6193 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6194 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6196 /* sqrt(x) > y is always true, if y is negative and we
6197 don't care about NaNs, i.e. negative values of x. */
6198 if (code == NE_EXPR || !HONOR_NANS (mode))
6199 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6201 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6202 return fold_build2_loc (loc, GE_EXPR, type, arg,
6203 build_real (TREE_TYPE (arg), dconst0));
6205 else if (code == GT_EXPR || code == GE_EXPR)
6207 REAL_VALUE_TYPE c2;
6209 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6210 real_convert (&c2, mode, &c2);
6212 if (REAL_VALUE_ISINF (c2))
6214 /* sqrt(x) > y is x == +Inf, when y is very large. */
6215 if (HONOR_INFINITIES (mode))
6216 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6219 /* sqrt(x) > y is always false, when y is very large
6220 and we don't care about infinities. */
6221 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6224 /* sqrt(x) > c is the same as x > c*c. */
6225 return fold_build2_loc (loc, code, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6228 else if (code == LT_EXPR || code == LE_EXPR)
6230 REAL_VALUE_TYPE c2;
6232 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6233 real_convert (&c2, mode, &c2);
6235 if (REAL_VALUE_ISINF (c2))
6237 /* sqrt(x) < y is always true, when y is a very large
6238 value and we don't care about NaNs or Infinities. */
6239 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6240 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6242 /* sqrt(x) < y is x != +Inf when y is very large and we
6243 don't care about NaNs. */
6244 if (! HONOR_NANS (mode))
6245 return fold_build2_loc (loc, NE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg), c2));
6248 /* sqrt(x) < y is x >= 0 when y is very large and we
6249 don't care about Infinities. */
6250 if (! HONOR_INFINITIES (mode))
6251 return fold_build2_loc (loc, GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg), dconst0));
6254 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6255 arg = save_expr (arg);
6256 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6257 fold_build2_loc (loc, GE_EXPR, type, arg,
6258 build_real (TREE_TYPE (arg),
6259 dconst0)),
6260 fold_build2_loc (loc, NE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 c2)));
6265 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6266 if (! HONOR_NANS (mode))
6267 return fold_build2_loc (loc, code, type, arg,
6268 build_real (TREE_TYPE (arg), c2));
6270 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6271 arg = save_expr (arg);
6272 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6273 fold_build2_loc (loc, GE_EXPR, type, arg,
6274 build_real (TREE_TYPE (arg),
6275 dconst0)),
6276 fold_build2_loc (loc, code, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 c2)));
6282 return NULL_TREE;
6285 /* Subroutine of fold() that optimizes comparisons against Infinities,
6286 either +Inf or -Inf.
6288 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6289 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6290 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292 The function returns the constant folded tree if a simplification
6293 can be made, and NULL_TREE otherwise. */
6295 static tree
6296 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6297 tree arg0, tree arg1)
6299 enum machine_mode mode;
6300 REAL_VALUE_TYPE max;
6301 tree temp;
6302 bool neg;
6304 mode = TYPE_MODE (TREE_TYPE (arg0));
6306 /* For negative infinity swap the sense of the comparison. */
6307 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6308 if (neg)
6309 code = swap_tree_comparison (code);
6311 switch (code)
6313 case GT_EXPR:
6314 /* x > +Inf is always false, if with ignore sNANs. */
6315 if (HONOR_SNANS (mode))
6316 return NULL_TREE;
6317 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6319 case LE_EXPR:
6320 /* x <= +Inf is always true, if we don't case about NaNs. */
6321 if (! HONOR_NANS (mode))
6322 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6324 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6325 arg0 = save_expr (arg0);
6326 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6328 case EQ_EXPR:
6329 case GE_EXPR:
6330 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6331 real_maxval (&max, neg, mode);
6332 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6335 case LT_EXPR:
6336 /* x < +Inf is always equal to x <= DBL_MAX. */
6337 real_maxval (&max, neg, mode);
6338 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6341 case NE_EXPR:
6342 /* x != +Inf is always equal to !(x > DBL_MAX). */
6343 real_maxval (&max, neg, mode);
6344 if (! HONOR_NANS (mode))
6345 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6346 arg0, build_real (TREE_TYPE (arg0), max));
6348 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6350 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6352 default:
6353 break;
6356 return NULL_TREE;
6359 /* Subroutine of fold() that optimizes comparisons of a division by
6360 a nonzero integer constant against an integer constant, i.e.
6361 X/C1 op C2.
6363 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6364 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6365 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6367 The function returns the constant folded tree if a simplification
6368 can be made, and NULL_TREE otherwise. */
6370 static tree
6371 fold_div_compare (location_t loc,
6372 enum tree_code code, tree type, tree arg0, tree arg1)
6374 tree prod, tmp, hi, lo;
6375 tree arg00 = TREE_OPERAND (arg0, 0);
6376 tree arg01 = TREE_OPERAND (arg0, 1);
6377 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6378 bool neg_overflow = false;
6379 bool overflow;
6381 /* We have to do this the hard way to detect unsigned overflow.
6382 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6383 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6384 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6385 neg_overflow = false;
6387 if (sign == UNSIGNED)
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1));
6391 lo = prod;
6393 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6394 val = wi::add (prod, tmp, sign, &overflow);
6395 hi = force_fit_type (TREE_TYPE (arg00), val,
6396 -1, overflow | TREE_OVERFLOW (prod));
6398 else if (tree_int_cst_sgn (arg01) >= 0)
6400 tmp = int_const_binop (MINUS_EXPR, arg01,
6401 build_int_cst (TREE_TYPE (arg01), 1));
6402 switch (tree_int_cst_sgn (arg1))
6404 case -1:
6405 neg_overflow = true;
6406 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6407 hi = prod;
6408 break;
6410 case 0:
6411 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6412 hi = tmp;
6413 break;
6415 case 1:
6416 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6417 lo = prod;
6418 break;
6420 default:
6421 gcc_unreachable ();
6424 else
6426 /* A negative divisor reverses the relational operators. */
6427 code = swap_tree_comparison (code);
6429 tmp = int_const_binop (PLUS_EXPR, arg01,
6430 build_int_cst (TREE_TYPE (arg01), 1));
6431 switch (tree_int_cst_sgn (arg1))
6433 case -1:
6434 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6435 lo = prod;
6436 break;
6438 case 0:
6439 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6440 lo = tmp;
6441 break;
6443 case 1:
6444 neg_overflow = true;
6445 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6446 hi = prod;
6447 break;
6449 default:
6450 gcc_unreachable ();
6454 switch (code)
6456 case EQ_EXPR:
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 1, lo, hi);
6465 case NE_EXPR:
6466 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6467 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6468 if (TREE_OVERFLOW (hi))
6469 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6470 if (TREE_OVERFLOW (lo))
6471 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6472 return build_range_check (loc, type, arg00, 0, lo, hi);
6474 case LT_EXPR:
6475 if (TREE_OVERFLOW (lo))
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand_loc (loc, type, tmp, arg00);
6480 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6482 case LE_EXPR:
6483 if (TREE_OVERFLOW (hi))
6485 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6488 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6490 case GT_EXPR:
6491 if (TREE_OVERFLOW (hi))
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand_loc (loc, type, tmp, arg00);
6496 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6498 case GE_EXPR:
6499 if (TREE_OVERFLOW (lo))
6501 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6502 return omit_one_operand_loc (loc, type, tmp, arg00);
6504 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6506 default:
6507 break;
6510 return NULL_TREE;
6514 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6515 equality/inequality test, then return a simplified form of the test
6516 using a sign testing. Otherwise return NULL. TYPE is the desired
6517 result type. */
6519 static tree
6520 fold_single_bit_test_into_sign_test (location_t loc,
6521 enum tree_code code, tree arg0, tree arg1,
6522 tree result_type)
6524 /* If this is testing a single bit, we can optimize the test. */
6525 if ((code == NE_EXPR || code == EQ_EXPR)
6526 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6527 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6529 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6530 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6531 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6533 if (arg00 != NULL_TREE
6534 /* This is only a win if casting to a signed type is cheap,
6535 i.e. when arg00's type is not a partial mode. */
6536 && TYPE_PRECISION (TREE_TYPE (arg00))
6537 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6539 tree stype = signed_type_for (TREE_TYPE (arg00));
6540 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6541 result_type,
6542 fold_convert_loc (loc, stype, arg00),
6543 build_int_cst (stype, 0));
6547 return NULL_TREE;
6550 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6551 equality/inequality test, then return a simplified form of
6552 the test using shifts and logical operations. Otherwise return
6553 NULL. TYPE is the desired result type. */
6555 tree
6556 fold_single_bit_test (location_t loc, enum tree_code code,
6557 tree arg0, tree arg1, tree result_type)
6559 /* If this is testing a single bit, we can optimize the test. */
6560 if ((code == NE_EXPR || code == EQ_EXPR)
6561 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6562 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6564 tree inner = TREE_OPERAND (arg0, 0);
6565 tree type = TREE_TYPE (arg0);
6566 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6567 enum machine_mode operand_mode = TYPE_MODE (type);
6568 int ops_unsigned;
6569 tree signed_type, unsigned_type, intermediate_type;
6570 tree tem, one;
6572 /* First, see if we can fold the single bit test into a sign-bit
6573 test. */
6574 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6575 result_type);
6576 if (tem)
6577 return tem;
6579 /* Otherwise we have (A & C) != 0 where C is a single bit,
6580 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6581 Similarly for (A & C) == 0. */
6583 /* If INNER is a right shift of a constant and it plus BITNUM does
6584 not overflow, adjust BITNUM and INNER. */
6585 if (TREE_CODE (inner) == RSHIFT_EXPR
6586 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6587 && bitnum < TYPE_PRECISION (type)
6588 && wi::ltu_p (TREE_OPERAND (inner, 1),
6589 TYPE_PRECISION (type) - bitnum))
6591 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6592 inner = TREE_OPERAND (inner, 0);
6595 /* If we are going to be able to omit the AND below, we must do our
6596 operations as unsigned. If we must use the AND, we have a choice.
6597 Normally unsigned is faster, but for some machines signed is. */
6598 #ifdef LOAD_EXTEND_OP
6599 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6600 && !flag_syntax_only) ? 0 : 1;
6601 #else
6602 ops_unsigned = 1;
6603 #endif
6605 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6606 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6607 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6608 inner = fold_convert_loc (loc, intermediate_type, inner);
6610 if (bitnum != 0)
6611 inner = build2 (RSHIFT_EXPR, intermediate_type,
6612 inner, size_int (bitnum));
6614 one = build_int_cst (intermediate_type, 1);
6616 if (code == EQ_EXPR)
6617 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6619 /* Put the AND last so it can combine with more things. */
6620 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6622 /* Make sure to return the proper type. */
6623 inner = fold_convert_loc (loc, result_type, inner);
6625 return inner;
6627 return NULL_TREE;
6630 /* Check whether we are allowed to reorder operands arg0 and arg1,
6631 such that the evaluation of arg1 occurs before arg0. */
6633 static bool
6634 reorder_operands_p (const_tree arg0, const_tree arg1)
6636 if (! flag_evaluation_order)
6637 return true;
6638 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6639 return true;
6640 return ! TREE_SIDE_EFFECTS (arg0)
6641 && ! TREE_SIDE_EFFECTS (arg1);
6644 /* Test whether it is preferable two swap two operands, ARG0 and
6645 ARG1, for example because ARG0 is an integer constant and ARG1
6646 isn't. If REORDER is true, only recommend swapping if we can
6647 evaluate the operands in reverse order. */
6649 bool
6650 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6652 if (CONSTANT_CLASS_P (arg1))
6653 return 0;
6654 if (CONSTANT_CLASS_P (arg0))
6655 return 1;
6657 STRIP_SIGN_NOPS (arg0);
6658 STRIP_SIGN_NOPS (arg1);
6660 if (TREE_CONSTANT (arg1))
6661 return 0;
6662 if (TREE_CONSTANT (arg0))
6663 return 1;
6665 if (reorder && flag_evaluation_order
6666 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6667 return 0;
6669 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6670 for commutative and comparison operators. Ensuring a canonical
6671 form allows the optimizers to find additional redundancies without
6672 having to explicitly check for both orderings. */
6673 if (TREE_CODE (arg0) == SSA_NAME
6674 && TREE_CODE (arg1) == SSA_NAME
6675 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6676 return 1;
6678 /* Put SSA_NAMEs last. */
6679 if (TREE_CODE (arg1) == SSA_NAME)
6680 return 0;
6681 if (TREE_CODE (arg0) == SSA_NAME)
6682 return 1;
6684 /* Put variables last. */
6685 if (DECL_P (arg1))
6686 return 0;
6687 if (DECL_P (arg0))
6688 return 1;
6690 return 0;
6693 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6694 ARG0 is extended to a wider type. */
6696 static tree
6697 fold_widened_comparison (location_t loc, enum tree_code code,
6698 tree type, tree arg0, tree arg1)
6700 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6701 tree arg1_unw;
6702 tree shorter_type, outer_type;
6703 tree min, max;
6704 bool above, below;
6706 if (arg0_unw == arg0)
6707 return NULL_TREE;
6708 shorter_type = TREE_TYPE (arg0_unw);
6710 #ifdef HAVE_canonicalize_funcptr_for_compare
6711 /* Disable this optimization if we're casting a function pointer
6712 type on targets that require function pointer canonicalization. */
6713 if (HAVE_canonicalize_funcptr_for_compare
6714 && TREE_CODE (shorter_type) == POINTER_TYPE
6715 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6716 return NULL_TREE;
6717 #endif
6719 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6720 return NULL_TREE;
6722 arg1_unw = get_unwidened (arg1, NULL_TREE);
6724 /* If possible, express the comparison in the shorter mode. */
6725 if ((code == EQ_EXPR || code == NE_EXPR
6726 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6727 && (TREE_TYPE (arg1_unw) == shorter_type
6728 || ((TYPE_PRECISION (shorter_type)
6729 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6730 && (TYPE_UNSIGNED (shorter_type)
6731 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6732 || (TREE_CODE (arg1_unw) == INTEGER_CST
6733 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6734 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6735 && int_fits_type_p (arg1_unw, shorter_type))))
6736 return fold_build2_loc (loc, code, type, arg0_unw,
6737 fold_convert_loc (loc, shorter_type, arg1_unw));
6739 if (TREE_CODE (arg1_unw) != INTEGER_CST
6740 || TREE_CODE (shorter_type) != INTEGER_TYPE
6741 || !int_fits_type_p (arg1_unw, shorter_type))
6742 return NULL_TREE;
6744 /* If we are comparing with the integer that does not fit into the range
6745 of the shorter type, the result is known. */
6746 outer_type = TREE_TYPE (arg1_unw);
6747 min = lower_bound_in_type (outer_type, shorter_type);
6748 max = upper_bound_in_type (outer_type, shorter_type);
6750 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6751 max, arg1_unw));
6752 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6753 arg1_unw, min));
6755 switch (code)
6757 case EQ_EXPR:
6758 if (above || below)
6759 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6760 break;
6762 case NE_EXPR:
6763 if (above || below)
6764 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6765 break;
6767 case LT_EXPR:
6768 case LE_EXPR:
6769 if (above)
6770 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6771 else if (below)
6772 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6774 case GT_EXPR:
6775 case GE_EXPR:
6776 if (above)
6777 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6778 else if (below)
6779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6781 default:
6782 break;
6785 return NULL_TREE;
6788 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6789 ARG0 just the signedness is changed. */
6791 static tree
6792 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6793 tree arg0, tree arg1)
6795 tree arg0_inner;
6796 tree inner_type, outer_type;
6798 if (!CONVERT_EXPR_P (arg0))
6799 return NULL_TREE;
6801 outer_type = TREE_TYPE (arg0);
6802 arg0_inner = TREE_OPERAND (arg0, 0);
6803 inner_type = TREE_TYPE (arg0_inner);
6805 #ifdef HAVE_canonicalize_funcptr_for_compare
6806 /* Disable this optimization if we're casting a function pointer
6807 type on targets that require function pointer canonicalization. */
6808 if (HAVE_canonicalize_funcptr_for_compare
6809 && TREE_CODE (inner_type) == POINTER_TYPE
6810 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6811 return NULL_TREE;
6812 #endif
6814 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6815 return NULL_TREE;
6817 if (TREE_CODE (arg1) != INTEGER_CST
6818 && !(CONVERT_EXPR_P (arg1)
6819 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6820 return NULL_TREE;
6822 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6823 && code != NE_EXPR
6824 && code != EQ_EXPR)
6825 return NULL_TREE;
6827 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6828 return NULL_TREE;
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6832 TREE_OVERFLOW (arg1));
6833 else
6834 arg1 = fold_convert_loc (loc, inner_type, arg1);
6836 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6840 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6841 means A >= Y && A != MAX, but in this case we know that
6842 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6844 static tree
6845 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6847 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6849 if (TREE_CODE (bound) == LT_EXPR)
6850 a = TREE_OPERAND (bound, 0);
6851 else if (TREE_CODE (bound) == GT_EXPR)
6852 a = TREE_OPERAND (bound, 1);
6853 else
6854 return NULL_TREE;
6856 typea = TREE_TYPE (a);
6857 if (!INTEGRAL_TYPE_P (typea)
6858 && !POINTER_TYPE_P (typea))
6859 return NULL_TREE;
6861 if (TREE_CODE (ineq) == LT_EXPR)
6863 a1 = TREE_OPERAND (ineq, 1);
6864 y = TREE_OPERAND (ineq, 0);
6866 else if (TREE_CODE (ineq) == GT_EXPR)
6868 a1 = TREE_OPERAND (ineq, 0);
6869 y = TREE_OPERAND (ineq, 1);
6871 else
6872 return NULL_TREE;
6874 if (TREE_TYPE (a1) != typea)
6875 return NULL_TREE;
6877 if (POINTER_TYPE_P (typea))
6879 /* Convert the pointer types into integer before taking the difference. */
6880 tree ta = fold_convert_loc (loc, ssizetype, a);
6881 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6882 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6884 else
6885 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6887 if (!diff || !integer_onep (diff))
6888 return NULL_TREE;
6890 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6893 /* Fold a sum or difference of at least one multiplication.
6894 Returns the folded tree or NULL if no simplification could be made. */
6896 static tree
6897 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6898 tree arg0, tree arg1)
6900 tree arg00, arg01, arg10, arg11;
6901 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6903 /* (A * C) +- (B * C) -> (A+-B) * C.
6904 (A * C) +- A -> A * (C+-1).
6905 We are most concerned about the case where C is a constant,
6906 but other combinations show up during loop reduction. Since
6907 it is not difficult, try all four possibilities. */
6909 if (TREE_CODE (arg0) == MULT_EXPR)
6911 arg00 = TREE_OPERAND (arg0, 0);
6912 arg01 = TREE_OPERAND (arg0, 1);
6914 else if (TREE_CODE (arg0) == INTEGER_CST)
6916 arg00 = build_one_cst (type);
6917 arg01 = arg0;
6919 else
6921 /* We cannot generate constant 1 for fract. */
6922 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6923 return NULL_TREE;
6924 arg00 = arg0;
6925 arg01 = build_one_cst (type);
6927 if (TREE_CODE (arg1) == MULT_EXPR)
6929 arg10 = TREE_OPERAND (arg1, 0);
6930 arg11 = TREE_OPERAND (arg1, 1);
6932 else if (TREE_CODE (arg1) == INTEGER_CST)
6934 arg10 = build_one_cst (type);
6935 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6936 the purpose of this canonicalization. */
6937 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6938 && negate_expr_p (arg1)
6939 && code == PLUS_EXPR)
6941 arg11 = negate_expr (arg1);
6942 code = MINUS_EXPR;
6944 else
6945 arg11 = arg1;
6947 else
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6951 return NULL_TREE;
6952 arg10 = arg1;
6953 arg11 = build_one_cst (type);
6955 same = NULL_TREE;
6957 if (operand_equal_p (arg01, arg11, 0))
6958 same = arg01, alt0 = arg00, alt1 = arg10;
6959 else if (operand_equal_p (arg00, arg10, 0))
6960 same = arg00, alt0 = arg01, alt1 = arg11;
6961 else if (operand_equal_p (arg00, arg11, 0))
6962 same = arg00, alt0 = arg01, alt1 = arg10;
6963 else if (operand_equal_p (arg01, arg10, 0))
6964 same = arg01, alt0 = arg00, alt1 = arg11;
6966 /* No identical multiplicands; see if we can find a common
6967 power-of-two factor in non-power-of-two multiplies. This
6968 can help in multi-dimensional array access. */
6969 else if (tree_fits_shwi_p (arg01)
6970 && tree_fits_shwi_p (arg11))
6972 HOST_WIDE_INT int01, int11, tmp;
6973 bool swap = false;
6974 tree maybe_same;
6975 int01 = tree_to_shwi (arg01);
6976 int11 = tree_to_shwi (arg11);
6978 /* Move min of absolute values to int11. */
6979 if (absu_hwi (int01) < absu_hwi (int11))
6981 tmp = int01, int01 = int11, int11 = tmp;
6982 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6983 maybe_same = arg01;
6984 swap = true;
6986 else
6987 maybe_same = arg11;
6989 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6990 /* The remainder should not be a constant, otherwise we
6991 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6992 increased the number of multiplications necessary. */
6993 && TREE_CODE (arg10) != INTEGER_CST)
6995 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6996 build_int_cst (TREE_TYPE (arg00),
6997 int01 / int11));
6998 alt1 = arg10;
6999 same = maybe_same;
7000 if (swap)
7001 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7005 if (same)
7006 return fold_build2_loc (loc, MULT_EXPR, type,
7007 fold_build2_loc (loc, code, type,
7008 fold_convert_loc (loc, type, alt0),
7009 fold_convert_loc (loc, type, alt1)),
7010 fold_convert_loc (loc, type, same));
7012 return NULL_TREE;
7015 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7020 static int
7021 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7023 tree type = TREE_TYPE (expr);
7024 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7025 int byte, offset, word, words;
7026 unsigned char value;
7028 if ((off == -1 && total_bytes > len)
7029 || off >= total_bytes)
7030 return 0;
7031 if (off == -1)
7032 off = 0;
7033 words = total_bytes / UNITS_PER_WORD;
7035 for (byte = 0; byte < total_bytes; byte++)
7037 int bitpos = byte * BITS_PER_UNIT;
7038 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7039 number of bytes. */
7040 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7042 if (total_bytes > UNITS_PER_WORD)
7044 word = byte / UNITS_PER_WORD;
7045 if (WORDS_BIG_ENDIAN)
7046 word = (words - 1) - word;
7047 offset = word * UNITS_PER_WORD;
7048 if (BYTES_BIG_ENDIAN)
7049 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7050 else
7051 offset += byte % UNITS_PER_WORD;
7053 else
7054 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7055 if (offset >= off
7056 && offset - off < len)
7057 ptr[offset - off] = value;
7059 return MIN (len, total_bytes - off);
7063 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7068 static int
7069 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7071 tree type = TREE_TYPE (expr);
7072 enum machine_mode mode = TYPE_MODE (type);
7073 int total_bytes = GET_MODE_SIZE (mode);
7074 FIXED_VALUE_TYPE value;
7075 tree i_value, i_type;
7077 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7078 return 0;
7080 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7082 if (NULL_TREE == i_type
7083 || TYPE_PRECISION (i_type) != total_bytes)
7084 return 0;
7086 value = TREE_FIXED_CST (expr);
7087 i_value = double_int_to_tree (i_type, value.data);
7089 return native_encode_int (i_value, ptr, len, off);
7093 /* Subroutine of native_encode_expr. Encode the REAL_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7098 static int
7099 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7101 tree type = TREE_TYPE (expr);
7102 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7103 int byte, offset, word, words, bitpos;
7104 unsigned char value;
7106 /* There are always 32 bits in each long, no matter the size of
7107 the hosts long. We handle floating point representations with
7108 up to 192 bits. */
7109 long tmp[6];
7111 if ((off == -1 && total_bytes > len)
7112 || off >= total_bytes)
7113 return 0;
7114 if (off == -1)
7115 off = 0;
7116 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7118 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7120 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7121 bitpos += BITS_PER_UNIT)
7123 byte = (bitpos / BITS_PER_UNIT) & 3;
7124 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7126 if (UNITS_PER_WORD < 4)
7128 word = byte / UNITS_PER_WORD;
7129 if (WORDS_BIG_ENDIAN)
7130 word = (words - 1) - word;
7131 offset = word * UNITS_PER_WORD;
7132 if (BYTES_BIG_ENDIAN)
7133 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7134 else
7135 offset += byte % UNITS_PER_WORD;
7137 else
7138 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7139 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7140 if (offset >= off
7141 && offset - off < len)
7142 ptr[offset - off] = value;
7144 return MIN (len, total_bytes - off);
7147 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7152 static int
7153 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7155 int rsize, isize;
7156 tree part;
7158 part = TREE_REALPART (expr);
7159 rsize = native_encode_expr (part, ptr, len, off);
7160 if (off == -1
7161 && rsize == 0)
7162 return 0;
7163 part = TREE_IMAGPART (expr);
7164 if (off != -1)
7165 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7166 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7167 if (off == -1
7168 && isize != rsize)
7169 return 0;
7170 return rsize + isize;
7174 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7175 specified by EXPR into the buffer PTR of length LEN bytes.
7176 Return the number of bytes placed in the buffer, or zero
7177 upon failure. */
7179 static int
7180 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7182 unsigned i, count;
7183 int size, offset;
7184 tree itype, elem;
7186 offset = 0;
7187 count = VECTOR_CST_NELTS (expr);
7188 itype = TREE_TYPE (TREE_TYPE (expr));
7189 size = GET_MODE_SIZE (TYPE_MODE (itype));
7190 for (i = 0; i < count; i++)
7192 if (off >= size)
7194 off -= size;
7195 continue;
7197 elem = VECTOR_CST_ELT (expr, i);
7198 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7199 if ((off == -1 && res != size)
7200 || res == 0)
7201 return 0;
7202 offset += res;
7203 if (offset >= len)
7204 return offset;
7205 if (off != -1)
7206 off = 0;
7208 return offset;
7212 /* Subroutine of native_encode_expr. Encode the STRING_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7217 static int
7218 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7220 tree type = TREE_TYPE (expr);
7221 HOST_WIDE_INT total_bytes;
7223 if (TREE_CODE (type) != ARRAY_TYPE
7224 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7225 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7226 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7227 return 0;
7228 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7229 if ((off == -1 && total_bytes > len)
7230 || off >= total_bytes)
7231 return 0;
7232 if (off == -1)
7233 off = 0;
7234 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7236 int written = 0;
7237 if (off < TREE_STRING_LENGTH (expr))
7239 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7240 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7242 memset (ptr + written, 0,
7243 MIN (total_bytes - written, len - written));
7245 else
7246 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7247 return MIN (total_bytes - off, len);
7251 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7252 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253 buffer PTR of length LEN bytes. If OFF is not -1 then start
7254 the encoding at byte offset OFF and encode at most LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero upon failure. */
7258 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7260 switch (TREE_CODE (expr))
7262 case INTEGER_CST:
7263 return native_encode_int (expr, ptr, len, off);
7265 case REAL_CST:
7266 return native_encode_real (expr, ptr, len, off);
7268 case FIXED_CST:
7269 return native_encode_fixed (expr, ptr, len, off);
7271 case COMPLEX_CST:
7272 return native_encode_complex (expr, ptr, len, off);
7274 case VECTOR_CST:
7275 return native_encode_vector (expr, ptr, len, off);
7277 case STRING_CST:
7278 return native_encode_string (expr, ptr, len, off);
7280 default:
7281 return 0;
7286 /* Subroutine of native_interpret_expr. Interpret the contents of
7287 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7288 If the buffer cannot be interpreted, return NULL_TREE. */
7290 static tree
7291 native_interpret_int (tree type, const unsigned char *ptr, int len)
7293 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7295 if (total_bytes > len
7296 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7297 return NULL_TREE;
7299 wide_int result = wi::from_buffer (ptr, total_bytes);
7301 return wide_int_to_tree (type, result);
7305 /* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7309 static tree
7310 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7312 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7313 double_int result;
7314 FIXED_VALUE_TYPE fixed_value;
7316 if (total_bytes > len
7317 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7318 return NULL_TREE;
7320 result = double_int::from_buffer (ptr, total_bytes);
7321 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7323 return build_fixed (type, fixed_value);
7327 /* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7331 static tree
7332 native_interpret_real (tree type, const unsigned char *ptr, int len)
7334 enum machine_mode mode = TYPE_MODE (type);
7335 int total_bytes = GET_MODE_SIZE (mode);
7336 int byte, offset, word, words, bitpos;
7337 unsigned char value;
7338 /* There are always 32 bits in each long, no matter the size of
7339 the hosts long. We handle floating point representations with
7340 up to 192 bits. */
7341 REAL_VALUE_TYPE r;
7342 long tmp[6];
7344 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7345 if (total_bytes > len || total_bytes > 24)
7346 return NULL_TREE;
7347 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7349 memset (tmp, 0, sizeof (tmp));
7350 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7351 bitpos += BITS_PER_UNIT)
7353 byte = (bitpos / BITS_PER_UNIT) & 3;
7354 if (UNITS_PER_WORD < 4)
7356 word = byte / UNITS_PER_WORD;
7357 if (WORDS_BIG_ENDIAN)
7358 word = (words - 1) - word;
7359 offset = word * UNITS_PER_WORD;
7360 if (BYTES_BIG_ENDIAN)
7361 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7362 else
7363 offset += byte % UNITS_PER_WORD;
7365 else
7366 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7367 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7369 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7372 real_from_target (&r, tmp, mode);
7373 return build_real (type, r);
7377 /* Subroutine of native_interpret_expr. Interpret the contents of
7378 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7379 If the buffer cannot be interpreted, return NULL_TREE. */
7381 static tree
7382 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7384 tree etype, rpart, ipart;
7385 int size;
7387 etype = TREE_TYPE (type);
7388 size = GET_MODE_SIZE (TYPE_MODE (etype));
7389 if (size * 2 > len)
7390 return NULL_TREE;
7391 rpart = native_interpret_expr (etype, ptr, size);
7392 if (!rpart)
7393 return NULL_TREE;
7394 ipart = native_interpret_expr (etype, ptr+size, size);
7395 if (!ipart)
7396 return NULL_TREE;
7397 return build_complex (type, rpart, ipart);
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7405 static tree
7406 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7408 tree etype, elem;
7409 int i, size, count;
7410 tree *elements;
7412 etype = TREE_TYPE (type);
7413 size = GET_MODE_SIZE (TYPE_MODE (etype));
7414 count = TYPE_VECTOR_SUBPARTS (type);
7415 if (size * count > len)
7416 return NULL_TREE;
7418 elements = XALLOCAVEC (tree, count);
7419 for (i = count - 1; i >= 0; i--)
7421 elem = native_interpret_expr (etype, ptr+(i*size), size);
7422 if (!elem)
7423 return NULL_TREE;
7424 elements[i] = elem;
7426 return build_vector (type, elements);
7430 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7431 the buffer PTR of length LEN as a constant of type TYPE. For
7432 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7433 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7434 return NULL_TREE. */
7436 tree
7437 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7439 switch (TREE_CODE (type))
7441 case INTEGER_TYPE:
7442 case ENUMERAL_TYPE:
7443 case BOOLEAN_TYPE:
7444 case POINTER_TYPE:
7445 case REFERENCE_TYPE:
7446 return native_interpret_int (type, ptr, len);
7448 case REAL_TYPE:
7449 return native_interpret_real (type, ptr, len);
7451 case FIXED_POINT_TYPE:
7452 return native_interpret_fixed (type, ptr, len);
7454 case COMPLEX_TYPE:
7455 return native_interpret_complex (type, ptr, len);
7457 case VECTOR_TYPE:
7458 return native_interpret_vector (type, ptr, len);
7460 default:
7461 return NULL_TREE;
7465 /* Returns true if we can interpret the contents of a native encoding
7466 as TYPE. */
7468 static bool
7469 can_native_interpret_type_p (tree type)
7471 switch (TREE_CODE (type))
7473 case INTEGER_TYPE:
7474 case ENUMERAL_TYPE:
7475 case BOOLEAN_TYPE:
7476 case POINTER_TYPE:
7477 case REFERENCE_TYPE:
7478 case FIXED_POINT_TYPE:
7479 case REAL_TYPE:
7480 case COMPLEX_TYPE:
7481 case VECTOR_TYPE:
7482 return true;
7483 default:
7484 return false;
7488 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7489 TYPE at compile-time. If we're unable to perform the conversion
7490 return NULL_TREE. */
7492 static tree
7493 fold_view_convert_expr (tree type, tree expr)
7495 /* We support up to 512-bit values (for V8DFmode). */
7496 unsigned char buffer[64];
7497 int len;
7499 /* Check that the host and target are sane. */
7500 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7501 return NULL_TREE;
7503 len = native_encode_expr (expr, buffer, sizeof (buffer));
7504 if (len == 0)
7505 return NULL_TREE;
7507 return native_interpret_expr (type, buffer, len);
7510 /* Build an expression for the address of T. Folds away INDIRECT_REF
7511 to avoid confusing the gimplify process. */
7513 tree
7514 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7516 /* The size of the object is not relevant when talking about its address. */
7517 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7518 t = TREE_OPERAND (t, 0);
7520 if (TREE_CODE (t) == INDIRECT_REF)
7522 t = TREE_OPERAND (t, 0);
7524 if (TREE_TYPE (t) != ptrtype)
7525 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7527 else if (TREE_CODE (t) == MEM_REF
7528 && integer_zerop (TREE_OPERAND (t, 1)))
7529 return TREE_OPERAND (t, 0);
7530 else if (TREE_CODE (t) == MEM_REF
7531 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7532 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7533 TREE_OPERAND (t, 0),
7534 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7535 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7537 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7539 if (TREE_TYPE (t) != ptrtype)
7540 t = fold_convert_loc (loc, ptrtype, t);
7542 else
7543 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7545 return t;
7548 /* Build an expression for the address of T. */
7550 tree
7551 build_fold_addr_expr_loc (location_t loc, tree t)
7553 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7555 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7558 static bool vec_cst_ctor_to_array (tree, tree *);
7560 /* Fold a unary expression of code CODE and type TYPE with operand
7561 OP0. Return the folded expression if folding is successful.
7562 Otherwise, return NULL_TREE. */
7564 tree
7565 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7567 tree tem;
7568 tree arg0;
7569 enum tree_code_class kind = TREE_CODE_CLASS (code);
7571 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7572 && TREE_CODE_LENGTH (code) == 1);
7574 tem = generic_simplify (loc, code, type, op0);
7575 if (tem)
7576 return tem;
7578 arg0 = op0;
7579 if (arg0)
7581 if (CONVERT_EXPR_CODE_P (code)
7582 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7584 /* Don't use STRIP_NOPS, because signedness of argument type
7585 matters. */
7586 STRIP_SIGN_NOPS (arg0);
7588 else
7590 /* Strip any conversions that don't change the mode. This
7591 is safe for every expression, except for a comparison
7592 expression because its signedness is derived from its
7593 operands.
7595 Note that this is done as an internal manipulation within
7596 the constant folder, in order to find the simplest
7597 representation of the arguments so that their form can be
7598 studied. In any cases, the appropriate type conversions
7599 should be put back in the tree that will get out of the
7600 constant folder. */
7601 STRIP_NOPS (arg0);
7605 if (TREE_CODE_CLASS (code) == tcc_unary)
7607 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7608 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7609 fold_build1_loc (loc, code, type,
7610 fold_convert_loc (loc, TREE_TYPE (op0),
7611 TREE_OPERAND (arg0, 1))));
7612 else if (TREE_CODE (arg0) == COND_EXPR)
7614 tree arg01 = TREE_OPERAND (arg0, 1);
7615 tree arg02 = TREE_OPERAND (arg0, 2);
7616 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7617 arg01 = fold_build1_loc (loc, code, type,
7618 fold_convert_loc (loc,
7619 TREE_TYPE (op0), arg01));
7620 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7621 arg02 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg02));
7624 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7625 arg01, arg02);
7627 /* If this was a conversion, and all we did was to move into
7628 inside the COND_EXPR, bring it back out. But leave it if
7629 it is a conversion from integer to integer and the
7630 result precision is no wider than a word since such a
7631 conversion is cheap and may be optimized away by combine,
7632 while it couldn't if it were outside the COND_EXPR. Then return
7633 so we don't get into an infinite recursion loop taking the
7634 conversion out and then back in. */
7636 if ((CONVERT_EXPR_CODE_P (code)
7637 || code == NON_LVALUE_EXPR)
7638 && TREE_CODE (tem) == COND_EXPR
7639 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7640 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7641 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7642 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7643 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7644 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7645 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7646 && (INTEGRAL_TYPE_P
7647 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7648 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7649 || flag_syntax_only))
7650 tem = build1_loc (loc, code, type,
7651 build3 (COND_EXPR,
7652 TREE_TYPE (TREE_OPERAND
7653 (TREE_OPERAND (tem, 1), 0)),
7654 TREE_OPERAND (tem, 0),
7655 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7656 TREE_OPERAND (TREE_OPERAND (tem, 2),
7657 0)));
7658 return tem;
7662 switch (code)
7664 case PAREN_EXPR:
7665 /* Re-association barriers around constants and other re-association
7666 barriers can be removed. */
7667 if (CONSTANT_CLASS_P (op0)
7668 || TREE_CODE (op0) == PAREN_EXPR)
7669 return fold_convert_loc (loc, type, op0);
7670 return NULL_TREE;
7672 case NON_LVALUE_EXPR:
7673 if (!maybe_lvalue_p (op0))
7674 return fold_convert_loc (loc, type, op0);
7675 return NULL_TREE;
7677 CASE_CONVERT:
7678 case FLOAT_EXPR:
7679 case FIX_TRUNC_EXPR:
7680 if (TREE_TYPE (op0) == type)
7681 return op0;
7683 if (COMPARISON_CLASS_P (op0))
7685 /* If we have (type) (a CMP b) and type is an integral type, return
7686 new expression involving the new type. Canonicalize
7687 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7688 non-integral type.
7689 Do not fold the result as that would not simplify further, also
7690 folding again results in recursions. */
7691 if (TREE_CODE (type) == BOOLEAN_TYPE)
7692 return build2_loc (loc, TREE_CODE (op0), type,
7693 TREE_OPERAND (op0, 0),
7694 TREE_OPERAND (op0, 1));
7695 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7696 && TREE_CODE (type) != VECTOR_TYPE)
7697 return build3_loc (loc, COND_EXPR, type, op0,
7698 constant_boolean_node (true, type),
7699 constant_boolean_node (false, type));
7702 /* Handle cases of two conversions in a row. */
7703 if (CONVERT_EXPR_P (op0))
7705 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7706 tree inter_type = TREE_TYPE (op0);
7707 int inside_int = INTEGRAL_TYPE_P (inside_type);
7708 int inside_ptr = POINTER_TYPE_P (inside_type);
7709 int inside_float = FLOAT_TYPE_P (inside_type);
7710 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7711 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7712 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7713 int inter_int = INTEGRAL_TYPE_P (inter_type);
7714 int inter_ptr = POINTER_TYPE_P (inter_type);
7715 int inter_float = FLOAT_TYPE_P (inter_type);
7716 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7717 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7718 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7719 int final_int = INTEGRAL_TYPE_P (type);
7720 int final_ptr = POINTER_TYPE_P (type);
7721 int final_float = FLOAT_TYPE_P (type);
7722 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7723 unsigned int final_prec = TYPE_PRECISION (type);
7724 int final_unsignedp = TYPE_UNSIGNED (type);
7726 /* In addition to the cases of two conversions in a row
7727 handled below, if we are converting something to its own
7728 type via an object of identical or wider precision, neither
7729 conversion is needed. */
7730 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7731 && (((inter_int || inter_ptr) && final_int)
7732 || (inter_float && final_float))
7733 && inter_prec >= final_prec)
7734 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7736 /* Likewise, if the intermediate and initial types are either both
7737 float or both integer, we don't need the middle conversion if the
7738 former is wider than the latter and doesn't change the signedness
7739 (for integers). Avoid this if the final type is a pointer since
7740 then we sometimes need the middle conversion. Likewise if the
7741 final type has a precision not equal to the size of its mode. */
7742 if (((inter_int && inside_int)
7743 || (inter_float && inside_float)
7744 || (inter_vec && inside_vec))
7745 && inter_prec >= inside_prec
7746 && (inter_float || inter_vec
7747 || inter_unsignedp == inside_unsignedp)
7748 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7749 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7750 && ! final_ptr
7751 && (! final_vec || inter_prec == inside_prec))
7752 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7754 /* If we have a sign-extension of a zero-extended value, we can
7755 replace that by a single zero-extension. Likewise if the
7756 final conversion does not change precision we can drop the
7757 intermediate conversion. */
7758 if (inside_int && inter_int && final_int
7759 && ((inside_prec < inter_prec && inter_prec < final_prec
7760 && inside_unsignedp && !inter_unsignedp)
7761 || final_prec == inter_prec))
7762 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7764 /* Two conversions in a row are not needed unless:
7765 - some conversion is floating-point (overstrict for now), or
7766 - some conversion is a vector (overstrict for now), or
7767 - the intermediate type is narrower than both initial and
7768 final, or
7769 - the intermediate type and innermost type differ in signedness,
7770 and the outermost type is wider than the intermediate, or
7771 - the initial type is a pointer type and the precisions of the
7772 intermediate and final types differ, or
7773 - the final type is a pointer type and the precisions of the
7774 initial and intermediate types differ. */
7775 if (! inside_float && ! inter_float && ! final_float
7776 && ! inside_vec && ! inter_vec && ! final_vec
7777 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7778 && ! (inside_int && inter_int
7779 && inter_unsignedp != inside_unsignedp
7780 && inter_prec < final_prec)
7781 && ((inter_unsignedp && inter_prec > inside_prec)
7782 == (final_unsignedp && final_prec > inter_prec))
7783 && ! (inside_ptr && inter_prec != final_prec)
7784 && ! (final_ptr && inside_prec != inter_prec)
7785 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7786 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7787 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7790 /* Handle (T *)&A.B.C for A being of type T and B and C
7791 living at offset zero. This occurs frequently in
7792 C++ upcasting and then accessing the base. */
7793 if (TREE_CODE (op0) == ADDR_EXPR
7794 && POINTER_TYPE_P (type)
7795 && handled_component_p (TREE_OPERAND (op0, 0)))
7797 HOST_WIDE_INT bitsize, bitpos;
7798 tree offset;
7799 enum machine_mode mode;
7800 int unsignedp, volatilep;
7801 tree base = TREE_OPERAND (op0, 0);
7802 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7803 &mode, &unsignedp, &volatilep, false);
7804 /* If the reference was to a (constant) zero offset, we can use
7805 the address of the base if it has the same base type
7806 as the result type and the pointer type is unqualified. */
7807 if (! offset && bitpos == 0
7808 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7809 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7810 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7811 return fold_convert_loc (loc, type,
7812 build_fold_addr_expr_loc (loc, base));
7815 if (TREE_CODE (op0) == MODIFY_EXPR
7816 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7817 /* Detect assigning a bitfield. */
7818 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7819 && DECL_BIT_FIELD
7820 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7822 /* Don't leave an assignment inside a conversion
7823 unless assigning a bitfield. */
7824 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7825 /* First do the assignment, then return converted constant. */
7826 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7827 TREE_NO_WARNING (tem) = 1;
7828 TREE_USED (tem) = 1;
7829 return tem;
7832 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7833 constants (if x has signed type, the sign bit cannot be set
7834 in c). This folds extension into the BIT_AND_EXPR.
7835 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7836 very likely don't have maximal range for their precision and this
7837 transformation effectively doesn't preserve non-maximal ranges. */
7838 if (TREE_CODE (type) == INTEGER_TYPE
7839 && TREE_CODE (op0) == BIT_AND_EXPR
7840 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7842 tree and_expr = op0;
7843 tree and0 = TREE_OPERAND (and_expr, 0);
7844 tree and1 = TREE_OPERAND (and_expr, 1);
7845 int change = 0;
7847 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7848 || (TYPE_PRECISION (type)
7849 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7850 change = 1;
7851 else if (TYPE_PRECISION (TREE_TYPE (and1))
7852 <= HOST_BITS_PER_WIDE_INT
7853 && tree_fits_uhwi_p (and1))
7855 unsigned HOST_WIDE_INT cst;
7857 cst = tree_to_uhwi (and1);
7858 cst &= HOST_WIDE_INT_M1U
7859 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7860 change = (cst == 0);
7861 #ifdef LOAD_EXTEND_OP
7862 if (change
7863 && !flag_syntax_only
7864 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7865 == ZERO_EXTEND))
7867 tree uns = unsigned_type_for (TREE_TYPE (and0));
7868 and0 = fold_convert_loc (loc, uns, and0);
7869 and1 = fold_convert_loc (loc, uns, and1);
7871 #endif
7873 if (change)
7875 tem = force_fit_type (type, wi::to_widest (and1), 0,
7876 TREE_OVERFLOW (and1));
7877 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7878 fold_convert_loc (loc, type, and0), tem);
7882 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7883 when one of the new casts will fold away. Conservatively we assume
7884 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7885 if (POINTER_TYPE_P (type)
7886 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7887 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7888 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7889 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7890 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7892 tree arg00 = TREE_OPERAND (arg0, 0);
7893 tree arg01 = TREE_OPERAND (arg0, 1);
7895 return fold_build_pointer_plus_loc
7896 (loc, fold_convert_loc (loc, type, arg00), arg01);
7899 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7900 of the same precision, and X is an integer type not narrower than
7901 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7902 if (INTEGRAL_TYPE_P (type)
7903 && TREE_CODE (op0) == BIT_NOT_EXPR
7904 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7905 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7906 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7908 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7909 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7910 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7911 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7912 fold_convert_loc (loc, type, tem));
7915 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7916 type of X and Y (integer types only). */
7917 if (INTEGRAL_TYPE_P (type)
7918 && TREE_CODE (op0) == MULT_EXPR
7919 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7920 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7922 /* Be careful not to introduce new overflows. */
7923 tree mult_type;
7924 if (TYPE_OVERFLOW_WRAPS (type))
7925 mult_type = type;
7926 else
7927 mult_type = unsigned_type_for (type);
7929 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7931 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7932 fold_convert_loc (loc, mult_type,
7933 TREE_OPERAND (op0, 0)),
7934 fold_convert_loc (loc, mult_type,
7935 TREE_OPERAND (op0, 1)));
7936 return fold_convert_loc (loc, type, tem);
7940 tem = fold_convert_const (code, type, arg0);
7941 return tem ? tem : NULL_TREE;
7943 case ADDR_SPACE_CONVERT_EXPR:
7944 if (integer_zerop (arg0))
7945 return fold_convert_const (code, type, arg0);
7946 return NULL_TREE;
7948 case FIXED_CONVERT_EXPR:
7949 tem = fold_convert_const (code, type, arg0);
7950 return tem ? tem : NULL_TREE;
7952 case VIEW_CONVERT_EXPR:
7953 if (TREE_TYPE (op0) == type)
7954 return op0;
7955 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7956 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7957 type, TREE_OPERAND (op0, 0));
7958 if (TREE_CODE (op0) == MEM_REF)
7959 return fold_build2_loc (loc, MEM_REF, type,
7960 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7962 /* For integral conversions with the same precision or pointer
7963 conversions use a NOP_EXPR instead. */
7964 if ((INTEGRAL_TYPE_P (type)
7965 || POINTER_TYPE_P (type))
7966 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7967 || POINTER_TYPE_P (TREE_TYPE (op0)))
7968 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7969 return fold_convert_loc (loc, type, op0);
7971 /* Strip inner integral conversions that do not change the precision. */
7972 if (CONVERT_EXPR_P (op0)
7973 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7974 || POINTER_TYPE_P (TREE_TYPE (op0)))
7975 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7976 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7977 && (TYPE_PRECISION (TREE_TYPE (op0))
7978 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7979 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7980 type, TREE_OPERAND (op0, 0));
7982 return fold_view_convert_expr (type, op0);
7984 case NEGATE_EXPR:
7985 tem = fold_negate_expr (loc, arg0);
7986 if (tem)
7987 return fold_convert_loc (loc, type, tem);
7988 return NULL_TREE;
7990 case ABS_EXPR:
7991 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7992 return fold_abs_const (arg0, type);
7993 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7994 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7995 /* Convert fabs((double)float) into (double)fabsf(float). */
7996 else if (TREE_CODE (arg0) == NOP_EXPR
7997 && TREE_CODE (type) == REAL_TYPE)
7999 tree targ0 = strip_float_extensions (arg0);
8000 if (targ0 != arg0)
8001 return fold_convert_loc (loc, type,
8002 fold_build1_loc (loc, ABS_EXPR,
8003 TREE_TYPE (targ0),
8004 targ0));
8006 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8007 else if (TREE_CODE (arg0) == ABS_EXPR)
8008 return arg0;
8009 else if (tree_expr_nonnegative_p (arg0))
8010 return arg0;
8012 /* Strip sign ops from argument. */
8013 if (TREE_CODE (type) == REAL_TYPE)
8015 tem = fold_strip_sign_ops (arg0);
8016 if (tem)
8017 return fold_build1_loc (loc, ABS_EXPR, type,
8018 fold_convert_loc (loc, type, tem));
8020 return NULL_TREE;
8022 case CONJ_EXPR:
8023 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8024 return fold_convert_loc (loc, type, arg0);
8025 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8027 tree itype = TREE_TYPE (type);
8028 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8029 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8030 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8031 negate_expr (ipart));
8033 if (TREE_CODE (arg0) == COMPLEX_CST)
8035 tree itype = TREE_TYPE (type);
8036 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8037 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8038 return build_complex (type, rpart, negate_expr (ipart));
8040 if (TREE_CODE (arg0) == CONJ_EXPR)
8041 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8042 return NULL_TREE;
8044 case BIT_NOT_EXPR:
8045 if (TREE_CODE (arg0) == INTEGER_CST)
8046 return fold_not_const (arg0, type);
8047 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8048 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8049 /* Convert ~ (-A) to A - 1. */
8050 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8051 return fold_build2_loc (loc, MINUS_EXPR, type,
8052 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8053 build_int_cst (type, 1));
8054 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8055 else if (INTEGRAL_TYPE_P (type)
8056 && ((TREE_CODE (arg0) == MINUS_EXPR
8057 && integer_onep (TREE_OPERAND (arg0, 1)))
8058 || (TREE_CODE (arg0) == PLUS_EXPR
8059 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8060 return fold_build1_loc (loc, NEGATE_EXPR, type,
8061 fold_convert_loc (loc, type,
8062 TREE_OPERAND (arg0, 0)));
8063 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8064 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8065 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)))))
8068 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8069 fold_convert_loc (loc, type,
8070 TREE_OPERAND (arg0, 1)));
8071 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8072 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8073 fold_convert_loc (loc, type,
8074 TREE_OPERAND (arg0, 1)))))
8075 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8076 fold_convert_loc (loc, type,
8077 TREE_OPERAND (arg0, 0)), tem);
8078 /* Perform BIT_NOT_EXPR on each element individually. */
8079 else if (TREE_CODE (arg0) == VECTOR_CST)
8081 tree *elements;
8082 tree elem;
8083 unsigned count = VECTOR_CST_NELTS (arg0), i;
8085 elements = XALLOCAVEC (tree, count);
8086 for (i = 0; i < count; i++)
8088 elem = VECTOR_CST_ELT (arg0, i);
8089 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8090 if (elem == NULL_TREE)
8091 break;
8092 elements[i] = elem;
8094 if (i == count)
8095 return build_vector (type, elements);
8097 else if (COMPARISON_CLASS_P (arg0)
8098 && (VECTOR_TYPE_P (type)
8099 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8101 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8102 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8103 HONOR_NANS (TYPE_MODE (op_type)));
8104 if (subcode != ERROR_MARK)
8105 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8106 TREE_OPERAND (arg0, 1));
8110 return NULL_TREE;
8112 case TRUTH_NOT_EXPR:
8113 /* Note that the operand of this must be an int
8114 and its values must be 0 or 1.
8115 ("true" is a fixed value perhaps depending on the language,
8116 but we don't handle values other than 1 correctly yet.) */
8117 tem = fold_truth_not_expr (loc, arg0);
8118 if (!tem)
8119 return NULL_TREE;
8120 return fold_convert_loc (loc, type, tem);
8122 case REALPART_EXPR:
8123 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8124 return fold_convert_loc (loc, type, arg0);
8125 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8126 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8127 TREE_OPERAND (arg0, 1));
8128 if (TREE_CODE (arg0) == COMPLEX_CST)
8129 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8130 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8132 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8133 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8134 fold_build1_loc (loc, REALPART_EXPR, itype,
8135 TREE_OPERAND (arg0, 0)),
8136 fold_build1_loc (loc, REALPART_EXPR, itype,
8137 TREE_OPERAND (arg0, 1)));
8138 return fold_convert_loc (loc, type, tem);
8140 if (TREE_CODE (arg0) == CONJ_EXPR)
8142 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8143 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8144 TREE_OPERAND (arg0, 0));
8145 return fold_convert_loc (loc, type, tem);
8147 if (TREE_CODE (arg0) == CALL_EXPR)
8149 tree fn = get_callee_fndecl (arg0);
8150 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8151 switch (DECL_FUNCTION_CODE (fn))
8153 CASE_FLT_FN (BUILT_IN_CEXPI):
8154 fn = mathfn_built_in (type, BUILT_IN_COS);
8155 if (fn)
8156 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8157 break;
8159 default:
8160 break;
8163 return NULL_TREE;
8165 case IMAGPART_EXPR:
8166 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8167 return build_zero_cst (type);
8168 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8169 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8170 TREE_OPERAND (arg0, 0));
8171 if (TREE_CODE (arg0) == COMPLEX_CST)
8172 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8173 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8175 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8176 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8177 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8178 TREE_OPERAND (arg0, 0)),
8179 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8180 TREE_OPERAND (arg0, 1)));
8181 return fold_convert_loc (loc, type, tem);
8183 if (TREE_CODE (arg0) == CONJ_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8187 return fold_convert_loc (loc, type, negate_expr (tem));
8189 if (TREE_CODE (arg0) == CALL_EXPR)
8191 tree fn = get_callee_fndecl (arg0);
8192 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8193 switch (DECL_FUNCTION_CODE (fn))
8195 CASE_FLT_FN (BUILT_IN_CEXPI):
8196 fn = mathfn_built_in (type, BUILT_IN_SIN);
8197 if (fn)
8198 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8199 break;
8201 default:
8202 break;
8205 return NULL_TREE;
8207 case INDIRECT_REF:
8208 /* Fold *&X to X if X is an lvalue. */
8209 if (TREE_CODE (op0) == ADDR_EXPR)
8211 tree op00 = TREE_OPERAND (op0, 0);
8212 if ((TREE_CODE (op00) == VAR_DECL
8213 || TREE_CODE (op00) == PARM_DECL
8214 || TREE_CODE (op00) == RESULT_DECL)
8215 && !TREE_READONLY (op00))
8216 return op00;
8218 return NULL_TREE;
8220 case VEC_UNPACK_LO_EXPR:
8221 case VEC_UNPACK_HI_EXPR:
8222 case VEC_UNPACK_FLOAT_LO_EXPR:
8223 case VEC_UNPACK_FLOAT_HI_EXPR:
8225 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8226 tree *elts;
8227 enum tree_code subcode;
8229 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8230 if (TREE_CODE (arg0) != VECTOR_CST)
8231 return NULL_TREE;
8233 elts = XALLOCAVEC (tree, nelts * 2);
8234 if (!vec_cst_ctor_to_array (arg0, elts))
8235 return NULL_TREE;
8237 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8238 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8239 elts += nelts;
8241 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8242 subcode = NOP_EXPR;
8243 else
8244 subcode = FLOAT_EXPR;
8246 for (i = 0; i < nelts; i++)
8248 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8249 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8250 return NULL_TREE;
8253 return build_vector (type, elts);
8256 case REDUC_MIN_EXPR:
8257 case REDUC_MAX_EXPR:
8258 case REDUC_PLUS_EXPR:
8260 unsigned int nelts, i;
8261 tree *elts;
8262 enum tree_code subcode;
8264 if (TREE_CODE (op0) != VECTOR_CST)
8265 return NULL_TREE;
8266 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8268 elts = XALLOCAVEC (tree, nelts);
8269 if (!vec_cst_ctor_to_array (op0, elts))
8270 return NULL_TREE;
8272 switch (code)
8274 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8275 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8276 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8277 default: gcc_unreachable ();
8280 for (i = 1; i < nelts; i++)
8282 elts[0] = const_binop (subcode, elts[0], elts[i]);
8283 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8284 return NULL_TREE;
8287 return elts[0];
8290 default:
8291 return NULL_TREE;
8292 } /* switch (code) */
8296 /* If the operation was a conversion do _not_ mark a resulting constant
8297 with TREE_OVERFLOW if the original constant was not. These conversions
8298 have implementation defined behavior and retaining the TREE_OVERFLOW
8299 flag here would confuse later passes such as VRP. */
8300 tree
8301 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8302 tree type, tree op0)
8304 tree res = fold_unary_loc (loc, code, type, op0);
8305 if (res
8306 && TREE_CODE (res) == INTEGER_CST
8307 && TREE_CODE (op0) == INTEGER_CST
8308 && CONVERT_EXPR_CODE_P (code))
8309 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8311 return res;
8314 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8315 operands OP0 and OP1. LOC is the location of the resulting expression.
8316 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8317 Return the folded expression if folding is successful. Otherwise,
8318 return NULL_TREE. */
8319 static tree
8320 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8321 tree arg0, tree arg1, tree op0, tree op1)
8323 tree tem;
8325 /* We only do these simplifications if we are optimizing. */
8326 if (!optimize)
8327 return NULL_TREE;
8329 /* Check for things like (A || B) && (A || C). We can convert this
8330 to A || (B && C). Note that either operator can be any of the four
8331 truth and/or operations and the transformation will still be
8332 valid. Also note that we only care about order for the
8333 ANDIF and ORIF operators. If B contains side effects, this
8334 might change the truth-value of A. */
8335 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8336 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8337 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8338 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8339 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8340 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8342 tree a00 = TREE_OPERAND (arg0, 0);
8343 tree a01 = TREE_OPERAND (arg0, 1);
8344 tree a10 = TREE_OPERAND (arg1, 0);
8345 tree a11 = TREE_OPERAND (arg1, 1);
8346 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8347 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8348 && (code == TRUTH_AND_EXPR
8349 || code == TRUTH_OR_EXPR));
8351 if (operand_equal_p (a00, a10, 0))
8352 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8353 fold_build2_loc (loc, code, type, a01, a11));
8354 else if (commutative && operand_equal_p (a00, a11, 0))
8355 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8356 fold_build2_loc (loc, code, type, a01, a10));
8357 else if (commutative && operand_equal_p (a01, a10, 0))
8358 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8359 fold_build2_loc (loc, code, type, a00, a11));
8361 /* This case if tricky because we must either have commutative
8362 operators or else A10 must not have side-effects. */
8364 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8365 && operand_equal_p (a01, a11, 0))
8366 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8367 fold_build2_loc (loc, code, type, a00, a10),
8368 a01);
8371 /* See if we can build a range comparison. */
8372 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8373 return tem;
8375 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8376 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8378 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8379 if (tem)
8380 return fold_build2_loc (loc, code, type, tem, arg1);
8383 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8384 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8386 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8387 if (tem)
8388 return fold_build2_loc (loc, code, type, arg0, tem);
8391 /* Check for the possibility of merging component references. If our
8392 lhs is another similar operation, try to merge its rhs with our
8393 rhs. Then try to merge our lhs and rhs. */
8394 if (TREE_CODE (arg0) == code
8395 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8396 TREE_OPERAND (arg0, 1), arg1)))
8397 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8399 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8400 return tem;
8402 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8403 && (code == TRUTH_AND_EXPR
8404 || code == TRUTH_ANDIF_EXPR
8405 || code == TRUTH_OR_EXPR
8406 || code == TRUTH_ORIF_EXPR))
8408 enum tree_code ncode, icode;
8410 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8412 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8414 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8415 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8416 We don't want to pack more than two leafs to a non-IF AND/OR
8417 expression.
8418 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8419 equal to IF-CODE, then we don't want to add right-hand operand.
8420 If the inner right-hand side of left-hand operand has
8421 side-effects, or isn't simple, then we can't add to it,
8422 as otherwise we might destroy if-sequence. */
8423 if (TREE_CODE (arg0) == icode
8424 && simple_operand_p_2 (arg1)
8425 /* Needed for sequence points to handle trappings, and
8426 side-effects. */
8427 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8429 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8430 arg1);
8431 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8432 tem);
8434 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8435 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8436 else if (TREE_CODE (arg1) == icode
8437 && simple_operand_p_2 (arg0)
8438 /* Needed for sequence points to handle trappings, and
8439 side-effects. */
8440 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8442 tem = fold_build2_loc (loc, ncode, type,
8443 arg0, TREE_OPERAND (arg1, 0));
8444 return fold_build2_loc (loc, icode, type, tem,
8445 TREE_OPERAND (arg1, 1));
8447 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8448 into (A OR B).
8449 For sequence point consistancy, we need to check for trapping,
8450 and side-effects. */
8451 else if (code == icode && simple_operand_p_2 (arg0)
8452 && simple_operand_p_2 (arg1))
8453 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8456 return NULL_TREE;
8459 /* Fold a binary expression of code CODE and type TYPE with operands
8460 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8461 Return the folded expression if folding is successful. Otherwise,
8462 return NULL_TREE. */
8464 static tree
8465 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8467 enum tree_code compl_code;
8469 if (code == MIN_EXPR)
8470 compl_code = MAX_EXPR;
8471 else if (code == MAX_EXPR)
8472 compl_code = MIN_EXPR;
8473 else
8474 gcc_unreachable ();
8476 /* MIN (MAX (a, b), b) == b. */
8477 if (TREE_CODE (op0) == compl_code
8478 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8479 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8481 /* MIN (MAX (b, a), b) == b. */
8482 if (TREE_CODE (op0) == compl_code
8483 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8484 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8485 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8487 /* MIN (a, MAX (a, b)) == a. */
8488 if (TREE_CODE (op1) == compl_code
8489 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8490 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8491 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8493 /* MIN (a, MAX (b, a)) == a. */
8494 if (TREE_CODE (op1) == compl_code
8495 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8496 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8497 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8499 return NULL_TREE;
8502 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8503 by changing CODE to reduce the magnitude of constants involved in
8504 ARG0 of the comparison.
8505 Returns a canonicalized comparison tree if a simplification was
8506 possible, otherwise returns NULL_TREE.
8507 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8508 valid if signed overflow is undefined. */
8510 static tree
8511 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8512 tree arg0, tree arg1,
8513 bool *strict_overflow_p)
8515 enum tree_code code0 = TREE_CODE (arg0);
8516 tree t, cst0 = NULL_TREE;
8517 int sgn0;
8518 bool swap = false;
8520 /* Match A +- CST code arg1 and CST code arg1. We can change the
8521 first form only if overflow is undefined. */
8522 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8523 /* In principle pointers also have undefined overflow behavior,
8524 but that causes problems elsewhere. */
8525 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8526 && (code0 == MINUS_EXPR
8527 || code0 == PLUS_EXPR)
8528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8529 || code0 == INTEGER_CST))
8530 return NULL_TREE;
8532 /* Identify the constant in arg0 and its sign. */
8533 if (code0 == INTEGER_CST)
8534 cst0 = arg0;
8535 else
8536 cst0 = TREE_OPERAND (arg0, 1);
8537 sgn0 = tree_int_cst_sgn (cst0);
8539 /* Overflowed constants and zero will cause problems. */
8540 if (integer_zerop (cst0)
8541 || TREE_OVERFLOW (cst0))
8542 return NULL_TREE;
8544 /* See if we can reduce the magnitude of the constant in
8545 arg0 by changing the comparison code. */
8546 if (code0 == INTEGER_CST)
8548 /* CST <= arg1 -> CST-1 < arg1. */
8549 if (code == LE_EXPR && sgn0 == 1)
8550 code = LT_EXPR;
8551 /* -CST < arg1 -> -CST-1 <= arg1. */
8552 else if (code == LT_EXPR && sgn0 == -1)
8553 code = LE_EXPR;
8554 /* CST > arg1 -> CST-1 >= arg1. */
8555 else if (code == GT_EXPR && sgn0 == 1)
8556 code = GE_EXPR;
8557 /* -CST >= arg1 -> -CST-1 > arg1. */
8558 else if (code == GE_EXPR && sgn0 == -1)
8559 code = GT_EXPR;
8560 else
8561 return NULL_TREE;
8562 /* arg1 code' CST' might be more canonical. */
8563 swap = true;
8565 else
8567 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8568 if (code == LT_EXPR
8569 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8570 code = LE_EXPR;
8571 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8572 else if (code == GT_EXPR
8573 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8574 code = GE_EXPR;
8575 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8576 else if (code == LE_EXPR
8577 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8578 code = LT_EXPR;
8579 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8580 else if (code == GE_EXPR
8581 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8582 code = GT_EXPR;
8583 else
8584 return NULL_TREE;
8585 *strict_overflow_p = true;
8588 /* Now build the constant reduced in magnitude. But not if that
8589 would produce one outside of its types range. */
8590 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8591 && ((sgn0 == 1
8592 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8593 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8594 || (sgn0 == -1
8595 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8596 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8597 /* We cannot swap the comparison here as that would cause us to
8598 endlessly recurse. */
8599 return NULL_TREE;
8601 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8602 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8603 if (code0 != INTEGER_CST)
8604 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8605 t = fold_convert (TREE_TYPE (arg1), t);
8607 /* If swapping might yield to a more canonical form, do so. */
8608 if (swap)
8609 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8610 else
8611 return fold_build2_loc (loc, code, type, t, arg1);
8614 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8615 overflow further. Try to decrease the magnitude of constants involved
8616 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8617 and put sole constants at the second argument position.
8618 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8620 static tree
8621 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8622 tree arg0, tree arg1)
8624 tree t;
8625 bool strict_overflow_p;
8626 const char * const warnmsg = G_("assuming signed overflow does not occur "
8627 "when reducing constant in comparison");
8629 /* Try canonicalization by simplifying arg0. */
8630 strict_overflow_p = false;
8631 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8632 &strict_overflow_p);
8633 if (t)
8635 if (strict_overflow_p)
8636 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8637 return t;
8640 /* Try canonicalization by simplifying arg1 using the swapped
8641 comparison. */
8642 code = swap_tree_comparison (code);
8643 strict_overflow_p = false;
8644 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8645 &strict_overflow_p);
8646 if (t && strict_overflow_p)
8647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8648 return t;
8651 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8652 space. This is used to avoid issuing overflow warnings for
8653 expressions like &p->x which can not wrap. */
8655 static bool
8656 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8658 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8659 return true;
8661 if (bitpos < 0)
8662 return true;
8664 wide_int wi_offset;
8665 int precision = TYPE_PRECISION (TREE_TYPE (base));
8666 if (offset == NULL_TREE)
8667 wi_offset = wi::zero (precision);
8668 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8669 return true;
8670 else
8671 wi_offset = offset;
8673 bool overflow;
8674 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8675 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8676 if (overflow)
8677 return true;
8679 if (!wi::fits_uhwi_p (total))
8680 return true;
8682 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8683 if (size <= 0)
8684 return true;
8686 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8687 array. */
8688 if (TREE_CODE (base) == ADDR_EXPR)
8690 HOST_WIDE_INT base_size;
8692 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8693 if (base_size > 0 && size < base_size)
8694 size = base_size;
8697 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8700 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8701 kind INTEGER_CST. This makes sure to properly sign-extend the
8702 constant. */
8704 static HOST_WIDE_INT
8705 size_low_cst (const_tree t)
8707 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8708 int prec = TYPE_PRECISION (TREE_TYPE (t));
8709 if (prec < HOST_BITS_PER_WIDE_INT)
8710 return sext_hwi (w, prec);
8711 return w;
8714 /* Subroutine of fold_binary. This routine performs all of the
8715 transformations that are common to the equality/inequality
8716 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8717 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8718 fold_binary should call fold_binary. Fold a comparison with
8719 tree code CODE and type TYPE with operands OP0 and OP1. Return
8720 the folded comparison or NULL_TREE. */
8722 static tree
8723 fold_comparison (location_t loc, enum tree_code code, tree type,
8724 tree op0, tree op1)
8726 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8727 tree arg0, arg1, tem;
8729 arg0 = op0;
8730 arg1 = op1;
8732 STRIP_SIGN_NOPS (arg0);
8733 STRIP_SIGN_NOPS (arg1);
8735 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8736 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8737 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8739 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8740 && TREE_CODE (arg1) == INTEGER_CST
8741 && !TREE_OVERFLOW (arg1))
8743 const enum tree_code
8744 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8745 tree const1 = TREE_OPERAND (arg0, 1);
8746 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8747 tree variable = TREE_OPERAND (arg0, 0);
8748 tree new_const = int_const_binop (reverse_op, const2, const1);
8750 /* If the constant operation overflowed this can be
8751 simplified as a comparison against INT_MAX/INT_MIN. */
8752 if (TREE_OVERFLOW (new_const)
8753 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8755 int const1_sgn = tree_int_cst_sgn (const1);
8756 enum tree_code code2 = code;
8758 /* Get the sign of the constant on the lhs if the
8759 operation were VARIABLE + CONST1. */
8760 if (TREE_CODE (arg0) == MINUS_EXPR)
8761 const1_sgn = -const1_sgn;
8763 /* The sign of the constant determines if we overflowed
8764 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8765 Canonicalize to the INT_MIN overflow by swapping the comparison
8766 if necessary. */
8767 if (const1_sgn == -1)
8768 code2 = swap_tree_comparison (code);
8770 /* We now can look at the canonicalized case
8771 VARIABLE + 1 CODE2 INT_MIN
8772 and decide on the result. */
8773 switch (code2)
8775 case EQ_EXPR:
8776 case LT_EXPR:
8777 case LE_EXPR:
8778 return
8779 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8781 case NE_EXPR:
8782 case GE_EXPR:
8783 case GT_EXPR:
8784 return
8785 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8787 default:
8788 gcc_unreachable ();
8791 else
8793 if (!equality_code)
8794 fold_overflow_warning ("assuming signed overflow does not occur "
8795 "when changing X +- C1 cmp C2 to "
8796 "X cmp C2 -+ C1",
8797 WARN_STRICT_OVERFLOW_COMPARISON);
8798 return fold_build2_loc (loc, code, type, variable, new_const);
8802 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8803 if (TREE_CODE (arg0) == MINUS_EXPR
8804 && equality_code
8805 && integer_zerop (arg1))
8807 /* ??? The transformation is valid for the other operators if overflow
8808 is undefined for the type, but performing it here badly interacts
8809 with the transformation in fold_cond_expr_with_comparison which
8810 attempts to synthetize ABS_EXPR. */
8811 if (!equality_code)
8812 fold_overflow_warning ("assuming signed overflow does not occur "
8813 "when changing X - Y cmp 0 to X cmp Y",
8814 WARN_STRICT_OVERFLOW_COMPARISON);
8815 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8816 TREE_OPERAND (arg0, 1));
8819 /* For comparisons of pointers we can decompose it to a compile time
8820 comparison of the base objects and the offsets into the object.
8821 This requires at least one operand being an ADDR_EXPR or a
8822 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8823 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8824 && (TREE_CODE (arg0) == ADDR_EXPR
8825 || TREE_CODE (arg1) == ADDR_EXPR
8826 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8827 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8829 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8830 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8831 enum machine_mode mode;
8832 int volatilep, unsignedp;
8833 bool indirect_base0 = false, indirect_base1 = false;
8835 /* Get base and offset for the access. Strip ADDR_EXPR for
8836 get_inner_reference, but put it back by stripping INDIRECT_REF
8837 off the base object if possible. indirect_baseN will be true
8838 if baseN is not an address but refers to the object itself. */
8839 base0 = arg0;
8840 if (TREE_CODE (arg0) == ADDR_EXPR)
8842 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8843 &bitsize, &bitpos0, &offset0, &mode,
8844 &unsignedp, &volatilep, false);
8845 if (TREE_CODE (base0) == INDIRECT_REF)
8846 base0 = TREE_OPERAND (base0, 0);
8847 else
8848 indirect_base0 = true;
8850 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8852 base0 = TREE_OPERAND (arg0, 0);
8853 STRIP_SIGN_NOPS (base0);
8854 if (TREE_CODE (base0) == ADDR_EXPR)
8856 base0 = TREE_OPERAND (base0, 0);
8857 indirect_base0 = true;
8859 offset0 = TREE_OPERAND (arg0, 1);
8860 if (tree_fits_shwi_p (offset0))
8862 HOST_WIDE_INT off = size_low_cst (offset0);
8863 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8864 * BITS_PER_UNIT)
8865 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8867 bitpos0 = off * BITS_PER_UNIT;
8868 offset0 = NULL_TREE;
8873 base1 = arg1;
8874 if (TREE_CODE (arg1) == ADDR_EXPR)
8876 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8877 &bitsize, &bitpos1, &offset1, &mode,
8878 &unsignedp, &volatilep, false);
8879 if (TREE_CODE (base1) == INDIRECT_REF)
8880 base1 = TREE_OPERAND (base1, 0);
8881 else
8882 indirect_base1 = true;
8884 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8886 base1 = TREE_OPERAND (arg1, 0);
8887 STRIP_SIGN_NOPS (base1);
8888 if (TREE_CODE (base1) == ADDR_EXPR)
8890 base1 = TREE_OPERAND (base1, 0);
8891 indirect_base1 = true;
8893 offset1 = TREE_OPERAND (arg1, 1);
8894 if (tree_fits_shwi_p (offset1))
8896 HOST_WIDE_INT off = size_low_cst (offset1);
8897 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8898 * BITS_PER_UNIT)
8899 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8901 bitpos1 = off * BITS_PER_UNIT;
8902 offset1 = NULL_TREE;
8907 /* A local variable can never be pointed to by
8908 the default SSA name of an incoming parameter. */
8909 if ((TREE_CODE (arg0) == ADDR_EXPR
8910 && indirect_base0
8911 && TREE_CODE (base0) == VAR_DECL
8912 && auto_var_in_fn_p (base0, current_function_decl)
8913 && !indirect_base1
8914 && TREE_CODE (base1) == SSA_NAME
8915 && SSA_NAME_IS_DEFAULT_DEF (base1)
8916 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8917 || (TREE_CODE (arg1) == ADDR_EXPR
8918 && indirect_base1
8919 && TREE_CODE (base1) == VAR_DECL
8920 && auto_var_in_fn_p (base1, current_function_decl)
8921 && !indirect_base0
8922 && TREE_CODE (base0) == SSA_NAME
8923 && SSA_NAME_IS_DEFAULT_DEF (base0)
8924 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8926 if (code == NE_EXPR)
8927 return constant_boolean_node (1, type);
8928 else if (code == EQ_EXPR)
8929 return constant_boolean_node (0, type);
8931 /* If we have equivalent bases we might be able to simplify. */
8932 else if (indirect_base0 == indirect_base1
8933 && operand_equal_p (base0, base1, 0))
8935 /* We can fold this expression to a constant if the non-constant
8936 offset parts are equal. */
8937 if ((offset0 == offset1
8938 || (offset0 && offset1
8939 && operand_equal_p (offset0, offset1, 0)))
8940 && (code == EQ_EXPR
8941 || code == NE_EXPR
8942 || (indirect_base0 && DECL_P (base0))
8943 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8946 if (!equality_code
8947 && bitpos0 != bitpos1
8948 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8949 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8950 fold_overflow_warning (("assuming pointer wraparound does not "
8951 "occur when comparing P +- C1 with "
8952 "P +- C2"),
8953 WARN_STRICT_OVERFLOW_CONDITIONAL);
8955 switch (code)
8957 case EQ_EXPR:
8958 return constant_boolean_node (bitpos0 == bitpos1, type);
8959 case NE_EXPR:
8960 return constant_boolean_node (bitpos0 != bitpos1, type);
8961 case LT_EXPR:
8962 return constant_boolean_node (bitpos0 < bitpos1, type);
8963 case LE_EXPR:
8964 return constant_boolean_node (bitpos0 <= bitpos1, type);
8965 case GE_EXPR:
8966 return constant_boolean_node (bitpos0 >= bitpos1, type);
8967 case GT_EXPR:
8968 return constant_boolean_node (bitpos0 > bitpos1, type);
8969 default:;
8972 /* We can simplify the comparison to a comparison of the variable
8973 offset parts if the constant offset parts are equal.
8974 Be careful to use signed sizetype here because otherwise we
8975 mess with array offsets in the wrong way. This is possible
8976 because pointer arithmetic is restricted to retain within an
8977 object and overflow on pointer differences is undefined as of
8978 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8979 else if (bitpos0 == bitpos1
8980 && (equality_code
8981 || (indirect_base0 && DECL_P (base0))
8982 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8984 /* By converting to signed sizetype we cover middle-end pointer
8985 arithmetic which operates on unsigned pointer types of size
8986 type size and ARRAY_REF offsets which are properly sign or
8987 zero extended from their type in case it is narrower than
8988 sizetype. */
8989 if (offset0 == NULL_TREE)
8990 offset0 = build_int_cst (ssizetype, 0);
8991 else
8992 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8993 if (offset1 == NULL_TREE)
8994 offset1 = build_int_cst (ssizetype, 0);
8995 else
8996 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8998 if (!equality_code
8999 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9000 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9001 fold_overflow_warning (("assuming pointer wraparound does not "
9002 "occur when comparing P +- C1 with "
9003 "P +- C2"),
9004 WARN_STRICT_OVERFLOW_COMPARISON);
9006 return fold_build2_loc (loc, code, type, offset0, offset1);
9009 /* For non-equal bases we can simplify if they are addresses
9010 of local binding decls or constants. */
9011 else if (indirect_base0 && indirect_base1
9012 /* We know that !operand_equal_p (base0, base1, 0)
9013 because the if condition was false. But make
9014 sure two decls are not the same. */
9015 && base0 != base1
9016 && TREE_CODE (arg0) == ADDR_EXPR
9017 && TREE_CODE (arg1) == ADDR_EXPR
9018 && (((TREE_CODE (base0) == VAR_DECL
9019 || TREE_CODE (base0) == PARM_DECL)
9020 && (targetm.binds_local_p (base0)
9021 || CONSTANT_CLASS_P (base1)))
9022 || CONSTANT_CLASS_P (base0))
9023 && (((TREE_CODE (base1) == VAR_DECL
9024 || TREE_CODE (base1) == PARM_DECL)
9025 && (targetm.binds_local_p (base1)
9026 || CONSTANT_CLASS_P (base0)))
9027 || CONSTANT_CLASS_P (base1)))
9029 if (code == EQ_EXPR)
9030 return omit_two_operands_loc (loc, type, boolean_false_node,
9031 arg0, arg1);
9032 else if (code == NE_EXPR)
9033 return omit_two_operands_loc (loc, type, boolean_true_node,
9034 arg0, arg1);
9036 /* For equal offsets we can simplify to a comparison of the
9037 base addresses. */
9038 else if (bitpos0 == bitpos1
9039 && (indirect_base0
9040 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9041 && (indirect_base1
9042 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9043 && ((offset0 == offset1)
9044 || (offset0 && offset1
9045 && operand_equal_p (offset0, offset1, 0))))
9047 if (indirect_base0)
9048 base0 = build_fold_addr_expr_loc (loc, base0);
9049 if (indirect_base1)
9050 base1 = build_fold_addr_expr_loc (loc, base1);
9051 return fold_build2_loc (loc, code, type, base0, base1);
9055 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9056 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9057 the resulting offset is smaller in absolute value than the
9058 original one and has the same sign. */
9059 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9060 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9061 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9062 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9063 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9064 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9065 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9067 tree const1 = TREE_OPERAND (arg0, 1);
9068 tree const2 = TREE_OPERAND (arg1, 1);
9069 tree variable1 = TREE_OPERAND (arg0, 0);
9070 tree variable2 = TREE_OPERAND (arg1, 0);
9071 tree cst;
9072 const char * const warnmsg = G_("assuming signed overflow does not "
9073 "occur when combining constants around "
9074 "a comparison");
9076 /* Put the constant on the side where it doesn't overflow and is
9077 of lower absolute value and of same sign than before. */
9078 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9079 ? MINUS_EXPR : PLUS_EXPR,
9080 const2, const1);
9081 if (!TREE_OVERFLOW (cst)
9082 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9083 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9085 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9086 return fold_build2_loc (loc, code, type,
9087 variable1,
9088 fold_build2_loc (loc, TREE_CODE (arg1),
9089 TREE_TYPE (arg1),
9090 variable2, cst));
9093 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9094 ? MINUS_EXPR : PLUS_EXPR,
9095 const1, const2);
9096 if (!TREE_OVERFLOW (cst)
9097 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9098 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9100 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9101 return fold_build2_loc (loc, code, type,
9102 fold_build2_loc (loc, TREE_CODE (arg0),
9103 TREE_TYPE (arg0),
9104 variable1, cst),
9105 variable2);
9109 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9110 signed arithmetic case. That form is created by the compiler
9111 often enough for folding it to be of value. One example is in
9112 computing loop trip counts after Operator Strength Reduction. */
9113 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9114 && TREE_CODE (arg0) == MULT_EXPR
9115 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9116 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9117 && integer_zerop (arg1))
9119 tree const1 = TREE_OPERAND (arg0, 1);
9120 tree const2 = arg1; /* zero */
9121 tree variable1 = TREE_OPERAND (arg0, 0);
9122 enum tree_code cmp_code = code;
9124 /* Handle unfolded multiplication by zero. */
9125 if (integer_zerop (const1))
9126 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9128 fold_overflow_warning (("assuming signed overflow does not occur when "
9129 "eliminating multiplication in comparison "
9130 "with zero"),
9131 WARN_STRICT_OVERFLOW_COMPARISON);
9133 /* If const1 is negative we swap the sense of the comparison. */
9134 if (tree_int_cst_sgn (const1) < 0)
9135 cmp_code = swap_tree_comparison (cmp_code);
9137 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9140 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9141 if (tem)
9142 return tem;
9144 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9146 tree targ0 = strip_float_extensions (arg0);
9147 tree targ1 = strip_float_extensions (arg1);
9148 tree newtype = TREE_TYPE (targ0);
9150 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9151 newtype = TREE_TYPE (targ1);
9153 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9154 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9155 return fold_build2_loc (loc, code, type,
9156 fold_convert_loc (loc, newtype, targ0),
9157 fold_convert_loc (loc, newtype, targ1));
9159 /* (-a) CMP (-b) -> b CMP a */
9160 if (TREE_CODE (arg0) == NEGATE_EXPR
9161 && TREE_CODE (arg1) == NEGATE_EXPR)
9162 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9163 TREE_OPERAND (arg0, 0));
9165 if (TREE_CODE (arg1) == REAL_CST)
9167 REAL_VALUE_TYPE cst;
9168 cst = TREE_REAL_CST (arg1);
9170 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9171 if (TREE_CODE (arg0) == NEGATE_EXPR)
9172 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9173 TREE_OPERAND (arg0, 0),
9174 build_real (TREE_TYPE (arg1),
9175 real_value_negate (&cst)));
9177 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9178 /* a CMP (-0) -> a CMP 0 */
9179 if (REAL_VALUE_MINUS_ZERO (cst))
9180 return fold_build2_loc (loc, code, type, arg0,
9181 build_real (TREE_TYPE (arg1), dconst0));
9183 /* x != NaN is always true, other ops are always false. */
9184 if (REAL_VALUE_ISNAN (cst)
9185 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9187 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9188 return omit_one_operand_loc (loc, type, tem, arg0);
9191 /* Fold comparisons against infinity. */
9192 if (REAL_VALUE_ISINF (cst)
9193 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9195 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9196 if (tem != NULL_TREE)
9197 return tem;
9201 /* If this is a comparison of a real constant with a PLUS_EXPR
9202 or a MINUS_EXPR of a real constant, we can convert it into a
9203 comparison with a revised real constant as long as no overflow
9204 occurs when unsafe_math_optimizations are enabled. */
9205 if (flag_unsafe_math_optimizations
9206 && TREE_CODE (arg1) == REAL_CST
9207 && (TREE_CODE (arg0) == PLUS_EXPR
9208 || TREE_CODE (arg0) == MINUS_EXPR)
9209 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9210 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9211 ? MINUS_EXPR : PLUS_EXPR,
9212 arg1, TREE_OPERAND (arg0, 1)))
9213 && !TREE_OVERFLOW (tem))
9214 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9216 /* Likewise, we can simplify a comparison of a real constant with
9217 a MINUS_EXPR whose first operand is also a real constant, i.e.
9218 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9219 floating-point types only if -fassociative-math is set. */
9220 if (flag_associative_math
9221 && TREE_CODE (arg1) == REAL_CST
9222 && TREE_CODE (arg0) == MINUS_EXPR
9223 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9224 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9225 arg1))
9226 && !TREE_OVERFLOW (tem))
9227 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9228 TREE_OPERAND (arg0, 1), tem);
9230 /* Fold comparisons against built-in math functions. */
9231 if (TREE_CODE (arg1) == REAL_CST
9232 && flag_unsafe_math_optimizations
9233 && ! flag_errno_math)
9235 enum built_in_function fcode = builtin_mathfn_code (arg0);
9237 if (fcode != END_BUILTINS)
9239 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9240 if (tem != NULL_TREE)
9241 return tem;
9246 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9247 && CONVERT_EXPR_P (arg0))
9249 /* If we are widening one operand of an integer comparison,
9250 see if the other operand is similarly being widened. Perhaps we
9251 can do the comparison in the narrower type. */
9252 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9253 if (tem)
9254 return tem;
9256 /* Or if we are changing signedness. */
9257 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9258 if (tem)
9259 return tem;
9262 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9263 constant, we can simplify it. */
9264 if (TREE_CODE (arg1) == INTEGER_CST
9265 && (TREE_CODE (arg0) == MIN_EXPR
9266 || TREE_CODE (arg0) == MAX_EXPR)
9267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9269 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9270 if (tem)
9271 return tem;
9274 /* Simplify comparison of something with itself. (For IEEE
9275 floating-point, we can only do some of these simplifications.) */
9276 if (operand_equal_p (arg0, arg1, 0))
9278 switch (code)
9280 case EQ_EXPR:
9281 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9282 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9283 return constant_boolean_node (1, type);
9284 break;
9286 case GE_EXPR:
9287 case LE_EXPR:
9288 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9289 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9290 return constant_boolean_node (1, type);
9291 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9293 case NE_EXPR:
9294 /* For NE, we can only do this simplification if integer
9295 or we don't honor IEEE floating point NaNs. */
9296 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9297 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9298 break;
9299 /* ... fall through ... */
9300 case GT_EXPR:
9301 case LT_EXPR:
9302 return constant_boolean_node (0, type);
9303 default:
9304 gcc_unreachable ();
9308 /* If we are comparing an expression that just has comparisons
9309 of two integer values, arithmetic expressions of those comparisons,
9310 and constants, we can simplify it. There are only three cases
9311 to check: the two values can either be equal, the first can be
9312 greater, or the second can be greater. Fold the expression for
9313 those three values. Since each value must be 0 or 1, we have
9314 eight possibilities, each of which corresponds to the constant 0
9315 or 1 or one of the six possible comparisons.
9317 This handles common cases like (a > b) == 0 but also handles
9318 expressions like ((x > y) - (y > x)) > 0, which supposedly
9319 occur in macroized code. */
9321 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9323 tree cval1 = 0, cval2 = 0;
9324 int save_p = 0;
9326 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9327 /* Don't handle degenerate cases here; they should already
9328 have been handled anyway. */
9329 && cval1 != 0 && cval2 != 0
9330 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9331 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9332 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9333 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9334 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9335 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9336 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9338 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9339 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9341 /* We can't just pass T to eval_subst in case cval1 or cval2
9342 was the same as ARG1. */
9344 tree high_result
9345 = fold_build2_loc (loc, code, type,
9346 eval_subst (loc, arg0, cval1, maxval,
9347 cval2, minval),
9348 arg1);
9349 tree equal_result
9350 = fold_build2_loc (loc, code, type,
9351 eval_subst (loc, arg0, cval1, maxval,
9352 cval2, maxval),
9353 arg1);
9354 tree low_result
9355 = fold_build2_loc (loc, code, type,
9356 eval_subst (loc, arg0, cval1, minval,
9357 cval2, maxval),
9358 arg1);
9360 /* All three of these results should be 0 or 1. Confirm they are.
9361 Then use those values to select the proper code to use. */
9363 if (TREE_CODE (high_result) == INTEGER_CST
9364 && TREE_CODE (equal_result) == INTEGER_CST
9365 && TREE_CODE (low_result) == INTEGER_CST)
9367 /* Make a 3-bit mask with the high-order bit being the
9368 value for `>', the next for '=', and the low for '<'. */
9369 switch ((integer_onep (high_result) * 4)
9370 + (integer_onep (equal_result) * 2)
9371 + integer_onep (low_result))
9373 case 0:
9374 /* Always false. */
9375 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9376 case 1:
9377 code = LT_EXPR;
9378 break;
9379 case 2:
9380 code = EQ_EXPR;
9381 break;
9382 case 3:
9383 code = LE_EXPR;
9384 break;
9385 case 4:
9386 code = GT_EXPR;
9387 break;
9388 case 5:
9389 code = NE_EXPR;
9390 break;
9391 case 6:
9392 code = GE_EXPR;
9393 break;
9394 case 7:
9395 /* Always true. */
9396 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9399 if (save_p)
9401 tem = save_expr (build2 (code, type, cval1, cval2));
9402 SET_EXPR_LOCATION (tem, loc);
9403 return tem;
9405 return fold_build2_loc (loc, code, type, cval1, cval2);
9410 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9411 into a single range test. */
9412 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9413 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9414 && TREE_CODE (arg1) == INTEGER_CST
9415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9416 && !integer_zerop (TREE_OPERAND (arg0, 1))
9417 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9418 && !TREE_OVERFLOW (arg1))
9420 tem = fold_div_compare (loc, code, type, arg0, arg1);
9421 if (tem != NULL_TREE)
9422 return tem;
9425 /* Fold ~X op ~Y as Y op X. */
9426 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9427 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9429 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9430 return fold_build2_loc (loc, code, type,
9431 fold_convert_loc (loc, cmp_type,
9432 TREE_OPERAND (arg1, 0)),
9433 TREE_OPERAND (arg0, 0));
9436 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9437 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9438 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9440 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9441 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9442 TREE_OPERAND (arg0, 0),
9443 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9444 fold_convert_loc (loc, cmp_type, arg1)));
9447 return NULL_TREE;
9451 /* Subroutine of fold_binary. Optimize complex multiplications of the
9452 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9453 argument EXPR represents the expression "z" of type TYPE. */
9455 static tree
9456 fold_mult_zconjz (location_t loc, tree type, tree expr)
9458 tree itype = TREE_TYPE (type);
9459 tree rpart, ipart, tem;
9461 if (TREE_CODE (expr) == COMPLEX_EXPR)
9463 rpart = TREE_OPERAND (expr, 0);
9464 ipart = TREE_OPERAND (expr, 1);
9466 else if (TREE_CODE (expr) == COMPLEX_CST)
9468 rpart = TREE_REALPART (expr);
9469 ipart = TREE_IMAGPART (expr);
9471 else
9473 expr = save_expr (expr);
9474 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9475 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9478 rpart = save_expr (rpart);
9479 ipart = save_expr (ipart);
9480 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9481 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9482 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9483 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9484 build_zero_cst (itype));
9488 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9489 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9490 guarantees that P and N have the same least significant log2(M) bits.
9491 N is not otherwise constrained. In particular, N is not normalized to
9492 0 <= N < M as is common. In general, the precise value of P is unknown.
9493 M is chosen as large as possible such that constant N can be determined.
9495 Returns M and sets *RESIDUE to N.
9497 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9498 account. This is not always possible due to PR 35705.
9501 static unsigned HOST_WIDE_INT
9502 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9503 bool allow_func_align)
9505 enum tree_code code;
9507 *residue = 0;
9509 code = TREE_CODE (expr);
9510 if (code == ADDR_EXPR)
9512 unsigned int bitalign;
9513 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9514 *residue /= BITS_PER_UNIT;
9515 return bitalign / BITS_PER_UNIT;
9517 else if (code == POINTER_PLUS_EXPR)
9519 tree op0, op1;
9520 unsigned HOST_WIDE_INT modulus;
9521 enum tree_code inner_code;
9523 op0 = TREE_OPERAND (expr, 0);
9524 STRIP_NOPS (op0);
9525 modulus = get_pointer_modulus_and_residue (op0, residue,
9526 allow_func_align);
9528 op1 = TREE_OPERAND (expr, 1);
9529 STRIP_NOPS (op1);
9530 inner_code = TREE_CODE (op1);
9531 if (inner_code == INTEGER_CST)
9533 *residue += TREE_INT_CST_LOW (op1);
9534 return modulus;
9536 else if (inner_code == MULT_EXPR)
9538 op1 = TREE_OPERAND (op1, 1);
9539 if (TREE_CODE (op1) == INTEGER_CST)
9541 unsigned HOST_WIDE_INT align;
9543 /* Compute the greatest power-of-2 divisor of op1. */
9544 align = TREE_INT_CST_LOW (op1);
9545 align &= -align;
9547 /* If align is non-zero and less than *modulus, replace
9548 *modulus with align., If align is 0, then either op1 is 0
9549 or the greatest power-of-2 divisor of op1 doesn't fit in an
9550 unsigned HOST_WIDE_INT. In either case, no additional
9551 constraint is imposed. */
9552 if (align)
9553 modulus = MIN (modulus, align);
9555 return modulus;
9560 /* If we get here, we were unable to determine anything useful about the
9561 expression. */
9562 return 1;
9565 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9566 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9568 static bool
9569 vec_cst_ctor_to_array (tree arg, tree *elts)
9571 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9573 if (TREE_CODE (arg) == VECTOR_CST)
9575 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9576 elts[i] = VECTOR_CST_ELT (arg, i);
9578 else if (TREE_CODE (arg) == CONSTRUCTOR)
9580 constructor_elt *elt;
9582 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9583 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9584 return false;
9585 else
9586 elts[i] = elt->value;
9588 else
9589 return false;
9590 for (; i < nelts; i++)
9591 elts[i]
9592 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9593 return true;
9596 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9597 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9598 NULL_TREE otherwise. */
9600 static tree
9601 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9603 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9604 tree *elts;
9605 bool need_ctor = false;
9607 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9608 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9609 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9610 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9611 return NULL_TREE;
9613 elts = XALLOCAVEC (tree, nelts * 3);
9614 if (!vec_cst_ctor_to_array (arg0, elts)
9615 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9616 return NULL_TREE;
9618 for (i = 0; i < nelts; i++)
9620 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9621 need_ctor = true;
9622 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9625 if (need_ctor)
9627 vec<constructor_elt, va_gc> *v;
9628 vec_alloc (v, nelts);
9629 for (i = 0; i < nelts; i++)
9630 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9631 return build_constructor (type, v);
9633 else
9634 return build_vector (type, &elts[2 * nelts]);
9637 /* Try to fold a pointer difference of type TYPE two address expressions of
9638 array references AREF0 and AREF1 using location LOC. Return a
9639 simplified expression for the difference or NULL_TREE. */
9641 static tree
9642 fold_addr_of_array_ref_difference (location_t loc, tree type,
9643 tree aref0, tree aref1)
9645 tree base0 = TREE_OPERAND (aref0, 0);
9646 tree base1 = TREE_OPERAND (aref1, 0);
9647 tree base_offset = build_int_cst (type, 0);
9649 /* If the bases are array references as well, recurse. If the bases
9650 are pointer indirections compute the difference of the pointers.
9651 If the bases are equal, we are set. */
9652 if ((TREE_CODE (base0) == ARRAY_REF
9653 && TREE_CODE (base1) == ARRAY_REF
9654 && (base_offset
9655 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9656 || (INDIRECT_REF_P (base0)
9657 && INDIRECT_REF_P (base1)
9658 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9659 TREE_OPERAND (base0, 0),
9660 TREE_OPERAND (base1, 0))))
9661 || operand_equal_p (base0, base1, 0))
9663 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9664 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9665 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9666 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9667 return fold_build2_loc (loc, PLUS_EXPR, type,
9668 base_offset,
9669 fold_build2_loc (loc, MULT_EXPR, type,
9670 diff, esz));
9672 return NULL_TREE;
9675 /* If the real or vector real constant CST of type TYPE has an exact
9676 inverse, return it, else return NULL. */
9678 static tree
9679 exact_inverse (tree type, tree cst)
9681 REAL_VALUE_TYPE r;
9682 tree unit_type, *elts;
9683 enum machine_mode mode;
9684 unsigned vec_nelts, i;
9686 switch (TREE_CODE (cst))
9688 case REAL_CST:
9689 r = TREE_REAL_CST (cst);
9691 if (exact_real_inverse (TYPE_MODE (type), &r))
9692 return build_real (type, r);
9694 return NULL_TREE;
9696 case VECTOR_CST:
9697 vec_nelts = VECTOR_CST_NELTS (cst);
9698 elts = XALLOCAVEC (tree, vec_nelts);
9699 unit_type = TREE_TYPE (type);
9700 mode = TYPE_MODE (unit_type);
9702 for (i = 0; i < vec_nelts; i++)
9704 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9705 if (!exact_real_inverse (mode, &r))
9706 return NULL_TREE;
9707 elts[i] = build_real (unit_type, r);
9710 return build_vector (type, elts);
9712 default:
9713 return NULL_TREE;
9717 /* Mask out the tz least significant bits of X of type TYPE where
9718 tz is the number of trailing zeroes in Y. */
9719 static wide_int
9720 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9722 int tz = wi::ctz (y);
9723 if (tz > 0)
9724 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9725 return x;
9728 /* Return true when T is an address and is known to be nonzero.
9729 For floating point we further ensure that T is not denormal.
9730 Similar logic is present in nonzero_address in rtlanal.h.
9732 If the return value is based on the assumption that signed overflow
9733 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9734 change *STRICT_OVERFLOW_P. */
9736 static bool
9737 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9739 tree type = TREE_TYPE (t);
9740 enum tree_code code;
9742 /* Doing something useful for floating point would need more work. */
9743 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9744 return false;
9746 code = TREE_CODE (t);
9747 switch (TREE_CODE_CLASS (code))
9749 case tcc_unary:
9750 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9751 strict_overflow_p);
9752 case tcc_binary:
9753 case tcc_comparison:
9754 return tree_binary_nonzero_warnv_p (code, type,
9755 TREE_OPERAND (t, 0),
9756 TREE_OPERAND (t, 1),
9757 strict_overflow_p);
9758 case tcc_constant:
9759 case tcc_declaration:
9760 case tcc_reference:
9761 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9763 default:
9764 break;
9767 switch (code)
9769 case TRUTH_NOT_EXPR:
9770 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9771 strict_overflow_p);
9773 case TRUTH_AND_EXPR:
9774 case TRUTH_OR_EXPR:
9775 case TRUTH_XOR_EXPR:
9776 return tree_binary_nonzero_warnv_p (code, type,
9777 TREE_OPERAND (t, 0),
9778 TREE_OPERAND (t, 1),
9779 strict_overflow_p);
9781 case COND_EXPR:
9782 case CONSTRUCTOR:
9783 case OBJ_TYPE_REF:
9784 case ASSERT_EXPR:
9785 case ADDR_EXPR:
9786 case WITH_SIZE_EXPR:
9787 case SSA_NAME:
9788 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9790 case COMPOUND_EXPR:
9791 case MODIFY_EXPR:
9792 case BIND_EXPR:
9793 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9794 strict_overflow_p);
9796 case SAVE_EXPR:
9797 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9798 strict_overflow_p);
9800 case CALL_EXPR:
9802 tree fndecl = get_callee_fndecl (t);
9803 if (!fndecl) return false;
9804 if (flag_delete_null_pointer_checks && !flag_check_new
9805 && DECL_IS_OPERATOR_NEW (fndecl)
9806 && !TREE_NOTHROW (fndecl))
9807 return true;
9808 if (flag_delete_null_pointer_checks
9809 && lookup_attribute ("returns_nonnull",
9810 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9811 return true;
9812 return alloca_call_p (t);
9815 default:
9816 break;
9818 return false;
9821 /* Return true when T is an address and is known to be nonzero.
9822 Handle warnings about undefined signed overflow. */
9824 static bool
9825 tree_expr_nonzero_p (tree t)
9827 bool ret, strict_overflow_p;
9829 strict_overflow_p = false;
9830 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9831 if (strict_overflow_p)
9832 fold_overflow_warning (("assuming signed overflow does not occur when "
9833 "determining that expression is always "
9834 "non-zero"),
9835 WARN_STRICT_OVERFLOW_MISC);
9836 return ret;
9839 /* Fold a binary expression of code CODE and type TYPE with operands
9840 OP0 and OP1. LOC is the location of the resulting expression.
9841 Return the folded expression if folding is successful. Otherwise,
9842 return NULL_TREE. */
9844 tree
9845 fold_binary_loc (location_t loc,
9846 enum tree_code code, tree type, tree op0, tree op1)
9848 enum tree_code_class kind = TREE_CODE_CLASS (code);
9849 tree arg0, arg1, tem;
9850 tree t1 = NULL_TREE;
9851 bool strict_overflow_p;
9852 unsigned int prec;
9854 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9855 && TREE_CODE_LENGTH (code) == 2
9856 && op0 != NULL_TREE
9857 && op1 != NULL_TREE);
9859 arg0 = op0;
9860 arg1 = op1;
9862 /* Strip any conversions that don't change the mode. This is
9863 safe for every expression, except for a comparison expression
9864 because its signedness is derived from its operands. So, in
9865 the latter case, only strip conversions that don't change the
9866 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9867 preserved.
9869 Note that this is done as an internal manipulation within the
9870 constant folder, in order to find the simplest representation
9871 of the arguments so that their form can be studied. In any
9872 cases, the appropriate type conversions should be put back in
9873 the tree that will get out of the constant folder. */
9875 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9877 STRIP_SIGN_NOPS (arg0);
9878 STRIP_SIGN_NOPS (arg1);
9880 else
9882 STRIP_NOPS (arg0);
9883 STRIP_NOPS (arg1);
9886 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9887 constant but we can't do arithmetic on them. */
9888 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9889 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9890 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9891 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9892 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9893 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9894 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9896 if (kind == tcc_binary)
9898 /* Make sure type and arg0 have the same saturating flag. */
9899 gcc_assert (TYPE_SATURATING (type)
9900 == TYPE_SATURATING (TREE_TYPE (arg0)));
9901 tem = const_binop (code, arg0, arg1);
9903 else if (kind == tcc_comparison)
9904 tem = fold_relational_const (code, type, arg0, arg1);
9905 else
9906 tem = NULL_TREE;
9908 if (tem != NULL_TREE)
9910 if (TREE_TYPE (tem) != type)
9911 tem = fold_convert_loc (loc, type, tem);
9912 return tem;
9916 /* If this is a commutative operation, and ARG0 is a constant, move it
9917 to ARG1 to reduce the number of tests below. */
9918 if (commutative_tree_code (code)
9919 && tree_swap_operands_p (arg0, arg1, true))
9920 return fold_build2_loc (loc, code, type, op1, op0);
9922 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9923 to ARG1 to reduce the number of tests below. */
9924 if (kind == tcc_comparison
9925 && tree_swap_operands_p (arg0, arg1, true))
9926 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9928 tem = generic_simplify (loc, code, type, op0, op1);
9929 if (tem)
9930 return tem;
9932 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9934 First check for cases where an arithmetic operation is applied to a
9935 compound, conditional, or comparison operation. Push the arithmetic
9936 operation inside the compound or conditional to see if any folding
9937 can then be done. Convert comparison to conditional for this purpose.
9938 The also optimizes non-constant cases that used to be done in
9939 expand_expr.
9941 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9942 one of the operands is a comparison and the other is a comparison, a
9943 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9944 code below would make the expression more complex. Change it to a
9945 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9946 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9948 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9949 || code == EQ_EXPR || code == NE_EXPR)
9950 && TREE_CODE (type) != VECTOR_TYPE
9951 && ((truth_value_p (TREE_CODE (arg0))
9952 && (truth_value_p (TREE_CODE (arg1))
9953 || (TREE_CODE (arg1) == BIT_AND_EXPR
9954 && integer_onep (TREE_OPERAND (arg1, 1)))))
9955 || (truth_value_p (TREE_CODE (arg1))
9956 && (truth_value_p (TREE_CODE (arg0))
9957 || (TREE_CODE (arg0) == BIT_AND_EXPR
9958 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9960 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9961 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9962 : TRUTH_XOR_EXPR,
9963 boolean_type_node,
9964 fold_convert_loc (loc, boolean_type_node, arg0),
9965 fold_convert_loc (loc, boolean_type_node, arg1));
9967 if (code == EQ_EXPR)
9968 tem = invert_truthvalue_loc (loc, tem);
9970 return fold_convert_loc (loc, type, tem);
9973 if (TREE_CODE_CLASS (code) == tcc_binary
9974 || TREE_CODE_CLASS (code) == tcc_comparison)
9976 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9978 tem = fold_build2_loc (loc, code, type,
9979 fold_convert_loc (loc, TREE_TYPE (op0),
9980 TREE_OPERAND (arg0, 1)), op1);
9981 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9982 tem);
9984 if (TREE_CODE (arg1) == COMPOUND_EXPR
9985 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9987 tem = fold_build2_loc (loc, code, type, op0,
9988 fold_convert_loc (loc, TREE_TYPE (op1),
9989 TREE_OPERAND (arg1, 1)));
9990 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9991 tem);
9994 if (TREE_CODE (arg0) == COND_EXPR
9995 || TREE_CODE (arg0) == VEC_COND_EXPR
9996 || COMPARISON_CLASS_P (arg0))
9998 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9999 arg0, arg1,
10000 /*cond_first_p=*/1);
10001 if (tem != NULL_TREE)
10002 return tem;
10005 if (TREE_CODE (arg1) == COND_EXPR
10006 || TREE_CODE (arg1) == VEC_COND_EXPR
10007 || COMPARISON_CLASS_P (arg1))
10009 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10010 arg1, arg0,
10011 /*cond_first_p=*/0);
10012 if (tem != NULL_TREE)
10013 return tem;
10017 switch (code)
10019 case MEM_REF:
10020 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10021 if (TREE_CODE (arg0) == ADDR_EXPR
10022 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10024 tree iref = TREE_OPERAND (arg0, 0);
10025 return fold_build2 (MEM_REF, type,
10026 TREE_OPERAND (iref, 0),
10027 int_const_binop (PLUS_EXPR, arg1,
10028 TREE_OPERAND (iref, 1)));
10031 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10032 if (TREE_CODE (arg0) == ADDR_EXPR
10033 && handled_component_p (TREE_OPERAND (arg0, 0)))
10035 tree base;
10036 HOST_WIDE_INT coffset;
10037 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10038 &coffset);
10039 if (!base)
10040 return NULL_TREE;
10041 return fold_build2 (MEM_REF, type,
10042 build_fold_addr_expr (base),
10043 int_const_binop (PLUS_EXPR, arg1,
10044 size_int (coffset)));
10047 return NULL_TREE;
10049 case POINTER_PLUS_EXPR:
10050 /* 0 +p index -> (type)index */
10051 if (integer_zerop (arg0))
10052 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10054 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10055 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10056 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10057 return fold_convert_loc (loc, type,
10058 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10059 fold_convert_loc (loc, sizetype,
10060 arg1),
10061 fold_convert_loc (loc, sizetype,
10062 arg0)));
10064 /* (PTR +p B) +p A -> PTR +p (B + A) */
10065 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10067 tree inner;
10068 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10069 tree arg00 = TREE_OPERAND (arg0, 0);
10070 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10071 arg01, fold_convert_loc (loc, sizetype, arg1));
10072 return fold_convert_loc (loc, type,
10073 fold_build_pointer_plus_loc (loc,
10074 arg00, inner));
10077 /* PTR_CST +p CST -> CST1 */
10078 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10079 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10080 fold_convert_loc (loc, type, arg1));
10082 return NULL_TREE;
10084 case PLUS_EXPR:
10085 /* A + (-B) -> A - B */
10086 if (TREE_CODE (arg1) == NEGATE_EXPR
10087 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10088 return fold_build2_loc (loc, MINUS_EXPR, type,
10089 fold_convert_loc (loc, type, arg0),
10090 fold_convert_loc (loc, type,
10091 TREE_OPERAND (arg1, 0)));
10092 /* (-A) + B -> B - A */
10093 if (TREE_CODE (arg0) == NEGATE_EXPR
10094 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10095 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10096 return fold_build2_loc (loc, MINUS_EXPR, type,
10097 fold_convert_loc (loc, type, arg1),
10098 fold_convert_loc (loc, type,
10099 TREE_OPERAND (arg0, 0)));
10101 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10103 /* Convert ~A + 1 to -A. */
10104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10105 && integer_each_onep (arg1))
10106 return fold_build1_loc (loc, NEGATE_EXPR, type,
10107 fold_convert_loc (loc, type,
10108 TREE_OPERAND (arg0, 0)));
10110 /* ~X + X is -1. */
10111 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10112 && !TYPE_OVERFLOW_TRAPS (type))
10114 tree tem = TREE_OPERAND (arg0, 0);
10116 STRIP_NOPS (tem);
10117 if (operand_equal_p (tem, arg1, 0))
10119 t1 = build_all_ones_cst (type);
10120 return omit_one_operand_loc (loc, type, t1, arg1);
10124 /* X + ~X is -1. */
10125 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10126 && !TYPE_OVERFLOW_TRAPS (type))
10128 tree tem = TREE_OPERAND (arg1, 0);
10130 STRIP_NOPS (tem);
10131 if (operand_equal_p (arg0, tem, 0))
10133 t1 = build_all_ones_cst (type);
10134 return omit_one_operand_loc (loc, type, t1, arg0);
10138 /* X + (X / CST) * -CST is X % CST. */
10139 if (TREE_CODE (arg1) == MULT_EXPR
10140 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10141 && operand_equal_p (arg0,
10142 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10144 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10145 tree cst1 = TREE_OPERAND (arg1, 1);
10146 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10147 cst1, cst0);
10148 if (sum && integer_zerop (sum))
10149 return fold_convert_loc (loc, type,
10150 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10151 TREE_TYPE (arg0), arg0,
10152 cst0));
10156 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10157 one. Make sure the type is not saturating and has the signedness of
10158 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10159 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10160 if ((TREE_CODE (arg0) == MULT_EXPR
10161 || TREE_CODE (arg1) == MULT_EXPR)
10162 && !TYPE_SATURATING (type)
10163 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10164 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10165 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10167 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10168 if (tem)
10169 return tem;
10172 if (! FLOAT_TYPE_P (type))
10174 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10175 with a constant, and the two constants have no bits in common,
10176 we should treat this as a BIT_IOR_EXPR since this may produce more
10177 simplifications. */
10178 if (TREE_CODE (arg0) == BIT_AND_EXPR
10179 && TREE_CODE (arg1) == BIT_AND_EXPR
10180 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10181 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10182 && wi::bit_and (TREE_OPERAND (arg0, 1),
10183 TREE_OPERAND (arg1, 1)) == 0)
10185 code = BIT_IOR_EXPR;
10186 goto bit_ior;
10189 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10190 (plus (plus (mult) (mult)) (foo)) so that we can
10191 take advantage of the factoring cases below. */
10192 if (TYPE_OVERFLOW_WRAPS (type)
10193 && (((TREE_CODE (arg0) == PLUS_EXPR
10194 || TREE_CODE (arg0) == MINUS_EXPR)
10195 && TREE_CODE (arg1) == MULT_EXPR)
10196 || ((TREE_CODE (arg1) == PLUS_EXPR
10197 || TREE_CODE (arg1) == MINUS_EXPR)
10198 && TREE_CODE (arg0) == MULT_EXPR)))
10200 tree parg0, parg1, parg, marg;
10201 enum tree_code pcode;
10203 if (TREE_CODE (arg1) == MULT_EXPR)
10204 parg = arg0, marg = arg1;
10205 else
10206 parg = arg1, marg = arg0;
10207 pcode = TREE_CODE (parg);
10208 parg0 = TREE_OPERAND (parg, 0);
10209 parg1 = TREE_OPERAND (parg, 1);
10210 STRIP_NOPS (parg0);
10211 STRIP_NOPS (parg1);
10213 if (TREE_CODE (parg0) == MULT_EXPR
10214 && TREE_CODE (parg1) != MULT_EXPR)
10215 return fold_build2_loc (loc, pcode, type,
10216 fold_build2_loc (loc, PLUS_EXPR, type,
10217 fold_convert_loc (loc, type,
10218 parg0),
10219 fold_convert_loc (loc, type,
10220 marg)),
10221 fold_convert_loc (loc, type, parg1));
10222 if (TREE_CODE (parg0) != MULT_EXPR
10223 && TREE_CODE (parg1) == MULT_EXPR)
10224 return
10225 fold_build2_loc (loc, PLUS_EXPR, type,
10226 fold_convert_loc (loc, type, parg0),
10227 fold_build2_loc (loc, pcode, type,
10228 fold_convert_loc (loc, type, marg),
10229 fold_convert_loc (loc, type,
10230 parg1)));
10233 else
10235 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10236 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10237 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10239 /* Likewise if the operands are reversed. */
10240 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10241 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10243 /* Convert X + -C into X - C. */
10244 if (TREE_CODE (arg1) == REAL_CST
10245 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10247 tem = fold_negate_const (arg1, type);
10248 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10249 return fold_build2_loc (loc, MINUS_EXPR, type,
10250 fold_convert_loc (loc, type, arg0),
10251 fold_convert_loc (loc, type, tem));
10254 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10255 to __complex__ ( x, y ). This is not the same for SNaNs or
10256 if signed zeros are involved. */
10257 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10258 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10259 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10261 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10262 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10263 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10264 bool arg0rz = false, arg0iz = false;
10265 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10266 || (arg0i && (arg0iz = real_zerop (arg0i))))
10268 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10269 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10270 if (arg0rz && arg1i && real_zerop (arg1i))
10272 tree rp = arg1r ? arg1r
10273 : build1 (REALPART_EXPR, rtype, arg1);
10274 tree ip = arg0i ? arg0i
10275 : build1 (IMAGPART_EXPR, rtype, arg0);
10276 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10278 else if (arg0iz && arg1r && real_zerop (arg1r))
10280 tree rp = arg0r ? arg0r
10281 : build1 (REALPART_EXPR, rtype, arg0);
10282 tree ip = arg1i ? arg1i
10283 : build1 (IMAGPART_EXPR, rtype, arg1);
10284 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10289 if (flag_unsafe_math_optimizations
10290 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10291 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10292 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10293 return tem;
10295 /* Convert x+x into x*2.0. */
10296 if (operand_equal_p (arg0, arg1, 0)
10297 && SCALAR_FLOAT_TYPE_P (type))
10298 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10299 build_real (type, dconst2));
10301 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10302 We associate floats only if the user has specified
10303 -fassociative-math. */
10304 if (flag_associative_math
10305 && TREE_CODE (arg1) == PLUS_EXPR
10306 && TREE_CODE (arg0) != MULT_EXPR)
10308 tree tree10 = TREE_OPERAND (arg1, 0);
10309 tree tree11 = TREE_OPERAND (arg1, 1);
10310 if (TREE_CODE (tree11) == MULT_EXPR
10311 && TREE_CODE (tree10) == MULT_EXPR)
10313 tree tree0;
10314 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10315 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10318 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10319 We associate floats only if the user has specified
10320 -fassociative-math. */
10321 if (flag_associative_math
10322 && TREE_CODE (arg0) == PLUS_EXPR
10323 && TREE_CODE (arg1) != MULT_EXPR)
10325 tree tree00 = TREE_OPERAND (arg0, 0);
10326 tree tree01 = TREE_OPERAND (arg0, 1);
10327 if (TREE_CODE (tree01) == MULT_EXPR
10328 && TREE_CODE (tree00) == MULT_EXPR)
10330 tree tree0;
10331 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10332 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10337 bit_rotate:
10338 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10339 is a rotate of A by C1 bits. */
10340 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10341 is a rotate of A by B bits. */
10343 enum tree_code code0, code1;
10344 tree rtype;
10345 code0 = TREE_CODE (arg0);
10346 code1 = TREE_CODE (arg1);
10347 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10348 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10349 && operand_equal_p (TREE_OPERAND (arg0, 0),
10350 TREE_OPERAND (arg1, 0), 0)
10351 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10352 TYPE_UNSIGNED (rtype))
10353 /* Only create rotates in complete modes. Other cases are not
10354 expanded properly. */
10355 && (element_precision (rtype)
10356 == element_precision (TYPE_MODE (rtype))))
10358 tree tree01, tree11;
10359 enum tree_code code01, code11;
10361 tree01 = TREE_OPERAND (arg0, 1);
10362 tree11 = TREE_OPERAND (arg1, 1);
10363 STRIP_NOPS (tree01);
10364 STRIP_NOPS (tree11);
10365 code01 = TREE_CODE (tree01);
10366 code11 = TREE_CODE (tree11);
10367 if (code01 == INTEGER_CST
10368 && code11 == INTEGER_CST
10369 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10370 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10372 tem = build2_loc (loc, LROTATE_EXPR,
10373 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10374 TREE_OPERAND (arg0, 0),
10375 code0 == LSHIFT_EXPR ? tree01 : tree11);
10376 return fold_convert_loc (loc, type, tem);
10378 else if (code11 == MINUS_EXPR)
10380 tree tree110, tree111;
10381 tree110 = TREE_OPERAND (tree11, 0);
10382 tree111 = TREE_OPERAND (tree11, 1);
10383 STRIP_NOPS (tree110);
10384 STRIP_NOPS (tree111);
10385 if (TREE_CODE (tree110) == INTEGER_CST
10386 && 0 == compare_tree_int (tree110,
10387 element_precision
10388 (TREE_TYPE (TREE_OPERAND
10389 (arg0, 0))))
10390 && operand_equal_p (tree01, tree111, 0))
10391 return
10392 fold_convert_loc (loc, type,
10393 build2 ((code0 == LSHIFT_EXPR
10394 ? LROTATE_EXPR
10395 : RROTATE_EXPR),
10396 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10397 TREE_OPERAND (arg0, 0), tree01));
10399 else if (code01 == MINUS_EXPR)
10401 tree tree010, tree011;
10402 tree010 = TREE_OPERAND (tree01, 0);
10403 tree011 = TREE_OPERAND (tree01, 1);
10404 STRIP_NOPS (tree010);
10405 STRIP_NOPS (tree011);
10406 if (TREE_CODE (tree010) == INTEGER_CST
10407 && 0 == compare_tree_int (tree010,
10408 element_precision
10409 (TREE_TYPE (TREE_OPERAND
10410 (arg0, 0))))
10411 && operand_equal_p (tree11, tree011, 0))
10412 return fold_convert_loc
10413 (loc, type,
10414 build2 ((code0 != LSHIFT_EXPR
10415 ? LROTATE_EXPR
10416 : RROTATE_EXPR),
10417 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10418 TREE_OPERAND (arg0, 0), tree11));
10423 associate:
10424 /* In most languages, can't associate operations on floats through
10425 parentheses. Rather than remember where the parentheses were, we
10426 don't associate floats at all, unless the user has specified
10427 -fassociative-math.
10428 And, we need to make sure type is not saturating. */
10430 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10431 && !TYPE_SATURATING (type))
10433 tree var0, con0, lit0, minus_lit0;
10434 tree var1, con1, lit1, minus_lit1;
10435 tree atype = type;
10436 bool ok = true;
10438 /* Split both trees into variables, constants, and literals. Then
10439 associate each group together, the constants with literals,
10440 then the result with variables. This increases the chances of
10441 literals being recombined later and of generating relocatable
10442 expressions for the sum of a constant and literal. */
10443 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10444 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10445 code == MINUS_EXPR);
10447 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10448 if (code == MINUS_EXPR)
10449 code = PLUS_EXPR;
10451 /* With undefined overflow prefer doing association in a type
10452 which wraps on overflow, if that is one of the operand types. */
10453 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10454 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10456 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10457 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10458 atype = TREE_TYPE (arg0);
10459 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10460 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10461 atype = TREE_TYPE (arg1);
10462 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10465 /* With undefined overflow we can only associate constants with one
10466 variable, and constants whose association doesn't overflow. */
10467 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10468 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10470 if (var0 && var1)
10472 tree tmp0 = var0;
10473 tree tmp1 = var1;
10475 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10476 tmp0 = TREE_OPERAND (tmp0, 0);
10477 if (CONVERT_EXPR_P (tmp0)
10478 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10479 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10480 <= TYPE_PRECISION (atype)))
10481 tmp0 = TREE_OPERAND (tmp0, 0);
10482 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10483 tmp1 = TREE_OPERAND (tmp1, 0);
10484 if (CONVERT_EXPR_P (tmp1)
10485 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10486 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10487 <= TYPE_PRECISION (atype)))
10488 tmp1 = TREE_OPERAND (tmp1, 0);
10489 /* The only case we can still associate with two variables
10490 is if they are the same, modulo negation and bit-pattern
10491 preserving conversions. */
10492 if (!operand_equal_p (tmp0, tmp1, 0))
10493 ok = false;
10497 /* Only do something if we found more than two objects. Otherwise,
10498 nothing has changed and we risk infinite recursion. */
10499 if (ok
10500 && (2 < ((var0 != 0) + (var1 != 0)
10501 + (con0 != 0) + (con1 != 0)
10502 + (lit0 != 0) + (lit1 != 0)
10503 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10505 bool any_overflows = false;
10506 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10507 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10508 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10509 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10510 var0 = associate_trees (loc, var0, var1, code, atype);
10511 con0 = associate_trees (loc, con0, con1, code, atype);
10512 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10513 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10514 code, atype);
10516 /* Preserve the MINUS_EXPR if the negative part of the literal is
10517 greater than the positive part. Otherwise, the multiplicative
10518 folding code (i.e extract_muldiv) may be fooled in case
10519 unsigned constants are subtracted, like in the following
10520 example: ((X*2 + 4) - 8U)/2. */
10521 if (minus_lit0 && lit0)
10523 if (TREE_CODE (lit0) == INTEGER_CST
10524 && TREE_CODE (minus_lit0) == INTEGER_CST
10525 && tree_int_cst_lt (lit0, minus_lit0))
10527 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10528 MINUS_EXPR, atype);
10529 lit0 = 0;
10531 else
10533 lit0 = associate_trees (loc, lit0, minus_lit0,
10534 MINUS_EXPR, atype);
10535 minus_lit0 = 0;
10539 /* Don't introduce overflows through reassociation. */
10540 if (!any_overflows
10541 && ((lit0 && TREE_OVERFLOW (lit0))
10542 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10543 return NULL_TREE;
10545 if (minus_lit0)
10547 if (con0 == 0)
10548 return
10549 fold_convert_loc (loc, type,
10550 associate_trees (loc, var0, minus_lit0,
10551 MINUS_EXPR, atype));
10552 else
10554 con0 = associate_trees (loc, con0, minus_lit0,
10555 MINUS_EXPR, atype);
10556 return
10557 fold_convert_loc (loc, type,
10558 associate_trees (loc, var0, con0,
10559 PLUS_EXPR, atype));
10563 con0 = associate_trees (loc, con0, lit0, code, atype);
10564 return
10565 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10566 code, atype));
10570 return NULL_TREE;
10572 case MINUS_EXPR:
10573 /* Pointer simplifications for subtraction, simple reassociations. */
10574 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10576 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10577 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10578 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10580 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10581 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10582 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10583 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10584 return fold_build2_loc (loc, PLUS_EXPR, type,
10585 fold_build2_loc (loc, MINUS_EXPR, type,
10586 arg00, arg10),
10587 fold_build2_loc (loc, MINUS_EXPR, type,
10588 arg01, arg11));
10590 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10591 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10593 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10594 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10595 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10596 fold_convert_loc (loc, type, arg1));
10597 if (tmp)
10598 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10600 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10601 simplifies. */
10602 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10604 tree arg10 = fold_convert_loc (loc, type,
10605 TREE_OPERAND (arg1, 0));
10606 tree arg11 = fold_convert_loc (loc, type,
10607 TREE_OPERAND (arg1, 1));
10608 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10609 fold_convert_loc (loc, type, arg0),
10610 arg10);
10611 if (tmp)
10612 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10615 /* A - (-B) -> A + B */
10616 if (TREE_CODE (arg1) == NEGATE_EXPR)
10617 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10618 fold_convert_loc (loc, type,
10619 TREE_OPERAND (arg1, 0)));
10620 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10621 if (TREE_CODE (arg0) == NEGATE_EXPR
10622 && negate_expr_p (arg1)
10623 && reorder_operands_p (arg0, arg1))
10624 return fold_build2_loc (loc, MINUS_EXPR, type,
10625 fold_convert_loc (loc, type,
10626 negate_expr (arg1)),
10627 fold_convert_loc (loc, type,
10628 TREE_OPERAND (arg0, 0)));
10629 /* Convert -A - 1 to ~A. */
10630 if (TREE_CODE (arg0) == NEGATE_EXPR
10631 && integer_each_onep (arg1)
10632 && !TYPE_OVERFLOW_TRAPS (type))
10633 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10634 fold_convert_loc (loc, type,
10635 TREE_OPERAND (arg0, 0)));
10637 /* Convert -1 - A to ~A. */
10638 if (TREE_CODE (type) != COMPLEX_TYPE
10639 && integer_all_onesp (arg0))
10640 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10643 /* X - (X / Y) * Y is X % Y. */
10644 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10645 && TREE_CODE (arg1) == MULT_EXPR
10646 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10647 && operand_equal_p (arg0,
10648 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10649 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10650 TREE_OPERAND (arg1, 1), 0))
10651 return
10652 fold_convert_loc (loc, type,
10653 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10654 arg0, TREE_OPERAND (arg1, 1)));
10656 if (! FLOAT_TYPE_P (type))
10658 if (integer_zerop (arg0))
10659 return negate_expr (fold_convert_loc (loc, type, arg1));
10661 /* Fold A - (A & B) into ~B & A. */
10662 if (!TREE_SIDE_EFFECTS (arg0)
10663 && TREE_CODE (arg1) == BIT_AND_EXPR)
10665 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10667 tree arg10 = fold_convert_loc (loc, type,
10668 TREE_OPERAND (arg1, 0));
10669 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10670 fold_build1_loc (loc, BIT_NOT_EXPR,
10671 type, arg10),
10672 fold_convert_loc (loc, type, arg0));
10674 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10676 tree arg11 = fold_convert_loc (loc,
10677 type, TREE_OPERAND (arg1, 1));
10678 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10679 fold_build1_loc (loc, BIT_NOT_EXPR,
10680 type, arg11),
10681 fold_convert_loc (loc, type, arg0));
10685 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10686 any power of 2 minus 1. */
10687 if (TREE_CODE (arg0) == BIT_AND_EXPR
10688 && TREE_CODE (arg1) == BIT_AND_EXPR
10689 && operand_equal_p (TREE_OPERAND (arg0, 0),
10690 TREE_OPERAND (arg1, 0), 0))
10692 tree mask0 = TREE_OPERAND (arg0, 1);
10693 tree mask1 = TREE_OPERAND (arg1, 1);
10694 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10696 if (operand_equal_p (tem, mask1, 0))
10698 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10699 TREE_OPERAND (arg0, 0), mask1);
10700 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10705 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10706 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10707 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10709 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10710 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10711 (-ARG1 + ARG0) reduces to -ARG1. */
10712 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10713 return negate_expr (fold_convert_loc (loc, type, arg1));
10715 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10716 __complex__ ( x, -y ). This is not the same for SNaNs or if
10717 signed zeros are involved. */
10718 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10719 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10720 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10722 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10723 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10724 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10725 bool arg0rz = false, arg0iz = false;
10726 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10727 || (arg0i && (arg0iz = real_zerop (arg0i))))
10729 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10730 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10731 if (arg0rz && arg1i && real_zerop (arg1i))
10733 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10734 arg1r ? arg1r
10735 : build1 (REALPART_EXPR, rtype, arg1));
10736 tree ip = arg0i ? arg0i
10737 : build1 (IMAGPART_EXPR, rtype, arg0);
10738 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10740 else if (arg0iz && arg1r && real_zerop (arg1r))
10742 tree rp = arg0r ? arg0r
10743 : build1 (REALPART_EXPR, rtype, arg0);
10744 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10745 arg1i ? arg1i
10746 : build1 (IMAGPART_EXPR, rtype, arg1));
10747 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10752 /* A - B -> A + (-B) if B is easily negatable. */
10753 if (negate_expr_p (arg1)
10754 && ((FLOAT_TYPE_P (type)
10755 /* Avoid this transformation if B is a positive REAL_CST. */
10756 && (TREE_CODE (arg1) != REAL_CST
10757 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10758 || INTEGRAL_TYPE_P (type)))
10759 return fold_build2_loc (loc, PLUS_EXPR, type,
10760 fold_convert_loc (loc, type, arg0),
10761 fold_convert_loc (loc, type,
10762 negate_expr (arg1)));
10764 /* Try folding difference of addresses. */
10766 HOST_WIDE_INT diff;
10768 if ((TREE_CODE (arg0) == ADDR_EXPR
10769 || TREE_CODE (arg1) == ADDR_EXPR)
10770 && ptr_difference_const (arg0, arg1, &diff))
10771 return build_int_cst_type (type, diff);
10774 /* Fold &a[i] - &a[j] to i-j. */
10775 if (TREE_CODE (arg0) == ADDR_EXPR
10776 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10777 && TREE_CODE (arg1) == ADDR_EXPR
10778 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10780 tree tem = fold_addr_of_array_ref_difference (loc, type,
10781 TREE_OPERAND (arg0, 0),
10782 TREE_OPERAND (arg1, 0));
10783 if (tem)
10784 return tem;
10787 if (FLOAT_TYPE_P (type)
10788 && flag_unsafe_math_optimizations
10789 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10790 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10791 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10792 return tem;
10794 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10795 one. Make sure the type is not saturating and has the signedness of
10796 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10797 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10798 if ((TREE_CODE (arg0) == MULT_EXPR
10799 || TREE_CODE (arg1) == MULT_EXPR)
10800 && !TYPE_SATURATING (type)
10801 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10802 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10803 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10805 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10806 if (tem)
10807 return tem;
10810 goto associate;
10812 case MULT_EXPR:
10813 /* (-A) * (-B) -> A * B */
10814 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10815 return fold_build2_loc (loc, MULT_EXPR, type,
10816 fold_convert_loc (loc, type,
10817 TREE_OPERAND (arg0, 0)),
10818 fold_convert_loc (loc, type,
10819 negate_expr (arg1)));
10820 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10821 return fold_build2_loc (loc, MULT_EXPR, type,
10822 fold_convert_loc (loc, type,
10823 negate_expr (arg0)),
10824 fold_convert_loc (loc, type,
10825 TREE_OPERAND (arg1, 0)));
10827 if (! FLOAT_TYPE_P (type))
10829 /* Transform x * -1 into -x. Make sure to do the negation
10830 on the original operand with conversions not stripped
10831 because we can only strip non-sign-changing conversions. */
10832 if (integer_minus_onep (arg1))
10833 return fold_convert_loc (loc, type, negate_expr (op0));
10834 /* Transform x * -C into -x * C if x is easily negatable. */
10835 if (TREE_CODE (arg1) == INTEGER_CST
10836 && tree_int_cst_sgn (arg1) == -1
10837 && negate_expr_p (arg0)
10838 && (tem = negate_expr (arg1)) != arg1
10839 && !TREE_OVERFLOW (tem))
10840 return fold_build2_loc (loc, MULT_EXPR, type,
10841 fold_convert_loc (loc, type,
10842 negate_expr (arg0)),
10843 tem);
10845 /* (a * (1 << b)) is (a << b) */
10846 if (TREE_CODE (arg1) == LSHIFT_EXPR
10847 && integer_onep (TREE_OPERAND (arg1, 0)))
10848 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10849 TREE_OPERAND (arg1, 1));
10850 if (TREE_CODE (arg0) == LSHIFT_EXPR
10851 && integer_onep (TREE_OPERAND (arg0, 0)))
10852 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10853 TREE_OPERAND (arg0, 1));
10855 /* (A + A) * C -> A * 2 * C */
10856 if (TREE_CODE (arg0) == PLUS_EXPR
10857 && TREE_CODE (arg1) == INTEGER_CST
10858 && operand_equal_p (TREE_OPERAND (arg0, 0),
10859 TREE_OPERAND (arg0, 1), 0))
10860 return fold_build2_loc (loc, MULT_EXPR, type,
10861 omit_one_operand_loc (loc, type,
10862 TREE_OPERAND (arg0, 0),
10863 TREE_OPERAND (arg0, 1)),
10864 fold_build2_loc (loc, MULT_EXPR, type,
10865 build_int_cst (type, 2) , arg1));
10867 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10868 sign-changing only. */
10869 if (TREE_CODE (arg1) == INTEGER_CST
10870 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10871 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10872 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10874 strict_overflow_p = false;
10875 if (TREE_CODE (arg1) == INTEGER_CST
10876 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10877 &strict_overflow_p)))
10879 if (strict_overflow_p)
10880 fold_overflow_warning (("assuming signed overflow does not "
10881 "occur when simplifying "
10882 "multiplication"),
10883 WARN_STRICT_OVERFLOW_MISC);
10884 return fold_convert_loc (loc, type, tem);
10887 /* Optimize z * conj(z) for integer complex numbers. */
10888 if (TREE_CODE (arg0) == CONJ_EXPR
10889 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10890 return fold_mult_zconjz (loc, type, arg1);
10891 if (TREE_CODE (arg1) == CONJ_EXPR
10892 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10893 return fold_mult_zconjz (loc, type, arg0);
10895 else
10897 /* Maybe fold x * 0 to 0. The expressions aren't the same
10898 when x is NaN, since x * 0 is also NaN. Nor are they the
10899 same in modes with signed zeros, since multiplying a
10900 negative value by 0 gives -0, not +0. */
10901 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10902 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10903 && real_zerop (arg1))
10904 return omit_one_operand_loc (loc, type, arg1, arg0);
10905 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10906 Likewise for complex arithmetic with signed zeros. */
10907 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10908 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10909 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10910 && real_onep (arg1))
10911 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10913 /* Transform x * -1.0 into -x. */
10914 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10915 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10916 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10917 && real_minus_onep (arg1))
10918 return fold_convert_loc (loc, type, negate_expr (arg0));
10920 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10921 the result for floating point types due to rounding so it is applied
10922 only if -fassociative-math was specify. */
10923 if (flag_associative_math
10924 && TREE_CODE (arg0) == RDIV_EXPR
10925 && TREE_CODE (arg1) == REAL_CST
10926 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10928 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10929 arg1);
10930 if (tem)
10931 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10932 TREE_OPERAND (arg0, 1));
10935 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10936 if (operand_equal_p (arg0, arg1, 0))
10938 tree tem = fold_strip_sign_ops (arg0);
10939 if (tem != NULL_TREE)
10941 tem = fold_convert_loc (loc, type, tem);
10942 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10946 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10947 This is not the same for NaNs or if signed zeros are
10948 involved. */
10949 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10950 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10951 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10952 && TREE_CODE (arg1) == COMPLEX_CST
10953 && real_zerop (TREE_REALPART (arg1)))
10955 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10956 if (real_onep (TREE_IMAGPART (arg1)))
10957 return
10958 fold_build2_loc (loc, COMPLEX_EXPR, type,
10959 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10960 rtype, arg0)),
10961 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10962 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10963 return
10964 fold_build2_loc (loc, COMPLEX_EXPR, type,
10965 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10966 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10967 rtype, arg0)));
10970 /* Optimize z * conj(z) for floating point complex numbers.
10971 Guarded by flag_unsafe_math_optimizations as non-finite
10972 imaginary components don't produce scalar results. */
10973 if (flag_unsafe_math_optimizations
10974 && TREE_CODE (arg0) == CONJ_EXPR
10975 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10976 return fold_mult_zconjz (loc, type, arg1);
10977 if (flag_unsafe_math_optimizations
10978 && TREE_CODE (arg1) == CONJ_EXPR
10979 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10980 return fold_mult_zconjz (loc, type, arg0);
10982 if (flag_unsafe_math_optimizations)
10984 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10985 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10987 /* Optimizations of root(...)*root(...). */
10988 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10990 tree rootfn, arg;
10991 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10992 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10994 /* Optimize sqrt(x)*sqrt(x) as x. */
10995 if (BUILTIN_SQRT_P (fcode0)
10996 && operand_equal_p (arg00, arg10, 0)
10997 && ! HONOR_SNANS (TYPE_MODE (type)))
10998 return arg00;
11000 /* Optimize root(x)*root(y) as root(x*y). */
11001 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11002 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11003 return build_call_expr_loc (loc, rootfn, 1, arg);
11006 /* Optimize expN(x)*expN(y) as expN(x+y). */
11007 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11009 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11010 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11011 CALL_EXPR_ARG (arg0, 0),
11012 CALL_EXPR_ARG (arg1, 0));
11013 return build_call_expr_loc (loc, expfn, 1, arg);
11016 /* Optimizations of pow(...)*pow(...). */
11017 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11018 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11019 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11021 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11022 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11023 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11024 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11026 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11027 if (operand_equal_p (arg01, arg11, 0))
11029 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11030 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11031 arg00, arg10);
11032 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11035 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11036 if (operand_equal_p (arg00, arg10, 0))
11038 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11039 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11040 arg01, arg11);
11041 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11045 /* Optimize tan(x)*cos(x) as sin(x). */
11046 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11047 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11048 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11049 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11050 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11051 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11052 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11053 CALL_EXPR_ARG (arg1, 0), 0))
11055 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11057 if (sinfn != NULL_TREE)
11058 return build_call_expr_loc (loc, sinfn, 1,
11059 CALL_EXPR_ARG (arg0, 0));
11062 /* Optimize x*pow(x,c) as pow(x,c+1). */
11063 if (fcode1 == BUILT_IN_POW
11064 || fcode1 == BUILT_IN_POWF
11065 || fcode1 == BUILT_IN_POWL)
11067 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11068 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11069 if (TREE_CODE (arg11) == REAL_CST
11070 && !TREE_OVERFLOW (arg11)
11071 && operand_equal_p (arg0, arg10, 0))
11073 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11074 REAL_VALUE_TYPE c;
11075 tree arg;
11077 c = TREE_REAL_CST (arg11);
11078 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11079 arg = build_real (type, c);
11080 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11084 /* Optimize pow(x,c)*x as pow(x,c+1). */
11085 if (fcode0 == BUILT_IN_POW
11086 || fcode0 == BUILT_IN_POWF
11087 || fcode0 == BUILT_IN_POWL)
11089 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11090 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11091 if (TREE_CODE (arg01) == REAL_CST
11092 && !TREE_OVERFLOW (arg01)
11093 && operand_equal_p (arg1, arg00, 0))
11095 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11096 REAL_VALUE_TYPE c;
11097 tree arg;
11099 c = TREE_REAL_CST (arg01);
11100 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11101 arg = build_real (type, c);
11102 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11106 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11107 if (!in_gimple_form
11108 && optimize
11109 && operand_equal_p (arg0, arg1, 0))
11111 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11113 if (powfn)
11115 tree arg = build_real (type, dconst2);
11116 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11121 goto associate;
11123 case BIT_IOR_EXPR:
11124 bit_ior:
11125 if (operand_equal_p (arg0, arg1, 0))
11126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11128 /* ~X | X is -1. */
11129 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11130 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11132 t1 = build_zero_cst (type);
11133 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11134 return omit_one_operand_loc (loc, type, t1, arg1);
11137 /* X | ~X is -1. */
11138 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11139 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11141 t1 = build_zero_cst (type);
11142 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11143 return omit_one_operand_loc (loc, type, t1, arg0);
11146 /* Canonicalize (X & C1) | C2. */
11147 if (TREE_CODE (arg0) == BIT_AND_EXPR
11148 && TREE_CODE (arg1) == INTEGER_CST
11149 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11151 int width = TYPE_PRECISION (type), w;
11152 wide_int c1 = TREE_OPERAND (arg0, 1);
11153 wide_int c2 = arg1;
11155 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11156 if ((c1 & c2) == c1)
11157 return omit_one_operand_loc (loc, type, arg1,
11158 TREE_OPERAND (arg0, 0));
11160 wide_int msk = wi::mask (width, false,
11161 TYPE_PRECISION (TREE_TYPE (arg1)));
11163 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11164 if (msk.and_not (c1 | c2) == 0)
11165 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11166 TREE_OPERAND (arg0, 0), arg1);
11168 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11169 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11170 mode which allows further optimizations. */
11171 c1 &= msk;
11172 c2 &= msk;
11173 wide_int c3 = c1.and_not (c2);
11174 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11176 wide_int mask = wi::mask (w, false,
11177 TYPE_PRECISION (type));
11178 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11180 c3 = mask;
11181 break;
11185 if (c3 != c1)
11186 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11187 fold_build2_loc (loc, BIT_AND_EXPR, type,
11188 TREE_OPERAND (arg0, 0),
11189 wide_int_to_tree (type,
11190 c3)),
11191 arg1);
11194 /* (X & Y) | Y is (X, Y). */
11195 if (TREE_CODE (arg0) == BIT_AND_EXPR
11196 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11197 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11198 /* (X & Y) | X is (Y, X). */
11199 if (TREE_CODE (arg0) == BIT_AND_EXPR
11200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11201 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11202 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11203 /* X | (X & Y) is (Y, X). */
11204 if (TREE_CODE (arg1) == BIT_AND_EXPR
11205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11206 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11207 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11208 /* X | (Y & X) is (Y, X). */
11209 if (TREE_CODE (arg1) == BIT_AND_EXPR
11210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11211 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11212 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11214 /* (X & ~Y) | (~X & Y) is X ^ Y */
11215 if (TREE_CODE (arg0) == BIT_AND_EXPR
11216 && TREE_CODE (arg1) == BIT_AND_EXPR)
11218 tree a0, a1, l0, l1, n0, n1;
11220 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11221 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11223 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11224 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11226 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11227 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11229 if ((operand_equal_p (n0, a0, 0)
11230 && operand_equal_p (n1, a1, 0))
11231 || (operand_equal_p (n0, a1, 0)
11232 && operand_equal_p (n1, a0, 0)))
11233 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11236 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11237 if (t1 != NULL_TREE)
11238 return t1;
11240 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11242 This results in more efficient code for machines without a NAND
11243 instruction. Combine will canonicalize to the first form
11244 which will allow use of NAND instructions provided by the
11245 backend if they exist. */
11246 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11247 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11249 return
11250 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11251 build2 (BIT_AND_EXPR, type,
11252 fold_convert_loc (loc, type,
11253 TREE_OPERAND (arg0, 0)),
11254 fold_convert_loc (loc, type,
11255 TREE_OPERAND (arg1, 0))));
11258 /* See if this can be simplified into a rotate first. If that
11259 is unsuccessful continue in the association code. */
11260 goto bit_rotate;
11262 case BIT_XOR_EXPR:
11263 if (integer_all_onesp (arg1))
11264 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11266 /* ~X ^ X is -1. */
11267 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11268 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11270 t1 = build_zero_cst (type);
11271 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11272 return omit_one_operand_loc (loc, type, t1, arg1);
11275 /* X ^ ~X is -1. */
11276 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11277 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11279 t1 = build_zero_cst (type);
11280 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11281 return omit_one_operand_loc (loc, type, t1, arg0);
11284 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11285 with a constant, and the two constants have no bits in common,
11286 we should treat this as a BIT_IOR_EXPR since this may produce more
11287 simplifications. */
11288 if (TREE_CODE (arg0) == BIT_AND_EXPR
11289 && TREE_CODE (arg1) == BIT_AND_EXPR
11290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11291 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11292 && wi::bit_and (TREE_OPERAND (arg0, 1),
11293 TREE_OPERAND (arg1, 1)) == 0)
11295 code = BIT_IOR_EXPR;
11296 goto bit_ior;
11299 /* (X | Y) ^ X -> Y & ~ X*/
11300 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11301 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11303 tree t2 = TREE_OPERAND (arg0, 1);
11304 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11305 arg1);
11306 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11307 fold_convert_loc (loc, type, t2),
11308 fold_convert_loc (loc, type, t1));
11309 return t1;
11312 /* (Y | X) ^ X -> Y & ~ X*/
11313 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11316 tree t2 = TREE_OPERAND (arg0, 0);
11317 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11318 arg1);
11319 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11320 fold_convert_loc (loc, type, t2),
11321 fold_convert_loc (loc, type, t1));
11322 return t1;
11325 /* X ^ (X | Y) -> Y & ~ X*/
11326 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11327 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11329 tree t2 = TREE_OPERAND (arg1, 1);
11330 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11331 arg0);
11332 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11333 fold_convert_loc (loc, type, t2),
11334 fold_convert_loc (loc, type, t1));
11335 return t1;
11338 /* X ^ (Y | X) -> Y & ~ X*/
11339 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11340 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11342 tree t2 = TREE_OPERAND (arg1, 0);
11343 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11344 arg0);
11345 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11346 fold_convert_loc (loc, type, t2),
11347 fold_convert_loc (loc, type, t1));
11348 return t1;
11351 /* Convert ~X ^ ~Y to X ^ Y. */
11352 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11353 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11354 return fold_build2_loc (loc, code, type,
11355 fold_convert_loc (loc, type,
11356 TREE_OPERAND (arg0, 0)),
11357 fold_convert_loc (loc, type,
11358 TREE_OPERAND (arg1, 0)));
11360 /* Convert ~X ^ C to X ^ ~C. */
11361 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11362 && TREE_CODE (arg1) == INTEGER_CST)
11363 return fold_build2_loc (loc, code, type,
11364 fold_convert_loc (loc, type,
11365 TREE_OPERAND (arg0, 0)),
11366 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11368 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11369 if (TREE_CODE (arg0) == BIT_AND_EXPR
11370 && INTEGRAL_TYPE_P (type)
11371 && integer_onep (TREE_OPERAND (arg0, 1))
11372 && integer_onep (arg1))
11373 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11374 build_zero_cst (TREE_TYPE (arg0)));
11376 /* Fold (X & Y) ^ Y as ~X & Y. */
11377 if (TREE_CODE (arg0) == BIT_AND_EXPR
11378 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11380 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11381 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11382 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11383 fold_convert_loc (loc, type, arg1));
11385 /* Fold (X & Y) ^ X as ~Y & X. */
11386 if (TREE_CODE (arg0) == BIT_AND_EXPR
11387 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11388 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11390 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11391 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11392 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11393 fold_convert_loc (loc, type, arg1));
11395 /* Fold X ^ (X & Y) as X & ~Y. */
11396 if (TREE_CODE (arg1) == BIT_AND_EXPR
11397 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11399 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11400 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11401 fold_convert_loc (loc, type, arg0),
11402 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11404 /* Fold X ^ (Y & X) as ~Y & X. */
11405 if (TREE_CODE (arg1) == BIT_AND_EXPR
11406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11407 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11409 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11410 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11411 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11412 fold_convert_loc (loc, type, arg0));
11415 /* See if this can be simplified into a rotate first. If that
11416 is unsuccessful continue in the association code. */
11417 goto bit_rotate;
11419 case BIT_AND_EXPR:
11420 if (integer_all_onesp (arg1))
11421 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11422 if (operand_equal_p (arg0, arg1, 0))
11423 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11425 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11426 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11427 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11428 || (TREE_CODE (arg0) == EQ_EXPR
11429 && integer_zerop (TREE_OPERAND (arg0, 1))))
11430 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11431 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11433 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11434 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11435 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11436 || (TREE_CODE (arg1) == EQ_EXPR
11437 && integer_zerop (TREE_OPERAND (arg1, 1))))
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11439 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11441 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11442 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11443 && TREE_CODE (arg1) == INTEGER_CST
11444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11446 tree tmp1 = fold_convert_loc (loc, type, arg1);
11447 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11448 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11449 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11450 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11451 return
11452 fold_convert_loc (loc, type,
11453 fold_build2_loc (loc, BIT_IOR_EXPR,
11454 type, tmp2, tmp3));
11457 /* (X | Y) & Y is (X, Y). */
11458 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11459 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11460 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11461 /* (X | Y) & X is (Y, X). */
11462 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11464 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11465 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11466 /* X & (X | Y) is (Y, X). */
11467 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11469 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11470 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11471 /* X & (Y | X) is (Y, X). */
11472 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11473 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11474 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11475 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11477 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11478 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11479 && INTEGRAL_TYPE_P (type)
11480 && integer_onep (TREE_OPERAND (arg0, 1))
11481 && integer_onep (arg1))
11483 tree tem2;
11484 tem = TREE_OPERAND (arg0, 0);
11485 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11486 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11487 tem, tem2);
11488 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11489 build_zero_cst (TREE_TYPE (tem)));
11491 /* Fold ~X & 1 as (X & 1) == 0. */
11492 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11493 && INTEGRAL_TYPE_P (type)
11494 && integer_onep (arg1))
11496 tree tem2;
11497 tem = TREE_OPERAND (arg0, 0);
11498 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11499 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11500 tem, tem2);
11501 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11502 build_zero_cst (TREE_TYPE (tem)));
11504 /* Fold !X & 1 as X == 0. */
11505 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11506 && integer_onep (arg1))
11508 tem = TREE_OPERAND (arg0, 0);
11509 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11510 build_zero_cst (TREE_TYPE (tem)));
11513 /* Fold (X ^ Y) & Y as ~X & Y. */
11514 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11515 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11517 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11518 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11519 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11520 fold_convert_loc (loc, type, arg1));
11522 /* Fold (X ^ Y) & X as ~Y & X. */
11523 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11524 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11525 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11527 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11528 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11529 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11530 fold_convert_loc (loc, type, arg1));
11532 /* Fold X & (X ^ Y) as X & ~Y. */
11533 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11534 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11536 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 fold_convert_loc (loc, type, arg0),
11539 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11541 /* Fold X & (Y ^ X) as ~Y & X. */
11542 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11543 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11544 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11546 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11547 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11548 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11549 fold_convert_loc (loc, type, arg0));
11552 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11553 multiple of 1 << CST. */
11554 if (TREE_CODE (arg1) == INTEGER_CST)
11556 wide_int cst1 = arg1;
11557 wide_int ncst1 = -cst1;
11558 if ((cst1 & ncst1) == ncst1
11559 && multiple_of_p (type, arg0,
11560 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11561 return fold_convert_loc (loc, type, arg0);
11564 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11565 bits from CST2. */
11566 if (TREE_CODE (arg1) == INTEGER_CST
11567 && TREE_CODE (arg0) == MULT_EXPR
11568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11570 wide_int warg1 = arg1;
11571 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11573 if (masked == 0)
11574 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11575 arg0, arg1);
11576 else if (masked != warg1)
11578 /* Avoid the transform if arg1 is a mask of some
11579 mode which allows further optimizations. */
11580 int pop = wi::popcount (warg1);
11581 if (!(pop >= BITS_PER_UNIT
11582 && exact_log2 (pop) != -1
11583 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11584 return fold_build2_loc (loc, code, type, op0,
11585 wide_int_to_tree (type, masked));
11589 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11590 ((A & N) + B) & M -> (A + B) & M
11591 Similarly if (N & M) == 0,
11592 ((A | N) + B) & M -> (A + B) & M
11593 and for - instead of + (or unary - instead of +)
11594 and/or ^ instead of |.
11595 If B is constant and (B & M) == 0, fold into A & M. */
11596 if (TREE_CODE (arg1) == INTEGER_CST)
11598 wide_int cst1 = arg1;
11599 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11600 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11601 && (TREE_CODE (arg0) == PLUS_EXPR
11602 || TREE_CODE (arg0) == MINUS_EXPR
11603 || TREE_CODE (arg0) == NEGATE_EXPR)
11604 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11605 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11607 tree pmop[2];
11608 int which = 0;
11609 wide_int cst0;
11611 /* Now we know that arg0 is (C + D) or (C - D) or
11612 -C and arg1 (M) is == (1LL << cst) - 1.
11613 Store C into PMOP[0] and D into PMOP[1]. */
11614 pmop[0] = TREE_OPERAND (arg0, 0);
11615 pmop[1] = NULL;
11616 if (TREE_CODE (arg0) != NEGATE_EXPR)
11618 pmop[1] = TREE_OPERAND (arg0, 1);
11619 which = 1;
11622 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11623 which = -1;
11625 for (; which >= 0; which--)
11626 switch (TREE_CODE (pmop[which]))
11628 case BIT_AND_EXPR:
11629 case BIT_IOR_EXPR:
11630 case BIT_XOR_EXPR:
11631 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11632 != INTEGER_CST)
11633 break;
11634 cst0 = TREE_OPERAND (pmop[which], 1);
11635 cst0 &= cst1;
11636 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11638 if (cst0 != cst1)
11639 break;
11641 else if (cst0 != 0)
11642 break;
11643 /* If C or D is of the form (A & N) where
11644 (N & M) == M, or of the form (A | N) or
11645 (A ^ N) where (N & M) == 0, replace it with A. */
11646 pmop[which] = TREE_OPERAND (pmop[which], 0);
11647 break;
11648 case INTEGER_CST:
11649 /* If C or D is a N where (N & M) == 0, it can be
11650 omitted (assumed 0). */
11651 if ((TREE_CODE (arg0) == PLUS_EXPR
11652 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11653 && (cst1 & pmop[which]) == 0)
11654 pmop[which] = NULL;
11655 break;
11656 default:
11657 break;
11660 /* Only build anything new if we optimized one or both arguments
11661 above. */
11662 if (pmop[0] != TREE_OPERAND (arg0, 0)
11663 || (TREE_CODE (arg0) != NEGATE_EXPR
11664 && pmop[1] != TREE_OPERAND (arg0, 1)))
11666 tree utype = TREE_TYPE (arg0);
11667 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11669 /* Perform the operations in a type that has defined
11670 overflow behavior. */
11671 utype = unsigned_type_for (TREE_TYPE (arg0));
11672 if (pmop[0] != NULL)
11673 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11674 if (pmop[1] != NULL)
11675 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11678 if (TREE_CODE (arg0) == NEGATE_EXPR)
11679 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11680 else if (TREE_CODE (arg0) == PLUS_EXPR)
11682 if (pmop[0] != NULL && pmop[1] != NULL)
11683 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11684 pmop[0], pmop[1]);
11685 else if (pmop[0] != NULL)
11686 tem = pmop[0];
11687 else if (pmop[1] != NULL)
11688 tem = pmop[1];
11689 else
11690 return build_int_cst (type, 0);
11692 else if (pmop[0] == NULL)
11693 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11694 else
11695 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11696 pmop[0], pmop[1]);
11697 /* TEM is now the new binary +, - or unary - replacement. */
11698 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11699 fold_convert_loc (loc, utype, arg1));
11700 return fold_convert_loc (loc, type, tem);
11705 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11706 if (t1 != NULL_TREE)
11707 return t1;
11708 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11709 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11710 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11712 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11714 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11715 if (mask == -1)
11716 return
11717 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11720 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11722 This results in more efficient code for machines without a NOR
11723 instruction. Combine will canonicalize to the first form
11724 which will allow use of NOR instructions provided by the
11725 backend if they exist. */
11726 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11727 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11729 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11730 build2 (BIT_IOR_EXPR, type,
11731 fold_convert_loc (loc, type,
11732 TREE_OPERAND (arg0, 0)),
11733 fold_convert_loc (loc, type,
11734 TREE_OPERAND (arg1, 0))));
11737 /* If arg0 is derived from the address of an object or function, we may
11738 be able to fold this expression using the object or function's
11739 alignment. */
11740 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11742 unsigned HOST_WIDE_INT modulus, residue;
11743 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11745 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11746 integer_onep (arg1));
11748 /* This works because modulus is a power of 2. If this weren't the
11749 case, we'd have to replace it by its greatest power-of-2
11750 divisor: modulus & -modulus. */
11751 if (low < modulus)
11752 return build_int_cst (type, residue & low);
11755 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11756 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11757 if the new mask might be further optimized. */
11758 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11759 || TREE_CODE (arg0) == RSHIFT_EXPR)
11760 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11761 && TREE_CODE (arg1) == INTEGER_CST
11762 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11763 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11764 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11765 < TYPE_PRECISION (TREE_TYPE (arg0))))
11767 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11768 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11769 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11770 tree shift_type = TREE_TYPE (arg0);
11772 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11773 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11774 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11775 && TYPE_PRECISION (TREE_TYPE (arg0))
11776 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11778 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11779 tree arg00 = TREE_OPERAND (arg0, 0);
11780 /* See if more bits can be proven as zero because of
11781 zero extension. */
11782 if (TREE_CODE (arg00) == NOP_EXPR
11783 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11785 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11786 if (TYPE_PRECISION (inner_type)
11787 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11788 && TYPE_PRECISION (inner_type) < prec)
11790 prec = TYPE_PRECISION (inner_type);
11791 /* See if we can shorten the right shift. */
11792 if (shiftc < prec)
11793 shift_type = inner_type;
11794 /* Otherwise X >> C1 is all zeros, so we'll optimize
11795 it into (X, 0) later on by making sure zerobits
11796 is all ones. */
11799 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11800 if (shiftc < prec)
11802 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11803 zerobits <<= prec - shiftc;
11805 /* For arithmetic shift if sign bit could be set, zerobits
11806 can contain actually sign bits, so no transformation is
11807 possible, unless MASK masks them all away. In that
11808 case the shift needs to be converted into logical shift. */
11809 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11810 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11812 if ((mask & zerobits) == 0)
11813 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11814 else
11815 zerobits = 0;
11819 /* ((X << 16) & 0xff00) is (X, 0). */
11820 if ((mask & zerobits) == mask)
11821 return omit_one_operand_loc (loc, type,
11822 build_int_cst (type, 0), arg0);
11824 newmask = mask | zerobits;
11825 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11827 /* Only do the transformation if NEWMASK is some integer
11828 mode's mask. */
11829 for (prec = BITS_PER_UNIT;
11830 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11831 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11832 break;
11833 if (prec < HOST_BITS_PER_WIDE_INT
11834 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11836 tree newmaskt;
11838 if (shift_type != TREE_TYPE (arg0))
11840 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11841 fold_convert_loc (loc, shift_type,
11842 TREE_OPERAND (arg0, 0)),
11843 TREE_OPERAND (arg0, 1));
11844 tem = fold_convert_loc (loc, type, tem);
11846 else
11847 tem = op0;
11848 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11849 if (!tree_int_cst_equal (newmaskt, arg1))
11850 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11855 goto associate;
11857 case RDIV_EXPR:
11858 /* Don't touch a floating-point divide by zero unless the mode
11859 of the constant can represent infinity. */
11860 if (TREE_CODE (arg1) == REAL_CST
11861 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11862 && real_zerop (arg1))
11863 return NULL_TREE;
11865 /* Optimize A / A to 1.0 if we don't care about
11866 NaNs or Infinities. Skip the transformation
11867 for non-real operands. */
11868 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11869 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11870 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11871 && operand_equal_p (arg0, arg1, 0))
11873 tree r = build_real (TREE_TYPE (arg0), dconst1);
11875 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11878 /* The complex version of the above A / A optimization. */
11879 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11880 && operand_equal_p (arg0, arg1, 0))
11882 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11883 if (! HONOR_NANS (TYPE_MODE (elem_type))
11884 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11886 tree r = build_real (elem_type, dconst1);
11887 /* omit_two_operands will call fold_convert for us. */
11888 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11892 /* (-A) / (-B) -> A / B */
11893 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11894 return fold_build2_loc (loc, RDIV_EXPR, type,
11895 TREE_OPERAND (arg0, 0),
11896 negate_expr (arg1));
11897 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11898 return fold_build2_loc (loc, RDIV_EXPR, type,
11899 negate_expr (arg0),
11900 TREE_OPERAND (arg1, 0));
11902 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11903 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11904 && real_onep (arg1))
11905 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11907 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11908 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11909 && real_minus_onep (arg1))
11910 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11911 negate_expr (arg0)));
11913 /* If ARG1 is a constant, we can convert this to a multiply by the
11914 reciprocal. This does not have the same rounding properties,
11915 so only do this if -freciprocal-math. We can actually
11916 always safely do it if ARG1 is a power of two, but it's hard to
11917 tell if it is or not in a portable manner. */
11918 if (optimize
11919 && (TREE_CODE (arg1) == REAL_CST
11920 || (TREE_CODE (arg1) == COMPLEX_CST
11921 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11922 || (TREE_CODE (arg1) == VECTOR_CST
11923 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11925 if (flag_reciprocal_math
11926 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11927 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11928 /* Find the reciprocal if optimizing and the result is exact.
11929 TODO: Complex reciprocal not implemented. */
11930 if (TREE_CODE (arg1) != COMPLEX_CST)
11932 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11934 if (inverse)
11935 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11938 /* Convert A/B/C to A/(B*C). */
11939 if (flag_reciprocal_math
11940 && TREE_CODE (arg0) == RDIV_EXPR)
11941 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11942 fold_build2_loc (loc, MULT_EXPR, type,
11943 TREE_OPERAND (arg0, 1), arg1));
11945 /* Convert A/(B/C) to (A/B)*C. */
11946 if (flag_reciprocal_math
11947 && TREE_CODE (arg1) == RDIV_EXPR)
11948 return fold_build2_loc (loc, MULT_EXPR, type,
11949 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11950 TREE_OPERAND (arg1, 0)),
11951 TREE_OPERAND (arg1, 1));
11953 /* Convert C1/(X*C2) into (C1/C2)/X. */
11954 if (flag_reciprocal_math
11955 && TREE_CODE (arg1) == MULT_EXPR
11956 && TREE_CODE (arg0) == REAL_CST
11957 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11959 tree tem = const_binop (RDIV_EXPR, arg0,
11960 TREE_OPERAND (arg1, 1));
11961 if (tem)
11962 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11963 TREE_OPERAND (arg1, 0));
11966 if (flag_unsafe_math_optimizations)
11968 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11969 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11971 /* Optimize sin(x)/cos(x) as tan(x). */
11972 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11973 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11974 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11975 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11976 CALL_EXPR_ARG (arg1, 0), 0))
11978 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11980 if (tanfn != NULL_TREE)
11981 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11984 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11985 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11986 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11987 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11988 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11989 CALL_EXPR_ARG (arg1, 0), 0))
11991 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11993 if (tanfn != NULL_TREE)
11995 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11996 CALL_EXPR_ARG (arg0, 0));
11997 return fold_build2_loc (loc, RDIV_EXPR, type,
11998 build_real (type, dconst1), tmp);
12002 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12003 NaNs or Infinities. */
12004 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12005 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12006 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12008 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12009 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12011 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12012 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12013 && operand_equal_p (arg00, arg01, 0))
12015 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12017 if (cosfn != NULL_TREE)
12018 return build_call_expr_loc (loc, cosfn, 1, arg00);
12022 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12023 NaNs or Infinities. */
12024 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12025 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12026 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12028 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12029 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12031 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12032 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12033 && operand_equal_p (arg00, arg01, 0))
12035 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12037 if (cosfn != NULL_TREE)
12039 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12040 return fold_build2_loc (loc, RDIV_EXPR, type,
12041 build_real (type, dconst1),
12042 tmp);
12047 /* Optimize pow(x,c)/x as pow(x,c-1). */
12048 if (fcode0 == BUILT_IN_POW
12049 || fcode0 == BUILT_IN_POWF
12050 || fcode0 == BUILT_IN_POWL)
12052 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12053 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12054 if (TREE_CODE (arg01) == REAL_CST
12055 && !TREE_OVERFLOW (arg01)
12056 && operand_equal_p (arg1, arg00, 0))
12058 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12059 REAL_VALUE_TYPE c;
12060 tree arg;
12062 c = TREE_REAL_CST (arg01);
12063 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12064 arg = build_real (type, c);
12065 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12069 /* Optimize a/root(b/c) into a*root(c/b). */
12070 if (BUILTIN_ROOT_P (fcode1))
12072 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12074 if (TREE_CODE (rootarg) == RDIV_EXPR)
12076 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12077 tree b = TREE_OPERAND (rootarg, 0);
12078 tree c = TREE_OPERAND (rootarg, 1);
12080 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12082 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12083 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12087 /* Optimize x/expN(y) into x*expN(-y). */
12088 if (BUILTIN_EXPONENT_P (fcode1))
12090 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12091 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12092 arg1 = build_call_expr_loc (loc,
12093 expfn, 1,
12094 fold_convert_loc (loc, type, arg));
12095 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12098 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12099 if (fcode1 == BUILT_IN_POW
12100 || fcode1 == BUILT_IN_POWF
12101 || fcode1 == BUILT_IN_POWL)
12103 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12104 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12105 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12106 tree neg11 = fold_convert_loc (loc, type,
12107 negate_expr (arg11));
12108 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12109 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12112 return NULL_TREE;
12114 case TRUNC_DIV_EXPR:
12115 /* Optimize (X & (-A)) / A where A is a power of 2,
12116 to X >> log2(A) */
12117 if (TREE_CODE (arg0) == BIT_AND_EXPR
12118 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12119 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12121 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12122 arg1, TREE_OPERAND (arg0, 1));
12123 if (sum && integer_zerop (sum)) {
12124 tree pow2 = build_int_cst (integer_type_node,
12125 wi::exact_log2 (arg1));
12126 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12127 TREE_OPERAND (arg0, 0), pow2);
12131 /* Fall through */
12133 case FLOOR_DIV_EXPR:
12134 /* Simplify A / (B << N) where A and B are positive and B is
12135 a power of 2, to A >> (N + log2(B)). */
12136 strict_overflow_p = false;
12137 if (TREE_CODE (arg1) == LSHIFT_EXPR
12138 && (TYPE_UNSIGNED (type)
12139 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12141 tree sval = TREE_OPERAND (arg1, 0);
12142 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12144 tree sh_cnt = TREE_OPERAND (arg1, 1);
12145 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12146 wi::exact_log2 (sval));
12148 if (strict_overflow_p)
12149 fold_overflow_warning (("assuming signed overflow does not "
12150 "occur when simplifying A / (B << N)"),
12151 WARN_STRICT_OVERFLOW_MISC);
12153 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12154 sh_cnt, pow2);
12155 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12156 fold_convert_loc (loc, type, arg0), sh_cnt);
12160 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12161 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12162 if (INTEGRAL_TYPE_P (type)
12163 && TYPE_UNSIGNED (type)
12164 && code == FLOOR_DIV_EXPR)
12165 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12167 /* Fall through */
12169 case ROUND_DIV_EXPR:
12170 case CEIL_DIV_EXPR:
12171 case EXACT_DIV_EXPR:
12172 if (integer_zerop (arg1))
12173 return NULL_TREE;
12174 /* X / -1 is -X. */
12175 if (!TYPE_UNSIGNED (type)
12176 && TREE_CODE (arg1) == INTEGER_CST
12177 && wi::eq_p (arg1, -1))
12178 return fold_convert_loc (loc, type, negate_expr (arg0));
12180 /* Convert -A / -B to A / B when the type is signed and overflow is
12181 undefined. */
12182 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12183 && TREE_CODE (arg0) == NEGATE_EXPR
12184 && negate_expr_p (arg1))
12186 if (INTEGRAL_TYPE_P (type))
12187 fold_overflow_warning (("assuming signed overflow does not occur "
12188 "when distributing negation across "
12189 "division"),
12190 WARN_STRICT_OVERFLOW_MISC);
12191 return fold_build2_loc (loc, code, type,
12192 fold_convert_loc (loc, type,
12193 TREE_OPERAND (arg0, 0)),
12194 fold_convert_loc (loc, type,
12195 negate_expr (arg1)));
12197 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12198 && TREE_CODE (arg1) == NEGATE_EXPR
12199 && negate_expr_p (arg0))
12201 if (INTEGRAL_TYPE_P (type))
12202 fold_overflow_warning (("assuming signed overflow does not occur "
12203 "when distributing negation across "
12204 "division"),
12205 WARN_STRICT_OVERFLOW_MISC);
12206 return fold_build2_loc (loc, code, type,
12207 fold_convert_loc (loc, type,
12208 negate_expr (arg0)),
12209 fold_convert_loc (loc, type,
12210 TREE_OPERAND (arg1, 0)));
12213 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12214 operation, EXACT_DIV_EXPR.
12216 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12217 At one time others generated faster code, it's not clear if they do
12218 after the last round to changes to the DIV code in expmed.c. */
12219 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12220 && multiple_of_p (type, arg0, arg1))
12221 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12223 strict_overflow_p = false;
12224 if (TREE_CODE (arg1) == INTEGER_CST
12225 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12226 &strict_overflow_p)))
12228 if (strict_overflow_p)
12229 fold_overflow_warning (("assuming signed overflow does not occur "
12230 "when simplifying division"),
12231 WARN_STRICT_OVERFLOW_MISC);
12232 return fold_convert_loc (loc, type, tem);
12235 return NULL_TREE;
12237 case CEIL_MOD_EXPR:
12238 case FLOOR_MOD_EXPR:
12239 case ROUND_MOD_EXPR:
12240 case TRUNC_MOD_EXPR:
12241 /* X % -1 is zero. */
12242 if (!TYPE_UNSIGNED (type)
12243 && TREE_CODE (arg1) == INTEGER_CST
12244 && wi::eq_p (arg1, -1))
12245 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12247 /* X % -C is the same as X % C. */
12248 if (code == TRUNC_MOD_EXPR
12249 && TYPE_SIGN (type) == SIGNED
12250 && TREE_CODE (arg1) == INTEGER_CST
12251 && !TREE_OVERFLOW (arg1)
12252 && wi::neg_p (arg1)
12253 && !TYPE_OVERFLOW_TRAPS (type)
12254 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12255 && !sign_bit_p (arg1, arg1))
12256 return fold_build2_loc (loc, code, type,
12257 fold_convert_loc (loc, type, arg0),
12258 fold_convert_loc (loc, type,
12259 negate_expr (arg1)));
12261 /* X % -Y is the same as X % Y. */
12262 if (code == TRUNC_MOD_EXPR
12263 && !TYPE_UNSIGNED (type)
12264 && TREE_CODE (arg1) == NEGATE_EXPR
12265 && !TYPE_OVERFLOW_TRAPS (type))
12266 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12267 fold_convert_loc (loc, type,
12268 TREE_OPERAND (arg1, 0)));
12270 strict_overflow_p = false;
12271 if (TREE_CODE (arg1) == INTEGER_CST
12272 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12273 &strict_overflow_p)))
12275 if (strict_overflow_p)
12276 fold_overflow_warning (("assuming signed overflow does not occur "
12277 "when simplifying modulus"),
12278 WARN_STRICT_OVERFLOW_MISC);
12279 return fold_convert_loc (loc, type, tem);
12282 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12283 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12284 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12285 && (TYPE_UNSIGNED (type)
12286 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12288 tree c = arg1;
12289 /* Also optimize A % (C << N) where C is a power of 2,
12290 to A & ((C << N) - 1). */
12291 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12292 c = TREE_OPERAND (arg1, 0);
12294 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12296 tree mask
12297 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12298 build_int_cst (TREE_TYPE (arg1), 1));
12299 if (strict_overflow_p)
12300 fold_overflow_warning (("assuming signed overflow does not "
12301 "occur when simplifying "
12302 "X % (power of two)"),
12303 WARN_STRICT_OVERFLOW_MISC);
12304 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12305 fold_convert_loc (loc, type, arg0),
12306 fold_convert_loc (loc, type, mask));
12310 return NULL_TREE;
12312 case LROTATE_EXPR:
12313 case RROTATE_EXPR:
12314 if (integer_all_onesp (arg0))
12315 return omit_one_operand_loc (loc, type, arg0, arg1);
12316 goto shift;
12318 case RSHIFT_EXPR:
12319 /* Optimize -1 >> x for arithmetic right shifts. */
12320 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12321 && tree_expr_nonnegative_p (arg1))
12322 return omit_one_operand_loc (loc, type, arg0, arg1);
12323 /* ... fall through ... */
12325 case LSHIFT_EXPR:
12326 shift:
12327 if (integer_zerop (arg1))
12328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12329 if (integer_zerop (arg0))
12330 return omit_one_operand_loc (loc, type, arg0, arg1);
12332 /* Prefer vector1 << scalar to vector1 << vector2
12333 if vector2 is uniform. */
12334 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12335 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12336 return fold_build2_loc (loc, code, type, op0, tem);
12338 /* Since negative shift count is not well-defined,
12339 don't try to compute it in the compiler. */
12340 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12341 return NULL_TREE;
12343 prec = element_precision (type);
12345 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12346 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12347 && tree_to_uhwi (arg1) < prec
12348 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12349 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12351 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12352 + tree_to_uhwi (arg1));
12354 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12355 being well defined. */
12356 if (low >= prec)
12358 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12359 low = low % prec;
12360 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12361 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12362 TREE_OPERAND (arg0, 0));
12363 else
12364 low = prec - 1;
12367 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12368 build_int_cst (TREE_TYPE (arg1), low));
12371 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12372 into x & ((unsigned)-1 >> c) for unsigned types. */
12373 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12374 || (TYPE_UNSIGNED (type)
12375 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12376 && tree_fits_uhwi_p (arg1)
12377 && tree_to_uhwi (arg1) < prec
12378 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12379 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12381 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12382 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12383 tree lshift;
12384 tree arg00;
12386 if (low0 == low1)
12388 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12390 lshift = build_minus_one_cst (type);
12391 lshift = const_binop (code, lshift, arg1);
12393 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12397 /* Rewrite an LROTATE_EXPR by a constant into an
12398 RROTATE_EXPR by a new constant. */
12399 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12401 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12402 tem = const_binop (MINUS_EXPR, tem, arg1);
12403 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12406 /* If we have a rotate of a bit operation with the rotate count and
12407 the second operand of the bit operation both constant,
12408 permute the two operations. */
12409 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12410 && (TREE_CODE (arg0) == BIT_AND_EXPR
12411 || TREE_CODE (arg0) == BIT_IOR_EXPR
12412 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12414 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12415 fold_build2_loc (loc, code, type,
12416 TREE_OPERAND (arg0, 0), arg1),
12417 fold_build2_loc (loc, code, type,
12418 TREE_OPERAND (arg0, 1), arg1));
12420 /* Two consecutive rotates adding up to the some integer
12421 multiple of the precision of the type can be ignored. */
12422 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12423 && TREE_CODE (arg0) == RROTATE_EXPR
12424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12425 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12426 prec) == 0)
12427 return TREE_OPERAND (arg0, 0);
12429 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12430 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12431 if the latter can be further optimized. */
12432 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12433 && TREE_CODE (arg0) == BIT_AND_EXPR
12434 && TREE_CODE (arg1) == INTEGER_CST
12435 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12437 tree mask = fold_build2_loc (loc, code, type,
12438 fold_convert_loc (loc, type,
12439 TREE_OPERAND (arg0, 1)),
12440 arg1);
12441 tree shift = fold_build2_loc (loc, code, type,
12442 fold_convert_loc (loc, type,
12443 TREE_OPERAND (arg0, 0)),
12444 arg1);
12445 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12446 if (tem)
12447 return tem;
12450 return NULL_TREE;
12452 case MIN_EXPR:
12453 if (operand_equal_p (arg0, arg1, 0))
12454 return omit_one_operand_loc (loc, type, arg0, arg1);
12455 if (INTEGRAL_TYPE_P (type)
12456 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12457 return omit_one_operand_loc (loc, type, arg1, arg0);
12458 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12459 if (tem)
12460 return tem;
12461 goto associate;
12463 case MAX_EXPR:
12464 if (operand_equal_p (arg0, arg1, 0))
12465 return omit_one_operand_loc (loc, type, arg0, arg1);
12466 if (INTEGRAL_TYPE_P (type)
12467 && TYPE_MAX_VALUE (type)
12468 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12469 return omit_one_operand_loc (loc, type, arg1, arg0);
12470 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12471 if (tem)
12472 return tem;
12473 goto associate;
12475 case TRUTH_ANDIF_EXPR:
12476 /* Note that the operands of this must be ints
12477 and their values must be 0 or 1.
12478 ("true" is a fixed value perhaps depending on the language.) */
12479 /* If first arg is constant zero, return it. */
12480 if (integer_zerop (arg0))
12481 return fold_convert_loc (loc, type, arg0);
12482 case TRUTH_AND_EXPR:
12483 /* If either arg is constant true, drop it. */
12484 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12485 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12486 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12487 /* Preserve sequence points. */
12488 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12490 /* If second arg is constant zero, result is zero, but first arg
12491 must be evaluated. */
12492 if (integer_zerop (arg1))
12493 return omit_one_operand_loc (loc, type, arg1, arg0);
12494 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12495 case will be handled here. */
12496 if (integer_zerop (arg0))
12497 return omit_one_operand_loc (loc, type, arg0, arg1);
12499 /* !X && X is always false. */
12500 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12502 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12503 /* X && !X is always false. */
12504 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12505 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12506 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12508 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12509 means A >= Y && A != MAX, but in this case we know that
12510 A < X <= MAX. */
12512 if (!TREE_SIDE_EFFECTS (arg0)
12513 && !TREE_SIDE_EFFECTS (arg1))
12515 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12516 if (tem && !operand_equal_p (tem, arg0, 0))
12517 return fold_build2_loc (loc, code, type, tem, arg1);
12519 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12520 if (tem && !operand_equal_p (tem, arg1, 0))
12521 return fold_build2_loc (loc, code, type, arg0, tem);
12524 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12525 != NULL_TREE)
12526 return tem;
12528 return NULL_TREE;
12530 case TRUTH_ORIF_EXPR:
12531 /* Note that the operands of this must be ints
12532 and their values must be 0 or true.
12533 ("true" is a fixed value perhaps depending on the language.) */
12534 /* If first arg is constant true, return it. */
12535 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12536 return fold_convert_loc (loc, type, arg0);
12537 case TRUTH_OR_EXPR:
12538 /* If either arg is constant zero, drop it. */
12539 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12540 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12541 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12542 /* Preserve sequence points. */
12543 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12544 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12545 /* If second arg is constant true, result is true, but we must
12546 evaluate first arg. */
12547 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12548 return omit_one_operand_loc (loc, type, arg1, arg0);
12549 /* Likewise for first arg, but note this only occurs here for
12550 TRUTH_OR_EXPR. */
12551 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12552 return omit_one_operand_loc (loc, type, arg0, arg1);
12554 /* !X || X is always true. */
12555 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12557 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12558 /* X || !X is always true. */
12559 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12561 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12563 /* (X && !Y) || (!X && Y) is X ^ Y */
12564 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12565 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12567 tree a0, a1, l0, l1, n0, n1;
12569 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12570 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12572 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12573 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12575 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12576 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12578 if ((operand_equal_p (n0, a0, 0)
12579 && operand_equal_p (n1, a1, 0))
12580 || (operand_equal_p (n0, a1, 0)
12581 && operand_equal_p (n1, a0, 0)))
12582 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12585 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12586 != NULL_TREE)
12587 return tem;
12589 return NULL_TREE;
12591 case TRUTH_XOR_EXPR:
12592 /* If the second arg is constant zero, drop it. */
12593 if (integer_zerop (arg1))
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12595 /* If the second arg is constant true, this is a logical inversion. */
12596 if (integer_onep (arg1))
12598 tem = invert_truthvalue_loc (loc, arg0);
12599 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12601 /* Identical arguments cancel to zero. */
12602 if (operand_equal_p (arg0, arg1, 0))
12603 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12605 /* !X ^ X is always true. */
12606 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12608 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12610 /* X ^ !X is always true. */
12611 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12612 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12613 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12615 return NULL_TREE;
12617 case EQ_EXPR:
12618 case NE_EXPR:
12619 STRIP_NOPS (arg0);
12620 STRIP_NOPS (arg1);
12622 tem = fold_comparison (loc, code, type, op0, op1);
12623 if (tem != NULL_TREE)
12624 return tem;
12626 /* bool_var != 0 becomes bool_var. */
12627 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12628 && code == NE_EXPR)
12629 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12631 /* bool_var == 1 becomes bool_var. */
12632 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12633 && code == EQ_EXPR)
12634 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12636 /* bool_var != 1 becomes !bool_var. */
12637 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12638 && code == NE_EXPR)
12639 return fold_convert_loc (loc, type,
12640 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12641 TREE_TYPE (arg0), arg0));
12643 /* bool_var == 0 becomes !bool_var. */
12644 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12645 && code == EQ_EXPR)
12646 return fold_convert_loc (loc, type,
12647 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12648 TREE_TYPE (arg0), arg0));
12650 /* !exp != 0 becomes !exp */
12651 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12652 && code == NE_EXPR)
12653 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12655 /* If this is an equality comparison of the address of two non-weak,
12656 unaliased symbols neither of which are extern (since we do not
12657 have access to attributes for externs), then we know the result. */
12658 if (TREE_CODE (arg0) == ADDR_EXPR
12659 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12660 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12661 && ! lookup_attribute ("alias",
12662 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12663 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12664 && TREE_CODE (arg1) == ADDR_EXPR
12665 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12666 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12667 && ! lookup_attribute ("alias",
12668 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12669 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12671 /* We know that we're looking at the address of two
12672 non-weak, unaliased, static _DECL nodes.
12674 It is both wasteful and incorrect to call operand_equal_p
12675 to compare the two ADDR_EXPR nodes. It is wasteful in that
12676 all we need to do is test pointer equality for the arguments
12677 to the two ADDR_EXPR nodes. It is incorrect to use
12678 operand_equal_p as that function is NOT equivalent to a
12679 C equality test. It can in fact return false for two
12680 objects which would test as equal using the C equality
12681 operator. */
12682 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12683 return constant_boolean_node (equal
12684 ? code == EQ_EXPR : code != EQ_EXPR,
12685 type);
12688 /* Similarly for a NEGATE_EXPR. */
12689 if (TREE_CODE (arg0) == NEGATE_EXPR
12690 && TREE_CODE (arg1) == INTEGER_CST
12691 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12692 arg1)))
12693 && TREE_CODE (tem) == INTEGER_CST
12694 && !TREE_OVERFLOW (tem))
12695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12697 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12698 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12699 && TREE_CODE (arg1) == INTEGER_CST
12700 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12701 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12702 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12703 fold_convert_loc (loc,
12704 TREE_TYPE (arg0),
12705 arg1),
12706 TREE_OPERAND (arg0, 1)));
12708 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12709 if ((TREE_CODE (arg0) == PLUS_EXPR
12710 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12711 || TREE_CODE (arg0) == MINUS_EXPR)
12712 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12713 0)),
12714 arg1, 0)
12715 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12716 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12718 tree val = TREE_OPERAND (arg0, 1);
12719 return omit_two_operands_loc (loc, type,
12720 fold_build2_loc (loc, code, type,
12721 val,
12722 build_int_cst (TREE_TYPE (val),
12723 0)),
12724 TREE_OPERAND (arg0, 0), arg1);
12727 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12728 if (TREE_CODE (arg0) == MINUS_EXPR
12729 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12730 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12731 1)),
12732 arg1, 0)
12733 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12735 return omit_two_operands_loc (loc, type,
12736 code == NE_EXPR
12737 ? boolean_true_node : boolean_false_node,
12738 TREE_OPERAND (arg0, 1), arg1);
12741 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12742 if (TREE_CODE (arg0) == ABS_EXPR
12743 && (integer_zerop (arg1) || real_zerop (arg1)))
12744 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12746 /* If this is an EQ or NE comparison with zero and ARG0 is
12747 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12748 two operations, but the latter can be done in one less insn
12749 on machines that have only two-operand insns or on which a
12750 constant cannot be the first operand. */
12751 if (TREE_CODE (arg0) == BIT_AND_EXPR
12752 && integer_zerop (arg1))
12754 tree arg00 = TREE_OPERAND (arg0, 0);
12755 tree arg01 = TREE_OPERAND (arg0, 1);
12756 if (TREE_CODE (arg00) == LSHIFT_EXPR
12757 && integer_onep (TREE_OPERAND (arg00, 0)))
12759 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12760 arg01, TREE_OPERAND (arg00, 1));
12761 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12762 build_int_cst (TREE_TYPE (arg0), 1));
12763 return fold_build2_loc (loc, code, type,
12764 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12765 arg1);
12767 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12768 && integer_onep (TREE_OPERAND (arg01, 0)))
12770 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12771 arg00, TREE_OPERAND (arg01, 1));
12772 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12773 build_int_cst (TREE_TYPE (arg0), 1));
12774 return fold_build2_loc (loc, code, type,
12775 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12776 arg1);
12780 /* If this is an NE or EQ comparison of zero against the result of a
12781 signed MOD operation whose second operand is a power of 2, make
12782 the MOD operation unsigned since it is simpler and equivalent. */
12783 if (integer_zerop (arg1)
12784 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12785 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12786 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12787 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12788 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12789 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12791 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12792 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12793 fold_convert_loc (loc, newtype,
12794 TREE_OPERAND (arg0, 0)),
12795 fold_convert_loc (loc, newtype,
12796 TREE_OPERAND (arg0, 1)));
12798 return fold_build2_loc (loc, code, type, newmod,
12799 fold_convert_loc (loc, newtype, arg1));
12802 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12803 C1 is a valid shift constant, and C2 is a power of two, i.e.
12804 a single bit. */
12805 if (TREE_CODE (arg0) == BIT_AND_EXPR
12806 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12807 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12808 == INTEGER_CST
12809 && integer_pow2p (TREE_OPERAND (arg0, 1))
12810 && integer_zerop (arg1))
12812 tree itype = TREE_TYPE (arg0);
12813 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12814 prec = TYPE_PRECISION (itype);
12816 /* Check for a valid shift count. */
12817 if (wi::ltu_p (arg001, prec))
12819 tree arg01 = TREE_OPERAND (arg0, 1);
12820 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12821 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12822 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12823 can be rewritten as (X & (C2 << C1)) != 0. */
12824 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12826 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12827 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12828 return fold_build2_loc (loc, code, type, tem,
12829 fold_convert_loc (loc, itype, arg1));
12831 /* Otherwise, for signed (arithmetic) shifts,
12832 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12833 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12834 else if (!TYPE_UNSIGNED (itype))
12835 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12836 arg000, build_int_cst (itype, 0));
12837 /* Otherwise, of unsigned (logical) shifts,
12838 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12839 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12840 else
12841 return omit_one_operand_loc (loc, type,
12842 code == EQ_EXPR ? integer_one_node
12843 : integer_zero_node,
12844 arg000);
12848 /* If we have (A & C) == C where C is a power of 2, convert this into
12849 (A & C) != 0. Similarly for NE_EXPR. */
12850 if (TREE_CODE (arg0) == BIT_AND_EXPR
12851 && integer_pow2p (TREE_OPERAND (arg0, 1))
12852 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12853 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12854 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12855 integer_zero_node));
12857 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12858 bit, then fold the expression into A < 0 or A >= 0. */
12859 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12860 if (tem)
12861 return tem;
12863 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12864 Similarly for NE_EXPR. */
12865 if (TREE_CODE (arg0) == BIT_AND_EXPR
12866 && TREE_CODE (arg1) == INTEGER_CST
12867 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12869 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12870 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12871 TREE_OPERAND (arg0, 1));
12872 tree dandnotc
12873 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12874 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12875 notc);
12876 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12877 if (integer_nonzerop (dandnotc))
12878 return omit_one_operand_loc (loc, type, rslt, arg0);
12881 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12882 Similarly for NE_EXPR. */
12883 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12884 && TREE_CODE (arg1) == INTEGER_CST
12885 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12887 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12888 tree candnotd
12889 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12890 TREE_OPERAND (arg0, 1),
12891 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12892 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12893 if (integer_nonzerop (candnotd))
12894 return omit_one_operand_loc (loc, type, rslt, arg0);
12897 /* If this is a comparison of a field, we may be able to simplify it. */
12898 if ((TREE_CODE (arg0) == COMPONENT_REF
12899 || TREE_CODE (arg0) == BIT_FIELD_REF)
12900 /* Handle the constant case even without -O
12901 to make sure the warnings are given. */
12902 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12904 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12905 if (t1)
12906 return t1;
12909 /* Optimize comparisons of strlen vs zero to a compare of the
12910 first character of the string vs zero. To wit,
12911 strlen(ptr) == 0 => *ptr == 0
12912 strlen(ptr) != 0 => *ptr != 0
12913 Other cases should reduce to one of these two (or a constant)
12914 due to the return value of strlen being unsigned. */
12915 if (TREE_CODE (arg0) == CALL_EXPR
12916 && integer_zerop (arg1))
12918 tree fndecl = get_callee_fndecl (arg0);
12920 if (fndecl
12921 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12922 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12923 && call_expr_nargs (arg0) == 1
12924 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12926 tree iref = build_fold_indirect_ref_loc (loc,
12927 CALL_EXPR_ARG (arg0, 0));
12928 return fold_build2_loc (loc, code, type, iref,
12929 build_int_cst (TREE_TYPE (iref), 0));
12933 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12934 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12935 if (TREE_CODE (arg0) == RSHIFT_EXPR
12936 && integer_zerop (arg1)
12937 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12939 tree arg00 = TREE_OPERAND (arg0, 0);
12940 tree arg01 = TREE_OPERAND (arg0, 1);
12941 tree itype = TREE_TYPE (arg00);
12942 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12944 if (TYPE_UNSIGNED (itype))
12946 itype = signed_type_for (itype);
12947 arg00 = fold_convert_loc (loc, itype, arg00);
12949 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12950 type, arg00, build_zero_cst (itype));
12954 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12955 if (integer_zerop (arg1)
12956 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12957 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12958 TREE_OPERAND (arg0, 1));
12960 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12961 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12962 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12963 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12964 build_zero_cst (TREE_TYPE (arg0)));
12965 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12966 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12968 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12969 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12970 build_zero_cst (TREE_TYPE (arg0)));
12972 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12973 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12974 && TREE_CODE (arg1) == INTEGER_CST
12975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12976 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12977 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12978 TREE_OPERAND (arg0, 1), arg1));
12980 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12981 (X & C) == 0 when C is a single bit. */
12982 if (TREE_CODE (arg0) == BIT_AND_EXPR
12983 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12984 && integer_zerop (arg1)
12985 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12987 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12988 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12989 TREE_OPERAND (arg0, 1));
12990 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12991 type, tem,
12992 fold_convert_loc (loc, TREE_TYPE (arg0),
12993 arg1));
12996 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12997 constant C is a power of two, i.e. a single bit. */
12998 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12999 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13000 && integer_zerop (arg1)
13001 && integer_pow2p (TREE_OPERAND (arg0, 1))
13002 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13003 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13005 tree arg00 = TREE_OPERAND (arg0, 0);
13006 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13007 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13010 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13011 when is C is a power of two, i.e. a single bit. */
13012 if (TREE_CODE (arg0) == BIT_AND_EXPR
13013 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13014 && integer_zerop (arg1)
13015 && integer_pow2p (TREE_OPERAND (arg0, 1))
13016 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13017 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13019 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13020 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13021 arg000, TREE_OPERAND (arg0, 1));
13022 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13023 tem, build_int_cst (TREE_TYPE (tem), 0));
13026 if (integer_zerop (arg1)
13027 && tree_expr_nonzero_p (arg0))
13029 tree res = constant_boolean_node (code==NE_EXPR, type);
13030 return omit_one_operand_loc (loc, type, res, arg0);
13033 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13034 if (TREE_CODE (arg0) == NEGATE_EXPR
13035 && TREE_CODE (arg1) == NEGATE_EXPR)
13036 return fold_build2_loc (loc, code, type,
13037 TREE_OPERAND (arg0, 0),
13038 fold_convert_loc (loc, TREE_TYPE (arg0),
13039 TREE_OPERAND (arg1, 0)));
13041 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13042 if (TREE_CODE (arg0) == BIT_AND_EXPR
13043 && TREE_CODE (arg1) == BIT_AND_EXPR)
13045 tree arg00 = TREE_OPERAND (arg0, 0);
13046 tree arg01 = TREE_OPERAND (arg0, 1);
13047 tree arg10 = TREE_OPERAND (arg1, 0);
13048 tree arg11 = TREE_OPERAND (arg1, 1);
13049 tree itype = TREE_TYPE (arg0);
13051 if (operand_equal_p (arg01, arg11, 0))
13052 return fold_build2_loc (loc, code, type,
13053 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13054 fold_build2_loc (loc,
13055 BIT_XOR_EXPR, itype,
13056 arg00, arg10),
13057 arg01),
13058 build_zero_cst (itype));
13060 if (operand_equal_p (arg01, arg10, 0))
13061 return fold_build2_loc (loc, code, type,
13062 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13063 fold_build2_loc (loc,
13064 BIT_XOR_EXPR, itype,
13065 arg00, arg11),
13066 arg01),
13067 build_zero_cst (itype));
13069 if (operand_equal_p (arg00, arg11, 0))
13070 return fold_build2_loc (loc, code, type,
13071 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13072 fold_build2_loc (loc,
13073 BIT_XOR_EXPR, itype,
13074 arg01, arg10),
13075 arg00),
13076 build_zero_cst (itype));
13078 if (operand_equal_p (arg00, arg10, 0))
13079 return fold_build2_loc (loc, code, type,
13080 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13081 fold_build2_loc (loc,
13082 BIT_XOR_EXPR, itype,
13083 arg01, arg11),
13084 arg00),
13085 build_zero_cst (itype));
13088 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13089 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13091 tree arg00 = TREE_OPERAND (arg0, 0);
13092 tree arg01 = TREE_OPERAND (arg0, 1);
13093 tree arg10 = TREE_OPERAND (arg1, 0);
13094 tree arg11 = TREE_OPERAND (arg1, 1);
13095 tree itype = TREE_TYPE (arg0);
13097 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13098 operand_equal_p guarantees no side-effects so we don't need
13099 to use omit_one_operand on Z. */
13100 if (operand_equal_p (arg01, arg11, 0))
13101 return fold_build2_loc (loc, code, type, arg00,
13102 fold_convert_loc (loc, TREE_TYPE (arg00),
13103 arg10));
13104 if (operand_equal_p (arg01, arg10, 0))
13105 return fold_build2_loc (loc, code, type, arg00,
13106 fold_convert_loc (loc, TREE_TYPE (arg00),
13107 arg11));
13108 if (operand_equal_p (arg00, arg11, 0))
13109 return fold_build2_loc (loc, code, type, arg01,
13110 fold_convert_loc (loc, TREE_TYPE (arg01),
13111 arg10));
13112 if (operand_equal_p (arg00, arg10, 0))
13113 return fold_build2_loc (loc, code, type, arg01,
13114 fold_convert_loc (loc, TREE_TYPE (arg01),
13115 arg11));
13117 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13118 if (TREE_CODE (arg01) == INTEGER_CST
13119 && TREE_CODE (arg11) == INTEGER_CST)
13121 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13122 fold_convert_loc (loc, itype, arg11));
13123 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13124 return fold_build2_loc (loc, code, type, tem,
13125 fold_convert_loc (loc, itype, arg10));
13129 /* Attempt to simplify equality/inequality comparisons of complex
13130 values. Only lower the comparison if the result is known or
13131 can be simplified to a single scalar comparison. */
13132 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13133 || TREE_CODE (arg0) == COMPLEX_CST)
13134 && (TREE_CODE (arg1) == COMPLEX_EXPR
13135 || TREE_CODE (arg1) == COMPLEX_CST))
13137 tree real0, imag0, real1, imag1;
13138 tree rcond, icond;
13140 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13142 real0 = TREE_OPERAND (arg0, 0);
13143 imag0 = TREE_OPERAND (arg0, 1);
13145 else
13147 real0 = TREE_REALPART (arg0);
13148 imag0 = TREE_IMAGPART (arg0);
13151 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13153 real1 = TREE_OPERAND (arg1, 0);
13154 imag1 = TREE_OPERAND (arg1, 1);
13156 else
13158 real1 = TREE_REALPART (arg1);
13159 imag1 = TREE_IMAGPART (arg1);
13162 rcond = fold_binary_loc (loc, code, type, real0, real1);
13163 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13165 if (integer_zerop (rcond))
13167 if (code == EQ_EXPR)
13168 return omit_two_operands_loc (loc, type, boolean_false_node,
13169 imag0, imag1);
13170 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13172 else
13174 if (code == NE_EXPR)
13175 return omit_two_operands_loc (loc, type, boolean_true_node,
13176 imag0, imag1);
13177 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13181 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13182 if (icond && TREE_CODE (icond) == INTEGER_CST)
13184 if (integer_zerop (icond))
13186 if (code == EQ_EXPR)
13187 return omit_two_operands_loc (loc, type, boolean_false_node,
13188 real0, real1);
13189 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13191 else
13193 if (code == NE_EXPR)
13194 return omit_two_operands_loc (loc, type, boolean_true_node,
13195 real0, real1);
13196 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13201 return NULL_TREE;
13203 case LT_EXPR:
13204 case GT_EXPR:
13205 case LE_EXPR:
13206 case GE_EXPR:
13207 tem = fold_comparison (loc, code, type, op0, op1);
13208 if (tem != NULL_TREE)
13209 return tem;
13211 /* Transform comparisons of the form X +- C CMP X. */
13212 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13214 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13215 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13216 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13217 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13219 tree arg01 = TREE_OPERAND (arg0, 1);
13220 enum tree_code code0 = TREE_CODE (arg0);
13221 int is_positive;
13223 if (TREE_CODE (arg01) == REAL_CST)
13224 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13225 else
13226 is_positive = tree_int_cst_sgn (arg01);
13228 /* (X - c) > X becomes false. */
13229 if (code == GT_EXPR
13230 && ((code0 == MINUS_EXPR && is_positive >= 0)
13231 || (code0 == PLUS_EXPR && is_positive <= 0)))
13233 if (TREE_CODE (arg01) == INTEGER_CST
13234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13235 fold_overflow_warning (("assuming signed overflow does not "
13236 "occur when assuming that (X - c) > X "
13237 "is always false"),
13238 WARN_STRICT_OVERFLOW_ALL);
13239 return constant_boolean_node (0, type);
13242 /* Likewise (X + c) < X becomes false. */
13243 if (code == LT_EXPR
13244 && ((code0 == PLUS_EXPR && is_positive >= 0)
13245 || (code0 == MINUS_EXPR && is_positive <= 0)))
13247 if (TREE_CODE (arg01) == INTEGER_CST
13248 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13249 fold_overflow_warning (("assuming signed overflow does not "
13250 "occur when assuming that "
13251 "(X + c) < X is always false"),
13252 WARN_STRICT_OVERFLOW_ALL);
13253 return constant_boolean_node (0, type);
13256 /* Convert (X - c) <= X to true. */
13257 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13258 && code == LE_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive >= 0)
13260 || (code0 == PLUS_EXPR && is_positive <= 0)))
13262 if (TREE_CODE (arg01) == INTEGER_CST
13263 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13264 fold_overflow_warning (("assuming signed overflow does not "
13265 "occur when assuming that "
13266 "(X - c) <= X is always true"),
13267 WARN_STRICT_OVERFLOW_ALL);
13268 return constant_boolean_node (1, type);
13271 /* Convert (X + c) >= X to true. */
13272 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13273 && code == GE_EXPR
13274 && ((code0 == PLUS_EXPR && is_positive >= 0)
13275 || (code0 == MINUS_EXPR && is_positive <= 0)))
13277 if (TREE_CODE (arg01) == INTEGER_CST
13278 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does not "
13280 "occur when assuming that "
13281 "(X + c) >= X is always true"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (1, type);
13286 if (TREE_CODE (arg01) == INTEGER_CST)
13288 /* Convert X + c > X and X - c < X to true for integers. */
13289 if (code == GT_EXPR
13290 && ((code0 == PLUS_EXPR && is_positive > 0)
13291 || (code0 == MINUS_EXPR && is_positive < 0)))
13293 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13294 fold_overflow_warning (("assuming signed overflow does "
13295 "not occur when assuming that "
13296 "(X + c) > X is always true"),
13297 WARN_STRICT_OVERFLOW_ALL);
13298 return constant_boolean_node (1, type);
13301 if (code == LT_EXPR
13302 && ((code0 == MINUS_EXPR && is_positive > 0)
13303 || (code0 == PLUS_EXPR && is_positive < 0)))
13305 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13306 fold_overflow_warning (("assuming signed overflow does "
13307 "not occur when assuming that "
13308 "(X - c) < X is always true"),
13309 WARN_STRICT_OVERFLOW_ALL);
13310 return constant_boolean_node (1, type);
13313 /* Convert X + c <= X and X - c >= X to false for integers. */
13314 if (code == LE_EXPR
13315 && ((code0 == PLUS_EXPR && is_positive > 0)
13316 || (code0 == MINUS_EXPR && is_positive < 0)))
13318 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13319 fold_overflow_warning (("assuming signed overflow does "
13320 "not occur when assuming that "
13321 "(X + c) <= X is always false"),
13322 WARN_STRICT_OVERFLOW_ALL);
13323 return constant_boolean_node (0, type);
13326 if (code == GE_EXPR
13327 && ((code0 == MINUS_EXPR && is_positive > 0)
13328 || (code0 == PLUS_EXPR && is_positive < 0)))
13330 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13331 fold_overflow_warning (("assuming signed overflow does "
13332 "not occur when assuming that "
13333 "(X - c) >= X is always false"),
13334 WARN_STRICT_OVERFLOW_ALL);
13335 return constant_boolean_node (0, type);
13340 /* Comparisons with the highest or lowest possible integer of
13341 the specified precision will have known values. */
13343 tree arg1_type = TREE_TYPE (arg1);
13344 unsigned int prec = TYPE_PRECISION (arg1_type);
13346 if (TREE_CODE (arg1) == INTEGER_CST
13347 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13349 wide_int max = wi::max_value (arg1_type);
13350 wide_int signed_max = wi::max_value (prec, SIGNED);
13351 wide_int min = wi::min_value (arg1_type);
13353 if (wi::eq_p (arg1, max))
13354 switch (code)
13356 case GT_EXPR:
13357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13359 case GE_EXPR:
13360 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13362 case LE_EXPR:
13363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13365 case LT_EXPR:
13366 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13368 /* The GE_EXPR and LT_EXPR cases above are not normally
13369 reached because of previous transformations. */
13371 default:
13372 break;
13374 else if (wi::eq_p (arg1, max - 1))
13375 switch (code)
13377 case GT_EXPR:
13378 arg1 = const_binop (PLUS_EXPR, arg1,
13379 build_int_cst (TREE_TYPE (arg1), 1));
13380 return fold_build2_loc (loc, EQ_EXPR, type,
13381 fold_convert_loc (loc,
13382 TREE_TYPE (arg1), arg0),
13383 arg1);
13384 case LE_EXPR:
13385 arg1 = const_binop (PLUS_EXPR, arg1,
13386 build_int_cst (TREE_TYPE (arg1), 1));
13387 return fold_build2_loc (loc, NE_EXPR, type,
13388 fold_convert_loc (loc, TREE_TYPE (arg1),
13389 arg0),
13390 arg1);
13391 default:
13392 break;
13394 else if (wi::eq_p (arg1, min))
13395 switch (code)
13397 case LT_EXPR:
13398 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13400 case LE_EXPR:
13401 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13403 case GE_EXPR:
13404 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13406 case GT_EXPR:
13407 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13409 default:
13410 break;
13412 else if (wi::eq_p (arg1, min + 1))
13413 switch (code)
13415 case GE_EXPR:
13416 arg1 = const_binop (MINUS_EXPR, arg1,
13417 build_int_cst (TREE_TYPE (arg1), 1));
13418 return fold_build2_loc (loc, NE_EXPR, type,
13419 fold_convert_loc (loc,
13420 TREE_TYPE (arg1), arg0),
13421 arg1);
13422 case LT_EXPR:
13423 arg1 = const_binop (MINUS_EXPR, arg1,
13424 build_int_cst (TREE_TYPE (arg1), 1));
13425 return fold_build2_loc (loc, EQ_EXPR, type,
13426 fold_convert_loc (loc, TREE_TYPE (arg1),
13427 arg0),
13428 arg1);
13429 default:
13430 break;
13433 else if (wi::eq_p (arg1, signed_max)
13434 && TYPE_UNSIGNED (arg1_type)
13435 /* We will flip the signedness of the comparison operator
13436 associated with the mode of arg1, so the sign bit is
13437 specified by this mode. Check that arg1 is the signed
13438 max associated with this sign bit. */
13439 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13440 /* signed_type does not work on pointer types. */
13441 && INTEGRAL_TYPE_P (arg1_type))
13443 /* The following case also applies to X < signed_max+1
13444 and X >= signed_max+1 because previous transformations. */
13445 if (code == LE_EXPR || code == GT_EXPR)
13447 tree st = signed_type_for (arg1_type);
13448 return fold_build2_loc (loc,
13449 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13450 type, fold_convert_loc (loc, st, arg0),
13451 build_int_cst (st, 0));
13457 /* If we are comparing an ABS_EXPR with a constant, we can
13458 convert all the cases into explicit comparisons, but they may
13459 well not be faster than doing the ABS and one comparison.
13460 But ABS (X) <= C is a range comparison, which becomes a subtraction
13461 and a comparison, and is probably faster. */
13462 if (code == LE_EXPR
13463 && TREE_CODE (arg1) == INTEGER_CST
13464 && TREE_CODE (arg0) == ABS_EXPR
13465 && ! TREE_SIDE_EFFECTS (arg0)
13466 && (0 != (tem = negate_expr (arg1)))
13467 && TREE_CODE (tem) == INTEGER_CST
13468 && !TREE_OVERFLOW (tem))
13469 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13470 build2 (GE_EXPR, type,
13471 TREE_OPERAND (arg0, 0), tem),
13472 build2 (LE_EXPR, type,
13473 TREE_OPERAND (arg0, 0), arg1));
13475 /* Convert ABS_EXPR<x> >= 0 to true. */
13476 strict_overflow_p = false;
13477 if (code == GE_EXPR
13478 && (integer_zerop (arg1)
13479 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13480 && real_zerop (arg1)))
13481 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13483 if (strict_overflow_p)
13484 fold_overflow_warning (("assuming signed overflow does not occur "
13485 "when simplifying comparison of "
13486 "absolute value and zero"),
13487 WARN_STRICT_OVERFLOW_CONDITIONAL);
13488 return omit_one_operand_loc (loc, type,
13489 constant_boolean_node (true, type),
13490 arg0);
13493 /* Convert ABS_EXPR<x> < 0 to false. */
13494 strict_overflow_p = false;
13495 if (code == LT_EXPR
13496 && (integer_zerop (arg1) || real_zerop (arg1))
13497 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13499 if (strict_overflow_p)
13500 fold_overflow_warning (("assuming signed overflow does not occur "
13501 "when simplifying comparison of "
13502 "absolute value and zero"),
13503 WARN_STRICT_OVERFLOW_CONDITIONAL);
13504 return omit_one_operand_loc (loc, type,
13505 constant_boolean_node (false, type),
13506 arg0);
13509 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13510 and similarly for >= into !=. */
13511 if ((code == LT_EXPR || code == GE_EXPR)
13512 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13513 && TREE_CODE (arg1) == LSHIFT_EXPR
13514 && integer_onep (TREE_OPERAND (arg1, 0)))
13515 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13516 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13517 TREE_OPERAND (arg1, 1)),
13518 build_zero_cst (TREE_TYPE (arg0)));
13520 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13521 otherwise Y might be >= # of bits in X's type and thus e.g.
13522 (unsigned char) (1 << Y) for Y 15 might be 0.
13523 If the cast is widening, then 1 << Y should have unsigned type,
13524 otherwise if Y is number of bits in the signed shift type minus 1,
13525 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13526 31 might be 0xffffffff80000000. */
13527 if ((code == LT_EXPR || code == GE_EXPR)
13528 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13529 && CONVERT_EXPR_P (arg1)
13530 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13531 && (TYPE_PRECISION (TREE_TYPE (arg1))
13532 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13533 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13534 || (TYPE_PRECISION (TREE_TYPE (arg1))
13535 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13536 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13538 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13539 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13540 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13541 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13542 build_zero_cst (TREE_TYPE (arg0)));
13545 return NULL_TREE;
13547 case UNORDERED_EXPR:
13548 case ORDERED_EXPR:
13549 case UNLT_EXPR:
13550 case UNLE_EXPR:
13551 case UNGT_EXPR:
13552 case UNGE_EXPR:
13553 case UNEQ_EXPR:
13554 case LTGT_EXPR:
13555 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13557 t1 = fold_relational_const (code, type, arg0, arg1);
13558 if (t1 != NULL_TREE)
13559 return t1;
13562 /* If the first operand is NaN, the result is constant. */
13563 if (TREE_CODE (arg0) == REAL_CST
13564 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13565 && (code != LTGT_EXPR || ! flag_trapping_math))
13567 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13568 ? integer_zero_node
13569 : integer_one_node;
13570 return omit_one_operand_loc (loc, type, t1, arg1);
13573 /* If the second operand is NaN, the result is constant. */
13574 if (TREE_CODE (arg1) == REAL_CST
13575 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13576 && (code != LTGT_EXPR || ! flag_trapping_math))
13578 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13579 ? integer_zero_node
13580 : integer_one_node;
13581 return omit_one_operand_loc (loc, type, t1, arg0);
13584 /* Simplify unordered comparison of something with itself. */
13585 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13586 && operand_equal_p (arg0, arg1, 0))
13587 return constant_boolean_node (1, type);
13589 if (code == LTGT_EXPR
13590 && !flag_trapping_math
13591 && operand_equal_p (arg0, arg1, 0))
13592 return constant_boolean_node (0, type);
13594 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13596 tree targ0 = strip_float_extensions (arg0);
13597 tree targ1 = strip_float_extensions (arg1);
13598 tree newtype = TREE_TYPE (targ0);
13600 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13601 newtype = TREE_TYPE (targ1);
13603 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13604 return fold_build2_loc (loc, code, type,
13605 fold_convert_loc (loc, newtype, targ0),
13606 fold_convert_loc (loc, newtype, targ1));
13609 return NULL_TREE;
13611 case COMPOUND_EXPR:
13612 /* When pedantic, a compound expression can be neither an lvalue
13613 nor an integer constant expression. */
13614 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13615 return NULL_TREE;
13616 /* Don't let (0, 0) be null pointer constant. */
13617 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13618 : fold_convert_loc (loc, type, arg1);
13619 return pedantic_non_lvalue_loc (loc, tem);
13621 case COMPLEX_EXPR:
13622 if ((TREE_CODE (arg0) == REAL_CST
13623 && TREE_CODE (arg1) == REAL_CST)
13624 || (TREE_CODE (arg0) == INTEGER_CST
13625 && TREE_CODE (arg1) == INTEGER_CST))
13626 return build_complex (type, arg0, arg1);
13627 if (TREE_CODE (arg0) == REALPART_EXPR
13628 && TREE_CODE (arg1) == IMAGPART_EXPR
13629 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13630 && operand_equal_p (TREE_OPERAND (arg0, 0),
13631 TREE_OPERAND (arg1, 0), 0))
13632 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13633 TREE_OPERAND (arg1, 0));
13634 return NULL_TREE;
13636 case ASSERT_EXPR:
13637 /* An ASSERT_EXPR should never be passed to fold_binary. */
13638 gcc_unreachable ();
13640 case VEC_PACK_TRUNC_EXPR:
13641 case VEC_PACK_FIX_TRUNC_EXPR:
13643 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13644 tree *elts;
13646 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13647 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13648 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13649 return NULL_TREE;
13651 elts = XALLOCAVEC (tree, nelts);
13652 if (!vec_cst_ctor_to_array (arg0, elts)
13653 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13654 return NULL_TREE;
13656 for (i = 0; i < nelts; i++)
13658 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13659 ? NOP_EXPR : FIX_TRUNC_EXPR,
13660 TREE_TYPE (type), elts[i]);
13661 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13662 return NULL_TREE;
13665 return build_vector (type, elts);
13668 case VEC_WIDEN_MULT_LO_EXPR:
13669 case VEC_WIDEN_MULT_HI_EXPR:
13670 case VEC_WIDEN_MULT_EVEN_EXPR:
13671 case VEC_WIDEN_MULT_ODD_EXPR:
13673 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13674 unsigned int out, ofs, scale;
13675 tree *elts;
13677 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13678 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13679 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13680 return NULL_TREE;
13682 elts = XALLOCAVEC (tree, nelts * 4);
13683 if (!vec_cst_ctor_to_array (arg0, elts)
13684 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13685 return NULL_TREE;
13687 if (code == VEC_WIDEN_MULT_LO_EXPR)
13688 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13689 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13690 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13691 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13692 scale = 1, ofs = 0;
13693 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13694 scale = 1, ofs = 1;
13696 for (out = 0; out < nelts; out++)
13698 unsigned int in1 = (out << scale) + ofs;
13699 unsigned int in2 = in1 + nelts * 2;
13700 tree t1, t2;
13702 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13703 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13705 if (t1 == NULL_TREE || t2 == NULL_TREE)
13706 return NULL_TREE;
13707 elts[out] = const_binop (MULT_EXPR, t1, t2);
13708 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13709 return NULL_TREE;
13712 return build_vector (type, elts);
13715 default:
13716 return NULL_TREE;
13717 } /* switch (code) */
13720 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13721 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13722 of GOTO_EXPR. */
13724 static tree
13725 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13727 switch (TREE_CODE (*tp))
13729 case LABEL_EXPR:
13730 return *tp;
13732 case GOTO_EXPR:
13733 *walk_subtrees = 0;
13735 /* ... fall through ... */
13737 default:
13738 return NULL_TREE;
13742 /* Return whether the sub-tree ST contains a label which is accessible from
13743 outside the sub-tree. */
13745 static bool
13746 contains_label_p (tree st)
13748 return
13749 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13752 /* Fold a ternary expression of code CODE and type TYPE with operands
13753 OP0, OP1, and OP2. Return the folded expression if folding is
13754 successful. Otherwise, return NULL_TREE. */
13756 tree
13757 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13758 tree op0, tree op1, tree op2)
13760 tree tem;
13761 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13762 enum tree_code_class kind = TREE_CODE_CLASS (code);
13764 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13765 && TREE_CODE_LENGTH (code) == 3);
13767 /* If this is a commutative operation, and OP0 is a constant, move it
13768 to OP1 to reduce the number of tests below. */
13769 if (commutative_ternary_tree_code (code)
13770 && tree_swap_operands_p (op0, op1, true))
13771 return fold_build3_loc (loc, code, type, op1, op0, op2);
13773 tem = generic_simplify (loc, code, type, op0, op1, op2);
13774 if (tem)
13775 return tem;
13777 /* Strip any conversions that don't change the mode. This is safe
13778 for every expression, except for a comparison expression because
13779 its signedness is derived from its operands. So, in the latter
13780 case, only strip conversions that don't change the signedness.
13782 Note that this is done as an internal manipulation within the
13783 constant folder, in order to find the simplest representation of
13784 the arguments so that their form can be studied. In any cases,
13785 the appropriate type conversions should be put back in the tree
13786 that will get out of the constant folder. */
13787 if (op0)
13789 arg0 = op0;
13790 STRIP_NOPS (arg0);
13793 if (op1)
13795 arg1 = op1;
13796 STRIP_NOPS (arg1);
13799 if (op2)
13801 arg2 = op2;
13802 STRIP_NOPS (arg2);
13805 switch (code)
13807 case COMPONENT_REF:
13808 if (TREE_CODE (arg0) == CONSTRUCTOR
13809 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13811 unsigned HOST_WIDE_INT idx;
13812 tree field, value;
13813 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13814 if (field == arg1)
13815 return value;
13817 return NULL_TREE;
13819 case COND_EXPR:
13820 case VEC_COND_EXPR:
13821 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13822 so all simple results must be passed through pedantic_non_lvalue. */
13823 if (TREE_CODE (arg0) == INTEGER_CST)
13825 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13826 tem = integer_zerop (arg0) ? op2 : op1;
13827 /* Only optimize constant conditions when the selected branch
13828 has the same type as the COND_EXPR. This avoids optimizing
13829 away "c ? x : throw", where the throw has a void type.
13830 Avoid throwing away that operand which contains label. */
13831 if ((!TREE_SIDE_EFFECTS (unused_op)
13832 || !contains_label_p (unused_op))
13833 && (! VOID_TYPE_P (TREE_TYPE (tem))
13834 || VOID_TYPE_P (type)))
13835 return pedantic_non_lvalue_loc (loc, tem);
13836 return NULL_TREE;
13838 else if (TREE_CODE (arg0) == VECTOR_CST)
13840 if (integer_all_onesp (arg0))
13841 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13842 if (integer_zerop (arg0))
13843 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13845 if ((TREE_CODE (arg1) == VECTOR_CST
13846 || TREE_CODE (arg1) == CONSTRUCTOR)
13847 && (TREE_CODE (arg2) == VECTOR_CST
13848 || TREE_CODE (arg2) == CONSTRUCTOR))
13850 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13851 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13852 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13853 for (i = 0; i < nelts; i++)
13855 tree val = VECTOR_CST_ELT (arg0, i);
13856 if (integer_all_onesp (val))
13857 sel[i] = i;
13858 else if (integer_zerop (val))
13859 sel[i] = nelts + i;
13860 else /* Currently unreachable. */
13861 return NULL_TREE;
13863 tree t = fold_vec_perm (type, arg1, arg2, sel);
13864 if (t != NULL_TREE)
13865 return t;
13869 if (operand_equal_p (arg1, op2, 0))
13870 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13872 /* If we have A op B ? A : C, we may be able to convert this to a
13873 simpler expression, depending on the operation and the values
13874 of B and C. Signed zeros prevent all of these transformations,
13875 for reasons given above each one.
13877 Also try swapping the arguments and inverting the conditional. */
13878 if (COMPARISON_CLASS_P (arg0)
13879 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13880 arg1, TREE_OPERAND (arg0, 1))
13881 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13883 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13884 if (tem)
13885 return tem;
13888 if (COMPARISON_CLASS_P (arg0)
13889 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13890 op2,
13891 TREE_OPERAND (arg0, 1))
13892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13894 location_t loc0 = expr_location_or (arg0, loc);
13895 tem = fold_invert_truthvalue (loc0, arg0);
13896 if (tem && COMPARISON_CLASS_P (tem))
13898 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13899 if (tem)
13900 return tem;
13904 /* If the second operand is simpler than the third, swap them
13905 since that produces better jump optimization results. */
13906 if (truth_value_p (TREE_CODE (arg0))
13907 && tree_swap_operands_p (op1, op2, false))
13909 location_t loc0 = expr_location_or (arg0, loc);
13910 /* See if this can be inverted. If it can't, possibly because
13911 it was a floating-point inequality comparison, don't do
13912 anything. */
13913 tem = fold_invert_truthvalue (loc0, arg0);
13914 if (tem)
13915 return fold_build3_loc (loc, code, type, tem, op2, op1);
13918 /* Convert A ? 1 : 0 to simply A. */
13919 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13920 : (integer_onep (op1)
13921 && !VECTOR_TYPE_P (type)))
13922 && integer_zerop (op2)
13923 /* If we try to convert OP0 to our type, the
13924 call to fold will try to move the conversion inside
13925 a COND, which will recurse. In that case, the COND_EXPR
13926 is probably the best choice, so leave it alone. */
13927 && type == TREE_TYPE (arg0))
13928 return pedantic_non_lvalue_loc (loc, arg0);
13930 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13931 over COND_EXPR in cases such as floating point comparisons. */
13932 if (integer_zerop (op1)
13933 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13934 : (integer_onep (op2)
13935 && !VECTOR_TYPE_P (type)))
13936 && truth_value_p (TREE_CODE (arg0)))
13937 return pedantic_non_lvalue_loc (loc,
13938 fold_convert_loc (loc, type,
13939 invert_truthvalue_loc (loc,
13940 arg0)));
13942 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13943 if (TREE_CODE (arg0) == LT_EXPR
13944 && integer_zerop (TREE_OPERAND (arg0, 1))
13945 && integer_zerop (op2)
13946 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13948 /* sign_bit_p looks through both zero and sign extensions,
13949 but for this optimization only sign extensions are
13950 usable. */
13951 tree tem2 = TREE_OPERAND (arg0, 0);
13952 while (tem != tem2)
13954 if (TREE_CODE (tem2) != NOP_EXPR
13955 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13957 tem = NULL_TREE;
13958 break;
13960 tem2 = TREE_OPERAND (tem2, 0);
13962 /* sign_bit_p only checks ARG1 bits within A's precision.
13963 If <sign bit of A> has wider type than A, bits outside
13964 of A's precision in <sign bit of A> need to be checked.
13965 If they are all 0, this optimization needs to be done
13966 in unsigned A's type, if they are all 1 in signed A's type,
13967 otherwise this can't be done. */
13968 if (tem
13969 && TYPE_PRECISION (TREE_TYPE (tem))
13970 < TYPE_PRECISION (TREE_TYPE (arg1))
13971 && TYPE_PRECISION (TREE_TYPE (tem))
13972 < TYPE_PRECISION (type))
13974 int inner_width, outer_width;
13975 tree tem_type;
13977 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13978 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13979 if (outer_width > TYPE_PRECISION (type))
13980 outer_width = TYPE_PRECISION (type);
13982 wide_int mask = wi::shifted_mask
13983 (inner_width, outer_width - inner_width, false,
13984 TYPE_PRECISION (TREE_TYPE (arg1)));
13986 wide_int common = mask & arg1;
13987 if (common == mask)
13989 tem_type = signed_type_for (TREE_TYPE (tem));
13990 tem = fold_convert_loc (loc, tem_type, tem);
13992 else if (common == 0)
13994 tem_type = unsigned_type_for (TREE_TYPE (tem));
13995 tem = fold_convert_loc (loc, tem_type, tem);
13997 else
13998 tem = NULL;
14001 if (tem)
14002 return
14003 fold_convert_loc (loc, type,
14004 fold_build2_loc (loc, BIT_AND_EXPR,
14005 TREE_TYPE (tem), tem,
14006 fold_convert_loc (loc,
14007 TREE_TYPE (tem),
14008 arg1)));
14011 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14012 already handled above. */
14013 if (TREE_CODE (arg0) == BIT_AND_EXPR
14014 && integer_onep (TREE_OPERAND (arg0, 1))
14015 && integer_zerop (op2)
14016 && integer_pow2p (arg1))
14018 tree tem = TREE_OPERAND (arg0, 0);
14019 STRIP_NOPS (tem);
14020 if (TREE_CODE (tem) == RSHIFT_EXPR
14021 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14022 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14023 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14024 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14025 TREE_OPERAND (tem, 0), arg1);
14028 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14029 is probably obsolete because the first operand should be a
14030 truth value (that's why we have the two cases above), but let's
14031 leave it in until we can confirm this for all front-ends. */
14032 if (integer_zerop (op2)
14033 && TREE_CODE (arg0) == NE_EXPR
14034 && integer_zerop (TREE_OPERAND (arg0, 1))
14035 && integer_pow2p (arg1)
14036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14037 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14038 arg1, OEP_ONLY_CONST))
14039 return pedantic_non_lvalue_loc (loc,
14040 fold_convert_loc (loc, type,
14041 TREE_OPERAND (arg0, 0)));
14043 /* Disable the transformations below for vectors, since
14044 fold_binary_op_with_conditional_arg may undo them immediately,
14045 yielding an infinite loop. */
14046 if (code == VEC_COND_EXPR)
14047 return NULL_TREE;
14049 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14050 if (integer_zerop (op2)
14051 && truth_value_p (TREE_CODE (arg0))
14052 && truth_value_p (TREE_CODE (arg1))
14053 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14054 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14055 : TRUTH_ANDIF_EXPR,
14056 type, fold_convert_loc (loc, type, arg0), arg1);
14058 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14059 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14060 && truth_value_p (TREE_CODE (arg0))
14061 && truth_value_p (TREE_CODE (arg1))
14062 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14064 location_t loc0 = expr_location_or (arg0, loc);
14065 /* Only perform transformation if ARG0 is easily inverted. */
14066 tem = fold_invert_truthvalue (loc0, arg0);
14067 if (tem)
14068 return fold_build2_loc (loc, code == VEC_COND_EXPR
14069 ? BIT_IOR_EXPR
14070 : TRUTH_ORIF_EXPR,
14071 type, fold_convert_loc (loc, type, tem),
14072 arg1);
14075 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14076 if (integer_zerop (arg1)
14077 && truth_value_p (TREE_CODE (arg0))
14078 && truth_value_p (TREE_CODE (op2))
14079 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14081 location_t loc0 = expr_location_or (arg0, loc);
14082 /* Only perform transformation if ARG0 is easily inverted. */
14083 tem = fold_invert_truthvalue (loc0, arg0);
14084 if (tem)
14085 return fold_build2_loc (loc, code == VEC_COND_EXPR
14086 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14087 type, fold_convert_loc (loc, type, tem),
14088 op2);
14091 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14092 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14093 && truth_value_p (TREE_CODE (arg0))
14094 && truth_value_p (TREE_CODE (op2))
14095 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14096 return fold_build2_loc (loc, code == VEC_COND_EXPR
14097 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14098 type, fold_convert_loc (loc, type, arg0), op2);
14100 return NULL_TREE;
14102 case CALL_EXPR:
14103 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14104 of fold_ternary on them. */
14105 gcc_unreachable ();
14107 case BIT_FIELD_REF:
14108 if ((TREE_CODE (arg0) == VECTOR_CST
14109 || (TREE_CODE (arg0) == CONSTRUCTOR
14110 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14111 && (type == TREE_TYPE (TREE_TYPE (arg0))
14112 || (TREE_CODE (type) == VECTOR_TYPE
14113 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14115 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14116 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14117 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14118 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14120 if (n != 0
14121 && (idx % width) == 0
14122 && (n % width) == 0
14123 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14125 idx = idx / width;
14126 n = n / width;
14128 if (TREE_CODE (arg0) == VECTOR_CST)
14130 if (n == 1)
14131 return VECTOR_CST_ELT (arg0, idx);
14133 tree *vals = XALLOCAVEC (tree, n);
14134 for (unsigned i = 0; i < n; ++i)
14135 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14136 return build_vector (type, vals);
14139 /* Constructor elements can be subvectors. */
14140 unsigned HOST_WIDE_INT k = 1;
14141 if (CONSTRUCTOR_NELTS (arg0) != 0)
14143 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14144 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14145 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14148 /* We keep an exact subset of the constructor elements. */
14149 if ((idx % k) == 0 && (n % k) == 0)
14151 if (CONSTRUCTOR_NELTS (arg0) == 0)
14152 return build_constructor (type, NULL);
14153 idx /= k;
14154 n /= k;
14155 if (n == 1)
14157 if (idx < CONSTRUCTOR_NELTS (arg0))
14158 return CONSTRUCTOR_ELT (arg0, idx)->value;
14159 return build_zero_cst (type);
14162 vec<constructor_elt, va_gc> *vals;
14163 vec_alloc (vals, n);
14164 for (unsigned i = 0;
14165 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14166 ++i)
14167 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14168 CONSTRUCTOR_ELT
14169 (arg0, idx + i)->value);
14170 return build_constructor (type, vals);
14172 /* The bitfield references a single constructor element. */
14173 else if (idx + n <= (idx / k + 1) * k)
14175 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14176 return build_zero_cst (type);
14177 else if (n == k)
14178 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14179 else
14180 return fold_build3_loc (loc, code, type,
14181 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14182 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14187 /* A bit-field-ref that referenced the full argument can be stripped. */
14188 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14189 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14190 && integer_zerop (op2))
14191 return fold_convert_loc (loc, type, arg0);
14193 /* On constants we can use native encode/interpret to constant
14194 fold (nearly) all BIT_FIELD_REFs. */
14195 if (CONSTANT_CLASS_P (arg0)
14196 && can_native_interpret_type_p (type)
14197 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14198 /* This limitation should not be necessary, we just need to
14199 round this up to mode size. */
14200 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14201 /* Need bit-shifting of the buffer to relax the following. */
14202 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14204 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14205 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14206 unsigned HOST_WIDE_INT clen;
14207 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14208 /* ??? We cannot tell native_encode_expr to start at
14209 some random byte only. So limit us to a reasonable amount
14210 of work. */
14211 if (clen <= 4096)
14213 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14214 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14215 if (len > 0
14216 && len * BITS_PER_UNIT >= bitpos + bitsize)
14218 tree v = native_interpret_expr (type,
14219 b + bitpos / BITS_PER_UNIT,
14220 bitsize / BITS_PER_UNIT);
14221 if (v)
14222 return v;
14227 return NULL_TREE;
14229 case FMA_EXPR:
14230 /* For integers we can decompose the FMA if possible. */
14231 if (TREE_CODE (arg0) == INTEGER_CST
14232 && TREE_CODE (arg1) == INTEGER_CST)
14233 return fold_build2_loc (loc, PLUS_EXPR, type,
14234 const_binop (MULT_EXPR, arg0, arg1), arg2);
14235 if (integer_zerop (arg2))
14236 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14238 return fold_fma (loc, type, arg0, arg1, arg2);
14240 case VEC_PERM_EXPR:
14241 if (TREE_CODE (arg2) == VECTOR_CST)
14243 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14244 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14245 bool need_mask_canon = false;
14246 bool all_in_vec0 = true;
14247 bool all_in_vec1 = true;
14248 bool maybe_identity = true;
14249 bool single_arg = (op0 == op1);
14250 bool changed = false;
14252 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14253 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14254 for (i = 0; i < nelts; i++)
14256 tree val = VECTOR_CST_ELT (arg2, i);
14257 if (TREE_CODE (val) != INTEGER_CST)
14258 return NULL_TREE;
14260 /* Make sure that the perm value is in an acceptable
14261 range. */
14262 wide_int t = val;
14263 if (wi::gtu_p (t, mask))
14265 need_mask_canon = true;
14266 sel[i] = t.to_uhwi () & mask;
14268 else
14269 sel[i] = t.to_uhwi ();
14271 if (sel[i] < nelts)
14272 all_in_vec1 = false;
14273 else
14274 all_in_vec0 = false;
14276 if ((sel[i] & (nelts-1)) != i)
14277 maybe_identity = false;
14280 if (maybe_identity)
14282 if (all_in_vec0)
14283 return op0;
14284 if (all_in_vec1)
14285 return op1;
14288 if (all_in_vec0)
14289 op1 = op0;
14290 else if (all_in_vec1)
14292 op0 = op1;
14293 for (i = 0; i < nelts; i++)
14294 sel[i] -= nelts;
14295 need_mask_canon = true;
14298 if ((TREE_CODE (op0) == VECTOR_CST
14299 || TREE_CODE (op0) == CONSTRUCTOR)
14300 && (TREE_CODE (op1) == VECTOR_CST
14301 || TREE_CODE (op1) == CONSTRUCTOR))
14303 tree t = fold_vec_perm (type, op0, op1, sel);
14304 if (t != NULL_TREE)
14305 return t;
14308 if (op0 == op1 && !single_arg)
14309 changed = true;
14311 if (need_mask_canon && arg2 == op2)
14313 tree *tsel = XALLOCAVEC (tree, nelts);
14314 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14315 for (i = 0; i < nelts; i++)
14316 tsel[i] = build_int_cst (eltype, sel[i]);
14317 op2 = build_vector (TREE_TYPE (arg2), tsel);
14318 changed = true;
14321 if (changed)
14322 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14324 return NULL_TREE;
14326 default:
14327 return NULL_TREE;
14328 } /* switch (code) */
14331 /* Perform constant folding and related simplification of EXPR.
14332 The related simplifications include x*1 => x, x*0 => 0, etc.,
14333 and application of the associative law.
14334 NOP_EXPR conversions may be removed freely (as long as we
14335 are careful not to change the type of the overall expression).
14336 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14337 but we can constant-fold them if they have constant operands. */
14339 #ifdef ENABLE_FOLD_CHECKING
14340 # define fold(x) fold_1 (x)
14341 static tree fold_1 (tree);
14342 static
14343 #endif
14344 tree
14345 fold (tree expr)
14347 const tree t = expr;
14348 enum tree_code code = TREE_CODE (t);
14349 enum tree_code_class kind = TREE_CODE_CLASS (code);
14350 tree tem;
14351 location_t loc = EXPR_LOCATION (expr);
14353 /* Return right away if a constant. */
14354 if (kind == tcc_constant)
14355 return t;
14357 /* CALL_EXPR-like objects with variable numbers of operands are
14358 treated specially. */
14359 if (kind == tcc_vl_exp)
14361 if (code == CALL_EXPR)
14363 tem = fold_call_expr (loc, expr, false);
14364 return tem ? tem : expr;
14366 return expr;
14369 if (IS_EXPR_CODE_CLASS (kind))
14371 tree type = TREE_TYPE (t);
14372 tree op0, op1, op2;
14374 switch (TREE_CODE_LENGTH (code))
14376 case 1:
14377 op0 = TREE_OPERAND (t, 0);
14378 tem = fold_unary_loc (loc, code, type, op0);
14379 return tem ? tem : expr;
14380 case 2:
14381 op0 = TREE_OPERAND (t, 0);
14382 op1 = TREE_OPERAND (t, 1);
14383 tem = fold_binary_loc (loc, code, type, op0, op1);
14384 return tem ? tem : expr;
14385 case 3:
14386 op0 = TREE_OPERAND (t, 0);
14387 op1 = TREE_OPERAND (t, 1);
14388 op2 = TREE_OPERAND (t, 2);
14389 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14390 return tem ? tem : expr;
14391 default:
14392 break;
14396 switch (code)
14398 case ARRAY_REF:
14400 tree op0 = TREE_OPERAND (t, 0);
14401 tree op1 = TREE_OPERAND (t, 1);
14403 if (TREE_CODE (op1) == INTEGER_CST
14404 && TREE_CODE (op0) == CONSTRUCTOR
14405 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14407 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14408 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14409 unsigned HOST_WIDE_INT begin = 0;
14411 /* Find a matching index by means of a binary search. */
14412 while (begin != end)
14414 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14415 tree index = (*elts)[middle].index;
14417 if (TREE_CODE (index) == INTEGER_CST
14418 && tree_int_cst_lt (index, op1))
14419 begin = middle + 1;
14420 else if (TREE_CODE (index) == INTEGER_CST
14421 && tree_int_cst_lt (op1, index))
14422 end = middle;
14423 else if (TREE_CODE (index) == RANGE_EXPR
14424 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14425 begin = middle + 1;
14426 else if (TREE_CODE (index) == RANGE_EXPR
14427 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14428 end = middle;
14429 else
14430 return (*elts)[middle].value;
14434 return t;
14437 /* Return a VECTOR_CST if possible. */
14438 case CONSTRUCTOR:
14440 tree type = TREE_TYPE (t);
14441 if (TREE_CODE (type) != VECTOR_TYPE)
14442 return t;
14444 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14445 unsigned HOST_WIDE_INT idx, pos = 0;
14446 tree value;
14448 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14450 if (!CONSTANT_CLASS_P (value))
14451 return t;
14452 if (TREE_CODE (value) == VECTOR_CST)
14454 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14455 vec[pos++] = VECTOR_CST_ELT (value, i);
14457 else
14458 vec[pos++] = value;
14460 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14461 vec[pos] = build_zero_cst (TREE_TYPE (type));
14463 return build_vector (type, vec);
14466 case CONST_DECL:
14467 return fold (DECL_INITIAL (t));
14469 default:
14470 return t;
14471 } /* switch (code) */
14474 #ifdef ENABLE_FOLD_CHECKING
14475 #undef fold
14477 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14478 hash_table<pointer_hash<const tree_node> > *);
14479 static void fold_check_failed (const_tree, const_tree);
14480 void print_fold_checksum (const_tree);
14482 /* When --enable-checking=fold, compute a digest of expr before
14483 and after actual fold call to see if fold did not accidentally
14484 change original expr. */
14486 tree
14487 fold (tree expr)
14489 tree ret;
14490 struct md5_ctx ctx;
14491 unsigned char checksum_before[16], checksum_after[16];
14492 hash_table<pointer_hash<const tree_node> > ht (32);
14494 md5_init_ctx (&ctx);
14495 fold_checksum_tree (expr, &ctx, &ht);
14496 md5_finish_ctx (&ctx, checksum_before);
14497 ht.empty ();
14499 ret = fold_1 (expr);
14501 md5_init_ctx (&ctx);
14502 fold_checksum_tree (expr, &ctx, &ht);
14503 md5_finish_ctx (&ctx, checksum_after);
14505 if (memcmp (checksum_before, checksum_after, 16))
14506 fold_check_failed (expr, ret);
14508 return ret;
14511 void
14512 print_fold_checksum (const_tree expr)
14514 struct md5_ctx ctx;
14515 unsigned char checksum[16], cnt;
14516 hash_table<pointer_hash<const tree_node> > ht (32);
14518 md5_init_ctx (&ctx);
14519 fold_checksum_tree (expr, &ctx, &ht);
14520 md5_finish_ctx (&ctx, checksum);
14521 for (cnt = 0; cnt < 16; ++cnt)
14522 fprintf (stderr, "%02x", checksum[cnt]);
14523 putc ('\n', stderr);
14526 static void
14527 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14529 internal_error ("fold check: original tree changed by fold");
14532 static void
14533 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14534 hash_table<pointer_hash <const tree_node> > *ht)
14536 const tree_node **slot;
14537 enum tree_code code;
14538 union tree_node buf;
14539 int i, len;
14541 recursive_label:
14542 if (expr == NULL)
14543 return;
14544 slot = ht->find_slot (expr, INSERT);
14545 if (*slot != NULL)
14546 return;
14547 *slot = expr;
14548 code = TREE_CODE (expr);
14549 if (TREE_CODE_CLASS (code) == tcc_declaration
14550 && DECL_ASSEMBLER_NAME_SET_P (expr))
14552 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14553 memcpy ((char *) &buf, expr, tree_size (expr));
14554 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14555 expr = (tree) &buf;
14557 else if (TREE_CODE_CLASS (code) == tcc_type
14558 && (TYPE_POINTER_TO (expr)
14559 || TYPE_REFERENCE_TO (expr)
14560 || TYPE_CACHED_VALUES_P (expr)
14561 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14562 || TYPE_NEXT_VARIANT (expr)))
14564 /* Allow these fields to be modified. */
14565 tree tmp;
14566 memcpy ((char *) &buf, expr, tree_size (expr));
14567 expr = tmp = (tree) &buf;
14568 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14569 TYPE_POINTER_TO (tmp) = NULL;
14570 TYPE_REFERENCE_TO (tmp) = NULL;
14571 TYPE_NEXT_VARIANT (tmp) = NULL;
14572 if (TYPE_CACHED_VALUES_P (tmp))
14574 TYPE_CACHED_VALUES_P (tmp) = 0;
14575 TYPE_CACHED_VALUES (tmp) = NULL;
14578 md5_process_bytes (expr, tree_size (expr), ctx);
14579 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14580 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14581 if (TREE_CODE_CLASS (code) != tcc_type
14582 && TREE_CODE_CLASS (code) != tcc_declaration
14583 && code != TREE_LIST
14584 && code != SSA_NAME
14585 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14586 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14587 switch (TREE_CODE_CLASS (code))
14589 case tcc_constant:
14590 switch (code)
14592 case STRING_CST:
14593 md5_process_bytes (TREE_STRING_POINTER (expr),
14594 TREE_STRING_LENGTH (expr), ctx);
14595 break;
14596 case COMPLEX_CST:
14597 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14598 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14599 break;
14600 case VECTOR_CST:
14601 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14602 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14603 break;
14604 default:
14605 break;
14607 break;
14608 case tcc_exceptional:
14609 switch (code)
14611 case TREE_LIST:
14612 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14613 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14614 expr = TREE_CHAIN (expr);
14615 goto recursive_label;
14616 break;
14617 case TREE_VEC:
14618 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14619 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14620 break;
14621 default:
14622 break;
14624 break;
14625 case tcc_expression:
14626 case tcc_reference:
14627 case tcc_comparison:
14628 case tcc_unary:
14629 case tcc_binary:
14630 case tcc_statement:
14631 case tcc_vl_exp:
14632 len = TREE_OPERAND_LENGTH (expr);
14633 for (i = 0; i < len; ++i)
14634 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14635 break;
14636 case tcc_declaration:
14637 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14638 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14639 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14641 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14642 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14643 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14644 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14645 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14648 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14650 if (TREE_CODE (expr) == FUNCTION_DECL)
14652 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14653 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14655 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14657 break;
14658 case tcc_type:
14659 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14660 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14661 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14662 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14663 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14664 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14665 if (INTEGRAL_TYPE_P (expr)
14666 || SCALAR_FLOAT_TYPE_P (expr))
14668 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14669 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14671 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14672 if (TREE_CODE (expr) == RECORD_TYPE
14673 || TREE_CODE (expr) == UNION_TYPE
14674 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14675 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14676 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14677 break;
14678 default:
14679 break;
14683 /* Helper function for outputting the checksum of a tree T. When
14684 debugging with gdb, you can "define mynext" to be "next" followed
14685 by "call debug_fold_checksum (op0)", then just trace down till the
14686 outputs differ. */
14688 DEBUG_FUNCTION void
14689 debug_fold_checksum (const_tree t)
14691 int i;
14692 unsigned char checksum[16];
14693 struct md5_ctx ctx;
14694 hash_table<pointer_hash<const tree_node> > ht (32);
14696 md5_init_ctx (&ctx);
14697 fold_checksum_tree (t, &ctx, &ht);
14698 md5_finish_ctx (&ctx, checksum);
14699 ht.empty ();
14701 for (i = 0; i < 16; i++)
14702 fprintf (stderr, "%d ", checksum[i]);
14704 fprintf (stderr, "\n");
14707 #endif
14709 /* Fold a unary tree expression with code CODE of type TYPE with an
14710 operand OP0. LOC is the location of the resulting expression.
14711 Return a folded expression if successful. Otherwise, return a tree
14712 expression with code CODE of type TYPE with an operand OP0. */
14714 tree
14715 fold_build1_stat_loc (location_t loc,
14716 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14718 tree tem;
14719 #ifdef ENABLE_FOLD_CHECKING
14720 unsigned char checksum_before[16], checksum_after[16];
14721 struct md5_ctx ctx;
14722 hash_table<pointer_hash<const tree_node> > ht (32);
14724 md5_init_ctx (&ctx);
14725 fold_checksum_tree (op0, &ctx, &ht);
14726 md5_finish_ctx (&ctx, checksum_before);
14727 ht.empty ();
14728 #endif
14730 tem = fold_unary_loc (loc, code, type, op0);
14731 if (!tem)
14732 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14734 #ifdef ENABLE_FOLD_CHECKING
14735 md5_init_ctx (&ctx);
14736 fold_checksum_tree (op0, &ctx, &ht);
14737 md5_finish_ctx (&ctx, checksum_after);
14739 if (memcmp (checksum_before, checksum_after, 16))
14740 fold_check_failed (op0, tem);
14741 #endif
14742 return tem;
14745 /* Fold a binary tree expression with code CODE of type TYPE with
14746 operands OP0 and OP1. LOC is the location of the resulting
14747 expression. Return a folded expression if successful. Otherwise,
14748 return a tree expression with code CODE of type TYPE with operands
14749 OP0 and OP1. */
14751 tree
14752 fold_build2_stat_loc (location_t loc,
14753 enum tree_code code, tree type, tree op0, tree op1
14754 MEM_STAT_DECL)
14756 tree tem;
14757 #ifdef ENABLE_FOLD_CHECKING
14758 unsigned char checksum_before_op0[16],
14759 checksum_before_op1[16],
14760 checksum_after_op0[16],
14761 checksum_after_op1[16];
14762 struct md5_ctx ctx;
14763 hash_table<pointer_hash<const tree_node> > ht (32);
14765 md5_init_ctx (&ctx);
14766 fold_checksum_tree (op0, &ctx, &ht);
14767 md5_finish_ctx (&ctx, checksum_before_op0);
14768 ht.empty ();
14770 md5_init_ctx (&ctx);
14771 fold_checksum_tree (op1, &ctx, &ht);
14772 md5_finish_ctx (&ctx, checksum_before_op1);
14773 ht.empty ();
14774 #endif
14776 tem = fold_binary_loc (loc, code, type, op0, op1);
14777 if (!tem)
14778 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14780 #ifdef ENABLE_FOLD_CHECKING
14781 md5_init_ctx (&ctx);
14782 fold_checksum_tree (op0, &ctx, &ht);
14783 md5_finish_ctx (&ctx, checksum_after_op0);
14784 ht.empty ();
14786 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14787 fold_check_failed (op0, tem);
14789 md5_init_ctx (&ctx);
14790 fold_checksum_tree (op1, &ctx, &ht);
14791 md5_finish_ctx (&ctx, checksum_after_op1);
14793 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14794 fold_check_failed (op1, tem);
14795 #endif
14796 return tem;
14799 /* Fold a ternary tree expression with code CODE of type TYPE with
14800 operands OP0, OP1, and OP2. Return a folded expression if
14801 successful. Otherwise, return a tree expression with code CODE of
14802 type TYPE with operands OP0, OP1, and OP2. */
14804 tree
14805 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14806 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14808 tree tem;
14809 #ifdef ENABLE_FOLD_CHECKING
14810 unsigned char checksum_before_op0[16],
14811 checksum_before_op1[16],
14812 checksum_before_op2[16],
14813 checksum_after_op0[16],
14814 checksum_after_op1[16],
14815 checksum_after_op2[16];
14816 struct md5_ctx ctx;
14817 hash_table<pointer_hash<const tree_node> > ht (32);
14819 md5_init_ctx (&ctx);
14820 fold_checksum_tree (op0, &ctx, &ht);
14821 md5_finish_ctx (&ctx, checksum_before_op0);
14822 ht.empty ();
14824 md5_init_ctx (&ctx);
14825 fold_checksum_tree (op1, &ctx, &ht);
14826 md5_finish_ctx (&ctx, checksum_before_op1);
14827 ht.empty ();
14829 md5_init_ctx (&ctx);
14830 fold_checksum_tree (op2, &ctx, &ht);
14831 md5_finish_ctx (&ctx, checksum_before_op2);
14832 ht.empty ();
14833 #endif
14835 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14836 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14837 if (!tem)
14838 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14840 #ifdef ENABLE_FOLD_CHECKING
14841 md5_init_ctx (&ctx);
14842 fold_checksum_tree (op0, &ctx, &ht);
14843 md5_finish_ctx (&ctx, checksum_after_op0);
14844 ht.empty ();
14846 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14847 fold_check_failed (op0, tem);
14849 md5_init_ctx (&ctx);
14850 fold_checksum_tree (op1, &ctx, &ht);
14851 md5_finish_ctx (&ctx, checksum_after_op1);
14852 ht.empty ();
14854 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14855 fold_check_failed (op1, tem);
14857 md5_init_ctx (&ctx);
14858 fold_checksum_tree (op2, &ctx, &ht);
14859 md5_finish_ctx (&ctx, checksum_after_op2);
14861 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14862 fold_check_failed (op2, tem);
14863 #endif
14864 return tem;
14867 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14868 arguments in ARGARRAY, and a null static chain.
14869 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14870 of type TYPE from the given operands as constructed by build_call_array. */
14872 tree
14873 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14874 int nargs, tree *argarray)
14876 tree tem;
14877 #ifdef ENABLE_FOLD_CHECKING
14878 unsigned char checksum_before_fn[16],
14879 checksum_before_arglist[16],
14880 checksum_after_fn[16],
14881 checksum_after_arglist[16];
14882 struct md5_ctx ctx;
14883 hash_table<pointer_hash<const tree_node> > ht (32);
14884 int i;
14886 md5_init_ctx (&ctx);
14887 fold_checksum_tree (fn, &ctx, &ht);
14888 md5_finish_ctx (&ctx, checksum_before_fn);
14889 ht.empty ();
14891 md5_init_ctx (&ctx);
14892 for (i = 0; i < nargs; i++)
14893 fold_checksum_tree (argarray[i], &ctx, &ht);
14894 md5_finish_ctx (&ctx, checksum_before_arglist);
14895 ht.empty ();
14896 #endif
14898 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14900 #ifdef ENABLE_FOLD_CHECKING
14901 md5_init_ctx (&ctx);
14902 fold_checksum_tree (fn, &ctx, &ht);
14903 md5_finish_ctx (&ctx, checksum_after_fn);
14904 ht.empty ();
14906 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14907 fold_check_failed (fn, tem);
14909 md5_init_ctx (&ctx);
14910 for (i = 0; i < nargs; i++)
14911 fold_checksum_tree (argarray[i], &ctx, &ht);
14912 md5_finish_ctx (&ctx, checksum_after_arglist);
14914 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14915 fold_check_failed (NULL_TREE, tem);
14916 #endif
14917 return tem;
14920 /* Perform constant folding and related simplification of initializer
14921 expression EXPR. These behave identically to "fold_buildN" but ignore
14922 potential run-time traps and exceptions that fold must preserve. */
14924 #define START_FOLD_INIT \
14925 int saved_signaling_nans = flag_signaling_nans;\
14926 int saved_trapping_math = flag_trapping_math;\
14927 int saved_rounding_math = flag_rounding_math;\
14928 int saved_trapv = flag_trapv;\
14929 int saved_folding_initializer = folding_initializer;\
14930 flag_signaling_nans = 0;\
14931 flag_trapping_math = 0;\
14932 flag_rounding_math = 0;\
14933 flag_trapv = 0;\
14934 folding_initializer = 1;
14936 #define END_FOLD_INIT \
14937 flag_signaling_nans = saved_signaling_nans;\
14938 flag_trapping_math = saved_trapping_math;\
14939 flag_rounding_math = saved_rounding_math;\
14940 flag_trapv = saved_trapv;\
14941 folding_initializer = saved_folding_initializer;
14943 tree
14944 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14945 tree type, tree op)
14947 tree result;
14948 START_FOLD_INIT;
14950 result = fold_build1_loc (loc, code, type, op);
14952 END_FOLD_INIT;
14953 return result;
14956 tree
14957 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14958 tree type, tree op0, tree op1)
14960 tree result;
14961 START_FOLD_INIT;
14963 result = fold_build2_loc (loc, code, type, op0, op1);
14965 END_FOLD_INIT;
14966 return result;
14969 tree
14970 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14971 int nargs, tree *argarray)
14973 tree result;
14974 START_FOLD_INIT;
14976 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14978 END_FOLD_INIT;
14979 return result;
14982 #undef START_FOLD_INIT
14983 #undef END_FOLD_INIT
14985 /* Determine if first argument is a multiple of second argument. Return 0 if
14986 it is not, or we cannot easily determined it to be.
14988 An example of the sort of thing we care about (at this point; this routine
14989 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14990 fold cases do now) is discovering that
14992 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14994 is a multiple of
14996 SAVE_EXPR (J * 8)
14998 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15000 This code also handles discovering that
15002 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15004 is a multiple of 8 so we don't have to worry about dealing with a
15005 possible remainder.
15007 Note that we *look* inside a SAVE_EXPR only to determine how it was
15008 calculated; it is not safe for fold to do much of anything else with the
15009 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15010 at run time. For example, the latter example above *cannot* be implemented
15011 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15012 evaluation time of the original SAVE_EXPR is not necessarily the same at
15013 the time the new expression is evaluated. The only optimization of this
15014 sort that would be valid is changing
15016 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15018 divided by 8 to
15020 SAVE_EXPR (I) * SAVE_EXPR (J)
15022 (where the same SAVE_EXPR (J) is used in the original and the
15023 transformed version). */
15026 multiple_of_p (tree type, const_tree top, const_tree bottom)
15028 if (operand_equal_p (top, bottom, 0))
15029 return 1;
15031 if (TREE_CODE (type) != INTEGER_TYPE)
15032 return 0;
15034 switch (TREE_CODE (top))
15036 case BIT_AND_EXPR:
15037 /* Bitwise and provides a power of two multiple. If the mask is
15038 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15039 if (!integer_pow2p (bottom))
15040 return 0;
15041 /* FALLTHRU */
15043 case MULT_EXPR:
15044 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15045 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15047 case PLUS_EXPR:
15048 case MINUS_EXPR:
15049 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15050 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15052 case LSHIFT_EXPR:
15053 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15055 tree op1, t1;
15057 op1 = TREE_OPERAND (top, 1);
15058 /* const_binop may not detect overflow correctly,
15059 so check for it explicitly here. */
15060 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15061 && 0 != (t1 = fold_convert (type,
15062 const_binop (LSHIFT_EXPR,
15063 size_one_node,
15064 op1)))
15065 && !TREE_OVERFLOW (t1))
15066 return multiple_of_p (type, t1, bottom);
15068 return 0;
15070 case NOP_EXPR:
15071 /* Can't handle conversions from non-integral or wider integral type. */
15072 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15073 || (TYPE_PRECISION (type)
15074 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15075 return 0;
15077 /* .. fall through ... */
15079 case SAVE_EXPR:
15080 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15082 case COND_EXPR:
15083 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15084 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15086 case INTEGER_CST:
15087 if (TREE_CODE (bottom) != INTEGER_CST
15088 || integer_zerop (bottom)
15089 || (TYPE_UNSIGNED (type)
15090 && (tree_int_cst_sgn (top) < 0
15091 || tree_int_cst_sgn (bottom) < 0)))
15092 return 0;
15093 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15094 SIGNED);
15096 default:
15097 return 0;
15101 /* Return true if CODE or TYPE is known to be non-negative. */
15103 static bool
15104 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15106 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15107 && truth_value_p (code))
15108 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15109 have a signed:1 type (where the value is -1 and 0). */
15110 return true;
15111 return false;
15114 /* Return true if (CODE OP0) is known to be non-negative. If the return
15115 value is based on the assumption that signed overflow is undefined,
15116 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15117 *STRICT_OVERFLOW_P. */
15119 bool
15120 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15121 bool *strict_overflow_p)
15123 if (TYPE_UNSIGNED (type))
15124 return true;
15126 switch (code)
15128 case ABS_EXPR:
15129 /* We can't return 1 if flag_wrapv is set because
15130 ABS_EXPR<INT_MIN> = INT_MIN. */
15131 if (!INTEGRAL_TYPE_P (type))
15132 return true;
15133 if (TYPE_OVERFLOW_UNDEFINED (type))
15135 *strict_overflow_p = true;
15136 return true;
15138 break;
15140 case NON_LVALUE_EXPR:
15141 case FLOAT_EXPR:
15142 case FIX_TRUNC_EXPR:
15143 return tree_expr_nonnegative_warnv_p (op0,
15144 strict_overflow_p);
15146 case NOP_EXPR:
15148 tree inner_type = TREE_TYPE (op0);
15149 tree outer_type = type;
15151 if (TREE_CODE (outer_type) == REAL_TYPE)
15153 if (TREE_CODE (inner_type) == REAL_TYPE)
15154 return tree_expr_nonnegative_warnv_p (op0,
15155 strict_overflow_p);
15156 if (INTEGRAL_TYPE_P (inner_type))
15158 if (TYPE_UNSIGNED (inner_type))
15159 return true;
15160 return tree_expr_nonnegative_warnv_p (op0,
15161 strict_overflow_p);
15164 else if (INTEGRAL_TYPE_P (outer_type))
15166 if (TREE_CODE (inner_type) == REAL_TYPE)
15167 return tree_expr_nonnegative_warnv_p (op0,
15168 strict_overflow_p);
15169 if (INTEGRAL_TYPE_P (inner_type))
15170 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15171 && TYPE_UNSIGNED (inner_type);
15174 break;
15176 default:
15177 return tree_simple_nonnegative_warnv_p (code, type);
15180 /* We don't know sign of `t', so be conservative and return false. */
15181 return false;
15184 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15185 value is based on the assumption that signed overflow is undefined,
15186 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15187 *STRICT_OVERFLOW_P. */
15189 bool
15190 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15191 tree op1, bool *strict_overflow_p)
15193 if (TYPE_UNSIGNED (type))
15194 return true;
15196 switch (code)
15198 case POINTER_PLUS_EXPR:
15199 case PLUS_EXPR:
15200 if (FLOAT_TYPE_P (type))
15201 return (tree_expr_nonnegative_warnv_p (op0,
15202 strict_overflow_p)
15203 && tree_expr_nonnegative_warnv_p (op1,
15204 strict_overflow_p));
15206 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15207 both unsigned and at least 2 bits shorter than the result. */
15208 if (TREE_CODE (type) == INTEGER_TYPE
15209 && TREE_CODE (op0) == NOP_EXPR
15210 && TREE_CODE (op1) == NOP_EXPR)
15212 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15213 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15214 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15215 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15217 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15218 TYPE_PRECISION (inner2)) + 1;
15219 return prec < TYPE_PRECISION (type);
15222 break;
15224 case MULT_EXPR:
15225 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15227 /* x * x is always non-negative for floating point x
15228 or without overflow. */
15229 if (operand_equal_p (op0, op1, 0)
15230 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15231 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15233 if (TYPE_OVERFLOW_UNDEFINED (type))
15234 *strict_overflow_p = true;
15235 return true;
15239 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15240 both unsigned and their total bits is shorter than the result. */
15241 if (TREE_CODE (type) == INTEGER_TYPE
15242 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15243 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15245 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15246 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15247 : TREE_TYPE (op0);
15248 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15249 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15250 : TREE_TYPE (op1);
15252 bool unsigned0 = TYPE_UNSIGNED (inner0);
15253 bool unsigned1 = TYPE_UNSIGNED (inner1);
15255 if (TREE_CODE (op0) == INTEGER_CST)
15256 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15258 if (TREE_CODE (op1) == INTEGER_CST)
15259 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15261 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15262 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15264 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15265 ? tree_int_cst_min_precision (op0, UNSIGNED)
15266 : TYPE_PRECISION (inner0);
15268 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15269 ? tree_int_cst_min_precision (op1, UNSIGNED)
15270 : TYPE_PRECISION (inner1);
15272 return precision0 + precision1 < TYPE_PRECISION (type);
15275 return false;
15277 case BIT_AND_EXPR:
15278 case MAX_EXPR:
15279 return (tree_expr_nonnegative_warnv_p (op0,
15280 strict_overflow_p)
15281 || tree_expr_nonnegative_warnv_p (op1,
15282 strict_overflow_p));
15284 case BIT_IOR_EXPR:
15285 case BIT_XOR_EXPR:
15286 case MIN_EXPR:
15287 case RDIV_EXPR:
15288 case TRUNC_DIV_EXPR:
15289 case CEIL_DIV_EXPR:
15290 case FLOOR_DIV_EXPR:
15291 case ROUND_DIV_EXPR:
15292 return (tree_expr_nonnegative_warnv_p (op0,
15293 strict_overflow_p)
15294 && tree_expr_nonnegative_warnv_p (op1,
15295 strict_overflow_p));
15297 case TRUNC_MOD_EXPR:
15298 case CEIL_MOD_EXPR:
15299 case FLOOR_MOD_EXPR:
15300 case ROUND_MOD_EXPR:
15301 return tree_expr_nonnegative_warnv_p (op0,
15302 strict_overflow_p);
15303 default:
15304 return tree_simple_nonnegative_warnv_p (code, type);
15307 /* We don't know sign of `t', so be conservative and return false. */
15308 return false;
15311 /* Return true if T is known to be non-negative. If the return
15312 value is based on the assumption that signed overflow is undefined,
15313 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15314 *STRICT_OVERFLOW_P. */
15316 bool
15317 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15319 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15320 return true;
15322 switch (TREE_CODE (t))
15324 case INTEGER_CST:
15325 return tree_int_cst_sgn (t) >= 0;
15327 case REAL_CST:
15328 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15330 case FIXED_CST:
15331 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15333 case COND_EXPR:
15334 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15335 strict_overflow_p)
15336 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15337 strict_overflow_p));
15338 default:
15339 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15340 TREE_TYPE (t));
15342 /* We don't know sign of `t', so be conservative and return false. */
15343 return false;
15346 /* Return true if T is known to be non-negative. If the return
15347 value is based on the assumption that signed overflow is undefined,
15348 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15349 *STRICT_OVERFLOW_P. */
15351 bool
15352 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15353 tree arg0, tree arg1, bool *strict_overflow_p)
15355 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15356 switch (DECL_FUNCTION_CODE (fndecl))
15358 CASE_FLT_FN (BUILT_IN_ACOS):
15359 CASE_FLT_FN (BUILT_IN_ACOSH):
15360 CASE_FLT_FN (BUILT_IN_CABS):
15361 CASE_FLT_FN (BUILT_IN_COSH):
15362 CASE_FLT_FN (BUILT_IN_ERFC):
15363 CASE_FLT_FN (BUILT_IN_EXP):
15364 CASE_FLT_FN (BUILT_IN_EXP10):
15365 CASE_FLT_FN (BUILT_IN_EXP2):
15366 CASE_FLT_FN (BUILT_IN_FABS):
15367 CASE_FLT_FN (BUILT_IN_FDIM):
15368 CASE_FLT_FN (BUILT_IN_HYPOT):
15369 CASE_FLT_FN (BUILT_IN_POW10):
15370 CASE_INT_FN (BUILT_IN_FFS):
15371 CASE_INT_FN (BUILT_IN_PARITY):
15372 CASE_INT_FN (BUILT_IN_POPCOUNT):
15373 CASE_INT_FN (BUILT_IN_CLZ):
15374 CASE_INT_FN (BUILT_IN_CLRSB):
15375 case BUILT_IN_BSWAP32:
15376 case BUILT_IN_BSWAP64:
15377 /* Always true. */
15378 return true;
15380 CASE_FLT_FN (BUILT_IN_SQRT):
15381 /* sqrt(-0.0) is -0.0. */
15382 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15383 return true;
15384 return tree_expr_nonnegative_warnv_p (arg0,
15385 strict_overflow_p);
15387 CASE_FLT_FN (BUILT_IN_ASINH):
15388 CASE_FLT_FN (BUILT_IN_ATAN):
15389 CASE_FLT_FN (BUILT_IN_ATANH):
15390 CASE_FLT_FN (BUILT_IN_CBRT):
15391 CASE_FLT_FN (BUILT_IN_CEIL):
15392 CASE_FLT_FN (BUILT_IN_ERF):
15393 CASE_FLT_FN (BUILT_IN_EXPM1):
15394 CASE_FLT_FN (BUILT_IN_FLOOR):
15395 CASE_FLT_FN (BUILT_IN_FMOD):
15396 CASE_FLT_FN (BUILT_IN_FREXP):
15397 CASE_FLT_FN (BUILT_IN_ICEIL):
15398 CASE_FLT_FN (BUILT_IN_IFLOOR):
15399 CASE_FLT_FN (BUILT_IN_IRINT):
15400 CASE_FLT_FN (BUILT_IN_IROUND):
15401 CASE_FLT_FN (BUILT_IN_LCEIL):
15402 CASE_FLT_FN (BUILT_IN_LDEXP):
15403 CASE_FLT_FN (BUILT_IN_LFLOOR):
15404 CASE_FLT_FN (BUILT_IN_LLCEIL):
15405 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15406 CASE_FLT_FN (BUILT_IN_LLRINT):
15407 CASE_FLT_FN (BUILT_IN_LLROUND):
15408 CASE_FLT_FN (BUILT_IN_LRINT):
15409 CASE_FLT_FN (BUILT_IN_LROUND):
15410 CASE_FLT_FN (BUILT_IN_MODF):
15411 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15412 CASE_FLT_FN (BUILT_IN_RINT):
15413 CASE_FLT_FN (BUILT_IN_ROUND):
15414 CASE_FLT_FN (BUILT_IN_SCALB):
15415 CASE_FLT_FN (BUILT_IN_SCALBLN):
15416 CASE_FLT_FN (BUILT_IN_SCALBN):
15417 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15418 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15419 CASE_FLT_FN (BUILT_IN_SINH):
15420 CASE_FLT_FN (BUILT_IN_TANH):
15421 CASE_FLT_FN (BUILT_IN_TRUNC):
15422 /* True if the 1st argument is nonnegative. */
15423 return tree_expr_nonnegative_warnv_p (arg0,
15424 strict_overflow_p);
15426 CASE_FLT_FN (BUILT_IN_FMAX):
15427 /* True if the 1st OR 2nd arguments are nonnegative. */
15428 return (tree_expr_nonnegative_warnv_p (arg0,
15429 strict_overflow_p)
15430 || (tree_expr_nonnegative_warnv_p (arg1,
15431 strict_overflow_p)));
15433 CASE_FLT_FN (BUILT_IN_FMIN):
15434 /* True if the 1st AND 2nd arguments are nonnegative. */
15435 return (tree_expr_nonnegative_warnv_p (arg0,
15436 strict_overflow_p)
15437 && (tree_expr_nonnegative_warnv_p (arg1,
15438 strict_overflow_p)));
15440 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15441 /* True if the 2nd argument is nonnegative. */
15442 return tree_expr_nonnegative_warnv_p (arg1,
15443 strict_overflow_p);
15445 CASE_FLT_FN (BUILT_IN_POWI):
15446 /* True if the 1st argument is nonnegative or the second
15447 argument is an even integer. */
15448 if (TREE_CODE (arg1) == INTEGER_CST
15449 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15450 return true;
15451 return tree_expr_nonnegative_warnv_p (arg0,
15452 strict_overflow_p);
15454 CASE_FLT_FN (BUILT_IN_POW):
15455 /* True if the 1st argument is nonnegative or the second
15456 argument is an even integer valued real. */
15457 if (TREE_CODE (arg1) == REAL_CST)
15459 REAL_VALUE_TYPE c;
15460 HOST_WIDE_INT n;
15462 c = TREE_REAL_CST (arg1);
15463 n = real_to_integer (&c);
15464 if ((n & 1) == 0)
15466 REAL_VALUE_TYPE cint;
15467 real_from_integer (&cint, VOIDmode, n, SIGNED);
15468 if (real_identical (&c, &cint))
15469 return true;
15472 return tree_expr_nonnegative_warnv_p (arg0,
15473 strict_overflow_p);
15475 default:
15476 break;
15478 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15479 type);
15482 /* Return true if T is known to be non-negative. If the return
15483 value is based on the assumption that signed overflow is undefined,
15484 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15485 *STRICT_OVERFLOW_P. */
15487 static bool
15488 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15490 enum tree_code code = TREE_CODE (t);
15491 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15492 return true;
15494 switch (code)
15496 case TARGET_EXPR:
15498 tree temp = TARGET_EXPR_SLOT (t);
15499 t = TARGET_EXPR_INITIAL (t);
15501 /* If the initializer is non-void, then it's a normal expression
15502 that will be assigned to the slot. */
15503 if (!VOID_TYPE_P (t))
15504 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15506 /* Otherwise, the initializer sets the slot in some way. One common
15507 way is an assignment statement at the end of the initializer. */
15508 while (1)
15510 if (TREE_CODE (t) == BIND_EXPR)
15511 t = expr_last (BIND_EXPR_BODY (t));
15512 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15513 || TREE_CODE (t) == TRY_CATCH_EXPR)
15514 t = expr_last (TREE_OPERAND (t, 0));
15515 else if (TREE_CODE (t) == STATEMENT_LIST)
15516 t = expr_last (t);
15517 else
15518 break;
15520 if (TREE_CODE (t) == MODIFY_EXPR
15521 && TREE_OPERAND (t, 0) == temp)
15522 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15523 strict_overflow_p);
15525 return false;
15528 case CALL_EXPR:
15530 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15531 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15533 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15534 get_callee_fndecl (t),
15535 arg0,
15536 arg1,
15537 strict_overflow_p);
15539 case COMPOUND_EXPR:
15540 case MODIFY_EXPR:
15541 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15542 strict_overflow_p);
15543 case BIND_EXPR:
15544 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15545 strict_overflow_p);
15546 case SAVE_EXPR:
15547 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15548 strict_overflow_p);
15550 default:
15551 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15552 TREE_TYPE (t));
15555 /* We don't know sign of `t', so be conservative and return false. */
15556 return false;
15559 /* Return true if T is known to be non-negative. If the return
15560 value is based on the assumption that signed overflow is undefined,
15561 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15562 *STRICT_OVERFLOW_P. */
15564 bool
15565 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15567 enum tree_code code;
15568 if (t == error_mark_node)
15569 return false;
15571 code = TREE_CODE (t);
15572 switch (TREE_CODE_CLASS (code))
15574 case tcc_binary:
15575 case tcc_comparison:
15576 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15577 TREE_TYPE (t),
15578 TREE_OPERAND (t, 0),
15579 TREE_OPERAND (t, 1),
15580 strict_overflow_p);
15582 case tcc_unary:
15583 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15584 TREE_TYPE (t),
15585 TREE_OPERAND (t, 0),
15586 strict_overflow_p);
15588 case tcc_constant:
15589 case tcc_declaration:
15590 case tcc_reference:
15591 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15593 default:
15594 break;
15597 switch (code)
15599 case TRUTH_AND_EXPR:
15600 case TRUTH_OR_EXPR:
15601 case TRUTH_XOR_EXPR:
15602 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15603 TREE_TYPE (t),
15604 TREE_OPERAND (t, 0),
15605 TREE_OPERAND (t, 1),
15606 strict_overflow_p);
15607 case TRUTH_NOT_EXPR:
15608 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15609 TREE_TYPE (t),
15610 TREE_OPERAND (t, 0),
15611 strict_overflow_p);
15613 case COND_EXPR:
15614 case CONSTRUCTOR:
15615 case OBJ_TYPE_REF:
15616 case ASSERT_EXPR:
15617 case ADDR_EXPR:
15618 case WITH_SIZE_EXPR:
15619 case SSA_NAME:
15620 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15622 default:
15623 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15627 /* Return true if `t' is known to be non-negative. Handle warnings
15628 about undefined signed overflow. */
15630 bool
15631 tree_expr_nonnegative_p (tree t)
15633 bool ret, strict_overflow_p;
15635 strict_overflow_p = false;
15636 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15637 if (strict_overflow_p)
15638 fold_overflow_warning (("assuming signed overflow does not occur when "
15639 "determining that expression is always "
15640 "non-negative"),
15641 WARN_STRICT_OVERFLOW_MISC);
15642 return ret;
15646 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15647 For floating point we further ensure that T is not denormal.
15648 Similar logic is present in nonzero_address in rtlanal.h.
15650 If the return value is based on the assumption that signed overflow
15651 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15652 change *STRICT_OVERFLOW_P. */
15654 bool
15655 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15656 bool *strict_overflow_p)
15658 switch (code)
15660 case ABS_EXPR:
15661 return tree_expr_nonzero_warnv_p (op0,
15662 strict_overflow_p);
15664 case NOP_EXPR:
15666 tree inner_type = TREE_TYPE (op0);
15667 tree outer_type = type;
15669 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15670 && tree_expr_nonzero_warnv_p (op0,
15671 strict_overflow_p));
15673 break;
15675 case NON_LVALUE_EXPR:
15676 return tree_expr_nonzero_warnv_p (op0,
15677 strict_overflow_p);
15679 default:
15680 break;
15683 return false;
15686 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15687 For floating point we further ensure that T is not denormal.
15688 Similar logic is present in nonzero_address in rtlanal.h.
15690 If the return value is based on the assumption that signed overflow
15691 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15692 change *STRICT_OVERFLOW_P. */
15694 bool
15695 tree_binary_nonzero_warnv_p (enum tree_code code,
15696 tree type,
15697 tree op0,
15698 tree op1, bool *strict_overflow_p)
15700 bool sub_strict_overflow_p;
15701 switch (code)
15703 case POINTER_PLUS_EXPR:
15704 case PLUS_EXPR:
15705 if (TYPE_OVERFLOW_UNDEFINED (type))
15707 /* With the presence of negative values it is hard
15708 to say something. */
15709 sub_strict_overflow_p = false;
15710 if (!tree_expr_nonnegative_warnv_p (op0,
15711 &sub_strict_overflow_p)
15712 || !tree_expr_nonnegative_warnv_p (op1,
15713 &sub_strict_overflow_p))
15714 return false;
15715 /* One of operands must be positive and the other non-negative. */
15716 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15717 overflows, on a twos-complement machine the sum of two
15718 nonnegative numbers can never be zero. */
15719 return (tree_expr_nonzero_warnv_p (op0,
15720 strict_overflow_p)
15721 || tree_expr_nonzero_warnv_p (op1,
15722 strict_overflow_p));
15724 break;
15726 case MULT_EXPR:
15727 if (TYPE_OVERFLOW_UNDEFINED (type))
15729 if (tree_expr_nonzero_warnv_p (op0,
15730 strict_overflow_p)
15731 && tree_expr_nonzero_warnv_p (op1,
15732 strict_overflow_p))
15734 *strict_overflow_p = true;
15735 return true;
15738 break;
15740 case MIN_EXPR:
15741 sub_strict_overflow_p = false;
15742 if (tree_expr_nonzero_warnv_p (op0,
15743 &sub_strict_overflow_p)
15744 && tree_expr_nonzero_warnv_p (op1,
15745 &sub_strict_overflow_p))
15747 if (sub_strict_overflow_p)
15748 *strict_overflow_p = true;
15750 break;
15752 case MAX_EXPR:
15753 sub_strict_overflow_p = false;
15754 if (tree_expr_nonzero_warnv_p (op0,
15755 &sub_strict_overflow_p))
15757 if (sub_strict_overflow_p)
15758 *strict_overflow_p = true;
15760 /* When both operands are nonzero, then MAX must be too. */
15761 if (tree_expr_nonzero_warnv_p (op1,
15762 strict_overflow_p))
15763 return true;
15765 /* MAX where operand 0 is positive is positive. */
15766 return tree_expr_nonnegative_warnv_p (op0,
15767 strict_overflow_p);
15769 /* MAX where operand 1 is positive is positive. */
15770 else if (tree_expr_nonzero_warnv_p (op1,
15771 &sub_strict_overflow_p)
15772 && tree_expr_nonnegative_warnv_p (op1,
15773 &sub_strict_overflow_p))
15775 if (sub_strict_overflow_p)
15776 *strict_overflow_p = true;
15777 return true;
15779 break;
15781 case BIT_IOR_EXPR:
15782 return (tree_expr_nonzero_warnv_p (op1,
15783 strict_overflow_p)
15784 || tree_expr_nonzero_warnv_p (op0,
15785 strict_overflow_p));
15787 default:
15788 break;
15791 return false;
15794 /* Return true when T is an address and is known to be nonzero.
15795 For floating point we further ensure that T is not denormal.
15796 Similar logic is present in nonzero_address in rtlanal.h.
15798 If the return value is based on the assumption that signed overflow
15799 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15800 change *STRICT_OVERFLOW_P. */
15802 bool
15803 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15805 bool sub_strict_overflow_p;
15806 switch (TREE_CODE (t))
15808 case INTEGER_CST:
15809 return !integer_zerop (t);
15811 case ADDR_EXPR:
15813 tree base = TREE_OPERAND (t, 0);
15815 if (!DECL_P (base))
15816 base = get_base_address (base);
15818 if (!base)
15819 return false;
15821 /* For objects in symbol table check if we know they are non-zero.
15822 Don't do anything for variables and functions before symtab is built;
15823 it is quite possible that they will be declared weak later. */
15824 if (DECL_P (base) && decl_in_symtab_p (base))
15826 struct symtab_node *symbol;
15828 symbol = symtab_node::get_create (base);
15829 if (symbol)
15830 return symbol->nonzero_address ();
15831 else
15832 return false;
15835 /* Function local objects are never NULL. */
15836 if (DECL_P (base)
15837 && (DECL_CONTEXT (base)
15838 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15839 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15840 return true;
15842 /* Constants are never weak. */
15843 if (CONSTANT_CLASS_P (base))
15844 return true;
15846 return false;
15849 case COND_EXPR:
15850 sub_strict_overflow_p = false;
15851 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15852 &sub_strict_overflow_p)
15853 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15854 &sub_strict_overflow_p))
15856 if (sub_strict_overflow_p)
15857 *strict_overflow_p = true;
15858 return true;
15860 break;
15862 default:
15863 break;
15865 return false;
15868 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15869 attempt to fold the expression to a constant without modifying TYPE,
15870 OP0 or OP1.
15872 If the expression could be simplified to a constant, then return
15873 the constant. If the expression would not be simplified to a
15874 constant, then return NULL_TREE. */
15876 tree
15877 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15879 tree tem = fold_binary (code, type, op0, op1);
15880 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15883 /* Given the components of a unary expression CODE, TYPE and OP0,
15884 attempt to fold the expression to a constant without modifying
15885 TYPE or OP0.
15887 If the expression could be simplified to a constant, then return
15888 the constant. If the expression would not be simplified to a
15889 constant, then return NULL_TREE. */
15891 tree
15892 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15894 tree tem = fold_unary (code, type, op0);
15895 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15898 /* If EXP represents referencing an element in a constant string
15899 (either via pointer arithmetic or array indexing), return the
15900 tree representing the value accessed, otherwise return NULL. */
15902 tree
15903 fold_read_from_constant_string (tree exp)
15905 if ((TREE_CODE (exp) == INDIRECT_REF
15906 || TREE_CODE (exp) == ARRAY_REF)
15907 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15909 tree exp1 = TREE_OPERAND (exp, 0);
15910 tree index;
15911 tree string;
15912 location_t loc = EXPR_LOCATION (exp);
15914 if (TREE_CODE (exp) == INDIRECT_REF)
15915 string = string_constant (exp1, &index);
15916 else
15918 tree low_bound = array_ref_low_bound (exp);
15919 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15921 /* Optimize the special-case of a zero lower bound.
15923 We convert the low_bound to sizetype to avoid some problems
15924 with constant folding. (E.g. suppose the lower bound is 1,
15925 and its mode is QI. Without the conversion,l (ARRAY
15926 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15927 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15928 if (! integer_zerop (low_bound))
15929 index = size_diffop_loc (loc, index,
15930 fold_convert_loc (loc, sizetype, low_bound));
15932 string = exp1;
15935 if (string
15936 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15937 && TREE_CODE (string) == STRING_CST
15938 && TREE_CODE (index) == INTEGER_CST
15939 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15940 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15941 == MODE_INT)
15942 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15943 return build_int_cst_type (TREE_TYPE (exp),
15944 (TREE_STRING_POINTER (string)
15945 [TREE_INT_CST_LOW (index)]));
15947 return NULL;
15950 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15951 an integer constant, real, or fixed-point constant.
15953 TYPE is the type of the result. */
15955 static tree
15956 fold_negate_const (tree arg0, tree type)
15958 tree t = NULL_TREE;
15960 switch (TREE_CODE (arg0))
15962 case INTEGER_CST:
15964 bool overflow;
15965 wide_int val = wi::neg (arg0, &overflow);
15966 t = force_fit_type (type, val, 1,
15967 (overflow | TREE_OVERFLOW (arg0))
15968 && !TYPE_UNSIGNED (type));
15969 break;
15972 case REAL_CST:
15973 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15974 break;
15976 case FIXED_CST:
15978 FIXED_VALUE_TYPE f;
15979 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15980 &(TREE_FIXED_CST (arg0)), NULL,
15981 TYPE_SATURATING (type));
15982 t = build_fixed (type, f);
15983 /* Propagate overflow flags. */
15984 if (overflow_p | TREE_OVERFLOW (arg0))
15985 TREE_OVERFLOW (t) = 1;
15986 break;
15989 default:
15990 gcc_unreachable ();
15993 return t;
15996 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15997 an integer constant or real constant.
15999 TYPE is the type of the result. */
16001 tree
16002 fold_abs_const (tree arg0, tree type)
16004 tree t = NULL_TREE;
16006 switch (TREE_CODE (arg0))
16008 case INTEGER_CST:
16010 /* If the value is unsigned or non-negative, then the absolute value
16011 is the same as the ordinary value. */
16012 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16013 t = arg0;
16015 /* If the value is negative, then the absolute value is
16016 its negation. */
16017 else
16019 bool overflow;
16020 wide_int val = wi::neg (arg0, &overflow);
16021 t = force_fit_type (type, val, -1,
16022 overflow | TREE_OVERFLOW (arg0));
16025 break;
16027 case REAL_CST:
16028 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16029 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16030 else
16031 t = arg0;
16032 break;
16034 default:
16035 gcc_unreachable ();
16038 return t;
16041 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16042 constant. TYPE is the type of the result. */
16044 static tree
16045 fold_not_const (const_tree arg0, tree type)
16047 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16049 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16052 /* Given CODE, a relational operator, the target type, TYPE and two
16053 constant operands OP0 and OP1, return the result of the
16054 relational operation. If the result is not a compile time
16055 constant, then return NULL_TREE. */
16057 static tree
16058 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16060 int result, invert;
16062 /* From here on, the only cases we handle are when the result is
16063 known to be a constant. */
16065 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16067 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16068 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16070 /* Handle the cases where either operand is a NaN. */
16071 if (real_isnan (c0) || real_isnan (c1))
16073 switch (code)
16075 case EQ_EXPR:
16076 case ORDERED_EXPR:
16077 result = 0;
16078 break;
16080 case NE_EXPR:
16081 case UNORDERED_EXPR:
16082 case UNLT_EXPR:
16083 case UNLE_EXPR:
16084 case UNGT_EXPR:
16085 case UNGE_EXPR:
16086 case UNEQ_EXPR:
16087 result = 1;
16088 break;
16090 case LT_EXPR:
16091 case LE_EXPR:
16092 case GT_EXPR:
16093 case GE_EXPR:
16094 case LTGT_EXPR:
16095 if (flag_trapping_math)
16096 return NULL_TREE;
16097 result = 0;
16098 break;
16100 default:
16101 gcc_unreachable ();
16104 return constant_boolean_node (result, type);
16107 return constant_boolean_node (real_compare (code, c0, c1), type);
16110 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16112 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16113 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16114 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16117 /* Handle equality/inequality of complex constants. */
16118 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16120 tree rcond = fold_relational_const (code, type,
16121 TREE_REALPART (op0),
16122 TREE_REALPART (op1));
16123 tree icond = fold_relational_const (code, type,
16124 TREE_IMAGPART (op0),
16125 TREE_IMAGPART (op1));
16126 if (code == EQ_EXPR)
16127 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16128 else if (code == NE_EXPR)
16129 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16130 else
16131 return NULL_TREE;
16134 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16136 unsigned count = VECTOR_CST_NELTS (op0);
16137 tree *elts = XALLOCAVEC (tree, count);
16138 gcc_assert (VECTOR_CST_NELTS (op1) == count
16139 && TYPE_VECTOR_SUBPARTS (type) == count);
16141 for (unsigned i = 0; i < count; i++)
16143 tree elem_type = TREE_TYPE (type);
16144 tree elem0 = VECTOR_CST_ELT (op0, i);
16145 tree elem1 = VECTOR_CST_ELT (op1, i);
16147 tree tem = fold_relational_const (code, elem_type,
16148 elem0, elem1);
16150 if (tem == NULL_TREE)
16151 return NULL_TREE;
16153 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16156 return build_vector (type, elts);
16159 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16161 To compute GT, swap the arguments and do LT.
16162 To compute GE, do LT and invert the result.
16163 To compute LE, swap the arguments, do LT and invert the result.
16164 To compute NE, do EQ and invert the result.
16166 Therefore, the code below must handle only EQ and LT. */
16168 if (code == LE_EXPR || code == GT_EXPR)
16170 tree tem = op0;
16171 op0 = op1;
16172 op1 = tem;
16173 code = swap_tree_comparison (code);
16176 /* Note that it is safe to invert for real values here because we
16177 have already handled the one case that it matters. */
16179 invert = 0;
16180 if (code == NE_EXPR || code == GE_EXPR)
16182 invert = 1;
16183 code = invert_tree_comparison (code, false);
16186 /* Compute a result for LT or EQ if args permit;
16187 Otherwise return T. */
16188 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16190 if (code == EQ_EXPR)
16191 result = tree_int_cst_equal (op0, op1);
16192 else
16193 result = tree_int_cst_lt (op0, op1);
16195 else
16196 return NULL_TREE;
16198 if (invert)
16199 result ^= 1;
16200 return constant_boolean_node (result, type);
16203 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16204 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16205 itself. */
16207 tree
16208 fold_build_cleanup_point_expr (tree type, tree expr)
16210 /* If the expression does not have side effects then we don't have to wrap
16211 it with a cleanup point expression. */
16212 if (!TREE_SIDE_EFFECTS (expr))
16213 return expr;
16215 /* If the expression is a return, check to see if the expression inside the
16216 return has no side effects or the right hand side of the modify expression
16217 inside the return. If either don't have side effects set we don't need to
16218 wrap the expression in a cleanup point expression. Note we don't check the
16219 left hand side of the modify because it should always be a return decl. */
16220 if (TREE_CODE (expr) == RETURN_EXPR)
16222 tree op = TREE_OPERAND (expr, 0);
16223 if (!op || !TREE_SIDE_EFFECTS (op))
16224 return expr;
16225 op = TREE_OPERAND (op, 1);
16226 if (!TREE_SIDE_EFFECTS (op))
16227 return expr;
16230 return build1 (CLEANUP_POINT_EXPR, type, expr);
16233 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16234 of an indirection through OP0, or NULL_TREE if no simplification is
16235 possible. */
16237 tree
16238 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16240 tree sub = op0;
16241 tree subtype;
16243 STRIP_NOPS (sub);
16244 subtype = TREE_TYPE (sub);
16245 if (!POINTER_TYPE_P (subtype))
16246 return NULL_TREE;
16248 if (TREE_CODE (sub) == ADDR_EXPR)
16250 tree op = TREE_OPERAND (sub, 0);
16251 tree optype = TREE_TYPE (op);
16252 /* *&CONST_DECL -> to the value of the const decl. */
16253 if (TREE_CODE (op) == CONST_DECL)
16254 return DECL_INITIAL (op);
16255 /* *&p => p; make sure to handle *&"str"[cst] here. */
16256 if (type == optype)
16258 tree fop = fold_read_from_constant_string (op);
16259 if (fop)
16260 return fop;
16261 else
16262 return op;
16264 /* *(foo *)&fooarray => fooarray[0] */
16265 else if (TREE_CODE (optype) == ARRAY_TYPE
16266 && type == TREE_TYPE (optype)
16267 && (!in_gimple_form
16268 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16270 tree type_domain = TYPE_DOMAIN (optype);
16271 tree min_val = size_zero_node;
16272 if (type_domain && TYPE_MIN_VALUE (type_domain))
16273 min_val = TYPE_MIN_VALUE (type_domain);
16274 if (in_gimple_form
16275 && TREE_CODE (min_val) != INTEGER_CST)
16276 return NULL_TREE;
16277 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16278 NULL_TREE, NULL_TREE);
16280 /* *(foo *)&complexfoo => __real__ complexfoo */
16281 else if (TREE_CODE (optype) == COMPLEX_TYPE
16282 && type == TREE_TYPE (optype))
16283 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16284 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16285 else if (TREE_CODE (optype) == VECTOR_TYPE
16286 && type == TREE_TYPE (optype))
16288 tree part_width = TYPE_SIZE (type);
16289 tree index = bitsize_int (0);
16290 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16294 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16295 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16297 tree op00 = TREE_OPERAND (sub, 0);
16298 tree op01 = TREE_OPERAND (sub, 1);
16300 STRIP_NOPS (op00);
16301 if (TREE_CODE (op00) == ADDR_EXPR)
16303 tree op00type;
16304 op00 = TREE_OPERAND (op00, 0);
16305 op00type = TREE_TYPE (op00);
16307 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16308 if (TREE_CODE (op00type) == VECTOR_TYPE
16309 && type == TREE_TYPE (op00type))
16311 HOST_WIDE_INT offset = tree_to_shwi (op01);
16312 tree part_width = TYPE_SIZE (type);
16313 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16314 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16315 tree index = bitsize_int (indexi);
16317 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16318 return fold_build3_loc (loc,
16319 BIT_FIELD_REF, type, op00,
16320 part_width, index);
16323 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16324 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16325 && type == TREE_TYPE (op00type))
16327 tree size = TYPE_SIZE_UNIT (type);
16328 if (tree_int_cst_equal (size, op01))
16329 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16331 /* ((foo *)&fooarray)[1] => fooarray[1] */
16332 else if (TREE_CODE (op00type) == ARRAY_TYPE
16333 && type == TREE_TYPE (op00type))
16335 tree type_domain = TYPE_DOMAIN (op00type);
16336 tree min_val = size_zero_node;
16337 if (type_domain && TYPE_MIN_VALUE (type_domain))
16338 min_val = TYPE_MIN_VALUE (type_domain);
16339 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16340 TYPE_SIZE_UNIT (type));
16341 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16342 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16343 NULL_TREE, NULL_TREE);
16348 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16349 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16350 && type == TREE_TYPE (TREE_TYPE (subtype))
16351 && (!in_gimple_form
16352 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16354 tree type_domain;
16355 tree min_val = size_zero_node;
16356 sub = build_fold_indirect_ref_loc (loc, sub);
16357 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16358 if (type_domain && TYPE_MIN_VALUE (type_domain))
16359 min_val = TYPE_MIN_VALUE (type_domain);
16360 if (in_gimple_form
16361 && TREE_CODE (min_val) != INTEGER_CST)
16362 return NULL_TREE;
16363 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16364 NULL_TREE);
16367 return NULL_TREE;
16370 /* Builds an expression for an indirection through T, simplifying some
16371 cases. */
16373 tree
16374 build_fold_indirect_ref_loc (location_t loc, tree t)
16376 tree type = TREE_TYPE (TREE_TYPE (t));
16377 tree sub = fold_indirect_ref_1 (loc, type, t);
16379 if (sub)
16380 return sub;
16382 return build1_loc (loc, INDIRECT_REF, type, t);
16385 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16387 tree
16388 fold_indirect_ref_loc (location_t loc, tree t)
16390 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16392 if (sub)
16393 return sub;
16394 else
16395 return t;
16398 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16399 whose result is ignored. The type of the returned tree need not be
16400 the same as the original expression. */
16402 tree
16403 fold_ignored_result (tree t)
16405 if (!TREE_SIDE_EFFECTS (t))
16406 return integer_zero_node;
16408 for (;;)
16409 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16411 case tcc_unary:
16412 t = TREE_OPERAND (t, 0);
16413 break;
16415 case tcc_binary:
16416 case tcc_comparison:
16417 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16418 t = TREE_OPERAND (t, 0);
16419 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16420 t = TREE_OPERAND (t, 1);
16421 else
16422 return t;
16423 break;
16425 case tcc_expression:
16426 switch (TREE_CODE (t))
16428 case COMPOUND_EXPR:
16429 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16430 return t;
16431 t = TREE_OPERAND (t, 0);
16432 break;
16434 case COND_EXPR:
16435 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16436 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16437 return t;
16438 t = TREE_OPERAND (t, 0);
16439 break;
16441 default:
16442 return t;
16444 break;
16446 default:
16447 return t;
16451 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16453 tree
16454 round_up_loc (location_t loc, tree value, unsigned int divisor)
16456 tree div = NULL_TREE;
16458 if (divisor == 1)
16459 return value;
16461 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16462 have to do anything. Only do this when we are not given a const,
16463 because in that case, this check is more expensive than just
16464 doing it. */
16465 if (TREE_CODE (value) != INTEGER_CST)
16467 div = build_int_cst (TREE_TYPE (value), divisor);
16469 if (multiple_of_p (TREE_TYPE (value), value, div))
16470 return value;
16473 /* If divisor is a power of two, simplify this to bit manipulation. */
16474 if (divisor == (divisor & -divisor))
16476 if (TREE_CODE (value) == INTEGER_CST)
16478 wide_int val = value;
16479 bool overflow_p;
16481 if ((val & (divisor - 1)) == 0)
16482 return value;
16484 overflow_p = TREE_OVERFLOW (value);
16485 val &= ~(divisor - 1);
16486 val += divisor;
16487 if (val == 0)
16488 overflow_p = true;
16490 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16492 else
16494 tree t;
16496 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16497 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16498 t = build_int_cst (TREE_TYPE (value), -divisor);
16499 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16502 else
16504 if (!div)
16505 div = build_int_cst (TREE_TYPE (value), divisor);
16506 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16507 value = size_binop_loc (loc, MULT_EXPR, value, div);
16510 return value;
16513 /* Likewise, but round down. */
16515 tree
16516 round_down_loc (location_t loc, tree value, int divisor)
16518 tree div = NULL_TREE;
16520 gcc_assert (divisor > 0);
16521 if (divisor == 1)
16522 return value;
16524 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16525 have to do anything. Only do this when we are not given a const,
16526 because in that case, this check is more expensive than just
16527 doing it. */
16528 if (TREE_CODE (value) != INTEGER_CST)
16530 div = build_int_cst (TREE_TYPE (value), divisor);
16532 if (multiple_of_p (TREE_TYPE (value), value, div))
16533 return value;
16536 /* If divisor is a power of two, simplify this to bit manipulation. */
16537 if (divisor == (divisor & -divisor))
16539 tree t;
16541 t = build_int_cst (TREE_TYPE (value), -divisor);
16542 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16544 else
16546 if (!div)
16547 div = build_int_cst (TREE_TYPE (value), divisor);
16548 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16549 value = size_binop_loc (loc, MULT_EXPR, value, div);
16552 return value;
16555 /* Returns the pointer to the base of the object addressed by EXP and
16556 extracts the information about the offset of the access, storing it
16557 to PBITPOS and POFFSET. */
16559 static tree
16560 split_address_to_core_and_offset (tree exp,
16561 HOST_WIDE_INT *pbitpos, tree *poffset)
16563 tree core;
16564 enum machine_mode mode;
16565 int unsignedp, volatilep;
16566 HOST_WIDE_INT bitsize;
16567 location_t loc = EXPR_LOCATION (exp);
16569 if (TREE_CODE (exp) == ADDR_EXPR)
16571 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16572 poffset, &mode, &unsignedp, &volatilep,
16573 false);
16574 core = build_fold_addr_expr_loc (loc, core);
16576 else
16578 core = exp;
16579 *pbitpos = 0;
16580 *poffset = NULL_TREE;
16583 return core;
16586 /* Returns true if addresses of E1 and E2 differ by a constant, false
16587 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16589 bool
16590 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16592 tree core1, core2;
16593 HOST_WIDE_INT bitpos1, bitpos2;
16594 tree toffset1, toffset2, tdiff, type;
16596 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16597 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16599 if (bitpos1 % BITS_PER_UNIT != 0
16600 || bitpos2 % BITS_PER_UNIT != 0
16601 || !operand_equal_p (core1, core2, 0))
16602 return false;
16604 if (toffset1 && toffset2)
16606 type = TREE_TYPE (toffset1);
16607 if (type != TREE_TYPE (toffset2))
16608 toffset2 = fold_convert (type, toffset2);
16610 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16611 if (!cst_and_fits_in_hwi (tdiff))
16612 return false;
16614 *diff = int_cst_value (tdiff);
16616 else if (toffset1 || toffset2)
16618 /* If only one of the offsets is non-constant, the difference cannot
16619 be a constant. */
16620 return false;
16622 else
16623 *diff = 0;
16625 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16626 return true;
16629 /* Simplify the floating point expression EXP when the sign of the
16630 result is not significant. Return NULL_TREE if no simplification
16631 is possible. */
16633 tree
16634 fold_strip_sign_ops (tree exp)
16636 tree arg0, arg1;
16637 location_t loc = EXPR_LOCATION (exp);
16639 switch (TREE_CODE (exp))
16641 case ABS_EXPR:
16642 case NEGATE_EXPR:
16643 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16644 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16646 case MULT_EXPR:
16647 case RDIV_EXPR:
16648 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16649 return NULL_TREE;
16650 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16651 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16652 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16653 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16654 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16655 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16656 break;
16658 case COMPOUND_EXPR:
16659 arg0 = TREE_OPERAND (exp, 0);
16660 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16661 if (arg1)
16662 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16663 break;
16665 case COND_EXPR:
16666 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16667 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16668 if (arg0 || arg1)
16669 return fold_build3_loc (loc,
16670 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16671 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16672 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16673 break;
16675 case CALL_EXPR:
16677 const enum built_in_function fcode = builtin_mathfn_code (exp);
16678 switch (fcode)
16680 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16681 /* Strip copysign function call, return the 1st argument. */
16682 arg0 = CALL_EXPR_ARG (exp, 0);
16683 arg1 = CALL_EXPR_ARG (exp, 1);
16684 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16686 default:
16687 /* Strip sign ops from the argument of "odd" math functions. */
16688 if (negate_mathfn_p (fcode))
16690 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16691 if (arg0)
16692 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16694 break;
16697 break;
16699 default:
16700 break;
16702 return NULL_TREE;