Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob9183430205becea5fe90c45a115db000951cadf6
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static int simple_operand_p (const_tree);
134 static bool simple_operand_p_2 (tree);
135 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
136 static tree range_predecessor (tree);
137 static tree range_successor (tree);
138 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
139 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree optimize_minmax_comparison (location_t, enum tree_code,
142 tree, tree, tree);
143 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
144 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
145 static tree fold_binary_op_with_conditional_arg (location_t,
146 enum tree_code, tree,
147 tree, tree,
148 tree, tree, int);
149 static tree fold_mathfn_compare (location_t,
150 enum built_in_function, enum tree_code,
151 tree, tree, tree);
152 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
153 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
154 static bool reorder_operands_p (const_tree, const_tree);
155 static tree fold_negate_const (tree, tree);
156 static tree fold_not_const (const_tree, tree);
157 static tree fold_relational_const (enum tree_code, tree, tree, tree);
158 static tree fold_convert_const (enum tree_code, tree, tree);
160 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
161 Otherwise, return LOC. */
163 static location_t
164 expr_location_or (tree t, location_t loc)
166 location_t tloc = EXPR_LOCATION (t);
167 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 /* Similar to protected_set_expr_location, but never modify x in place,
171 if location can and needs to be set, unshare it. */
173 static inline tree
174 protected_set_expr_location_unshare (tree x, location_t loc)
176 if (CAN_HAVE_LOCATION_P (x)
177 && EXPR_LOCATION (x) != loc
178 && !(TREE_CODE (x) == SAVE_EXPR
179 || TREE_CODE (x) == TARGET_EXPR
180 || TREE_CODE (x) == BIND_EXPR))
182 x = copy_node (x);
183 SET_EXPR_LOCATION (x, loc);
185 return x;
188 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
189 division and returns the quotient. Otherwise returns
190 NULL_TREE. */
192 tree
193 div_if_zero_remainder (const_tree arg1, const_tree arg2)
195 widest_int quo;
197 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
198 SIGNED, &quo))
199 return wide_int_to_tree (TREE_TYPE (arg1), quo);
201 return NULL_TREE;
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
211 used. */
213 static int fold_deferring_overflow_warnings;
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
220 static const char* fold_deferred_overflow_warning;
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
225 static enum warn_strict_overflow_code fold_deferred_overflow_code;
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
230 void
231 fold_defer_overflow_warnings (void)
233 ++fold_deferring_overflow_warnings;
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
243 deferred code. */
245 void
246 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
248 const char *warnmsg;
249 location_t locus;
251 gcc_assert (fold_deferring_overflow_warnings > 0);
252 --fold_deferring_overflow_warnings;
253 if (fold_deferring_overflow_warnings > 0)
255 if (fold_deferred_overflow_warning != NULL
256 && code != 0
257 && code < (int) fold_deferred_overflow_code)
258 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
259 return;
262 warnmsg = fold_deferred_overflow_warning;
263 fold_deferred_overflow_warning = NULL;
265 if (!issue || warnmsg == NULL)
266 return;
268 if (gimple_no_warning_p (stmt))
269 return;
271 /* Use the smallest code level when deciding to issue the
272 warning. */
273 if (code == 0 || code > (int) fold_deferred_overflow_code)
274 code = fold_deferred_overflow_code;
276 if (!issue_strict_overflow_warning (code))
277 return;
279 if (stmt == NULL)
280 locus = input_location;
281 else
282 locus = gimple_location (stmt);
283 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 /* Stop deferring overflow warnings, ignoring any deferred
287 warnings. */
289 void
290 fold_undefer_and_ignore_overflow_warnings (void)
292 fold_undefer_overflow_warnings (false, NULL, 0);
295 /* Whether we are deferring overflow warnings. */
297 bool
298 fold_deferring_overflow_warnings_p (void)
300 return fold_deferring_overflow_warnings > 0;
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
306 static void
307 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
309 if (fold_deferring_overflow_warnings > 0)
311 if (fold_deferred_overflow_warning == NULL
312 || wc < fold_deferred_overflow_code)
314 fold_deferred_overflow_warning = gmsgid;
315 fold_deferred_overflow_code = wc;
318 else if (issue_strict_overflow_warning (wc))
319 warning (OPT_Wstrict_overflow, gmsgid);
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
325 static bool
326 negate_mathfn_p (enum built_in_function code)
328 switch (code)
330 CASE_FLT_FN (BUILT_IN_ASIN):
331 CASE_FLT_FN (BUILT_IN_ASINH):
332 CASE_FLT_FN (BUILT_IN_ATAN):
333 CASE_FLT_FN (BUILT_IN_ATANH):
334 CASE_FLT_FN (BUILT_IN_CASIN):
335 CASE_FLT_FN (BUILT_IN_CASINH):
336 CASE_FLT_FN (BUILT_IN_CATAN):
337 CASE_FLT_FN (BUILT_IN_CATANH):
338 CASE_FLT_FN (BUILT_IN_CBRT):
339 CASE_FLT_FN (BUILT_IN_CPROJ):
340 CASE_FLT_FN (BUILT_IN_CSIN):
341 CASE_FLT_FN (BUILT_IN_CSINH):
342 CASE_FLT_FN (BUILT_IN_CTAN):
343 CASE_FLT_FN (BUILT_IN_CTANH):
344 CASE_FLT_FN (BUILT_IN_ERF):
345 CASE_FLT_FN (BUILT_IN_LLROUND):
346 CASE_FLT_FN (BUILT_IN_LROUND):
347 CASE_FLT_FN (BUILT_IN_ROUND):
348 CASE_FLT_FN (BUILT_IN_SIN):
349 CASE_FLT_FN (BUILT_IN_SINH):
350 CASE_FLT_FN (BUILT_IN_TAN):
351 CASE_FLT_FN (BUILT_IN_TANH):
352 CASE_FLT_FN (BUILT_IN_TRUNC):
353 return true;
355 CASE_FLT_FN (BUILT_IN_LLRINT):
356 CASE_FLT_FN (BUILT_IN_LRINT):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT):
358 CASE_FLT_FN (BUILT_IN_RINT):
359 return !flag_rounding_math;
361 default:
362 break;
364 return false;
367 /* Check whether we may negate an integer constant T without causing
368 overflow. */
370 bool
371 may_negate_without_overflow_p (const_tree t)
373 tree type;
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
379 return false;
381 return !wi::only_sign_bit_p (t);
384 /* Determine whether an expression T can be cheaply negated using
385 the function negate_expr without introducing undefined overflow. */
387 static bool
388 negate_expr_p (tree t)
390 tree type;
392 if (t == 0)
393 return false;
395 type = TREE_TYPE (t);
397 STRIP_SIGN_NOPS (t);
398 switch (TREE_CODE (t))
400 case INTEGER_CST:
401 if (TYPE_OVERFLOW_WRAPS (type))
402 return true;
404 /* Check that -CST will not overflow type. */
405 return may_negate_without_overflow_p (t);
406 case BIT_NOT_EXPR:
407 return (INTEGRAL_TYPE_P (type)
408 && TYPE_OVERFLOW_WRAPS (type));
410 case FIXED_CST:
411 return true;
413 case NEGATE_EXPR:
414 return !TYPE_OVERFLOW_SANITIZED (type);
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
425 case VECTOR_CST:
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
436 return true;
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
448 || HONOR_SIGNED_ZEROS (element_mode (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
461 && !HONOR_SIGNED_ZEROS (element_mode (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
469 /* Fall through. */
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || (!TYPE_OVERFLOW_TRAPS (type)
560 && TYPE_OVERFLOW_WRAPS (type))
561 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
562 return tem;
563 break;
565 case REAL_CST:
566 tem = fold_negate_const (t, type);
567 /* Two's complement FP formats, such as c4x, may overflow. */
568 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
569 return tem;
570 break;
572 case FIXED_CST:
573 tem = fold_negate_const (t, type);
574 return tem;
576 case COMPLEX_CST:
578 tree rpart = negate_expr (TREE_REALPART (t));
579 tree ipart = negate_expr (TREE_IMAGPART (t));
581 if ((TREE_CODE (rpart) == REAL_CST
582 && TREE_CODE (ipart) == REAL_CST)
583 || (TREE_CODE (rpart) == INTEGER_CST
584 && TREE_CODE (ipart) == INTEGER_CST))
585 return build_complex (type, rpart, ipart);
587 break;
589 case VECTOR_CST:
591 int count = TYPE_VECTOR_SUBPARTS (type), i;
592 tree *elts = XALLOCAVEC (tree, count);
594 for (i = 0; i < count; i++)
596 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
597 if (elts[i] == NULL_TREE)
598 return NULL_TREE;
601 return build_vector (type, elts);
604 case COMPLEX_EXPR:
605 if (negate_expr_p (t))
606 return fold_build2_loc (loc, COMPLEX_EXPR, type,
607 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
608 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
609 break;
611 case CONJ_EXPR:
612 if (negate_expr_p (t))
613 return fold_build1_loc (loc, CONJ_EXPR, type,
614 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
615 break;
617 case NEGATE_EXPR:
618 if (!TYPE_OVERFLOW_SANITIZED (type))
619 return TREE_OPERAND (t, 0);
620 break;
622 case PLUS_EXPR:
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
624 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t, 1))
628 && reorder_operands_p (TREE_OPERAND (t, 0),
629 TREE_OPERAND (t, 1)))
631 tem = negate_expr (TREE_OPERAND (t, 1));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 0));
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t, 0)))
639 tem = negate_expr (TREE_OPERAND (t, 0));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 1));
644 break;
646 case MINUS_EXPR:
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
649 && !HONOR_SIGNED_ZEROS (element_mode (type))
650 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
651 return fold_build2_loc (loc, MINUS_EXPR, type,
652 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
653 break;
655 case MULT_EXPR:
656 if (TYPE_UNSIGNED (type))
657 break;
659 /* Fall through. */
661 case RDIV_EXPR:
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
664 tem = TREE_OPERAND (t, 1);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0), negate_expr (tem));
668 tem = TREE_OPERAND (t, 0);
669 if (negate_expr_p (tem))
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 negate_expr (tem), TREE_OPERAND (t, 1));
673 break;
675 case TRUNC_DIV_EXPR:
676 case ROUND_DIV_EXPR:
677 case EXACT_DIV_EXPR:
678 /* In general we can't negate A / B, because if A is INT_MIN and
679 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 and actually traps on some architectures. But if overflow is
681 undefined, we can negate, because - (INT_MIN / 1) is an
682 overflow. */
683 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
685 const char * const warnmsg = G_("assuming signed overflow does not "
686 "occur when negating a division");
687 tem = TREE_OPERAND (t, 1);
688 if (negate_expr_p (tem))
690 if (INTEGRAL_TYPE_P (type)
691 && (TREE_CODE (tem) != INTEGER_CST
692 || integer_onep (tem)))
693 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
694 return fold_build2_loc (loc, TREE_CODE (t), type,
695 TREE_OPERAND (t, 0), negate_expr (tem));
697 /* If overflow is undefined then we have to be careful because
698 we ask whether it's ok to associate the negate with the
699 division which is not ok for example for
700 -((a - b) / c) where (-(a - b)) / c may invoke undefined
701 overflow because of negating INT_MIN. So do not use
702 negate_expr_p here but open-code the two important cases. */
703 tem = TREE_OPERAND (t, 0);
704 if ((INTEGRAL_TYPE_P (type)
705 && (TREE_CODE (tem) == NEGATE_EXPR
706 || (TREE_CODE (tem) == INTEGER_CST
707 && may_negate_without_overflow_p (tem))))
708 || !INTEGRAL_TYPE_P (type))
709 return fold_build2_loc (loc, TREE_CODE (t), type,
710 negate_expr (tem), TREE_OPERAND (t, 1));
712 break;
714 case NOP_EXPR:
715 /* Convert -((double)float) into (double)(-float). */
716 if (TREE_CODE (type) == REAL_TYPE)
718 tem = strip_float_extensions (t);
719 if (tem != t && negate_expr_p (tem))
720 return fold_convert_loc (loc, type, negate_expr (tem));
722 break;
724 case CALL_EXPR:
725 /* Negate -f(x) as f(-x). */
726 if (negate_mathfn_p (builtin_mathfn_code (t))
727 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
729 tree fndecl, arg;
731 fndecl = get_callee_fndecl (t);
732 arg = negate_expr (CALL_EXPR_ARG (t, 0));
733 return build_call_expr_loc (loc, fndecl, 1, arg);
735 break;
737 case RSHIFT_EXPR:
738 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
739 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
741 tree op1 = TREE_OPERAND (t, 1);
742 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
744 tree ntype = TYPE_UNSIGNED (type)
745 ? signed_type_for (type)
746 : unsigned_type_for (type);
747 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
748 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
749 return fold_convert_loc (loc, type, temp);
752 break;
754 default:
755 break;
758 return NULL_TREE;
761 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
762 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
763 return NULL_TREE. */
765 static tree
766 negate_expr (tree t)
768 tree type, tem;
769 location_t loc;
771 if (t == NULL_TREE)
772 return NULL_TREE;
774 loc = EXPR_LOCATION (t);
775 type = TREE_TYPE (t);
776 STRIP_SIGN_NOPS (t);
778 tem = fold_negate_expr (loc, t);
779 if (!tem)
780 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
781 return fold_convert_loc (loc, type, tem);
784 /* Split a tree IN into a constant, literal and variable parts that could be
785 combined with CODE to make IN. "constant" means an expression with
786 TREE_CONSTANT but that isn't an actual constant. CODE must be a
787 commutative arithmetic operation. Store the constant part into *CONP,
788 the literal in *LITP and return the variable part. If a part isn't
789 present, set it to null. If the tree does not decompose in this way,
790 return the entire tree as the variable part and the other parts as null.
792 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
793 case, we negate an operand that was subtracted. Except if it is a
794 literal for which we use *MINUS_LITP instead.
796 If NEGATE_P is true, we are negating all of IN, again except a literal
797 for which we use *MINUS_LITP instead.
799 If IN is itself a literal or constant, return it as appropriate.
801 Note that we do not guarantee that any of the three values will be the
802 same type as IN, but they will have the same signedness and mode. */
804 static tree
805 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
806 tree *minus_litp, int negate_p)
808 tree var = 0;
810 *conp = 0;
811 *litp = 0;
812 *minus_litp = 0;
814 /* Strip any conversions that don't change the machine mode or signedness. */
815 STRIP_SIGN_NOPS (in);
817 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
818 || TREE_CODE (in) == FIXED_CST)
819 *litp = in;
820 else if (TREE_CODE (in) == code
821 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
822 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
823 /* We can associate addition and subtraction together (even
824 though the C standard doesn't say so) for integers because
825 the value is not affected. For reals, the value might be
826 affected, so we can't. */
827 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
828 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
830 tree op0 = TREE_OPERAND (in, 0);
831 tree op1 = TREE_OPERAND (in, 1);
832 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
833 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
835 /* First see if either of the operands is a literal, then a constant. */
836 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
837 || TREE_CODE (op0) == FIXED_CST)
838 *litp = op0, op0 = 0;
839 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
840 || TREE_CODE (op1) == FIXED_CST)
841 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
843 if (op0 != 0 && TREE_CONSTANT (op0))
844 *conp = op0, op0 = 0;
845 else if (op1 != 0 && TREE_CONSTANT (op1))
846 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
848 /* If we haven't dealt with either operand, this is not a case we can
849 decompose. Otherwise, VAR is either of the ones remaining, if any. */
850 if (op0 != 0 && op1 != 0)
851 var = in;
852 else if (op0 != 0)
853 var = op0;
854 else
855 var = op1, neg_var_p = neg1_p;
857 /* Now do any needed negations. */
858 if (neg_litp_p)
859 *minus_litp = *litp, *litp = 0;
860 if (neg_conp_p)
861 *conp = negate_expr (*conp);
862 if (neg_var_p)
863 var = negate_expr (var);
865 else if (TREE_CODE (in) == BIT_NOT_EXPR
866 && code == PLUS_EXPR)
868 /* -X - 1 is folded to ~X, undo that here. */
869 *minus_litp = build_one_cst (TREE_TYPE (in));
870 var = negate_expr (TREE_OPERAND (in, 0));
872 else if (TREE_CONSTANT (in))
873 *conp = in;
874 else
875 var = in;
877 if (negate_p)
879 if (*litp)
880 *minus_litp = *litp, *litp = 0;
881 else if (*minus_litp)
882 *litp = *minus_litp, *minus_litp = 0;
883 *conp = negate_expr (*conp);
884 var = negate_expr (var);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
899 return t2;
900 else if (t2 == 0)
901 return t1;
903 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
904 try to fold this since we will have infinite recursion. But do
905 deal with any NEGATE_EXPRs. */
906 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
907 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
909 if (code == PLUS_EXPR)
911 if (TREE_CODE (t1) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t2),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t1, 0)));
916 else if (TREE_CODE (t2) == NEGATE_EXPR)
917 return build2_loc (loc, MINUS_EXPR, type,
918 fold_convert_loc (loc, type, t1),
919 fold_convert_loc (loc, type,
920 TREE_OPERAND (t2, 0)));
921 else if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
924 else if (code == MINUS_EXPR)
926 if (integer_zerop (t2))
927 return fold_convert_loc (loc, type, t1);
930 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
939 for use in int_const_binop, size_binop and size_diffop. */
941 static bool
942 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
944 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
945 return false;
946 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
947 return false;
949 switch (code)
951 case LSHIFT_EXPR:
952 case RSHIFT_EXPR:
953 case LROTATE_EXPR:
954 case RROTATE_EXPR:
955 return true;
957 default:
958 break;
961 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
962 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
963 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 /* Combine two integer constants ARG1 and ARG2 under operation CODE
968 to produce a new constant. Return NULL_TREE if we don't know how
969 to evaluate CODE at compile-time. */
971 static tree
972 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
973 int overflowable)
975 wide_int res;
976 tree t;
977 tree type = TREE_TYPE (arg1);
978 signop sign = TYPE_SIGN (type);
979 bool overflow = false;
981 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
982 TYPE_SIGN (TREE_TYPE (parg2)));
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case RSHIFT_EXPR:
999 case LSHIFT_EXPR:
1000 if (wi::neg_p (arg2))
1002 arg2 = -arg2;
1003 if (code == RSHIFT_EXPR)
1004 code = LSHIFT_EXPR;
1005 else
1006 code = RSHIFT_EXPR;
1009 if (code == RSHIFT_EXPR)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res = wi::rshift (arg1, arg2, sign);
1014 else
1015 res = wi::lshift (arg1, arg2);
1016 break;
1018 case RROTATE_EXPR:
1019 case LROTATE_EXPR:
1020 if (wi::neg_p (arg2))
1022 arg2 = -arg2;
1023 if (code == RROTATE_EXPR)
1024 code = LROTATE_EXPR;
1025 else
1026 code = RROTATE_EXPR;
1029 if (code == RROTATE_EXPR)
1030 res = wi::rrotate (arg1, arg2);
1031 else
1032 res = wi::lrotate (arg1, arg2);
1033 break;
1035 case PLUS_EXPR:
1036 res = wi::add (arg1, arg2, sign, &overflow);
1037 break;
1039 case MINUS_EXPR:
1040 res = wi::sub (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_EXPR:
1044 res = wi::mul (arg1, arg2, sign, &overflow);
1045 break;
1047 case MULT_HIGHPART_EXPR:
1048 res = wi::mul_high (arg1, arg2, sign);
1049 break;
1051 case TRUNC_DIV_EXPR:
1052 case EXACT_DIV_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056 break;
1058 case FLOOR_DIV_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::div_floor (arg1, arg2, sign, &overflow);
1062 break;
1064 case CEIL_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068 break;
1070 case ROUND_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_round (arg1, arg2, sign, &overflow);
1074 break;
1076 case TRUNC_MOD_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080 break;
1082 case FLOOR_MOD_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086 break;
1088 case CEIL_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092 break;
1094 case ROUND_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_round (arg1, arg2, sign, &overflow);
1098 break;
1100 case MIN_EXPR:
1101 res = wi::min (arg1, arg2, sign);
1102 break;
1104 case MAX_EXPR:
1105 res = wi::max (arg1, arg2, sign);
1106 break;
1108 default:
1109 return NULL_TREE;
1112 t = force_fit_type (type, res, overflowable,
1113 (((sign == SIGNED || overflowable == -1)
1114 && overflow)
1115 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1117 return t;
1120 tree
1121 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1123 return int_const_binop_1 (code, arg1, arg2, 1);
1126 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1127 constant. We assume ARG1 and ARG2 have the same data type, or at least
1128 are the same kind of constant and the same machine mode. Return zero if
1129 combining the constants is not allowed in the current operating mode. */
1131 static tree
1132 const_binop (enum tree_code code, tree arg1, tree arg2)
1134 /* Sanity check for the recursive cases. */
1135 if (!arg1 || !arg2)
1136 return NULL_TREE;
1138 STRIP_NOPS (arg1);
1139 STRIP_NOPS (arg2);
1141 if (TREE_CODE (arg1) == INTEGER_CST)
1142 return int_const_binop (code, arg1, arg2);
1144 if (TREE_CODE (arg1) == REAL_CST)
1146 machine_mode mode;
1147 REAL_VALUE_TYPE d1;
1148 REAL_VALUE_TYPE d2;
1149 REAL_VALUE_TYPE value;
1150 REAL_VALUE_TYPE result;
1151 bool inexact;
1152 tree t, type;
1154 /* The following codes are handled by real_arithmetic. */
1155 switch (code)
1157 case PLUS_EXPR:
1158 case MINUS_EXPR:
1159 case MULT_EXPR:
1160 case RDIV_EXPR:
1161 case MIN_EXPR:
1162 case MAX_EXPR:
1163 break;
1165 default:
1166 return NULL_TREE;
1169 d1 = TREE_REAL_CST (arg1);
1170 d2 = TREE_REAL_CST (arg2);
1172 type = TREE_TYPE (arg1);
1173 mode = TYPE_MODE (type);
1175 /* Don't perform operation if we honor signaling NaNs and
1176 either operand is a NaN. */
1177 if (HONOR_SNANS (mode)
1178 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1179 return NULL_TREE;
1181 /* Don't perform operation if it would raise a division
1182 by zero exception. */
1183 if (code == RDIV_EXPR
1184 && REAL_VALUES_EQUAL (d2, dconst0)
1185 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1186 return NULL_TREE;
1188 /* If either operand is a NaN, just return it. Otherwise, set up
1189 for floating-point trap; we return an overflow. */
1190 if (REAL_VALUE_ISNAN (d1))
1191 return arg1;
1192 else if (REAL_VALUE_ISNAN (d2))
1193 return arg2;
1195 inexact = real_arithmetic (&value, code, &d1, &d2);
1196 real_convert (&result, mode, &value);
1198 /* Don't constant fold this floating point operation if
1199 the result has overflowed and flag_trapping_math. */
1200 if (flag_trapping_math
1201 && MODE_HAS_INFINITIES (mode)
1202 && REAL_VALUE_ISINF (result)
1203 && !REAL_VALUE_ISINF (d1)
1204 && !REAL_VALUE_ISINF (d2))
1205 return NULL_TREE;
1207 /* Don't constant fold this floating point operation if the
1208 result may dependent upon the run-time rounding mode and
1209 flag_rounding_math is set, or if GCC's software emulation
1210 is unable to accurately represent the result. */
1211 if ((flag_rounding_math
1212 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1213 && (inexact || !real_identical (&result, &value)))
1214 return NULL_TREE;
1216 t = build_real (type, result);
1218 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1219 return t;
1222 if (TREE_CODE (arg1) == FIXED_CST)
1224 FIXED_VALUE_TYPE f1;
1225 FIXED_VALUE_TYPE f2;
1226 FIXED_VALUE_TYPE result;
1227 tree t, type;
1228 int sat_p;
1229 bool overflow_p;
1231 /* The following codes are handled by fixed_arithmetic. */
1232 switch (code)
1234 case PLUS_EXPR:
1235 case MINUS_EXPR:
1236 case MULT_EXPR:
1237 case TRUNC_DIV_EXPR:
1238 f2 = TREE_FIXED_CST (arg2);
1239 break;
1241 case LSHIFT_EXPR:
1242 case RSHIFT_EXPR:
1244 wide_int w2 = arg2;
1245 f2.data.high = w2.elt (1);
1246 f2.data.low = w2.elt (0);
1247 f2.mode = SImode;
1249 break;
1251 default:
1252 return NULL_TREE;
1255 f1 = TREE_FIXED_CST (arg1);
1256 type = TREE_TYPE (arg1);
1257 sat_p = TYPE_SATURATING (type);
1258 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1259 t = build_fixed (type, result);
1260 /* Propagate overflow flags. */
1261 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1262 TREE_OVERFLOW (t) = 1;
1263 return t;
1266 if (TREE_CODE (arg1) == COMPLEX_CST)
1268 tree type = TREE_TYPE (arg1);
1269 tree r1 = TREE_REALPART (arg1);
1270 tree i1 = TREE_IMAGPART (arg1);
1271 tree r2 = TREE_REALPART (arg2);
1272 tree i2 = TREE_IMAGPART (arg2);
1273 tree real, imag;
1275 switch (code)
1277 case PLUS_EXPR:
1278 case MINUS_EXPR:
1279 real = const_binop (code, r1, r2);
1280 imag = const_binop (code, i1, i2);
1281 break;
1283 case MULT_EXPR:
1284 if (COMPLEX_FLOAT_TYPE_P (type))
1285 return do_mpc_arg2 (arg1, arg2, type,
1286 /* do_nonfinite= */ folding_initializer,
1287 mpc_mul);
1289 real = const_binop (MINUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1292 imag = const_binop (PLUS_EXPR,
1293 const_binop (MULT_EXPR, r1, i2),
1294 const_binop (MULT_EXPR, i1, r2));
1295 break;
1297 case RDIV_EXPR:
1298 if (COMPLEX_FLOAT_TYPE_P (type))
1299 return do_mpc_arg2 (arg1, arg2, type,
1300 /* do_nonfinite= */ folding_initializer,
1301 mpc_div);
1302 /* Fallthru ... */
1303 case TRUNC_DIV_EXPR:
1304 case CEIL_DIV_EXPR:
1305 case FLOOR_DIV_EXPR:
1306 case ROUND_DIV_EXPR:
1307 if (flag_complex_method == 0)
1309 /* Keep this algorithm in sync with
1310 tree-complex.c:expand_complex_div_straight().
1312 Expand complex division to scalars, straightforward algorithm.
1313 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1314 t = br*br + bi*bi
1316 tree magsquared
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r2, r2),
1319 const_binop (MULT_EXPR, i2, i2));
1320 tree t1
1321 = const_binop (PLUS_EXPR,
1322 const_binop (MULT_EXPR, r1, r2),
1323 const_binop (MULT_EXPR, i1, i2));
1324 tree t2
1325 = const_binop (MINUS_EXPR,
1326 const_binop (MULT_EXPR, i1, r2),
1327 const_binop (MULT_EXPR, r1, i2));
1329 real = const_binop (code, t1, magsquared);
1330 imag = const_binop (code, t2, magsquared);
1332 else
1334 /* Keep this algorithm in sync with
1335 tree-complex.c:expand_complex_div_wide().
1337 Expand complex division to scalars, modified algorithm to minimize
1338 overflow with wide input ranges. */
1339 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1340 fold_abs_const (r2, TREE_TYPE (type)),
1341 fold_abs_const (i2, TREE_TYPE (type)));
1343 if (integer_nonzerop (compare))
1345 /* In the TRUE branch, we compute
1346 ratio = br/bi;
1347 div = (br * ratio) + bi;
1348 tr = (ar * ratio) + ai;
1349 ti = (ai * ratio) - ar;
1350 tr = tr / div;
1351 ti = ti / div; */
1352 tree ratio = const_binop (code, r2, i2);
1353 tree div = const_binop (PLUS_EXPR, i2,
1354 const_binop (MULT_EXPR, r2, ratio));
1355 real = const_binop (MULT_EXPR, r1, ratio);
1356 real = const_binop (PLUS_EXPR, real, i1);
1357 real = const_binop (code, real, div);
1359 imag = const_binop (MULT_EXPR, i1, ratio);
1360 imag = const_binop (MINUS_EXPR, imag, r1);
1361 imag = const_binop (code, imag, div);
1363 else
1365 /* In the FALSE branch, we compute
1366 ratio = d/c;
1367 divisor = (d * ratio) + c;
1368 tr = (b * ratio) + a;
1369 ti = b - (a * ratio);
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, i2, r2);
1373 tree div = const_binop (PLUS_EXPR, r2,
1374 const_binop (MULT_EXPR, i2, ratio));
1376 real = const_binop (MULT_EXPR, i1, ratio);
1377 real = const_binop (PLUS_EXPR, real, r1);
1378 real = const_binop (code, real, div);
1380 imag = const_binop (MULT_EXPR, r1, ratio);
1381 imag = const_binop (MINUS_EXPR, i1, imag);
1382 imag = const_binop (code, imag, div);
1385 break;
1387 default:
1388 return NULL_TREE;
1391 if (real && imag)
1392 return build_complex (type, real, imag);
1395 if (TREE_CODE (arg1) == VECTOR_CST
1396 && TREE_CODE (arg2) == VECTOR_CST)
1398 tree type = TREE_TYPE (arg1);
1399 int count = TYPE_VECTOR_SUBPARTS (type), i;
1400 tree *elts = XALLOCAVEC (tree, count);
1402 for (i = 0; i < count; i++)
1404 tree elem1 = VECTOR_CST_ELT (arg1, i);
1405 tree elem2 = VECTOR_CST_ELT (arg2, i);
1407 elts[i] = const_binop (code, elem1, elem2);
1409 /* It is possible that const_binop cannot handle the given
1410 code and return NULL_TREE */
1411 if (elts[i] == NULL_TREE)
1412 return NULL_TREE;
1415 return build_vector (type, elts);
1418 /* Shifts allow a scalar offset for a vector. */
1419 if (TREE_CODE (arg1) == VECTOR_CST
1420 && TREE_CODE (arg2) == INTEGER_CST)
1422 tree type = TREE_TYPE (arg1);
1423 int count = TYPE_VECTOR_SUBPARTS (type), i;
1424 tree *elts = XALLOCAVEC (tree, count);
1426 for (i = 0; i < count; i++)
1428 tree elem1 = VECTOR_CST_ELT (arg1, i);
1430 elts[i] = const_binop (code, elem1, arg2);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE. */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1438 return build_vector (type, elts);
1440 return NULL_TREE;
1443 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1444 indicates which particular sizetype to create. */
1446 tree
1447 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1449 return build_int_cst (sizetype_tab[(int) kind], number);
1452 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1453 is a tree code. The type of the result is taken from the operands.
1454 Both must be equivalent integer types, ala int_binop_types_match_p.
1455 If the operands are constant, so is the result. */
1457 tree
1458 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1460 tree type = TREE_TYPE (arg0);
1462 if (arg0 == error_mark_node || arg1 == error_mark_node)
1463 return error_mark_node;
1465 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1466 TREE_TYPE (arg1)));
1468 /* Handle the special case of two integer constants faster. */
1469 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1471 /* And some specific cases even faster than that. */
1472 if (code == PLUS_EXPR)
1474 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1475 return arg1;
1476 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1477 return arg0;
1479 else if (code == MINUS_EXPR)
1481 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1482 return arg0;
1484 else if (code == MULT_EXPR)
1486 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1487 return arg1;
1490 /* Handle general case of two integer constants. For sizetype
1491 constant calculations we always want to know about overflow,
1492 even in the unsigned case. */
1493 return int_const_binop_1 (code, arg0, arg1, -1);
1496 return fold_build2_loc (loc, code, type, arg0, arg1);
1499 /* Given two values, either both of sizetype or both of bitsizetype,
1500 compute the difference between the two values. Return the value
1501 in signed type corresponding to the type of the operands. */
1503 tree
1504 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1506 tree type = TREE_TYPE (arg0);
1507 tree ctype;
1509 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1510 TREE_TYPE (arg1)));
1512 /* If the type is already signed, just do the simple thing. */
1513 if (!TYPE_UNSIGNED (type))
1514 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1516 if (type == sizetype)
1517 ctype = ssizetype;
1518 else if (type == bitsizetype)
1519 ctype = sbitsizetype;
1520 else
1521 ctype = signed_type_for (type);
1523 /* If either operand is not a constant, do the conversions to the signed
1524 type and subtract. The hardware will do the right thing with any
1525 overflow in the subtraction. */
1526 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1527 return size_binop_loc (loc, MINUS_EXPR,
1528 fold_convert_loc (loc, ctype, arg0),
1529 fold_convert_loc (loc, ctype, arg1));
1531 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1532 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1533 overflow) and negate (which can't either). Special-case a result
1534 of zero while we're here. */
1535 if (tree_int_cst_equal (arg0, arg1))
1536 return build_int_cst (ctype, 0);
1537 else if (tree_int_cst_lt (arg1, arg0))
1538 return fold_convert_loc (loc, ctype,
1539 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1540 else
1541 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1542 fold_convert_loc (loc, ctype,
1543 size_binop_loc (loc,
1544 MINUS_EXPR,
1545 arg1, arg0)));
1548 /* A subroutine of fold_convert_const handling conversions of an
1549 INTEGER_CST to another integer type. */
1551 static tree
1552 fold_convert_const_int_from_int (tree type, const_tree arg1)
1554 /* Given an integer constant, make new constant with new type,
1555 appropriately sign-extended or truncated. Use widest_int
1556 so that any extension is done according ARG1's type. */
1557 return force_fit_type (type, wi::to_widest (arg1),
1558 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1559 TREE_OVERFLOW (arg1));
1562 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1563 to an integer type. */
1565 static tree
1566 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1568 bool overflow = false;
1569 tree t;
1571 /* The following code implements the floating point to integer
1572 conversion rules required by the Java Language Specification,
1573 that IEEE NaNs are mapped to zero and values that overflow
1574 the target precision saturate, i.e. values greater than
1575 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1576 are mapped to INT_MIN. These semantics are allowed by the
1577 C and C++ standards that simply state that the behavior of
1578 FP-to-integer conversion is unspecified upon overflow. */
1580 wide_int val;
1581 REAL_VALUE_TYPE r;
1582 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1584 switch (code)
1586 case FIX_TRUNC_EXPR:
1587 real_trunc (&r, VOIDmode, &x);
1588 break;
1590 default:
1591 gcc_unreachable ();
1594 /* If R is NaN, return zero and show we have an overflow. */
1595 if (REAL_VALUE_ISNAN (r))
1597 overflow = true;
1598 val = wi::zero (TYPE_PRECISION (type));
1601 /* See if R is less than the lower bound or greater than the
1602 upper bound. */
1604 if (! overflow)
1606 tree lt = TYPE_MIN_VALUE (type);
1607 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1608 if (REAL_VALUES_LESS (r, l))
1610 overflow = true;
1611 val = lt;
1615 if (! overflow)
1617 tree ut = TYPE_MAX_VALUE (type);
1618 if (ut)
1620 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1621 if (REAL_VALUES_LESS (u, r))
1623 overflow = true;
1624 val = ut;
1629 if (! overflow)
1630 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1632 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1633 return t;
1636 /* A subroutine of fold_convert_const handling conversions of a
1637 FIXED_CST to an integer type. */
1639 static tree
1640 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1642 tree t;
1643 double_int temp, temp_trunc;
1644 unsigned int mode;
1646 /* Right shift FIXED_CST to temp by fbit. */
1647 temp = TREE_FIXED_CST (arg1).data;
1648 mode = TREE_FIXED_CST (arg1).mode;
1649 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1651 temp = temp.rshift (GET_MODE_FBIT (mode),
1652 HOST_BITS_PER_DOUBLE_INT,
1653 SIGNED_FIXED_POINT_MODE_P (mode));
1655 /* Left shift temp to temp_trunc by fbit. */
1656 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1657 HOST_BITS_PER_DOUBLE_INT,
1658 SIGNED_FIXED_POINT_MODE_P (mode));
1660 else
1662 temp = double_int_zero;
1663 temp_trunc = double_int_zero;
1666 /* If FIXED_CST is negative, we need to round the value toward 0.
1667 By checking if the fractional bits are not zero to add 1 to temp. */
1668 if (SIGNED_FIXED_POINT_MODE_P (mode)
1669 && temp_trunc.is_negative ()
1670 && TREE_FIXED_CST (arg1).data != temp_trunc)
1671 temp += double_int_one;
1673 /* Given a fixed-point constant, make new constant with new type,
1674 appropriately sign-extended or truncated. */
1675 t = force_fit_type (type, temp, -1,
1676 (temp.is_negative ()
1677 && (TYPE_UNSIGNED (type)
1678 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1679 | TREE_OVERFLOW (arg1));
1681 return t;
1684 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1685 to another floating point type. */
1687 static tree
1688 fold_convert_const_real_from_real (tree type, const_tree arg1)
1690 REAL_VALUE_TYPE value;
1691 tree t;
1693 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1694 t = build_real (type, value);
1696 /* If converting an infinity or NAN to a representation that doesn't
1697 have one, set the overflow bit so that we can produce some kind of
1698 error message at the appropriate point if necessary. It's not the
1699 most user-friendly message, but it's better than nothing. */
1700 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1701 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1702 TREE_OVERFLOW (t) = 1;
1703 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1704 && !MODE_HAS_NANS (TYPE_MODE (type)))
1705 TREE_OVERFLOW (t) = 1;
1706 /* Regular overflow, conversion produced an infinity in a mode that
1707 can't represent them. */
1708 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1709 && REAL_VALUE_ISINF (value)
1710 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1711 TREE_OVERFLOW (t) = 1;
1712 else
1713 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1714 return t;
1717 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1718 to a floating point type. */
1720 static tree
1721 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1723 REAL_VALUE_TYPE value;
1724 tree t;
1726 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1727 t = build_real (type, value);
1729 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1734 to another fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1744 TYPE_SATURATING (type));
1745 t = build_fixed (type, value);
1747 /* Propagate overflow flags. */
1748 if (overflow_p | TREE_OVERFLOW (arg1))
1749 TREE_OVERFLOW (t) = 1;
1750 return t;
1753 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1754 to a fixed-point type. */
1756 static tree
1757 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1759 FIXED_VALUE_TYPE value;
1760 tree t;
1761 bool overflow_p;
1762 double_int di;
1764 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1766 di.low = TREE_INT_CST_ELT (arg1, 0);
1767 if (TREE_INT_CST_NUNITS (arg1) == 1)
1768 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1769 else
1770 di.high = TREE_INT_CST_ELT (arg1, 1);
1772 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1773 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1774 TYPE_SATURATING (type));
1775 t = build_fixed (type, value);
1777 /* Propagate overflow flags. */
1778 if (overflow_p | TREE_OVERFLOW (arg1))
1779 TREE_OVERFLOW (t) = 1;
1780 return t;
1783 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1784 to a fixed-point type. */
1786 static tree
1787 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1789 FIXED_VALUE_TYPE value;
1790 tree t;
1791 bool overflow_p;
1793 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1794 &TREE_REAL_CST (arg1),
1795 TYPE_SATURATING (type));
1796 t = build_fixed (type, value);
1798 /* Propagate overflow flags. */
1799 if (overflow_p | TREE_OVERFLOW (arg1))
1800 TREE_OVERFLOW (t) = 1;
1801 return t;
1804 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1805 type TYPE. If no simplification can be done return NULL_TREE. */
1807 static tree
1808 fold_convert_const (enum tree_code code, tree type, tree arg1)
1810 if (TREE_TYPE (arg1) == type)
1811 return arg1;
1813 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1814 || TREE_CODE (type) == OFFSET_TYPE)
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return fold_convert_const_int_from_int (type, arg1);
1818 else if (TREE_CODE (arg1) == REAL_CST)
1819 return fold_convert_const_int_from_real (code, type, arg1);
1820 else if (TREE_CODE (arg1) == FIXED_CST)
1821 return fold_convert_const_int_from_fixed (type, arg1);
1823 else if (TREE_CODE (type) == REAL_TYPE)
1825 if (TREE_CODE (arg1) == INTEGER_CST)
1826 return build_real_from_int_cst (type, arg1);
1827 else if (TREE_CODE (arg1) == REAL_CST)
1828 return fold_convert_const_real_from_real (type, arg1);
1829 else if (TREE_CODE (arg1) == FIXED_CST)
1830 return fold_convert_const_real_from_fixed (type, arg1);
1832 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1834 if (TREE_CODE (arg1) == FIXED_CST)
1835 return fold_convert_const_fixed_from_fixed (type, arg1);
1836 else if (TREE_CODE (arg1) == INTEGER_CST)
1837 return fold_convert_const_fixed_from_int (type, arg1);
1838 else if (TREE_CODE (arg1) == REAL_CST)
1839 return fold_convert_const_fixed_from_real (type, arg1);
1841 return NULL_TREE;
1844 /* Construct a vector of zero elements of vector type TYPE. */
1846 static tree
1847 build_zero_vector (tree type)
1849 tree t;
1851 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1852 return build_vector_from_val (type, t);
1855 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1857 bool
1858 fold_convertible_p (const_tree type, const_tree arg)
1860 tree orig = TREE_TYPE (arg);
1862 if (type == orig)
1863 return true;
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return false;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return true;
1873 switch (TREE_CODE (type))
1875 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1876 case POINTER_TYPE: case REFERENCE_TYPE:
1877 case OFFSET_TYPE:
1878 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1879 || TREE_CODE (orig) == OFFSET_TYPE)
1880 return true;
1881 return (TREE_CODE (orig) == VECTOR_TYPE
1882 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1884 case REAL_TYPE:
1885 case FIXED_POINT_TYPE:
1886 case COMPLEX_TYPE:
1887 case VECTOR_TYPE:
1888 case VOID_TYPE:
1889 return TREE_CODE (type) == TREE_CODE (orig);
1891 default:
1892 return false;
1896 /* Convert expression ARG to type TYPE. Used by the middle-end for
1897 simple conversions in preference to calling the front-end's convert. */
1899 tree
1900 fold_convert_loc (location_t loc, tree type, tree arg)
1902 tree orig = TREE_TYPE (arg);
1903 tree tem;
1905 if (type == orig)
1906 return arg;
1908 if (TREE_CODE (arg) == ERROR_MARK
1909 || TREE_CODE (type) == ERROR_MARK
1910 || TREE_CODE (orig) == ERROR_MARK)
1911 return error_mark_node;
1913 switch (TREE_CODE (type))
1915 case POINTER_TYPE:
1916 case REFERENCE_TYPE:
1917 /* Handle conversions between pointers to different address spaces. */
1918 if (POINTER_TYPE_P (orig)
1919 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1920 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1921 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1922 /* fall through */
1924 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1925 case OFFSET_TYPE:
1926 if (TREE_CODE (arg) == INTEGER_CST)
1928 tem = fold_convert_const (NOP_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1932 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1933 || TREE_CODE (orig) == OFFSET_TYPE)
1934 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1935 if (TREE_CODE (orig) == COMPLEX_TYPE)
1936 return fold_convert_loc (loc, type,
1937 fold_build1_loc (loc, REALPART_EXPR,
1938 TREE_TYPE (orig), arg));
1939 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1940 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1941 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1943 case REAL_TYPE:
1944 if (TREE_CODE (arg) == INTEGER_CST)
1946 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1947 if (tem != NULL_TREE)
1948 return tem;
1950 else if (TREE_CODE (arg) == REAL_CST)
1952 tem = fold_convert_const (NOP_EXPR, type, arg);
1953 if (tem != NULL_TREE)
1954 return tem;
1956 else if (TREE_CODE (arg) == FIXED_CST)
1958 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1963 switch (TREE_CODE (orig))
1965 case INTEGER_TYPE:
1966 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1967 case POINTER_TYPE: case REFERENCE_TYPE:
1968 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1970 case REAL_TYPE:
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1973 case FIXED_POINT_TYPE:
1974 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1976 case COMPLEX_TYPE:
1977 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert_loc (loc, type, tem);
1980 default:
1981 gcc_unreachable ();
1984 case FIXED_POINT_TYPE:
1985 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1986 || TREE_CODE (arg) == REAL_CST)
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 goto fold_convert_exit;
1993 switch (TREE_CODE (orig))
1995 case FIXED_POINT_TYPE:
1996 case INTEGER_TYPE:
1997 case ENUMERAL_TYPE:
1998 case BOOLEAN_TYPE:
1999 case REAL_TYPE:
2000 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2002 case COMPLEX_TYPE:
2003 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2004 return fold_convert_loc (loc, type, tem);
2006 default:
2007 gcc_unreachable ();
2010 case COMPLEX_TYPE:
2011 switch (TREE_CODE (orig))
2013 case INTEGER_TYPE:
2014 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2015 case POINTER_TYPE: case REFERENCE_TYPE:
2016 case REAL_TYPE:
2017 case FIXED_POINT_TYPE:
2018 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2019 fold_convert_loc (loc, TREE_TYPE (type), arg),
2020 fold_convert_loc (loc, TREE_TYPE (type),
2021 integer_zero_node));
2022 case COMPLEX_TYPE:
2024 tree rpart, ipart;
2026 if (TREE_CODE (arg) == COMPLEX_EXPR)
2028 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2029 TREE_OPERAND (arg, 0));
2030 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2031 TREE_OPERAND (arg, 1));
2032 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2035 arg = save_expr (arg);
2036 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2037 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2038 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2039 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2043 default:
2044 gcc_unreachable ();
2047 case VECTOR_TYPE:
2048 if (integer_zerop (arg))
2049 return build_zero_vector (type);
2050 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2051 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2052 || TREE_CODE (orig) == VECTOR_TYPE);
2053 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2055 case VOID_TYPE:
2056 tem = fold_ignored_result (arg);
2057 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2059 default:
2060 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2061 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2062 gcc_unreachable ();
2064 fold_convert_exit:
2065 protected_set_expr_location_unshare (tem, loc);
2066 return tem;
2069 /* Return false if expr can be assumed not to be an lvalue, true
2070 otherwise. */
2072 static bool
2073 maybe_lvalue_p (const_tree x)
2075 /* We only need to wrap lvalue tree codes. */
2076 switch (TREE_CODE (x))
2078 case VAR_DECL:
2079 case PARM_DECL:
2080 case RESULT_DECL:
2081 case LABEL_DECL:
2082 case FUNCTION_DECL:
2083 case SSA_NAME:
2085 case COMPONENT_REF:
2086 case MEM_REF:
2087 case INDIRECT_REF:
2088 case ARRAY_REF:
2089 case ARRAY_RANGE_REF:
2090 case BIT_FIELD_REF:
2091 case OBJ_TYPE_REF:
2093 case REALPART_EXPR:
2094 case IMAGPART_EXPR:
2095 case PREINCREMENT_EXPR:
2096 case PREDECREMENT_EXPR:
2097 case SAVE_EXPR:
2098 case TRY_CATCH_EXPR:
2099 case WITH_CLEANUP_EXPR:
2100 case COMPOUND_EXPR:
2101 case MODIFY_EXPR:
2102 case TARGET_EXPR:
2103 case COND_EXPR:
2104 case BIND_EXPR:
2105 break;
2107 default:
2108 /* Assume the worst for front-end tree codes. */
2109 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2110 break;
2111 return false;
2114 return true;
2117 /* Return an expr equal to X but certainly not valid as an lvalue. */
2119 tree
2120 non_lvalue_loc (location_t loc, tree x)
2122 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2123 us. */
2124 if (in_gimple_form)
2125 return x;
2127 if (! maybe_lvalue_p (x))
2128 return x;
2129 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2132 /* When pedantic, return an expr equal to X but certainly not valid as a
2133 pedantic lvalue. Otherwise, return X. */
2135 static tree
2136 pedantic_non_lvalue_loc (location_t loc, tree x)
2138 return protected_set_expr_location_unshare (x, loc);
2141 /* Given a tree comparison code, return the code that is the logical inverse.
2142 It is generally not safe to do this for floating-point comparisons, except
2143 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2144 ERROR_MARK in this case. */
2146 enum tree_code
2147 invert_tree_comparison (enum tree_code code, bool honor_nans)
2149 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2150 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2151 return ERROR_MARK;
2153 switch (code)
2155 case EQ_EXPR:
2156 return NE_EXPR;
2157 case NE_EXPR:
2158 return EQ_EXPR;
2159 case GT_EXPR:
2160 return honor_nans ? UNLE_EXPR : LE_EXPR;
2161 case GE_EXPR:
2162 return honor_nans ? UNLT_EXPR : LT_EXPR;
2163 case LT_EXPR:
2164 return honor_nans ? UNGE_EXPR : GE_EXPR;
2165 case LE_EXPR:
2166 return honor_nans ? UNGT_EXPR : GT_EXPR;
2167 case LTGT_EXPR:
2168 return UNEQ_EXPR;
2169 case UNEQ_EXPR:
2170 return LTGT_EXPR;
2171 case UNGT_EXPR:
2172 return LE_EXPR;
2173 case UNGE_EXPR:
2174 return LT_EXPR;
2175 case UNLT_EXPR:
2176 return GE_EXPR;
2177 case UNLE_EXPR:
2178 return GT_EXPR;
2179 case ORDERED_EXPR:
2180 return UNORDERED_EXPR;
2181 case UNORDERED_EXPR:
2182 return ORDERED_EXPR;
2183 default:
2184 gcc_unreachable ();
2188 /* Similar, but return the comparison that results if the operands are
2189 swapped. This is safe for floating-point. */
2191 enum tree_code
2192 swap_tree_comparison (enum tree_code code)
2194 switch (code)
2196 case EQ_EXPR:
2197 case NE_EXPR:
2198 case ORDERED_EXPR:
2199 case UNORDERED_EXPR:
2200 case LTGT_EXPR:
2201 case UNEQ_EXPR:
2202 return code;
2203 case GT_EXPR:
2204 return LT_EXPR;
2205 case GE_EXPR:
2206 return LE_EXPR;
2207 case LT_EXPR:
2208 return GT_EXPR;
2209 case LE_EXPR:
2210 return GE_EXPR;
2211 case UNGT_EXPR:
2212 return UNLT_EXPR;
2213 case UNGE_EXPR:
2214 return UNLE_EXPR;
2215 case UNLT_EXPR:
2216 return UNGT_EXPR;
2217 case UNLE_EXPR:
2218 return UNGE_EXPR;
2219 default:
2220 gcc_unreachable ();
2225 /* Convert a comparison tree code from an enum tree_code representation
2226 into a compcode bit-based encoding. This function is the inverse of
2227 compcode_to_comparison. */
2229 static enum comparison_code
2230 comparison_to_compcode (enum tree_code code)
2232 switch (code)
2234 case LT_EXPR:
2235 return COMPCODE_LT;
2236 case EQ_EXPR:
2237 return COMPCODE_EQ;
2238 case LE_EXPR:
2239 return COMPCODE_LE;
2240 case GT_EXPR:
2241 return COMPCODE_GT;
2242 case NE_EXPR:
2243 return COMPCODE_NE;
2244 case GE_EXPR:
2245 return COMPCODE_GE;
2246 case ORDERED_EXPR:
2247 return COMPCODE_ORD;
2248 case UNORDERED_EXPR:
2249 return COMPCODE_UNORD;
2250 case UNLT_EXPR:
2251 return COMPCODE_UNLT;
2252 case UNEQ_EXPR:
2253 return COMPCODE_UNEQ;
2254 case UNLE_EXPR:
2255 return COMPCODE_UNLE;
2256 case UNGT_EXPR:
2257 return COMPCODE_UNGT;
2258 case LTGT_EXPR:
2259 return COMPCODE_LTGT;
2260 case UNGE_EXPR:
2261 return COMPCODE_UNGE;
2262 default:
2263 gcc_unreachable ();
2267 /* Convert a compcode bit-based encoding of a comparison operator back
2268 to GCC's enum tree_code representation. This function is the
2269 inverse of comparison_to_compcode. */
2271 static enum tree_code
2272 compcode_to_comparison (enum comparison_code code)
2274 switch (code)
2276 case COMPCODE_LT:
2277 return LT_EXPR;
2278 case COMPCODE_EQ:
2279 return EQ_EXPR;
2280 case COMPCODE_LE:
2281 return LE_EXPR;
2282 case COMPCODE_GT:
2283 return GT_EXPR;
2284 case COMPCODE_NE:
2285 return NE_EXPR;
2286 case COMPCODE_GE:
2287 return GE_EXPR;
2288 case COMPCODE_ORD:
2289 return ORDERED_EXPR;
2290 case COMPCODE_UNORD:
2291 return UNORDERED_EXPR;
2292 case COMPCODE_UNLT:
2293 return UNLT_EXPR;
2294 case COMPCODE_UNEQ:
2295 return UNEQ_EXPR;
2296 case COMPCODE_UNLE:
2297 return UNLE_EXPR;
2298 case COMPCODE_UNGT:
2299 return UNGT_EXPR;
2300 case COMPCODE_LTGT:
2301 return LTGT_EXPR;
2302 case COMPCODE_UNGE:
2303 return UNGE_EXPR;
2304 default:
2305 gcc_unreachable ();
2309 /* Return a tree for the comparison which is the combination of
2310 doing the AND or OR (depending on CODE) of the two operations LCODE
2311 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2312 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2313 if this makes the transformation invalid. */
2315 tree
2316 combine_comparisons (location_t loc,
2317 enum tree_code code, enum tree_code lcode,
2318 enum tree_code rcode, tree truth_type,
2319 tree ll_arg, tree lr_arg)
2321 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2322 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2323 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2324 int compcode;
2326 switch (code)
2328 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2329 compcode = lcompcode & rcompcode;
2330 break;
2332 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2333 compcode = lcompcode | rcompcode;
2334 break;
2336 default:
2337 return NULL_TREE;
2340 if (!honor_nans)
2342 /* Eliminate unordered comparisons, as well as LTGT and ORD
2343 which are not used unless the mode has NaNs. */
2344 compcode &= ~COMPCODE_UNORD;
2345 if (compcode == COMPCODE_LTGT)
2346 compcode = COMPCODE_NE;
2347 else if (compcode == COMPCODE_ORD)
2348 compcode = COMPCODE_TRUE;
2350 else if (flag_trapping_math)
2352 /* Check that the original operation and the optimized ones will trap
2353 under the same condition. */
2354 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2355 && (lcompcode != COMPCODE_EQ)
2356 && (lcompcode != COMPCODE_ORD);
2357 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2358 && (rcompcode != COMPCODE_EQ)
2359 && (rcompcode != COMPCODE_ORD);
2360 bool trap = (compcode & COMPCODE_UNORD) == 0
2361 && (compcode != COMPCODE_EQ)
2362 && (compcode != COMPCODE_ORD);
2364 /* In a short-circuited boolean expression the LHS might be
2365 such that the RHS, if evaluated, will never trap. For
2366 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2367 if neither x nor y is NaN. (This is a mixed blessing: for
2368 example, the expression above will never trap, hence
2369 optimizing it to x < y would be invalid). */
2370 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2371 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2372 rtrap = false;
2374 /* If the comparison was short-circuited, and only the RHS
2375 trapped, we may now generate a spurious trap. */
2376 if (rtrap && !ltrap
2377 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2378 return NULL_TREE;
2380 /* If we changed the conditions that cause a trap, we lose. */
2381 if ((ltrap || rtrap) != trap)
2382 return NULL_TREE;
2385 if (compcode == COMPCODE_TRUE)
2386 return constant_boolean_node (true, truth_type);
2387 else if (compcode == COMPCODE_FALSE)
2388 return constant_boolean_node (false, truth_type);
2389 else
2391 enum tree_code tcode;
2393 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2394 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2398 /* Return nonzero if two operands (typically of the same tree node)
2399 are necessarily equal. If either argument has side-effects this
2400 function returns zero. FLAGS modifies behavior as follows:
2402 If OEP_ONLY_CONST is set, only return nonzero for constants.
2403 This function tests whether the operands are indistinguishable;
2404 it does not test whether they are equal using C's == operation.
2405 The distinction is important for IEEE floating point, because
2406 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2407 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2409 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2410 even though it may hold multiple values during a function.
2411 This is because a GCC tree node guarantees that nothing else is
2412 executed between the evaluation of its "operands" (which may often
2413 be evaluated in arbitrary order). Hence if the operands themselves
2414 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2415 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2416 unset means assuming isochronic (or instantaneous) tree equivalence.
2417 Unless comparing arbitrary expression trees, such as from different
2418 statements, this flag can usually be left unset.
2420 If OEP_PURE_SAME is set, then pure functions with identical arguments
2421 are considered the same. It is used when the caller has other ways
2422 to ensure that global memory is unchanged in between. */
2425 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2427 /* If either is ERROR_MARK, they aren't equal. */
2428 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2429 || TREE_TYPE (arg0) == error_mark_node
2430 || TREE_TYPE (arg1) == error_mark_node)
2431 return 0;
2433 /* Similar, if either does not have a type (like a released SSA name),
2434 they aren't equal. */
2435 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2436 return 0;
2438 /* Check equality of integer constants before bailing out due to
2439 precision differences. */
2440 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2441 return tree_int_cst_equal (arg0, arg1);
2443 /* If both types don't have the same signedness, then we can't consider
2444 them equal. We must check this before the STRIP_NOPS calls
2445 because they may change the signedness of the arguments. As pointers
2446 strictly don't have a signedness, require either two pointers or
2447 two non-pointers as well. */
2448 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2449 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2450 return 0;
2452 /* We cannot consider pointers to different address space equal. */
2453 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2454 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2455 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2456 return 0;
2458 /* If both types don't have the same precision, then it is not safe
2459 to strip NOPs. */
2460 if (element_precision (TREE_TYPE (arg0))
2461 != element_precision (TREE_TYPE (arg1)))
2462 return 0;
2464 STRIP_NOPS (arg0);
2465 STRIP_NOPS (arg1);
2467 /* In case both args are comparisons but with different comparison
2468 code, try to swap the comparison operands of one arg to produce
2469 a match and compare that variant. */
2470 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2471 && COMPARISON_CLASS_P (arg0)
2472 && COMPARISON_CLASS_P (arg1))
2474 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2476 if (TREE_CODE (arg0) == swap_code)
2477 return operand_equal_p (TREE_OPERAND (arg0, 0),
2478 TREE_OPERAND (arg1, 1), flags)
2479 && operand_equal_p (TREE_OPERAND (arg0, 1),
2480 TREE_OPERAND (arg1, 0), flags);
2483 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2484 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2485 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2486 return 0;
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2493 return 0;
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below.
2502 If we are taking an invariant address of two identical objects
2503 they are necessarily equal as well. */
2504 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2505 && (TREE_CODE (arg0) == SAVE_EXPR
2506 || (flags & OEP_CONSTANT_ADDRESS_OF)
2507 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2508 return 1;
2510 /* Next handle constant cases, those for which we can return 1 even
2511 if ONLY_CONST is set. */
2512 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2513 switch (TREE_CODE (arg0))
2515 case INTEGER_CST:
2516 return tree_int_cst_equal (arg0, arg1);
2518 case FIXED_CST:
2519 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2520 TREE_FIXED_CST (arg1));
2522 case REAL_CST:
2523 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2524 TREE_REAL_CST (arg1)))
2525 return 1;
2528 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2530 /* If we do not distinguish between signed and unsigned zero,
2531 consider them equal. */
2532 if (real_zerop (arg0) && real_zerop (arg1))
2533 return 1;
2535 return 0;
2537 case VECTOR_CST:
2539 unsigned i;
2541 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2542 return 0;
2544 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2546 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2547 VECTOR_CST_ELT (arg1, i), flags))
2548 return 0;
2550 return 1;
2553 case COMPLEX_CST:
2554 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2555 flags)
2556 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2557 flags));
2559 case STRING_CST:
2560 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2561 && ! memcmp (TREE_STRING_POINTER (arg0),
2562 TREE_STRING_POINTER (arg1),
2563 TREE_STRING_LENGTH (arg0)));
2565 case ADDR_EXPR:
2566 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2567 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2568 ? OEP_CONSTANT_ADDRESS_OF : 0);
2569 default:
2570 break;
2573 if (flags & OEP_ONLY_CONST)
2574 return 0;
2576 /* Define macros to test an operand from arg0 and arg1 for equality and a
2577 variant that allows null and views null as being different from any
2578 non-null value. In the latter case, if either is null, the both
2579 must be; otherwise, do the normal comparison. */
2580 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2581 TREE_OPERAND (arg1, N), flags)
2583 #define OP_SAME_WITH_NULL(N) \
2584 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2585 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2587 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2589 case tcc_unary:
2590 /* Two conversions are equal only if signedness and modes match. */
2591 switch (TREE_CODE (arg0))
2593 CASE_CONVERT:
2594 case FIX_TRUNC_EXPR:
2595 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2596 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2597 return 0;
2598 break;
2599 default:
2600 break;
2603 return OP_SAME (0);
2606 case tcc_comparison:
2607 case tcc_binary:
2608 if (OP_SAME (0) && OP_SAME (1))
2609 return 1;
2611 /* For commutative ops, allow the other order. */
2612 return (commutative_tree_code (TREE_CODE (arg0))
2613 && operand_equal_p (TREE_OPERAND (arg0, 0),
2614 TREE_OPERAND (arg1, 1), flags)
2615 && operand_equal_p (TREE_OPERAND (arg0, 1),
2616 TREE_OPERAND (arg1, 0), flags));
2618 case tcc_reference:
2619 /* If either of the pointer (or reference) expressions we are
2620 dereferencing contain a side effect, these cannot be equal,
2621 but their addresses can be. */
2622 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2623 && (TREE_SIDE_EFFECTS (arg0)
2624 || TREE_SIDE_EFFECTS (arg1)))
2625 return 0;
2627 switch (TREE_CODE (arg0))
2629 case INDIRECT_REF:
2630 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2631 return OP_SAME (0);
2633 case REALPART_EXPR:
2634 case IMAGPART_EXPR:
2635 return OP_SAME (0);
2637 case TARGET_MEM_REF:
2638 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2639 /* Require equal extra operands and then fall through to MEM_REF
2640 handling of the two common operands. */
2641 if (!OP_SAME_WITH_NULL (2)
2642 || !OP_SAME_WITH_NULL (3)
2643 || !OP_SAME_WITH_NULL (4))
2644 return 0;
2645 /* Fallthru. */
2646 case MEM_REF:
2647 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2648 /* Require equal access sizes, and similar pointer types.
2649 We can have incomplete types for array references of
2650 variable-sized arrays from the Fortran frontend
2651 though. Also verify the types are compatible. */
2652 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2653 || (TYPE_SIZE (TREE_TYPE (arg0))
2654 && TYPE_SIZE (TREE_TYPE (arg1))
2655 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2656 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2657 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2658 && alias_ptr_types_compatible_p
2659 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2660 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2661 && OP_SAME (0) && OP_SAME (1));
2663 case ARRAY_REF:
2664 case ARRAY_RANGE_REF:
2665 /* Operands 2 and 3 may be null.
2666 Compare the array index by value if it is constant first as we
2667 may have different types but same value here. */
2668 if (!OP_SAME (0))
2669 return 0;
2670 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2671 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2672 TREE_OPERAND (arg1, 1))
2673 || OP_SAME (1))
2674 && OP_SAME_WITH_NULL (2)
2675 && OP_SAME_WITH_NULL (3));
2677 case COMPONENT_REF:
2678 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2679 may be NULL when we're called to compare MEM_EXPRs. */
2680 if (!OP_SAME_WITH_NULL (0)
2681 || !OP_SAME (1))
2682 return 0;
2683 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2684 return OP_SAME_WITH_NULL (2);
2686 case BIT_FIELD_REF:
2687 if (!OP_SAME (0))
2688 return 0;
2689 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2690 return OP_SAME (1) && OP_SAME (2);
2692 default:
2693 return 0;
2696 case tcc_expression:
2697 switch (TREE_CODE (arg0))
2699 case ADDR_EXPR:
2700 case TRUTH_NOT_EXPR:
2701 return OP_SAME (0);
2703 case TRUTH_ANDIF_EXPR:
2704 case TRUTH_ORIF_EXPR:
2705 return OP_SAME (0) && OP_SAME (1);
2707 case FMA_EXPR:
2708 case WIDEN_MULT_PLUS_EXPR:
2709 case WIDEN_MULT_MINUS_EXPR:
2710 if (!OP_SAME (2))
2711 return 0;
2712 /* The multiplcation operands are commutative. */
2713 /* FALLTHRU */
2715 case TRUTH_AND_EXPR:
2716 case TRUTH_OR_EXPR:
2717 case TRUTH_XOR_EXPR:
2718 if (OP_SAME (0) && OP_SAME (1))
2719 return 1;
2721 /* Otherwise take into account this is a commutative operation. */
2722 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2723 TREE_OPERAND (arg1, 1), flags)
2724 && operand_equal_p (TREE_OPERAND (arg0, 1),
2725 TREE_OPERAND (arg1, 0), flags));
2727 case COND_EXPR:
2728 case VEC_COND_EXPR:
2729 case DOT_PROD_EXPR:
2730 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2732 default:
2733 return 0;
2736 case tcc_vl_exp:
2737 switch (TREE_CODE (arg0))
2739 case CALL_EXPR:
2740 /* If the CALL_EXPRs call different functions, then they
2741 clearly can not be equal. */
2742 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2743 flags))
2744 return 0;
2747 unsigned int cef = call_expr_flags (arg0);
2748 if (flags & OEP_PURE_SAME)
2749 cef &= ECF_CONST | ECF_PURE;
2750 else
2751 cef &= ECF_CONST;
2752 if (!cef)
2753 return 0;
2756 /* Now see if all the arguments are the same. */
2758 const_call_expr_arg_iterator iter0, iter1;
2759 const_tree a0, a1;
2760 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2761 a1 = first_const_call_expr_arg (arg1, &iter1);
2762 a0 && a1;
2763 a0 = next_const_call_expr_arg (&iter0),
2764 a1 = next_const_call_expr_arg (&iter1))
2765 if (! operand_equal_p (a0, a1, flags))
2766 return 0;
2768 /* If we get here and both argument lists are exhausted
2769 then the CALL_EXPRs are equal. */
2770 return ! (a0 || a1);
2772 default:
2773 return 0;
2776 case tcc_declaration:
2777 /* Consider __builtin_sqrt equal to sqrt. */
2778 return (TREE_CODE (arg0) == FUNCTION_DECL
2779 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2780 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2781 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2783 default:
2784 return 0;
2787 #undef OP_SAME
2788 #undef OP_SAME_WITH_NULL
2791 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2792 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2794 When in doubt, return 0. */
2796 static int
2797 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2799 int unsignedp1, unsignedpo;
2800 tree primarg0, primarg1, primother;
2801 unsigned int correct_width;
2803 if (operand_equal_p (arg0, arg1, 0))
2804 return 1;
2806 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2807 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2808 return 0;
2810 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2811 and see if the inner values are the same. This removes any
2812 signedness comparison, which doesn't matter here. */
2813 primarg0 = arg0, primarg1 = arg1;
2814 STRIP_NOPS (primarg0);
2815 STRIP_NOPS (primarg1);
2816 if (operand_equal_p (primarg0, primarg1, 0))
2817 return 1;
2819 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2820 actual comparison operand, ARG0.
2822 First throw away any conversions to wider types
2823 already present in the operands. */
2825 primarg1 = get_narrower (arg1, &unsignedp1);
2826 primother = get_narrower (other, &unsignedpo);
2828 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2829 if (unsignedp1 == unsignedpo
2830 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2831 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2833 tree type = TREE_TYPE (arg0);
2835 /* Make sure shorter operand is extended the right way
2836 to match the longer operand. */
2837 primarg1 = fold_convert (signed_or_unsigned_type_for
2838 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2840 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2841 return 1;
2844 return 0;
2847 /* See if ARG is an expression that is either a comparison or is performing
2848 arithmetic on comparisons. The comparisons must only be comparing
2849 two different values, which will be stored in *CVAL1 and *CVAL2; if
2850 they are nonzero it means that some operands have already been found.
2851 No variables may be used anywhere else in the expression except in the
2852 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2853 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2855 If this is true, return 1. Otherwise, return zero. */
2857 static int
2858 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2860 enum tree_code code = TREE_CODE (arg);
2861 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2863 /* We can handle some of the tcc_expression cases here. */
2864 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2865 tclass = tcc_unary;
2866 else if (tclass == tcc_expression
2867 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2868 || code == COMPOUND_EXPR))
2869 tclass = tcc_binary;
2871 else if (tclass == tcc_expression && code == SAVE_EXPR
2872 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2874 /* If we've already found a CVAL1 or CVAL2, this expression is
2875 two complex to handle. */
2876 if (*cval1 || *cval2)
2877 return 0;
2879 tclass = tcc_unary;
2880 *save_p = 1;
2883 switch (tclass)
2885 case tcc_unary:
2886 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2888 case tcc_binary:
2889 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2890 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2891 cval1, cval2, save_p));
2893 case tcc_constant:
2894 return 1;
2896 case tcc_expression:
2897 if (code == COND_EXPR)
2898 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2899 cval1, cval2, save_p)
2900 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2901 cval1, cval2, save_p)
2902 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2903 cval1, cval2, save_p));
2904 return 0;
2906 case tcc_comparison:
2907 /* First see if we can handle the first operand, then the second. For
2908 the second operand, we know *CVAL1 can't be zero. It must be that
2909 one side of the comparison is each of the values; test for the
2910 case where this isn't true by failing if the two operands
2911 are the same. */
2913 if (operand_equal_p (TREE_OPERAND (arg, 0),
2914 TREE_OPERAND (arg, 1), 0))
2915 return 0;
2917 if (*cval1 == 0)
2918 *cval1 = TREE_OPERAND (arg, 0);
2919 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2921 else if (*cval2 == 0)
2922 *cval2 = TREE_OPERAND (arg, 0);
2923 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2925 else
2926 return 0;
2928 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2930 else if (*cval2 == 0)
2931 *cval2 = TREE_OPERAND (arg, 1);
2932 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2934 else
2935 return 0;
2937 return 1;
2939 default:
2940 return 0;
2944 /* ARG is a tree that is known to contain just arithmetic operations and
2945 comparisons. Evaluate the operations in the tree substituting NEW0 for
2946 any occurrence of OLD0 as an operand of a comparison and likewise for
2947 NEW1 and OLD1. */
2949 static tree
2950 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2951 tree old1, tree new1)
2953 tree type = TREE_TYPE (arg);
2954 enum tree_code code = TREE_CODE (arg);
2955 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2957 /* We can handle some of the tcc_expression cases here. */
2958 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2959 tclass = tcc_unary;
2960 else if (tclass == tcc_expression
2961 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2962 tclass = tcc_binary;
2964 switch (tclass)
2966 case tcc_unary:
2967 return fold_build1_loc (loc, code, type,
2968 eval_subst (loc, TREE_OPERAND (arg, 0),
2969 old0, new0, old1, new1));
2971 case tcc_binary:
2972 return fold_build2_loc (loc, code, type,
2973 eval_subst (loc, TREE_OPERAND (arg, 0),
2974 old0, new0, old1, new1),
2975 eval_subst (loc, TREE_OPERAND (arg, 1),
2976 old0, new0, old1, new1));
2978 case tcc_expression:
2979 switch (code)
2981 case SAVE_EXPR:
2982 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2983 old1, new1);
2985 case COMPOUND_EXPR:
2986 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2987 old1, new1);
2989 case COND_EXPR:
2990 return fold_build3_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1),
2993 eval_subst (loc, TREE_OPERAND (arg, 1),
2994 old0, new0, old1, new1),
2995 eval_subst (loc, TREE_OPERAND (arg, 2),
2996 old0, new0, old1, new1));
2997 default:
2998 break;
3000 /* Fall through - ??? */
3002 case tcc_comparison:
3004 tree arg0 = TREE_OPERAND (arg, 0);
3005 tree arg1 = TREE_OPERAND (arg, 1);
3007 /* We need to check both for exact equality and tree equality. The
3008 former will be true if the operand has a side-effect. In that
3009 case, we know the operand occurred exactly once. */
3011 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3012 arg0 = new0;
3013 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3014 arg0 = new1;
3016 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3017 arg1 = new0;
3018 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3019 arg1 = new1;
3021 return fold_build2_loc (loc, code, type, arg0, arg1);
3024 default:
3025 return arg;
3029 /* Return a tree for the case when the result of an expression is RESULT
3030 converted to TYPE and OMITTED was previously an operand of the expression
3031 but is now not needed (e.g., we folded OMITTED * 0).
3033 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3034 the conversion of RESULT to TYPE. */
3036 tree
3037 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3039 tree t = fold_convert_loc (loc, type, result);
3041 /* If the resulting operand is an empty statement, just return the omitted
3042 statement casted to void. */
3043 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3044 return build1_loc (loc, NOP_EXPR, void_type_node,
3045 fold_ignored_result (omitted));
3047 if (TREE_SIDE_EFFECTS (omitted))
3048 return build2_loc (loc, COMPOUND_EXPR, type,
3049 fold_ignored_result (omitted), t);
3051 return non_lvalue_loc (loc, t);
3054 /* Return a tree for the case when the result of an expression is RESULT
3055 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3056 of the expression but are now not needed.
3058 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3059 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3060 evaluated before OMITTED2. Otherwise, if neither has side effects,
3061 just do the conversion of RESULT to TYPE. */
3063 tree
3064 omit_two_operands_loc (location_t loc, tree type, tree result,
3065 tree omitted1, tree omitted2)
3067 tree t = fold_convert_loc (loc, type, result);
3069 if (TREE_SIDE_EFFECTS (omitted2))
3070 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3071 if (TREE_SIDE_EFFECTS (omitted1))
3072 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3074 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3078 /* Return a simplified tree node for the truth-negation of ARG. This
3079 never alters ARG itself. We assume that ARG is an operation that
3080 returns a truth value (0 or 1).
3082 FIXME: one would think we would fold the result, but it causes
3083 problems with the dominator optimizer. */
3085 static tree
3086 fold_truth_not_expr (location_t loc, tree arg)
3088 tree type = TREE_TYPE (arg);
3089 enum tree_code code = TREE_CODE (arg);
3090 location_t loc1, loc2;
3092 /* If this is a comparison, we can simply invert it, except for
3093 floating-point non-equality comparisons, in which case we just
3094 enclose a TRUTH_NOT_EXPR around what we have. */
3096 if (TREE_CODE_CLASS (code) == tcc_comparison)
3098 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3099 if (FLOAT_TYPE_P (op_type)
3100 && flag_trapping_math
3101 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3102 && code != NE_EXPR && code != EQ_EXPR)
3103 return NULL_TREE;
3105 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3106 if (code == ERROR_MARK)
3107 return NULL_TREE;
3109 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3110 TREE_OPERAND (arg, 1));
3113 switch (code)
3115 case INTEGER_CST:
3116 return constant_boolean_node (integer_zerop (arg), type);
3118 case TRUTH_AND_EXPR:
3119 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3120 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 return build2_loc (loc, TRUTH_OR_EXPR, type,
3122 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3123 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3125 case TRUTH_OR_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_AND_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3132 case TRUTH_XOR_EXPR:
3133 /* Here we can invert either operand. We invert the first operand
3134 unless the second operand is a TRUTH_NOT_EXPR in which case our
3135 result is the XOR of the first operand with the inside of the
3136 negation of the second operand. */
3138 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3139 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3140 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3141 else
3142 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3143 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3144 TREE_OPERAND (arg, 1));
3146 case TRUTH_ANDIF_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3149 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3150 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3151 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3153 case TRUTH_ORIF_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3155 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3156 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3158 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3160 case TRUTH_NOT_EXPR:
3161 return TREE_OPERAND (arg, 0);
3163 case COND_EXPR:
3165 tree arg1 = TREE_OPERAND (arg, 1);
3166 tree arg2 = TREE_OPERAND (arg, 2);
3168 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3169 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3171 /* A COND_EXPR may have a throw as one operand, which
3172 then has void type. Just leave void operands
3173 as they are. */
3174 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3175 VOID_TYPE_P (TREE_TYPE (arg1))
3176 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3177 VOID_TYPE_P (TREE_TYPE (arg2))
3178 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3181 case COMPOUND_EXPR:
3182 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3183 return build2_loc (loc, COMPOUND_EXPR, type,
3184 TREE_OPERAND (arg, 0),
3185 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3187 case NON_LVALUE_EXPR:
3188 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3189 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3191 CASE_CONVERT:
3192 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3193 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3195 /* ... fall through ... */
3197 case FLOAT_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 return build1_loc (loc, TREE_CODE (arg), type,
3200 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3202 case BIT_AND_EXPR:
3203 if (!integer_onep (TREE_OPERAND (arg, 1)))
3204 return NULL_TREE;
3205 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3207 case SAVE_EXPR:
3208 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3210 case CLEANUP_POINT_EXPR:
3211 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3212 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3213 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3215 default:
3216 return NULL_TREE;
3220 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3221 assume that ARG is an operation that returns a truth value (0 or 1
3222 for scalars, 0 or -1 for vectors). Return the folded expression if
3223 folding is successful. Otherwise, return NULL_TREE. */
3225 static tree
3226 fold_invert_truthvalue (location_t loc, tree arg)
3228 tree type = TREE_TYPE (arg);
3229 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3230 ? BIT_NOT_EXPR
3231 : TRUTH_NOT_EXPR,
3232 type, arg);
3235 /* Return a simplified tree node for the truth-negation of ARG. This
3236 never alters ARG itself. We assume that ARG is an operation that
3237 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3239 tree
3240 invert_truthvalue_loc (location_t loc, tree arg)
3242 if (TREE_CODE (arg) == ERROR_MARK)
3243 return arg;
3245 tree type = TREE_TYPE (arg);
3246 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3247 ? BIT_NOT_EXPR
3248 : TRUTH_NOT_EXPR,
3249 type, arg);
3252 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3253 operands are another bit-wise operation with a common input. If so,
3254 distribute the bit operations to save an operation and possibly two if
3255 constants are involved. For example, convert
3256 (A | B) & (A | C) into A | (B & C)
3257 Further simplification will occur if B and C are constants.
3259 If this optimization cannot be done, 0 will be returned. */
3261 static tree
3262 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3263 tree arg0, tree arg1)
3265 tree common;
3266 tree left, right;
3268 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3269 || TREE_CODE (arg0) == code
3270 || (TREE_CODE (arg0) != BIT_AND_EXPR
3271 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3272 return 0;
3274 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3276 common = TREE_OPERAND (arg0, 0);
3277 left = TREE_OPERAND (arg0, 1);
3278 right = TREE_OPERAND (arg1, 1);
3280 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3282 common = TREE_OPERAND (arg0, 0);
3283 left = TREE_OPERAND (arg0, 1);
3284 right = TREE_OPERAND (arg1, 0);
3286 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3288 common = TREE_OPERAND (arg0, 1);
3289 left = TREE_OPERAND (arg0, 0);
3290 right = TREE_OPERAND (arg1, 1);
3292 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3294 common = TREE_OPERAND (arg0, 1);
3295 left = TREE_OPERAND (arg0, 0);
3296 right = TREE_OPERAND (arg1, 0);
3298 else
3299 return 0;
3301 common = fold_convert_loc (loc, type, common);
3302 left = fold_convert_loc (loc, type, left);
3303 right = fold_convert_loc (loc, type, right);
3304 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3305 fold_build2_loc (loc, code, type, left, right));
3308 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3309 with code CODE. This optimization is unsafe. */
3310 static tree
3311 distribute_real_division (location_t loc, enum tree_code code, tree type,
3312 tree arg0, tree arg1)
3314 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3315 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3317 /* (A / C) +- (B / C) -> (A +- B) / C. */
3318 if (mul0 == mul1
3319 && operand_equal_p (TREE_OPERAND (arg0, 1),
3320 TREE_OPERAND (arg1, 1), 0))
3321 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3322 fold_build2_loc (loc, code, type,
3323 TREE_OPERAND (arg0, 0),
3324 TREE_OPERAND (arg1, 0)),
3325 TREE_OPERAND (arg0, 1));
3327 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3328 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3329 TREE_OPERAND (arg1, 0), 0)
3330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3333 REAL_VALUE_TYPE r0, r1;
3334 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3335 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3336 if (!mul0)
3337 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3338 if (!mul1)
3339 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3340 real_arithmetic (&r0, code, &r0, &r1);
3341 return fold_build2_loc (loc, MULT_EXPR, type,
3342 TREE_OPERAND (arg0, 0),
3343 build_real (type, r0));
3346 return NULL_TREE;
3349 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3350 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3352 static tree
3353 make_bit_field_ref (location_t loc, tree inner, tree type,
3354 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3356 tree result, bftype;
3358 if (bitpos == 0)
3360 tree size = TYPE_SIZE (TREE_TYPE (inner));
3361 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3362 || POINTER_TYPE_P (TREE_TYPE (inner)))
3363 && tree_fits_shwi_p (size)
3364 && tree_to_shwi (size) == bitsize)
3365 return fold_convert_loc (loc, type, inner);
3368 bftype = type;
3369 if (TYPE_PRECISION (bftype) != bitsize
3370 || TYPE_UNSIGNED (bftype) == !unsignedp)
3371 bftype = build_nonstandard_integer_type (bitsize, 0);
3373 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3374 size_int (bitsize), bitsize_int (bitpos));
3376 if (bftype != type)
3377 result = fold_convert_loc (loc, type, result);
3379 return result;
3382 /* Optimize a bit-field compare.
3384 There are two cases: First is a compare against a constant and the
3385 second is a comparison of two items where the fields are at the same
3386 bit position relative to the start of a chunk (byte, halfword, word)
3387 large enough to contain it. In these cases we can avoid the shift
3388 implicit in bitfield extractions.
3390 For constants, we emit a compare of the shifted constant with the
3391 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3392 compared. For two fields at the same position, we do the ANDs with the
3393 similar mask and compare the result of the ANDs.
3395 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3396 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3397 are the left and right operands of the comparison, respectively.
3399 If the optimization described above can be done, we return the resulting
3400 tree. Otherwise we return zero. */
3402 static tree
3403 optimize_bit_field_compare (location_t loc, enum tree_code code,
3404 tree compare_type, tree lhs, tree rhs)
3406 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3407 tree type = TREE_TYPE (lhs);
3408 tree unsigned_type;
3409 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3410 machine_mode lmode, rmode, nmode;
3411 int lunsignedp, runsignedp;
3412 int lvolatilep = 0, rvolatilep = 0;
3413 tree linner, rinner = NULL_TREE;
3414 tree mask;
3415 tree offset;
3417 /* Get all the information about the extractions being done. If the bit size
3418 if the same as the size of the underlying object, we aren't doing an
3419 extraction at all and so can do nothing. We also don't want to
3420 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3421 then will no longer be able to replace it. */
3422 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3423 &lunsignedp, &lvolatilep, false);
3424 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3425 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3426 return 0;
3428 if (!const_p)
3430 /* If this is not a constant, we can only do something if bit positions,
3431 sizes, and signedness are the same. */
3432 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3433 &runsignedp, &rvolatilep, false);
3435 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3436 || lunsignedp != runsignedp || offset != 0
3437 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3438 return 0;
3441 /* See if we can find a mode to refer to this field. We should be able to,
3442 but fail if we can't. */
3443 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3444 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3445 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3446 TYPE_ALIGN (TREE_TYPE (rinner))),
3447 word_mode, false);
3448 if (nmode == VOIDmode)
3449 return 0;
3451 /* Set signed and unsigned types of the precision of this mode for the
3452 shifts below. */
3453 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3455 /* Compute the bit position and size for the new reference and our offset
3456 within it. If the new reference is the same size as the original, we
3457 won't optimize anything, so return zero. */
3458 nbitsize = GET_MODE_BITSIZE (nmode);
3459 nbitpos = lbitpos & ~ (nbitsize - 1);
3460 lbitpos -= nbitpos;
3461 if (nbitsize == lbitsize)
3462 return 0;
3464 if (BYTES_BIG_ENDIAN)
3465 lbitpos = nbitsize - lbitsize - lbitpos;
3467 /* Make the mask to be used against the extracted field. */
3468 mask = build_int_cst_type (unsigned_type, -1);
3469 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3470 mask = const_binop (RSHIFT_EXPR, mask,
3471 size_int (nbitsize - lbitsize - lbitpos));
3473 if (! const_p)
3474 /* If not comparing with constant, just rework the comparison
3475 and return. */
3476 return fold_build2_loc (loc, code, compare_type,
3477 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3478 make_bit_field_ref (loc, linner,
3479 unsigned_type,
3480 nbitsize, nbitpos,
3482 mask),
3483 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3484 make_bit_field_ref (loc, rinner,
3485 unsigned_type,
3486 nbitsize, nbitpos,
3488 mask));
3490 /* Otherwise, we are handling the constant case. See if the constant is too
3491 big for the field. Warn and return a tree of for 0 (false) if so. We do
3492 this not only for its own sake, but to avoid having to test for this
3493 error case below. If we didn't, we might generate wrong code.
3495 For unsigned fields, the constant shifted right by the field length should
3496 be all zero. For signed fields, the high-order bits should agree with
3497 the sign bit. */
3499 if (lunsignedp)
3501 if (wi::lrshift (rhs, lbitsize) != 0)
3503 warning (0, "comparison is always %d due to width of bit-field",
3504 code == NE_EXPR);
3505 return constant_boolean_node (code == NE_EXPR, compare_type);
3508 else
3510 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3511 if (tem != 0 && tem != -1)
3513 warning (0, "comparison is always %d due to width of bit-field",
3514 code == NE_EXPR);
3515 return constant_boolean_node (code == NE_EXPR, compare_type);
3519 /* Single-bit compares should always be against zero. */
3520 if (lbitsize == 1 && ! integer_zerop (rhs))
3522 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3523 rhs = build_int_cst (type, 0);
3526 /* Make a new bitfield reference, shift the constant over the
3527 appropriate number of bits and mask it with the computed mask
3528 (in case this was a signed field). If we changed it, make a new one. */
3529 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3531 rhs = const_binop (BIT_AND_EXPR,
3532 const_binop (LSHIFT_EXPR,
3533 fold_convert_loc (loc, unsigned_type, rhs),
3534 size_int (lbitpos)),
3535 mask);
3537 lhs = build2_loc (loc, code, compare_type,
3538 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3539 return lhs;
3542 /* Subroutine for fold_truth_andor_1: decode a field reference.
3544 If EXP is a comparison reference, we return the innermost reference.
3546 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3547 set to the starting bit number.
3549 If the innermost field can be completely contained in a mode-sized
3550 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3552 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3553 otherwise it is not changed.
3555 *PUNSIGNEDP is set to the signedness of the field.
3557 *PMASK is set to the mask used. This is either contained in a
3558 BIT_AND_EXPR or derived from the width of the field.
3560 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3562 Return 0 if this is not a component reference or is one that we can't
3563 do anything with. */
3565 static tree
3566 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3567 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3568 int *punsignedp, int *pvolatilep,
3569 tree *pmask, tree *pand_mask)
3571 tree outer_type = 0;
3572 tree and_mask = 0;
3573 tree mask, inner, offset;
3574 tree unsigned_type;
3575 unsigned int precision;
3577 /* All the optimizations using this function assume integer fields.
3578 There are problems with FP fields since the type_for_size call
3579 below can fail for, e.g., XFmode. */
3580 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3581 return 0;
3583 /* We are interested in the bare arrangement of bits, so strip everything
3584 that doesn't affect the machine mode. However, record the type of the
3585 outermost expression if it may matter below. */
3586 if (CONVERT_EXPR_P (exp)
3587 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3588 outer_type = TREE_TYPE (exp);
3589 STRIP_NOPS (exp);
3591 if (TREE_CODE (exp) == BIT_AND_EXPR)
3593 and_mask = TREE_OPERAND (exp, 1);
3594 exp = TREE_OPERAND (exp, 0);
3595 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3596 if (TREE_CODE (and_mask) != INTEGER_CST)
3597 return 0;
3600 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3601 punsignedp, pvolatilep, false);
3602 if ((inner == exp && and_mask == 0)
3603 || *pbitsize < 0 || offset != 0
3604 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3605 return 0;
3607 /* If the number of bits in the reference is the same as the bitsize of
3608 the outer type, then the outer type gives the signedness. Otherwise
3609 (in case of a small bitfield) the signedness is unchanged. */
3610 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3611 *punsignedp = TYPE_UNSIGNED (outer_type);
3613 /* Compute the mask to access the bitfield. */
3614 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3615 precision = TYPE_PRECISION (unsigned_type);
3617 mask = build_int_cst_type (unsigned_type, -1);
3619 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3620 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3622 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3623 if (and_mask != 0)
3624 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3625 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3627 *pmask = mask;
3628 *pand_mask = and_mask;
3629 return inner;
3632 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3633 bit positions and MASK is SIGNED. */
3635 static int
3636 all_ones_mask_p (const_tree mask, unsigned int size)
3638 tree type = TREE_TYPE (mask);
3639 unsigned int precision = TYPE_PRECISION (type);
3641 /* If this function returns true when the type of the mask is
3642 UNSIGNED, then there will be errors. In particular see
3643 gcc.c-torture/execute/990326-1.c. There does not appear to be
3644 any documentation paper trail as to why this is so. But the pre
3645 wide-int worked with that restriction and it has been preserved
3646 here. */
3647 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3648 return false;
3650 return wi::mask (size, false, precision) == mask;
3653 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3654 represents the sign bit of EXP's type. If EXP represents a sign
3655 or zero extension, also test VAL against the unextended type.
3656 The return value is the (sub)expression whose sign bit is VAL,
3657 or NULL_TREE otherwise. */
3659 tree
3660 sign_bit_p (tree exp, const_tree val)
3662 int width;
3663 tree t;
3665 /* Tree EXP must have an integral type. */
3666 t = TREE_TYPE (exp);
3667 if (! INTEGRAL_TYPE_P (t))
3668 return NULL_TREE;
3670 /* Tree VAL must be an integer constant. */
3671 if (TREE_CODE (val) != INTEGER_CST
3672 || TREE_OVERFLOW (val))
3673 return NULL_TREE;
3675 width = TYPE_PRECISION (t);
3676 if (wi::only_sign_bit_p (val, width))
3677 return exp;
3679 /* Handle extension from a narrower type. */
3680 if (TREE_CODE (exp) == NOP_EXPR
3681 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3682 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3684 return NULL_TREE;
3687 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3688 to be evaluated unconditionally. */
3690 static int
3691 simple_operand_p (const_tree exp)
3693 /* Strip any conversions that don't change the machine mode. */
3694 STRIP_NOPS (exp);
3696 return (CONSTANT_CLASS_P (exp)
3697 || TREE_CODE (exp) == SSA_NAME
3698 || (DECL_P (exp)
3699 && ! TREE_ADDRESSABLE (exp)
3700 && ! TREE_THIS_VOLATILE (exp)
3701 && ! DECL_NONLOCAL (exp)
3702 /* Don't regard global variables as simple. They may be
3703 allocated in ways unknown to the compiler (shared memory,
3704 #pragma weak, etc). */
3705 && ! TREE_PUBLIC (exp)
3706 && ! DECL_EXTERNAL (exp)
3707 /* Weakrefs are not safe to be read, since they can be NULL.
3708 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3709 have DECL_WEAK flag set. */
3710 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3711 /* Loading a static variable is unduly expensive, but global
3712 registers aren't expensive. */
3713 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3716 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3717 to be evaluated unconditionally.
3718 I addition to simple_operand_p, we assume that comparisons, conversions,
3719 and logic-not operations are simple, if their operands are simple, too. */
3721 static bool
3722 simple_operand_p_2 (tree exp)
3724 enum tree_code code;
3726 if (TREE_SIDE_EFFECTS (exp)
3727 || tree_could_trap_p (exp))
3728 return false;
3730 while (CONVERT_EXPR_P (exp))
3731 exp = TREE_OPERAND (exp, 0);
3733 code = TREE_CODE (exp);
3735 if (TREE_CODE_CLASS (code) == tcc_comparison)
3736 return (simple_operand_p (TREE_OPERAND (exp, 0))
3737 && simple_operand_p (TREE_OPERAND (exp, 1)));
3739 if (code == TRUTH_NOT_EXPR)
3740 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3742 return simple_operand_p (exp);
3746 /* The following functions are subroutines to fold_range_test and allow it to
3747 try to change a logical combination of comparisons into a range test.
3749 For example, both
3750 X == 2 || X == 3 || X == 4 || X == 5
3752 X >= 2 && X <= 5
3753 are converted to
3754 (unsigned) (X - 2) <= 3
3756 We describe each set of comparisons as being either inside or outside
3757 a range, using a variable named like IN_P, and then describe the
3758 range with a lower and upper bound. If one of the bounds is omitted,
3759 it represents either the highest or lowest value of the type.
3761 In the comments below, we represent a range by two numbers in brackets
3762 preceded by a "+" to designate being inside that range, or a "-" to
3763 designate being outside that range, so the condition can be inverted by
3764 flipping the prefix. An omitted bound is represented by a "-". For
3765 example, "- [-, 10]" means being outside the range starting at the lowest
3766 possible value and ending at 10, in other words, being greater than 10.
3767 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3768 always false.
3770 We set up things so that the missing bounds are handled in a consistent
3771 manner so neither a missing bound nor "true" and "false" need to be
3772 handled using a special case. */
3774 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3775 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3776 and UPPER1_P are nonzero if the respective argument is an upper bound
3777 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3778 must be specified for a comparison. ARG1 will be converted to ARG0's
3779 type if both are specified. */
3781 static tree
3782 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3783 tree arg1, int upper1_p)
3785 tree tem;
3786 int result;
3787 int sgn0, sgn1;
3789 /* If neither arg represents infinity, do the normal operation.
3790 Else, if not a comparison, return infinity. Else handle the special
3791 comparison rules. Note that most of the cases below won't occur, but
3792 are handled for consistency. */
3794 if (arg0 != 0 && arg1 != 0)
3796 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3797 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3798 STRIP_NOPS (tem);
3799 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3802 if (TREE_CODE_CLASS (code) != tcc_comparison)
3803 return 0;
3805 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3806 for neither. In real maths, we cannot assume open ended ranges are
3807 the same. But, this is computer arithmetic, where numbers are finite.
3808 We can therefore make the transformation of any unbounded range with
3809 the value Z, Z being greater than any representable number. This permits
3810 us to treat unbounded ranges as equal. */
3811 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3812 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3813 switch (code)
3815 case EQ_EXPR:
3816 result = sgn0 == sgn1;
3817 break;
3818 case NE_EXPR:
3819 result = sgn0 != sgn1;
3820 break;
3821 case LT_EXPR:
3822 result = sgn0 < sgn1;
3823 break;
3824 case LE_EXPR:
3825 result = sgn0 <= sgn1;
3826 break;
3827 case GT_EXPR:
3828 result = sgn0 > sgn1;
3829 break;
3830 case GE_EXPR:
3831 result = sgn0 >= sgn1;
3832 break;
3833 default:
3834 gcc_unreachable ();
3837 return constant_boolean_node (result, type);
3840 /* Helper routine for make_range. Perform one step for it, return
3841 new expression if the loop should continue or NULL_TREE if it should
3842 stop. */
3844 tree
3845 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3846 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3847 bool *strict_overflow_p)
3849 tree arg0_type = TREE_TYPE (arg0);
3850 tree n_low, n_high, low = *p_low, high = *p_high;
3851 int in_p = *p_in_p, n_in_p;
3853 switch (code)
3855 case TRUTH_NOT_EXPR:
3856 /* We can only do something if the range is testing for zero. */
3857 if (low == NULL_TREE || high == NULL_TREE
3858 || ! integer_zerop (low) || ! integer_zerop (high))
3859 return NULL_TREE;
3860 *p_in_p = ! in_p;
3861 return arg0;
3863 case EQ_EXPR: case NE_EXPR:
3864 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3865 /* We can only do something if the range is testing for zero
3866 and if the second operand is an integer constant. Note that
3867 saying something is "in" the range we make is done by
3868 complementing IN_P since it will set in the initial case of
3869 being not equal to zero; "out" is leaving it alone. */
3870 if (low == NULL_TREE || high == NULL_TREE
3871 || ! integer_zerop (low) || ! integer_zerop (high)
3872 || TREE_CODE (arg1) != INTEGER_CST)
3873 return NULL_TREE;
3875 switch (code)
3877 case NE_EXPR: /* - [c, c] */
3878 low = high = arg1;
3879 break;
3880 case EQ_EXPR: /* + [c, c] */
3881 in_p = ! in_p, low = high = arg1;
3882 break;
3883 case GT_EXPR: /* - [-, c] */
3884 low = 0, high = arg1;
3885 break;
3886 case GE_EXPR: /* + [c, -] */
3887 in_p = ! in_p, low = arg1, high = 0;
3888 break;
3889 case LT_EXPR: /* - [c, -] */
3890 low = arg1, high = 0;
3891 break;
3892 case LE_EXPR: /* + [-, c] */
3893 in_p = ! in_p, low = 0, high = arg1;
3894 break;
3895 default:
3896 gcc_unreachable ();
3899 /* If this is an unsigned comparison, we also know that EXP is
3900 greater than or equal to zero. We base the range tests we make
3901 on that fact, so we record it here so we can parse existing
3902 range tests. We test arg0_type since often the return type
3903 of, e.g. EQ_EXPR, is boolean. */
3904 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3906 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3907 in_p, low, high, 1,
3908 build_int_cst (arg0_type, 0),
3909 NULL_TREE))
3910 return NULL_TREE;
3912 in_p = n_in_p, low = n_low, high = n_high;
3914 /* If the high bound is missing, but we have a nonzero low
3915 bound, reverse the range so it goes from zero to the low bound
3916 minus 1. */
3917 if (high == 0 && low && ! integer_zerop (low))
3919 in_p = ! in_p;
3920 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3921 build_int_cst (TREE_TYPE (low), 1), 0);
3922 low = build_int_cst (arg0_type, 0);
3926 *p_low = low;
3927 *p_high = high;
3928 *p_in_p = in_p;
3929 return arg0;
3931 case NEGATE_EXPR:
3932 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3933 low and high are non-NULL, then normalize will DTRT. */
3934 if (!TYPE_UNSIGNED (arg0_type)
3935 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3937 if (low == NULL_TREE)
3938 low = TYPE_MIN_VALUE (arg0_type);
3939 if (high == NULL_TREE)
3940 high = TYPE_MAX_VALUE (arg0_type);
3943 /* (-x) IN [a,b] -> x in [-b, -a] */
3944 n_low = range_binop (MINUS_EXPR, exp_type,
3945 build_int_cst (exp_type, 0),
3946 0, high, 1);
3947 n_high = range_binop (MINUS_EXPR, exp_type,
3948 build_int_cst (exp_type, 0),
3949 0, low, 0);
3950 if (n_high != 0 && TREE_OVERFLOW (n_high))
3951 return NULL_TREE;
3952 goto normalize;
3954 case BIT_NOT_EXPR:
3955 /* ~ X -> -X - 1 */
3956 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3957 build_int_cst (exp_type, 1));
3959 case PLUS_EXPR:
3960 case MINUS_EXPR:
3961 if (TREE_CODE (arg1) != INTEGER_CST)
3962 return NULL_TREE;
3964 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3965 move a constant to the other side. */
3966 if (!TYPE_UNSIGNED (arg0_type)
3967 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3968 return NULL_TREE;
3970 /* If EXP is signed, any overflow in the computation is undefined,
3971 so we don't worry about it so long as our computations on
3972 the bounds don't overflow. For unsigned, overflow is defined
3973 and this is exactly the right thing. */
3974 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3975 arg0_type, low, 0, arg1, 0);
3976 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3977 arg0_type, high, 1, arg1, 0);
3978 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3979 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3980 return NULL_TREE;
3982 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3983 *strict_overflow_p = true;
3985 normalize:
3986 /* Check for an unsigned range which has wrapped around the maximum
3987 value thus making n_high < n_low, and normalize it. */
3988 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3990 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3991 build_int_cst (TREE_TYPE (n_high), 1), 0);
3992 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3993 build_int_cst (TREE_TYPE (n_low), 1), 0);
3995 /* If the range is of the form +/- [ x+1, x ], we won't
3996 be able to normalize it. But then, it represents the
3997 whole range or the empty set, so make it
3998 +/- [ -, - ]. */
3999 if (tree_int_cst_equal (n_low, low)
4000 && tree_int_cst_equal (n_high, high))
4001 low = high = 0;
4002 else
4003 in_p = ! in_p;
4005 else
4006 low = n_low, high = n_high;
4008 *p_low = low;
4009 *p_high = high;
4010 *p_in_p = in_p;
4011 return arg0;
4013 CASE_CONVERT:
4014 case NON_LVALUE_EXPR:
4015 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4016 return NULL_TREE;
4018 if (! INTEGRAL_TYPE_P (arg0_type)
4019 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4020 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4021 return NULL_TREE;
4023 n_low = low, n_high = high;
4025 if (n_low != 0)
4026 n_low = fold_convert_loc (loc, arg0_type, n_low);
4028 if (n_high != 0)
4029 n_high = fold_convert_loc (loc, arg0_type, n_high);
4031 /* If we're converting arg0 from an unsigned type, to exp,
4032 a signed type, we will be doing the comparison as unsigned.
4033 The tests above have already verified that LOW and HIGH
4034 are both positive.
4036 So we have to ensure that we will handle large unsigned
4037 values the same way that the current signed bounds treat
4038 negative values. */
4040 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4042 tree high_positive;
4043 tree equiv_type;
4044 /* For fixed-point modes, we need to pass the saturating flag
4045 as the 2nd parameter. */
4046 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4047 equiv_type
4048 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4049 TYPE_SATURATING (arg0_type));
4050 else
4051 equiv_type
4052 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4054 /* A range without an upper bound is, naturally, unbounded.
4055 Since convert would have cropped a very large value, use
4056 the max value for the destination type. */
4057 high_positive
4058 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4059 : TYPE_MAX_VALUE (arg0_type);
4061 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4062 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4063 fold_convert_loc (loc, arg0_type,
4064 high_positive),
4065 build_int_cst (arg0_type, 1));
4067 /* If the low bound is specified, "and" the range with the
4068 range for which the original unsigned value will be
4069 positive. */
4070 if (low != 0)
4072 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4073 1, fold_convert_loc (loc, arg0_type,
4074 integer_zero_node),
4075 high_positive))
4076 return NULL_TREE;
4078 in_p = (n_in_p == in_p);
4080 else
4082 /* Otherwise, "or" the range with the range of the input
4083 that will be interpreted as negative. */
4084 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4085 1, fold_convert_loc (loc, arg0_type,
4086 integer_zero_node),
4087 high_positive))
4088 return NULL_TREE;
4090 in_p = (in_p != n_in_p);
4094 *p_low = n_low;
4095 *p_high = n_high;
4096 *p_in_p = in_p;
4097 return arg0;
4099 default:
4100 return NULL_TREE;
4104 /* Given EXP, a logical expression, set the range it is testing into
4105 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4106 actually being tested. *PLOW and *PHIGH will be made of the same
4107 type as the returned expression. If EXP is not a comparison, we
4108 will most likely not be returning a useful value and range. Set
4109 *STRICT_OVERFLOW_P to true if the return value is only valid
4110 because signed overflow is undefined; otherwise, do not change
4111 *STRICT_OVERFLOW_P. */
4113 tree
4114 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4115 bool *strict_overflow_p)
4117 enum tree_code code;
4118 tree arg0, arg1 = NULL_TREE;
4119 tree exp_type, nexp;
4120 int in_p;
4121 tree low, high;
4122 location_t loc = EXPR_LOCATION (exp);
4124 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4125 and see if we can refine the range. Some of the cases below may not
4126 happen, but it doesn't seem worth worrying about this. We "continue"
4127 the outer loop when we've changed something; otherwise we "break"
4128 the switch, which will "break" the while. */
4130 in_p = 0;
4131 low = high = build_int_cst (TREE_TYPE (exp), 0);
4133 while (1)
4135 code = TREE_CODE (exp);
4136 exp_type = TREE_TYPE (exp);
4137 arg0 = NULL_TREE;
4139 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4141 if (TREE_OPERAND_LENGTH (exp) > 0)
4142 arg0 = TREE_OPERAND (exp, 0);
4143 if (TREE_CODE_CLASS (code) == tcc_binary
4144 || TREE_CODE_CLASS (code) == tcc_comparison
4145 || (TREE_CODE_CLASS (code) == tcc_expression
4146 && TREE_OPERAND_LENGTH (exp) > 1))
4147 arg1 = TREE_OPERAND (exp, 1);
4149 if (arg0 == NULL_TREE)
4150 break;
4152 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4153 &high, &in_p, strict_overflow_p);
4154 if (nexp == NULL_TREE)
4155 break;
4156 exp = nexp;
4159 /* If EXP is a constant, we can evaluate whether this is true or false. */
4160 if (TREE_CODE (exp) == INTEGER_CST)
4162 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4163 exp, 0, low, 0))
4164 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4165 exp, 1, high, 1)));
4166 low = high = 0;
4167 exp = 0;
4170 *pin_p = in_p, *plow = low, *phigh = high;
4171 return exp;
4174 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4175 type, TYPE, return an expression to test if EXP is in (or out of, depending
4176 on IN_P) the range. Return 0 if the test couldn't be created. */
4178 tree
4179 build_range_check (location_t loc, tree type, tree exp, int in_p,
4180 tree low, tree high)
4182 tree etype = TREE_TYPE (exp), value;
4184 #ifdef HAVE_canonicalize_funcptr_for_compare
4185 /* Disable this optimization for function pointer expressions
4186 on targets that require function pointer canonicalization. */
4187 if (HAVE_canonicalize_funcptr_for_compare
4188 && TREE_CODE (etype) == POINTER_TYPE
4189 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4190 return NULL_TREE;
4191 #endif
4193 if (! in_p)
4195 value = build_range_check (loc, type, exp, 1, low, high);
4196 if (value != 0)
4197 return invert_truthvalue_loc (loc, value);
4199 return 0;
4202 if (low == 0 && high == 0)
4203 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4205 if (low == 0)
4206 return fold_build2_loc (loc, LE_EXPR, type, exp,
4207 fold_convert_loc (loc, etype, high));
4209 if (high == 0)
4210 return fold_build2_loc (loc, GE_EXPR, type, exp,
4211 fold_convert_loc (loc, etype, low));
4213 if (operand_equal_p (low, high, 0))
4214 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4215 fold_convert_loc (loc, etype, low));
4217 if (integer_zerop (low))
4219 if (! TYPE_UNSIGNED (etype))
4221 etype = unsigned_type_for (etype);
4222 high = fold_convert_loc (loc, etype, high);
4223 exp = fold_convert_loc (loc, etype, exp);
4225 return build_range_check (loc, type, exp, 1, 0, high);
4228 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4229 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4231 int prec = TYPE_PRECISION (etype);
4233 if (wi::mask (prec - 1, false, prec) == high)
4235 if (TYPE_UNSIGNED (etype))
4237 tree signed_etype = signed_type_for (etype);
4238 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4239 etype
4240 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4241 else
4242 etype = signed_etype;
4243 exp = fold_convert_loc (loc, etype, exp);
4245 return fold_build2_loc (loc, GT_EXPR, type, exp,
4246 build_int_cst (etype, 0));
4250 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4251 This requires wrap-around arithmetics for the type of the expression.
4252 First make sure that arithmetics in this type is valid, then make sure
4253 that it wraps around. */
4254 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4255 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4256 TYPE_UNSIGNED (etype));
4258 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4260 tree utype, minv, maxv;
4262 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4263 for the type in question, as we rely on this here. */
4264 utype = unsigned_type_for (etype);
4265 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4266 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4267 build_int_cst (TREE_TYPE (maxv), 1), 1);
4268 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4270 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4271 minv, 1, maxv, 1)))
4272 etype = utype;
4273 else
4274 return 0;
4277 high = fold_convert_loc (loc, etype, high);
4278 low = fold_convert_loc (loc, etype, low);
4279 exp = fold_convert_loc (loc, etype, exp);
4281 value = const_binop (MINUS_EXPR, high, low);
4284 if (POINTER_TYPE_P (etype))
4286 if (value != 0 && !TREE_OVERFLOW (value))
4288 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4289 return build_range_check (loc, type,
4290 fold_build_pointer_plus_loc (loc, exp, low),
4291 1, build_int_cst (etype, 0), value);
4293 return 0;
4296 if (value != 0 && !TREE_OVERFLOW (value))
4297 return build_range_check (loc, type,
4298 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4299 1, build_int_cst (etype, 0), value);
4301 return 0;
4304 /* Return the predecessor of VAL in its type, handling the infinite case. */
4306 static tree
4307 range_predecessor (tree val)
4309 tree type = TREE_TYPE (val);
4311 if (INTEGRAL_TYPE_P (type)
4312 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4313 return 0;
4314 else
4315 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4316 build_int_cst (TREE_TYPE (val), 1), 0);
4319 /* Return the successor of VAL in its type, handling the infinite case. */
4321 static tree
4322 range_successor (tree val)
4324 tree type = TREE_TYPE (val);
4326 if (INTEGRAL_TYPE_P (type)
4327 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4328 return 0;
4329 else
4330 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4331 build_int_cst (TREE_TYPE (val), 1), 0);
4334 /* Given two ranges, see if we can merge them into one. Return 1 if we
4335 can, 0 if we can't. Set the output range into the specified parameters. */
4337 bool
4338 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4339 tree high0, int in1_p, tree low1, tree high1)
4341 int no_overlap;
4342 int subset;
4343 int temp;
4344 tree tem;
4345 int in_p;
4346 tree low, high;
4347 int lowequal = ((low0 == 0 && low1 == 0)
4348 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4349 low0, 0, low1, 0)));
4350 int highequal = ((high0 == 0 && high1 == 0)
4351 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4352 high0, 1, high1, 1)));
4354 /* Make range 0 be the range that starts first, or ends last if they
4355 start at the same value. Swap them if it isn't. */
4356 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4357 low0, 0, low1, 0))
4358 || (lowequal
4359 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4360 high1, 1, high0, 1))))
4362 temp = in0_p, in0_p = in1_p, in1_p = temp;
4363 tem = low0, low0 = low1, low1 = tem;
4364 tem = high0, high0 = high1, high1 = tem;
4367 /* Now flag two cases, whether the ranges are disjoint or whether the
4368 second range is totally subsumed in the first. Note that the tests
4369 below are simplified by the ones above. */
4370 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4371 high0, 1, low1, 0));
4372 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4373 high1, 1, high0, 1));
4375 /* We now have four cases, depending on whether we are including or
4376 excluding the two ranges. */
4377 if (in0_p && in1_p)
4379 /* If they don't overlap, the result is false. If the second range
4380 is a subset it is the result. Otherwise, the range is from the start
4381 of the second to the end of the first. */
4382 if (no_overlap)
4383 in_p = 0, low = high = 0;
4384 else if (subset)
4385 in_p = 1, low = low1, high = high1;
4386 else
4387 in_p = 1, low = low1, high = high0;
4390 else if (in0_p && ! in1_p)
4392 /* If they don't overlap, the result is the first range. If they are
4393 equal, the result is false. If the second range is a subset of the
4394 first, and the ranges begin at the same place, we go from just after
4395 the end of the second range to the end of the first. If the second
4396 range is not a subset of the first, or if it is a subset and both
4397 ranges end at the same place, the range starts at the start of the
4398 first range and ends just before the second range.
4399 Otherwise, we can't describe this as a single range. */
4400 if (no_overlap)
4401 in_p = 1, low = low0, high = high0;
4402 else if (lowequal && highequal)
4403 in_p = 0, low = high = 0;
4404 else if (subset && lowequal)
4406 low = range_successor (high1);
4407 high = high0;
4408 in_p = 1;
4409 if (low == 0)
4411 /* We are in the weird situation where high0 > high1 but
4412 high1 has no successor. Punt. */
4413 return 0;
4416 else if (! subset || highequal)
4418 low = low0;
4419 high = range_predecessor (low1);
4420 in_p = 1;
4421 if (high == 0)
4423 /* low0 < low1 but low1 has no predecessor. Punt. */
4424 return 0;
4427 else
4428 return 0;
4431 else if (! in0_p && in1_p)
4433 /* If they don't overlap, the result is the second range. If the second
4434 is a subset of the first, the result is false. Otherwise,
4435 the range starts just after the first range and ends at the
4436 end of the second. */
4437 if (no_overlap)
4438 in_p = 1, low = low1, high = high1;
4439 else if (subset || highequal)
4440 in_p = 0, low = high = 0;
4441 else
4443 low = range_successor (high0);
4444 high = high1;
4445 in_p = 1;
4446 if (low == 0)
4448 /* high1 > high0 but high0 has no successor. Punt. */
4449 return 0;
4454 else
4456 /* The case where we are excluding both ranges. Here the complex case
4457 is if they don't overlap. In that case, the only time we have a
4458 range is if they are adjacent. If the second is a subset of the
4459 first, the result is the first. Otherwise, the range to exclude
4460 starts at the beginning of the first range and ends at the end of the
4461 second. */
4462 if (no_overlap)
4464 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4465 range_successor (high0),
4466 1, low1, 0)))
4467 in_p = 0, low = low0, high = high1;
4468 else
4470 /* Canonicalize - [min, x] into - [-, x]. */
4471 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4472 switch (TREE_CODE (TREE_TYPE (low0)))
4474 case ENUMERAL_TYPE:
4475 if (TYPE_PRECISION (TREE_TYPE (low0))
4476 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4477 break;
4478 /* FALLTHROUGH */
4479 case INTEGER_TYPE:
4480 if (tree_int_cst_equal (low0,
4481 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4482 low0 = 0;
4483 break;
4484 case POINTER_TYPE:
4485 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4486 && integer_zerop (low0))
4487 low0 = 0;
4488 break;
4489 default:
4490 break;
4493 /* Canonicalize - [x, max] into - [x, -]. */
4494 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4495 switch (TREE_CODE (TREE_TYPE (high1)))
4497 case ENUMERAL_TYPE:
4498 if (TYPE_PRECISION (TREE_TYPE (high1))
4499 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4500 break;
4501 /* FALLTHROUGH */
4502 case INTEGER_TYPE:
4503 if (tree_int_cst_equal (high1,
4504 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4505 high1 = 0;
4506 break;
4507 case POINTER_TYPE:
4508 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4509 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4510 high1, 1,
4511 build_int_cst (TREE_TYPE (high1), 1),
4512 1)))
4513 high1 = 0;
4514 break;
4515 default:
4516 break;
4519 /* The ranges might be also adjacent between the maximum and
4520 minimum values of the given type. For
4521 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4522 return + [x + 1, y - 1]. */
4523 if (low0 == 0 && high1 == 0)
4525 low = range_successor (high0);
4526 high = range_predecessor (low1);
4527 if (low == 0 || high == 0)
4528 return 0;
4530 in_p = 1;
4532 else
4533 return 0;
4536 else if (subset)
4537 in_p = 0, low = low0, high = high0;
4538 else
4539 in_p = 0, low = low0, high = high1;
4542 *pin_p = in_p, *plow = low, *phigh = high;
4543 return 1;
4547 /* Subroutine of fold, looking inside expressions of the form
4548 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4549 of the COND_EXPR. This function is being used also to optimize
4550 A op B ? C : A, by reversing the comparison first.
4552 Return a folded expression whose code is not a COND_EXPR
4553 anymore, or NULL_TREE if no folding opportunity is found. */
4555 static tree
4556 fold_cond_expr_with_comparison (location_t loc, tree type,
4557 tree arg0, tree arg1, tree arg2)
4559 enum tree_code comp_code = TREE_CODE (arg0);
4560 tree arg00 = TREE_OPERAND (arg0, 0);
4561 tree arg01 = TREE_OPERAND (arg0, 1);
4562 tree arg1_type = TREE_TYPE (arg1);
4563 tree tem;
4565 STRIP_NOPS (arg1);
4566 STRIP_NOPS (arg2);
4568 /* If we have A op 0 ? A : -A, consider applying the following
4569 transformations:
4571 A == 0? A : -A same as -A
4572 A != 0? A : -A same as A
4573 A >= 0? A : -A same as abs (A)
4574 A > 0? A : -A same as abs (A)
4575 A <= 0? A : -A same as -abs (A)
4576 A < 0? A : -A same as -abs (A)
4578 None of these transformations work for modes with signed
4579 zeros. If A is +/-0, the first two transformations will
4580 change the sign of the result (from +0 to -0, or vice
4581 versa). The last four will fix the sign of the result,
4582 even though the original expressions could be positive or
4583 negative, depending on the sign of A.
4585 Note that all these transformations are correct if A is
4586 NaN, since the two alternatives (A and -A) are also NaNs. */
4587 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4588 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4589 ? real_zerop (arg01)
4590 : integer_zerop (arg01))
4591 && ((TREE_CODE (arg2) == NEGATE_EXPR
4592 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4593 /* In the case that A is of the form X-Y, '-A' (arg2) may
4594 have already been folded to Y-X, check for that. */
4595 || (TREE_CODE (arg1) == MINUS_EXPR
4596 && TREE_CODE (arg2) == MINUS_EXPR
4597 && operand_equal_p (TREE_OPERAND (arg1, 0),
4598 TREE_OPERAND (arg2, 1), 0)
4599 && operand_equal_p (TREE_OPERAND (arg1, 1),
4600 TREE_OPERAND (arg2, 0), 0))))
4601 switch (comp_code)
4603 case EQ_EXPR:
4604 case UNEQ_EXPR:
4605 tem = fold_convert_loc (loc, arg1_type, arg1);
4606 return pedantic_non_lvalue_loc (loc,
4607 fold_convert_loc (loc, type,
4608 negate_expr (tem)));
4609 case NE_EXPR:
4610 case LTGT_EXPR:
4611 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4612 case UNGE_EXPR:
4613 case UNGT_EXPR:
4614 if (flag_trapping_math)
4615 break;
4616 /* Fall through. */
4617 case GE_EXPR:
4618 case GT_EXPR:
4619 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4620 arg1 = fold_convert_loc (loc, signed_type_for
4621 (TREE_TYPE (arg1)), arg1);
4622 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4623 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4624 case UNLE_EXPR:
4625 case UNLT_EXPR:
4626 if (flag_trapping_math)
4627 break;
4628 case LE_EXPR:
4629 case LT_EXPR:
4630 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4631 arg1 = fold_convert_loc (loc, signed_type_for
4632 (TREE_TYPE (arg1)), arg1);
4633 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4634 return negate_expr (fold_convert_loc (loc, type, tem));
4635 default:
4636 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4637 break;
4640 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4641 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4642 both transformations are correct when A is NaN: A != 0
4643 is then true, and A == 0 is false. */
4645 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4646 && integer_zerop (arg01) && integer_zerop (arg2))
4648 if (comp_code == NE_EXPR)
4649 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4650 else if (comp_code == EQ_EXPR)
4651 return build_zero_cst (type);
4654 /* Try some transformations of A op B ? A : B.
4656 A == B? A : B same as B
4657 A != B? A : B same as A
4658 A >= B? A : B same as max (A, B)
4659 A > B? A : B same as max (B, A)
4660 A <= B? A : B same as min (A, B)
4661 A < B? A : B same as min (B, A)
4663 As above, these transformations don't work in the presence
4664 of signed zeros. For example, if A and B are zeros of
4665 opposite sign, the first two transformations will change
4666 the sign of the result. In the last four, the original
4667 expressions give different results for (A=+0, B=-0) and
4668 (A=-0, B=+0), but the transformed expressions do not.
4670 The first two transformations are correct if either A or B
4671 is a NaN. In the first transformation, the condition will
4672 be false, and B will indeed be chosen. In the case of the
4673 second transformation, the condition A != B will be true,
4674 and A will be chosen.
4676 The conversions to max() and min() are not correct if B is
4677 a number and A is not. The conditions in the original
4678 expressions will be false, so all four give B. The min()
4679 and max() versions would give a NaN instead. */
4680 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4681 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4682 /* Avoid these transformations if the COND_EXPR may be used
4683 as an lvalue in the C++ front-end. PR c++/19199. */
4684 && (in_gimple_form
4685 || VECTOR_TYPE_P (type)
4686 || (strcmp (lang_hooks.name, "GNU C++") != 0
4687 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4688 || ! maybe_lvalue_p (arg1)
4689 || ! maybe_lvalue_p (arg2)))
4691 tree comp_op0 = arg00;
4692 tree comp_op1 = arg01;
4693 tree comp_type = TREE_TYPE (comp_op0);
4695 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4696 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4698 comp_type = type;
4699 comp_op0 = arg1;
4700 comp_op1 = arg2;
4703 switch (comp_code)
4705 case EQ_EXPR:
4706 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4707 case NE_EXPR:
4708 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4709 case LE_EXPR:
4710 case LT_EXPR:
4711 case UNLE_EXPR:
4712 case UNLT_EXPR:
4713 /* In C++ a ?: expression can be an lvalue, so put the
4714 operand which will be used if they are equal first
4715 so that we can convert this back to the
4716 corresponding COND_EXPR. */
4717 if (!HONOR_NANS (element_mode (arg1)))
4719 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4720 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4721 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4722 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4723 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4724 comp_op1, comp_op0);
4725 return pedantic_non_lvalue_loc (loc,
4726 fold_convert_loc (loc, type, tem));
4728 break;
4729 case GE_EXPR:
4730 case GT_EXPR:
4731 case UNGE_EXPR:
4732 case UNGT_EXPR:
4733 if (!HONOR_NANS (element_mode (arg1)))
4735 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4736 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4737 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4738 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4739 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4740 comp_op1, comp_op0);
4741 return pedantic_non_lvalue_loc (loc,
4742 fold_convert_loc (loc, type, tem));
4744 break;
4745 case UNEQ_EXPR:
4746 if (!HONOR_NANS (element_mode (arg1)))
4747 return pedantic_non_lvalue_loc (loc,
4748 fold_convert_loc (loc, type, arg2));
4749 break;
4750 case LTGT_EXPR:
4751 if (!HONOR_NANS (element_mode (arg1)))
4752 return pedantic_non_lvalue_loc (loc,
4753 fold_convert_loc (loc, type, arg1));
4754 break;
4755 default:
4756 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4757 break;
4761 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4762 we might still be able to simplify this. For example,
4763 if C1 is one less or one more than C2, this might have started
4764 out as a MIN or MAX and been transformed by this function.
4765 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4767 if (INTEGRAL_TYPE_P (type)
4768 && TREE_CODE (arg01) == INTEGER_CST
4769 && TREE_CODE (arg2) == INTEGER_CST)
4770 switch (comp_code)
4772 case EQ_EXPR:
4773 if (TREE_CODE (arg1) == INTEGER_CST)
4774 break;
4775 /* We can replace A with C1 in this case. */
4776 arg1 = fold_convert_loc (loc, type, arg01);
4777 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4779 case LT_EXPR:
4780 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4781 MIN_EXPR, to preserve the signedness of the comparison. */
4782 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4783 OEP_ONLY_CONST)
4784 && operand_equal_p (arg01,
4785 const_binop (PLUS_EXPR, arg2,
4786 build_int_cst (type, 1)),
4787 OEP_ONLY_CONST))
4789 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4790 fold_convert_loc (loc, TREE_TYPE (arg00),
4791 arg2));
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, tem));
4795 break;
4797 case LE_EXPR:
4798 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4799 as above. */
4800 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4801 OEP_ONLY_CONST)
4802 && operand_equal_p (arg01,
4803 const_binop (MINUS_EXPR, arg2,
4804 build_int_cst (type, 1)),
4805 OEP_ONLY_CONST))
4807 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4808 fold_convert_loc (loc, TREE_TYPE (arg00),
4809 arg2));
4810 return pedantic_non_lvalue_loc (loc,
4811 fold_convert_loc (loc, type, tem));
4813 break;
4815 case GT_EXPR:
4816 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4817 MAX_EXPR, to preserve the signedness of the comparison. */
4818 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4819 OEP_ONLY_CONST)
4820 && operand_equal_p (arg01,
4821 const_binop (MINUS_EXPR, arg2,
4822 build_int_cst (type, 1)),
4823 OEP_ONLY_CONST))
4825 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4826 fold_convert_loc (loc, TREE_TYPE (arg00),
4827 arg2));
4828 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4830 break;
4832 case GE_EXPR:
4833 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4834 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4835 OEP_ONLY_CONST)
4836 && operand_equal_p (arg01,
4837 const_binop (PLUS_EXPR, arg2,
4838 build_int_cst (type, 1)),
4839 OEP_ONLY_CONST))
4841 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4842 fold_convert_loc (loc, TREE_TYPE (arg00),
4843 arg2));
4844 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4846 break;
4847 case NE_EXPR:
4848 break;
4849 default:
4850 gcc_unreachable ();
4853 return NULL_TREE;
4858 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4859 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4860 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4861 false) >= 2)
4862 #endif
4864 /* EXP is some logical combination of boolean tests. See if we can
4865 merge it into some range test. Return the new tree if so. */
4867 static tree
4868 fold_range_test (location_t loc, enum tree_code code, tree type,
4869 tree op0, tree op1)
4871 int or_op = (code == TRUTH_ORIF_EXPR
4872 || code == TRUTH_OR_EXPR);
4873 int in0_p, in1_p, in_p;
4874 tree low0, low1, low, high0, high1, high;
4875 bool strict_overflow_p = false;
4876 tree tem, lhs, rhs;
4877 const char * const warnmsg = G_("assuming signed overflow does not occur "
4878 "when simplifying range test");
4880 if (!INTEGRAL_TYPE_P (type))
4881 return 0;
4883 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4884 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4886 /* If this is an OR operation, invert both sides; we will invert
4887 again at the end. */
4888 if (or_op)
4889 in0_p = ! in0_p, in1_p = ! in1_p;
4891 /* If both expressions are the same, if we can merge the ranges, and we
4892 can build the range test, return it or it inverted. If one of the
4893 ranges is always true or always false, consider it to be the same
4894 expression as the other. */
4895 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4896 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4897 in1_p, low1, high1)
4898 && 0 != (tem = (build_range_check (loc, type,
4899 lhs != 0 ? lhs
4900 : rhs != 0 ? rhs : integer_zero_node,
4901 in_p, low, high))))
4903 if (strict_overflow_p)
4904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4905 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4908 /* On machines where the branch cost is expensive, if this is a
4909 short-circuited branch and the underlying object on both sides
4910 is the same, make a non-short-circuit operation. */
4911 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4912 && lhs != 0 && rhs != 0
4913 && (code == TRUTH_ANDIF_EXPR
4914 || code == TRUTH_ORIF_EXPR)
4915 && operand_equal_p (lhs, rhs, 0))
4917 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4918 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4919 which cases we can't do this. */
4920 if (simple_operand_p (lhs))
4921 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4922 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4923 type, op0, op1);
4925 else if (!lang_hooks.decls.global_bindings_p ()
4926 && !CONTAINS_PLACEHOLDER_P (lhs))
4928 tree common = save_expr (lhs);
4930 if (0 != (lhs = build_range_check (loc, type, common,
4931 or_op ? ! in0_p : in0_p,
4932 low0, high0))
4933 && (0 != (rhs = build_range_check (loc, type, common,
4934 or_op ? ! in1_p : in1_p,
4935 low1, high1))))
4937 if (strict_overflow_p)
4938 fold_overflow_warning (warnmsg,
4939 WARN_STRICT_OVERFLOW_COMPARISON);
4940 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4941 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4942 type, lhs, rhs);
4947 return 0;
4950 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4951 bit value. Arrange things so the extra bits will be set to zero if and
4952 only if C is signed-extended to its full width. If MASK is nonzero,
4953 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955 static tree
4956 unextend (tree c, int p, int unsignedp, tree mask)
4958 tree type = TREE_TYPE (c);
4959 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4960 tree temp;
4962 if (p == modesize || unsignedp)
4963 return c;
4965 /* We work by getting just the sign bit into the low-order bit, then
4966 into the high-order bit, then sign-extend. We then XOR that value
4967 with C. */
4968 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
4970 /* We must use a signed type in order to get an arithmetic right shift.
4971 However, we must also avoid introducing accidental overflows, so that
4972 a subsequent call to integer_zerop will work. Hence we must
4973 do the type conversion here. At this point, the constant is either
4974 zero or one, and the conversion to a signed type can never overflow.
4975 We could get an overflow if this conversion is done anywhere else. */
4976 if (TYPE_UNSIGNED (type))
4977 temp = fold_convert (signed_type_for (type), temp);
4979 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4980 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4981 if (mask != 0)
4982 temp = const_binop (BIT_AND_EXPR, temp,
4983 fold_convert (TREE_TYPE (c), mask));
4984 /* If necessary, convert the type back to match the type of C. */
4985 if (TYPE_UNSIGNED (type))
4986 temp = fold_convert (type, temp);
4988 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4991 /* For an expression that has the form
4992 (A && B) || ~B
4994 (A || B) && ~B,
4995 we can drop one of the inner expressions and simplify to
4996 A || ~B
4998 A && ~B
4999 LOC is the location of the resulting expression. OP is the inner
5000 logical operation; the left-hand side in the examples above, while CMPOP
5001 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5002 removing a condition that guards another, as in
5003 (A != NULL && A->...) || A == NULL
5004 which we must not transform. If RHS_ONLY is true, only eliminate the
5005 right-most operand of the inner logical operation. */
5007 static tree
5008 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5009 bool rhs_only)
5011 tree type = TREE_TYPE (cmpop);
5012 enum tree_code code = TREE_CODE (cmpop);
5013 enum tree_code truthop_code = TREE_CODE (op);
5014 tree lhs = TREE_OPERAND (op, 0);
5015 tree rhs = TREE_OPERAND (op, 1);
5016 tree orig_lhs = lhs, orig_rhs = rhs;
5017 enum tree_code rhs_code = TREE_CODE (rhs);
5018 enum tree_code lhs_code = TREE_CODE (lhs);
5019 enum tree_code inv_code;
5021 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5022 return NULL_TREE;
5024 if (TREE_CODE_CLASS (code) != tcc_comparison)
5025 return NULL_TREE;
5027 if (rhs_code == truthop_code)
5029 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5030 if (newrhs != NULL_TREE)
5032 rhs = newrhs;
5033 rhs_code = TREE_CODE (rhs);
5036 if (lhs_code == truthop_code && !rhs_only)
5038 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5039 if (newlhs != NULL_TREE)
5041 lhs = newlhs;
5042 lhs_code = TREE_CODE (lhs);
5046 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5047 if (inv_code == rhs_code
5048 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5049 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5050 return lhs;
5051 if (!rhs_only && inv_code == lhs_code
5052 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5053 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5054 return rhs;
5055 if (rhs != orig_rhs || lhs != orig_lhs)
5056 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5057 lhs, rhs);
5058 return NULL_TREE;
5061 /* Find ways of folding logical expressions of LHS and RHS:
5062 Try to merge two comparisons to the same innermost item.
5063 Look for range tests like "ch >= '0' && ch <= '9'".
5064 Look for combinations of simple terms on machines with expensive branches
5065 and evaluate the RHS unconditionally.
5067 For example, if we have p->a == 2 && p->b == 4 and we can make an
5068 object large enough to span both A and B, we can do this with a comparison
5069 against the object ANDed with the a mask.
5071 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5072 operations to do this with one comparison.
5074 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5075 function and the one above.
5077 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5078 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5080 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5081 two operands.
5083 We return the simplified tree or 0 if no optimization is possible. */
5085 static tree
5086 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5087 tree lhs, tree rhs)
5089 /* If this is the "or" of two comparisons, we can do something if
5090 the comparisons are NE_EXPR. If this is the "and", we can do something
5091 if the comparisons are EQ_EXPR. I.e.,
5092 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5094 WANTED_CODE is this operation code. For single bit fields, we can
5095 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5096 comparison for one-bit fields. */
5098 enum tree_code wanted_code;
5099 enum tree_code lcode, rcode;
5100 tree ll_arg, lr_arg, rl_arg, rr_arg;
5101 tree ll_inner, lr_inner, rl_inner, rr_inner;
5102 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5103 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5104 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5105 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5106 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5107 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5108 machine_mode lnmode, rnmode;
5109 tree ll_mask, lr_mask, rl_mask, rr_mask;
5110 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5111 tree l_const, r_const;
5112 tree lntype, rntype, result;
5113 HOST_WIDE_INT first_bit, end_bit;
5114 int volatilep;
5116 /* Start by getting the comparison codes. Fail if anything is volatile.
5117 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5118 it were surrounded with a NE_EXPR. */
5120 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5121 return 0;
5123 lcode = TREE_CODE (lhs);
5124 rcode = TREE_CODE (rhs);
5126 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5128 lhs = build2 (NE_EXPR, truth_type, lhs,
5129 build_int_cst (TREE_TYPE (lhs), 0));
5130 lcode = NE_EXPR;
5133 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5135 rhs = build2 (NE_EXPR, truth_type, rhs,
5136 build_int_cst (TREE_TYPE (rhs), 0));
5137 rcode = NE_EXPR;
5140 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5141 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5142 return 0;
5144 ll_arg = TREE_OPERAND (lhs, 0);
5145 lr_arg = TREE_OPERAND (lhs, 1);
5146 rl_arg = TREE_OPERAND (rhs, 0);
5147 rr_arg = TREE_OPERAND (rhs, 1);
5149 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5150 if (simple_operand_p (ll_arg)
5151 && simple_operand_p (lr_arg))
5153 if (operand_equal_p (ll_arg, rl_arg, 0)
5154 && operand_equal_p (lr_arg, rr_arg, 0))
5156 result = combine_comparisons (loc, code, lcode, rcode,
5157 truth_type, ll_arg, lr_arg);
5158 if (result)
5159 return result;
5161 else if (operand_equal_p (ll_arg, rr_arg, 0)
5162 && operand_equal_p (lr_arg, rl_arg, 0))
5164 result = combine_comparisons (loc, code, lcode,
5165 swap_tree_comparison (rcode),
5166 truth_type, ll_arg, lr_arg);
5167 if (result)
5168 return result;
5172 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5173 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5175 /* If the RHS can be evaluated unconditionally and its operands are
5176 simple, it wins to evaluate the RHS unconditionally on machines
5177 with expensive branches. In this case, this isn't a comparison
5178 that can be merged. */
5180 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5181 false) >= 2
5182 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5183 && simple_operand_p (rl_arg)
5184 && simple_operand_p (rr_arg))
5186 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5187 if (code == TRUTH_OR_EXPR
5188 && lcode == NE_EXPR && integer_zerop (lr_arg)
5189 && rcode == NE_EXPR && integer_zerop (rr_arg)
5190 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5191 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5192 return build2_loc (loc, NE_EXPR, truth_type,
5193 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5194 ll_arg, rl_arg),
5195 build_int_cst (TREE_TYPE (ll_arg), 0));
5197 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5198 if (code == TRUTH_AND_EXPR
5199 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5200 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5201 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5202 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5203 return build2_loc (loc, EQ_EXPR, truth_type,
5204 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5205 ll_arg, rl_arg),
5206 build_int_cst (TREE_TYPE (ll_arg), 0));
5209 /* See if the comparisons can be merged. Then get all the parameters for
5210 each side. */
5212 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5213 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5214 return 0;
5216 volatilep = 0;
5217 ll_inner = decode_field_reference (loc, ll_arg,
5218 &ll_bitsize, &ll_bitpos, &ll_mode,
5219 &ll_unsignedp, &volatilep, &ll_mask,
5220 &ll_and_mask);
5221 lr_inner = decode_field_reference (loc, lr_arg,
5222 &lr_bitsize, &lr_bitpos, &lr_mode,
5223 &lr_unsignedp, &volatilep, &lr_mask,
5224 &lr_and_mask);
5225 rl_inner = decode_field_reference (loc, rl_arg,
5226 &rl_bitsize, &rl_bitpos, &rl_mode,
5227 &rl_unsignedp, &volatilep, &rl_mask,
5228 &rl_and_mask);
5229 rr_inner = decode_field_reference (loc, rr_arg,
5230 &rr_bitsize, &rr_bitpos, &rr_mode,
5231 &rr_unsignedp, &volatilep, &rr_mask,
5232 &rr_and_mask);
5234 /* It must be true that the inner operation on the lhs of each
5235 comparison must be the same if we are to be able to do anything.
5236 Then see if we have constants. If not, the same must be true for
5237 the rhs's. */
5238 if (volatilep || ll_inner == 0 || rl_inner == 0
5239 || ! operand_equal_p (ll_inner, rl_inner, 0))
5240 return 0;
5242 if (TREE_CODE (lr_arg) == INTEGER_CST
5243 && TREE_CODE (rr_arg) == INTEGER_CST)
5244 l_const = lr_arg, r_const = rr_arg;
5245 else if (lr_inner == 0 || rr_inner == 0
5246 || ! operand_equal_p (lr_inner, rr_inner, 0))
5247 return 0;
5248 else
5249 l_const = r_const = 0;
5251 /* If either comparison code is not correct for our logical operation,
5252 fail. However, we can convert a one-bit comparison against zero into
5253 the opposite comparison against that bit being set in the field. */
5255 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5256 if (lcode != wanted_code)
5258 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5260 /* Make the left operand unsigned, since we are only interested
5261 in the value of one bit. Otherwise we are doing the wrong
5262 thing below. */
5263 ll_unsignedp = 1;
5264 l_const = ll_mask;
5266 else
5267 return 0;
5270 /* This is analogous to the code for l_const above. */
5271 if (rcode != wanted_code)
5273 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5275 rl_unsignedp = 1;
5276 r_const = rl_mask;
5278 else
5279 return 0;
5282 /* See if we can find a mode that contains both fields being compared on
5283 the left. If we can't, fail. Otherwise, update all constants and masks
5284 to be relative to a field of that size. */
5285 first_bit = MIN (ll_bitpos, rl_bitpos);
5286 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5287 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5288 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5289 volatilep);
5290 if (lnmode == VOIDmode)
5291 return 0;
5293 lnbitsize = GET_MODE_BITSIZE (lnmode);
5294 lnbitpos = first_bit & ~ (lnbitsize - 1);
5295 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5296 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5298 if (BYTES_BIG_ENDIAN)
5300 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5301 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5304 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5305 size_int (xll_bitpos));
5306 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5307 size_int (xrl_bitpos));
5309 if (l_const)
5311 l_const = fold_convert_loc (loc, lntype, l_const);
5312 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5313 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5314 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5315 fold_build1_loc (loc, BIT_NOT_EXPR,
5316 lntype, ll_mask))))
5318 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5320 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5323 if (r_const)
5325 r_const = fold_convert_loc (loc, lntype, r_const);
5326 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5327 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5328 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5329 fold_build1_loc (loc, BIT_NOT_EXPR,
5330 lntype, rl_mask))))
5332 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5334 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5338 /* If the right sides are not constant, do the same for it. Also,
5339 disallow this optimization if a size or signedness mismatch occurs
5340 between the left and right sides. */
5341 if (l_const == 0)
5343 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5344 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5345 /* Make sure the two fields on the right
5346 correspond to the left without being swapped. */
5347 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5348 return 0;
5350 first_bit = MIN (lr_bitpos, rr_bitpos);
5351 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5352 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5353 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5354 volatilep);
5355 if (rnmode == VOIDmode)
5356 return 0;
5358 rnbitsize = GET_MODE_BITSIZE (rnmode);
5359 rnbitpos = first_bit & ~ (rnbitsize - 1);
5360 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5361 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5363 if (BYTES_BIG_ENDIAN)
5365 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5366 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5369 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5370 rntype, lr_mask),
5371 size_int (xlr_bitpos));
5372 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5373 rntype, rr_mask),
5374 size_int (xrr_bitpos));
5376 /* Make a mask that corresponds to both fields being compared.
5377 Do this for both items being compared. If the operands are the
5378 same size and the bits being compared are in the same position
5379 then we can do this by masking both and comparing the masked
5380 results. */
5381 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5382 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5383 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5385 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5386 ll_unsignedp || rl_unsignedp);
5387 if (! all_ones_mask_p (ll_mask, lnbitsize))
5388 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5390 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5391 lr_unsignedp || rr_unsignedp);
5392 if (! all_ones_mask_p (lr_mask, rnbitsize))
5393 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5395 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5398 /* There is still another way we can do something: If both pairs of
5399 fields being compared are adjacent, we may be able to make a wider
5400 field containing them both.
5402 Note that we still must mask the lhs/rhs expressions. Furthermore,
5403 the mask must be shifted to account for the shift done by
5404 make_bit_field_ref. */
5405 if ((ll_bitsize + ll_bitpos == rl_bitpos
5406 && lr_bitsize + lr_bitpos == rr_bitpos)
5407 || (ll_bitpos == rl_bitpos + rl_bitsize
5408 && lr_bitpos == rr_bitpos + rr_bitsize))
5410 tree type;
5412 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5413 ll_bitsize + rl_bitsize,
5414 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5415 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5416 lr_bitsize + rr_bitsize,
5417 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5419 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5420 size_int (MIN (xll_bitpos, xrl_bitpos)));
5421 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5422 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5424 /* Convert to the smaller type before masking out unwanted bits. */
5425 type = lntype;
5426 if (lntype != rntype)
5428 if (lnbitsize > rnbitsize)
5430 lhs = fold_convert_loc (loc, rntype, lhs);
5431 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5432 type = rntype;
5434 else if (lnbitsize < rnbitsize)
5436 rhs = fold_convert_loc (loc, lntype, rhs);
5437 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5438 type = lntype;
5442 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5443 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5445 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5446 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5448 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5451 return 0;
5454 /* Handle the case of comparisons with constants. If there is something in
5455 common between the masks, those bits of the constants must be the same.
5456 If not, the condition is always false. Test for this to avoid generating
5457 incorrect code below. */
5458 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5459 if (! integer_zerop (result)
5460 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5461 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5463 if (wanted_code == NE_EXPR)
5465 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5466 return constant_boolean_node (true, truth_type);
5468 else
5470 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5471 return constant_boolean_node (false, truth_type);
5475 /* Construct the expression we will return. First get the component
5476 reference we will make. Unless the mask is all ones the width of
5477 that field, perform the mask operation. Then compare with the
5478 merged constant. */
5479 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5480 ll_unsignedp || rl_unsignedp);
5482 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5483 if (! all_ones_mask_p (ll_mask, lnbitsize))
5484 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5486 return build2_loc (loc, wanted_code, truth_type, result,
5487 const_binop (BIT_IOR_EXPR, l_const, r_const));
5490 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5491 constant. */
5493 static tree
5494 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5495 tree op0, tree op1)
5497 tree arg0 = op0;
5498 enum tree_code op_code;
5499 tree comp_const;
5500 tree minmax_const;
5501 int consts_equal, consts_lt;
5502 tree inner;
5504 STRIP_SIGN_NOPS (arg0);
5506 op_code = TREE_CODE (arg0);
5507 minmax_const = TREE_OPERAND (arg0, 1);
5508 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5509 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5510 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5511 inner = TREE_OPERAND (arg0, 0);
5513 /* If something does not permit us to optimize, return the original tree. */
5514 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5515 || TREE_CODE (comp_const) != INTEGER_CST
5516 || TREE_OVERFLOW (comp_const)
5517 || TREE_CODE (minmax_const) != INTEGER_CST
5518 || TREE_OVERFLOW (minmax_const))
5519 return NULL_TREE;
5521 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5522 and GT_EXPR, doing the rest with recursive calls using logical
5523 simplifications. */
5524 switch (code)
5526 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5528 tree tem
5529 = optimize_minmax_comparison (loc,
5530 invert_tree_comparison (code, false),
5531 type, op0, op1);
5532 if (tem)
5533 return invert_truthvalue_loc (loc, tem);
5534 return NULL_TREE;
5537 case GE_EXPR:
5538 return
5539 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5540 optimize_minmax_comparison
5541 (loc, EQ_EXPR, type, arg0, comp_const),
5542 optimize_minmax_comparison
5543 (loc, GT_EXPR, type, arg0, comp_const));
5545 case EQ_EXPR:
5546 if (op_code == MAX_EXPR && consts_equal)
5547 /* MAX (X, 0) == 0 -> X <= 0 */
5548 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5550 else if (op_code == MAX_EXPR && consts_lt)
5551 /* MAX (X, 0) == 5 -> X == 5 */
5552 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5554 else if (op_code == MAX_EXPR)
5555 /* MAX (X, 0) == -1 -> false */
5556 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5558 else if (consts_equal)
5559 /* MIN (X, 0) == 0 -> X >= 0 */
5560 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5562 else if (consts_lt)
5563 /* MIN (X, 0) == 5 -> false */
5564 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5566 else
5567 /* MIN (X, 0) == -1 -> X == -1 */
5568 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5570 case GT_EXPR:
5571 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5572 /* MAX (X, 0) > 0 -> X > 0
5573 MAX (X, 0) > 5 -> X > 5 */
5574 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5576 else if (op_code == MAX_EXPR)
5577 /* MAX (X, 0) > -1 -> true */
5578 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5580 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5581 /* MIN (X, 0) > 0 -> false
5582 MIN (X, 0) > 5 -> false */
5583 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5585 else
5586 /* MIN (X, 0) > -1 -> X > -1 */
5587 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5589 default:
5590 return NULL_TREE;
5594 /* T is an integer expression that is being multiplied, divided, or taken a
5595 modulus (CODE says which and what kind of divide or modulus) by a
5596 constant C. See if we can eliminate that operation by folding it with
5597 other operations already in T. WIDE_TYPE, if non-null, is a type that
5598 should be used for the computation if wider than our type.
5600 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5601 (X * 2) + (Y * 4). We must, however, be assured that either the original
5602 expression would not overflow or that overflow is undefined for the type
5603 in the language in question.
5605 If we return a non-null expression, it is an equivalent form of the
5606 original computation, but need not be in the original type.
5608 We set *STRICT_OVERFLOW_P to true if the return values depends on
5609 signed overflow being undefined. Otherwise we do not change
5610 *STRICT_OVERFLOW_P. */
5612 static tree
5613 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5614 bool *strict_overflow_p)
5616 /* To avoid exponential search depth, refuse to allow recursion past
5617 three levels. Beyond that (1) it's highly unlikely that we'll find
5618 something interesting and (2) we've probably processed it before
5619 when we built the inner expression. */
5621 static int depth;
5622 tree ret;
5624 if (depth > 3)
5625 return NULL;
5627 depth++;
5628 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5629 depth--;
5631 return ret;
5634 static tree
5635 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5636 bool *strict_overflow_p)
5638 tree type = TREE_TYPE (t);
5639 enum tree_code tcode = TREE_CODE (t);
5640 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5641 > GET_MODE_SIZE (TYPE_MODE (type)))
5642 ? wide_type : type);
5643 tree t1, t2;
5644 int same_p = tcode == code;
5645 tree op0 = NULL_TREE, op1 = NULL_TREE;
5646 bool sub_strict_overflow_p;
5648 /* Don't deal with constants of zero here; they confuse the code below. */
5649 if (integer_zerop (c))
5650 return NULL_TREE;
5652 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5653 op0 = TREE_OPERAND (t, 0);
5655 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5656 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5658 /* Note that we need not handle conditional operations here since fold
5659 already handles those cases. So just do arithmetic here. */
5660 switch (tcode)
5662 case INTEGER_CST:
5663 /* For a constant, we can always simplify if we are a multiply
5664 or (for divide and modulus) if it is a multiple of our constant. */
5665 if (code == MULT_EXPR
5666 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5667 return const_binop (code, fold_convert (ctype, t),
5668 fold_convert (ctype, c));
5669 break;
5671 CASE_CONVERT: case NON_LVALUE_EXPR:
5672 /* If op0 is an expression ... */
5673 if ((COMPARISON_CLASS_P (op0)
5674 || UNARY_CLASS_P (op0)
5675 || BINARY_CLASS_P (op0)
5676 || VL_EXP_CLASS_P (op0)
5677 || EXPRESSION_CLASS_P (op0))
5678 /* ... and has wrapping overflow, and its type is smaller
5679 than ctype, then we cannot pass through as widening. */
5680 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5681 && (TYPE_PRECISION (ctype)
5682 > TYPE_PRECISION (TREE_TYPE (op0))))
5683 /* ... or this is a truncation (t is narrower than op0),
5684 then we cannot pass through this narrowing. */
5685 || (TYPE_PRECISION (type)
5686 < TYPE_PRECISION (TREE_TYPE (op0)))
5687 /* ... or signedness changes for division or modulus,
5688 then we cannot pass through this conversion. */
5689 || (code != MULT_EXPR
5690 && (TYPE_UNSIGNED (ctype)
5691 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5692 /* ... or has undefined overflow while the converted to
5693 type has not, we cannot do the operation in the inner type
5694 as that would introduce undefined overflow. */
5695 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5696 && !TYPE_OVERFLOW_UNDEFINED (type))))
5697 break;
5699 /* Pass the constant down and see if we can make a simplification. If
5700 we can, replace this expression with the inner simplification for
5701 possible later conversion to our or some other type. */
5702 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5703 && TREE_CODE (t2) == INTEGER_CST
5704 && !TREE_OVERFLOW (t2)
5705 && (0 != (t1 = extract_muldiv (op0, t2, code,
5706 code == MULT_EXPR
5707 ? ctype : NULL_TREE,
5708 strict_overflow_p))))
5709 return t1;
5710 break;
5712 case ABS_EXPR:
5713 /* If widening the type changes it from signed to unsigned, then we
5714 must avoid building ABS_EXPR itself as unsigned. */
5715 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5717 tree cstype = (*signed_type_for) (ctype);
5718 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5719 != 0)
5721 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5722 return fold_convert (ctype, t1);
5724 break;
5726 /* If the constant is negative, we cannot simplify this. */
5727 if (tree_int_cst_sgn (c) == -1)
5728 break;
5729 /* FALLTHROUGH */
5730 case NEGATE_EXPR:
5731 /* For division and modulus, type can't be unsigned, as e.g.
5732 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5733 For signed types, even with wrapping overflow, this is fine. */
5734 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5735 break;
5736 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5737 != 0)
5738 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5739 break;
5741 case MIN_EXPR: case MAX_EXPR:
5742 /* If widening the type changes the signedness, then we can't perform
5743 this optimization as that changes the result. */
5744 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5745 break;
5747 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5748 sub_strict_overflow_p = false;
5749 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5750 &sub_strict_overflow_p)) != 0
5751 && (t2 = extract_muldiv (op1, c, code, wide_type,
5752 &sub_strict_overflow_p)) != 0)
5754 if (tree_int_cst_sgn (c) < 0)
5755 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5756 if (sub_strict_overflow_p)
5757 *strict_overflow_p = true;
5758 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5759 fold_convert (ctype, t2));
5761 break;
5763 case LSHIFT_EXPR: case RSHIFT_EXPR:
5764 /* If the second operand is constant, this is a multiplication
5765 or floor division, by a power of two, so we can treat it that
5766 way unless the multiplier or divisor overflows. Signed
5767 left-shift overflow is implementation-defined rather than
5768 undefined in C90, so do not convert signed left shift into
5769 multiplication. */
5770 if (TREE_CODE (op1) == INTEGER_CST
5771 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5772 /* const_binop may not detect overflow correctly,
5773 so check for it explicitly here. */
5774 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5775 && 0 != (t1 = fold_convert (ctype,
5776 const_binop (LSHIFT_EXPR,
5777 size_one_node,
5778 op1)))
5779 && !TREE_OVERFLOW (t1))
5780 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5781 ? MULT_EXPR : FLOOR_DIV_EXPR,
5782 ctype,
5783 fold_convert (ctype, op0),
5784 t1),
5785 c, code, wide_type, strict_overflow_p);
5786 break;
5788 case PLUS_EXPR: case MINUS_EXPR:
5789 /* See if we can eliminate the operation on both sides. If we can, we
5790 can return a new PLUS or MINUS. If we can't, the only remaining
5791 cases where we can do anything are if the second operand is a
5792 constant. */
5793 sub_strict_overflow_p = false;
5794 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5795 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5796 if (t1 != 0 && t2 != 0
5797 && (code == MULT_EXPR
5798 /* If not multiplication, we can only do this if both operands
5799 are divisible by c. */
5800 || (multiple_of_p (ctype, op0, c)
5801 && multiple_of_p (ctype, op1, c))))
5803 if (sub_strict_overflow_p)
5804 *strict_overflow_p = true;
5805 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5806 fold_convert (ctype, t2));
5809 /* If this was a subtraction, negate OP1 and set it to be an addition.
5810 This simplifies the logic below. */
5811 if (tcode == MINUS_EXPR)
5813 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5814 /* If OP1 was not easily negatable, the constant may be OP0. */
5815 if (TREE_CODE (op0) == INTEGER_CST)
5817 tree tem = op0;
5818 op0 = op1;
5819 op1 = tem;
5820 tem = t1;
5821 t1 = t2;
5822 t2 = tem;
5826 if (TREE_CODE (op1) != INTEGER_CST)
5827 break;
5829 /* If either OP1 or C are negative, this optimization is not safe for
5830 some of the division and remainder types while for others we need
5831 to change the code. */
5832 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5834 if (code == CEIL_DIV_EXPR)
5835 code = FLOOR_DIV_EXPR;
5836 else if (code == FLOOR_DIV_EXPR)
5837 code = CEIL_DIV_EXPR;
5838 else if (code != MULT_EXPR
5839 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5840 break;
5843 /* If it's a multiply or a division/modulus operation of a multiple
5844 of our constant, do the operation and verify it doesn't overflow. */
5845 if (code == MULT_EXPR
5846 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5848 op1 = const_binop (code, fold_convert (ctype, op1),
5849 fold_convert (ctype, c));
5850 /* We allow the constant to overflow with wrapping semantics. */
5851 if (op1 == 0
5852 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5853 break;
5855 else
5856 break;
5858 /* If we have an unsigned type, we cannot widen the operation since it
5859 will change the result if the original computation overflowed. */
5860 if (TYPE_UNSIGNED (ctype) && ctype != type)
5861 break;
5863 /* If we were able to eliminate our operation from the first side,
5864 apply our operation to the second side and reform the PLUS. */
5865 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5866 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5868 /* The last case is if we are a multiply. In that case, we can
5869 apply the distributive law to commute the multiply and addition
5870 if the multiplication of the constants doesn't overflow
5871 and overflow is defined. With undefined overflow
5872 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5873 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5874 return fold_build2 (tcode, ctype,
5875 fold_build2 (code, ctype,
5876 fold_convert (ctype, op0),
5877 fold_convert (ctype, c)),
5878 op1);
5880 break;
5882 case MULT_EXPR:
5883 /* We have a special case here if we are doing something like
5884 (C * 8) % 4 since we know that's zero. */
5885 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5886 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5887 /* If the multiplication can overflow we cannot optimize this. */
5888 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5889 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5890 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5892 *strict_overflow_p = true;
5893 return omit_one_operand (type, integer_zero_node, op0);
5896 /* ... fall through ... */
5898 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5899 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5900 /* If we can extract our operation from the LHS, do so and return a
5901 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5902 do something only if the second operand is a constant. */
5903 if (same_p
5904 && (t1 = extract_muldiv (op0, c, code, wide_type,
5905 strict_overflow_p)) != 0)
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5907 fold_convert (ctype, op1));
5908 else if (tcode == MULT_EXPR && code == MULT_EXPR
5909 && (t1 = extract_muldiv (op1, c, code, wide_type,
5910 strict_overflow_p)) != 0)
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5912 fold_convert (ctype, t1));
5913 else if (TREE_CODE (op1) != INTEGER_CST)
5914 return 0;
5916 /* If these are the same operation types, we can associate them
5917 assuming no overflow. */
5918 if (tcode == code)
5920 bool overflow_p = false;
5921 bool overflow_mul_p;
5922 signop sign = TYPE_SIGN (ctype);
5923 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5924 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5925 if (overflow_mul_p
5926 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5927 overflow_p = true;
5928 if (!overflow_p)
5929 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5930 wide_int_to_tree (ctype, mul));
5933 /* If these operations "cancel" each other, we have the main
5934 optimizations of this pass, which occur when either constant is a
5935 multiple of the other, in which case we replace this with either an
5936 operation or CODE or TCODE.
5938 If we have an unsigned type, we cannot do this since it will change
5939 the result if the original computation overflowed. */
5940 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5941 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5942 || (tcode == MULT_EXPR
5943 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5944 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5945 && code != MULT_EXPR)))
5947 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5949 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5950 *strict_overflow_p = true;
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 fold_convert (ctype,
5953 const_binop (TRUNC_DIV_EXPR,
5954 op1, c)));
5956 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5958 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5959 *strict_overflow_p = true;
5960 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5961 fold_convert (ctype,
5962 const_binop (TRUNC_DIV_EXPR,
5963 c, op1)));
5966 break;
5968 default:
5969 break;
5972 return 0;
5975 /* Return a node which has the indicated constant VALUE (either 0 or
5976 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5977 and is of the indicated TYPE. */
5979 tree
5980 constant_boolean_node (bool value, tree type)
5982 if (type == integer_type_node)
5983 return value ? integer_one_node : integer_zero_node;
5984 else if (type == boolean_type_node)
5985 return value ? boolean_true_node : boolean_false_node;
5986 else if (TREE_CODE (type) == VECTOR_TYPE)
5987 return build_vector_from_val (type,
5988 build_int_cst (TREE_TYPE (type),
5989 value ? -1 : 0));
5990 else
5991 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5995 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5996 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5997 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5998 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5999 COND is the first argument to CODE; otherwise (as in the example
6000 given here), it is the second argument. TYPE is the type of the
6001 original expression. Return NULL_TREE if no simplification is
6002 possible. */
6004 static tree
6005 fold_binary_op_with_conditional_arg (location_t loc,
6006 enum tree_code code,
6007 tree type, tree op0, tree op1,
6008 tree cond, tree arg, int cond_first_p)
6010 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6011 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6012 tree test, true_value, false_value;
6013 tree lhs = NULL_TREE;
6014 tree rhs = NULL_TREE;
6015 enum tree_code cond_code = COND_EXPR;
6017 if (TREE_CODE (cond) == COND_EXPR
6018 || TREE_CODE (cond) == VEC_COND_EXPR)
6020 test = TREE_OPERAND (cond, 0);
6021 true_value = TREE_OPERAND (cond, 1);
6022 false_value = TREE_OPERAND (cond, 2);
6023 /* If this operand throws an expression, then it does not make
6024 sense to try to perform a logical or arithmetic operation
6025 involving it. */
6026 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6027 lhs = true_value;
6028 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6029 rhs = false_value;
6031 else
6033 tree testtype = TREE_TYPE (cond);
6034 test = cond;
6035 true_value = constant_boolean_node (true, testtype);
6036 false_value = constant_boolean_node (false, testtype);
6039 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6040 cond_code = VEC_COND_EXPR;
6042 /* This transformation is only worthwhile if we don't have to wrap ARG
6043 in a SAVE_EXPR and the operation can be simplified without recursing
6044 on at least one of the branches once its pushed inside the COND_EXPR. */
6045 if (!TREE_CONSTANT (arg)
6046 && (TREE_SIDE_EFFECTS (arg)
6047 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6048 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6049 return NULL_TREE;
6051 arg = fold_convert_loc (loc, arg_type, arg);
6052 if (lhs == 0)
6054 true_value = fold_convert_loc (loc, cond_type, true_value);
6055 if (cond_first_p)
6056 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6057 else
6058 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6060 if (rhs == 0)
6062 false_value = fold_convert_loc (loc, cond_type, false_value);
6063 if (cond_first_p)
6064 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6065 else
6066 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6069 /* Check that we have simplified at least one of the branches. */
6070 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6071 return NULL_TREE;
6073 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6077 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6079 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6080 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6081 ADDEND is the same as X.
6083 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6084 and finite. The problematic cases are when X is zero, and its mode
6085 has signed zeros. In the case of rounding towards -infinity,
6086 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6087 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6089 bool
6090 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6092 if (!real_zerop (addend))
6093 return false;
6095 /* Don't allow the fold with -fsignaling-nans. */
6096 if (HONOR_SNANS (element_mode (type)))
6097 return false;
6099 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6100 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6101 return true;
6103 /* In a vector or complex, we would need to check the sign of all zeros. */
6104 if (TREE_CODE (addend) != REAL_CST)
6105 return false;
6107 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6108 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6109 negate = !negate;
6111 /* The mode has signed zeros, and we have to honor their sign.
6112 In this situation, there is only one case we can return true for.
6113 X - 0 is the same as X unless rounding towards -infinity is
6114 supported. */
6115 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6118 /* Subroutine of fold() that checks comparisons of built-in math
6119 functions against real constants.
6121 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6122 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6123 is the type of the result and ARG0 and ARG1 are the operands of the
6124 comparison. ARG1 must be a TREE_REAL_CST.
6126 The function returns the constant folded tree if a simplification
6127 can be made, and NULL_TREE otherwise. */
6129 static tree
6130 fold_mathfn_compare (location_t loc,
6131 enum built_in_function fcode, enum tree_code code,
6132 tree type, tree arg0, tree arg1)
6134 REAL_VALUE_TYPE c;
6136 if (BUILTIN_SQRT_P (fcode))
6138 tree arg = CALL_EXPR_ARG (arg0, 0);
6139 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6141 c = TREE_REAL_CST (arg1);
6142 if (REAL_VALUE_NEGATIVE (c))
6144 /* sqrt(x) < y is always false, if y is negative. */
6145 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6146 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6148 /* sqrt(x) > y is always true, if y is negative and we
6149 don't care about NaNs, i.e. negative values of x. */
6150 if (code == NE_EXPR || !HONOR_NANS (mode))
6151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6153 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6154 return fold_build2_loc (loc, GE_EXPR, type, arg,
6155 build_real (TREE_TYPE (arg), dconst0));
6157 else if (code == GT_EXPR || code == GE_EXPR)
6159 REAL_VALUE_TYPE c2;
6161 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6162 real_convert (&c2, mode, &c2);
6164 if (REAL_VALUE_ISINF (c2))
6166 /* sqrt(x) > y is x == +Inf, when y is very large. */
6167 if (HONOR_INFINITIES (mode))
6168 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6169 build_real (TREE_TYPE (arg), c2));
6171 /* sqrt(x) > y is always false, when y is very large
6172 and we don't care about infinities. */
6173 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6176 /* sqrt(x) > c is the same as x > c*c. */
6177 return fold_build2_loc (loc, code, type, arg,
6178 build_real (TREE_TYPE (arg), c2));
6180 else if (code == LT_EXPR || code == LE_EXPR)
6182 REAL_VALUE_TYPE c2;
6184 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6185 real_convert (&c2, mode, &c2);
6187 if (REAL_VALUE_ISINF (c2))
6189 /* sqrt(x) < y is always true, when y is a very large
6190 value and we don't care about NaNs or Infinities. */
6191 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6192 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6194 /* sqrt(x) < y is x != +Inf when y is very large and we
6195 don't care about NaNs. */
6196 if (! HONOR_NANS (mode))
6197 return fold_build2_loc (loc, NE_EXPR, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6200 /* sqrt(x) < y is x >= 0 when y is very large and we
6201 don't care about Infinities. */
6202 if (! HONOR_INFINITIES (mode))
6203 return fold_build2_loc (loc, GE_EXPR, type, arg,
6204 build_real (TREE_TYPE (arg), dconst0));
6206 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6207 arg = save_expr (arg);
6208 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6209 fold_build2_loc (loc, GE_EXPR, type, arg,
6210 build_real (TREE_TYPE (arg),
6211 dconst0)),
6212 fold_build2_loc (loc, NE_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg),
6214 c2)));
6217 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6218 if (! HONOR_NANS (mode))
6219 return fold_build2_loc (loc, code, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6223 arg = save_expr (arg);
6224 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6225 fold_build2_loc (loc, GE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg),
6227 dconst0)),
6228 fold_build2_loc (loc, code, type, arg,
6229 build_real (TREE_TYPE (arg),
6230 c2)));
6234 return NULL_TREE;
6237 /* Subroutine of fold() that optimizes comparisons against Infinities,
6238 either +Inf or -Inf.
6240 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6241 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6242 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6244 The function returns the constant folded tree if a simplification
6245 can be made, and NULL_TREE otherwise. */
6247 static tree
6248 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6249 tree arg0, tree arg1)
6251 machine_mode mode;
6252 REAL_VALUE_TYPE max;
6253 tree temp;
6254 bool neg;
6256 mode = TYPE_MODE (TREE_TYPE (arg0));
6258 /* For negative infinity swap the sense of the comparison. */
6259 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6260 if (neg)
6261 code = swap_tree_comparison (code);
6263 switch (code)
6265 case GT_EXPR:
6266 /* x > +Inf is always false, if with ignore sNANs. */
6267 if (HONOR_SNANS (mode))
6268 return NULL_TREE;
6269 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6271 case LE_EXPR:
6272 /* x <= +Inf is always true, if we don't case about NaNs. */
6273 if (! HONOR_NANS (mode))
6274 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6276 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6277 arg0 = save_expr (arg0);
6278 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6280 case EQ_EXPR:
6281 case GE_EXPR:
6282 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6283 real_maxval (&max, neg, mode);
6284 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6285 arg0, build_real (TREE_TYPE (arg0), max));
6287 case LT_EXPR:
6288 /* x < +Inf is always equal to x <= DBL_MAX. */
6289 real_maxval (&max, neg, mode);
6290 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6291 arg0, build_real (TREE_TYPE (arg0), max));
6293 case NE_EXPR:
6294 /* x != +Inf is always equal to !(x > DBL_MAX). */
6295 real_maxval (&max, neg, mode);
6296 if (! HONOR_NANS (mode))
6297 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6298 arg0, build_real (TREE_TYPE (arg0), max));
6300 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6301 arg0, build_real (TREE_TYPE (arg0), max));
6302 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6304 default:
6305 break;
6308 return NULL_TREE;
6311 /* Subroutine of fold() that optimizes comparisons of a division by
6312 a nonzero integer constant against an integer constant, i.e.
6313 X/C1 op C2.
6315 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6316 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6317 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6319 The function returns the constant folded tree if a simplification
6320 can be made, and NULL_TREE otherwise. */
6322 static tree
6323 fold_div_compare (location_t loc,
6324 enum tree_code code, tree type, tree arg0, tree arg1)
6326 tree prod, tmp, hi, lo;
6327 tree arg00 = TREE_OPERAND (arg0, 0);
6328 tree arg01 = TREE_OPERAND (arg0, 1);
6329 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6330 bool neg_overflow = false;
6331 bool overflow;
6333 /* We have to do this the hard way to detect unsigned overflow.
6334 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6335 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6336 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6337 neg_overflow = false;
6339 if (sign == UNSIGNED)
6341 tmp = int_const_binop (MINUS_EXPR, arg01,
6342 build_int_cst (TREE_TYPE (arg01), 1));
6343 lo = prod;
6345 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6346 val = wi::add (prod, tmp, sign, &overflow);
6347 hi = force_fit_type (TREE_TYPE (arg00), val,
6348 -1, overflow | TREE_OVERFLOW (prod));
6350 else if (tree_int_cst_sgn (arg01) >= 0)
6352 tmp = int_const_binop (MINUS_EXPR, arg01,
6353 build_int_cst (TREE_TYPE (arg01), 1));
6354 switch (tree_int_cst_sgn (arg1))
6356 case -1:
6357 neg_overflow = true;
6358 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6359 hi = prod;
6360 break;
6362 case 0:
6363 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6364 hi = tmp;
6365 break;
6367 case 1:
6368 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6369 lo = prod;
6370 break;
6372 default:
6373 gcc_unreachable ();
6376 else
6378 /* A negative divisor reverses the relational operators. */
6379 code = swap_tree_comparison (code);
6381 tmp = int_const_binop (PLUS_EXPR, arg01,
6382 build_int_cst (TREE_TYPE (arg01), 1));
6383 switch (tree_int_cst_sgn (arg1))
6385 case -1:
6386 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6387 lo = prod;
6388 break;
6390 case 0:
6391 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6392 lo = tmp;
6393 break;
6395 case 1:
6396 neg_overflow = true;
6397 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6398 hi = prod;
6399 break;
6401 default:
6402 gcc_unreachable ();
6406 switch (code)
6408 case EQ_EXPR:
6409 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6410 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6411 if (TREE_OVERFLOW (hi))
6412 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6413 if (TREE_OVERFLOW (lo))
6414 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6415 return build_range_check (loc, type, arg00, 1, lo, hi);
6417 case NE_EXPR:
6418 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6419 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6420 if (TREE_OVERFLOW (hi))
6421 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6422 if (TREE_OVERFLOW (lo))
6423 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6424 return build_range_check (loc, type, arg00, 0, lo, hi);
6426 case LT_EXPR:
6427 if (TREE_OVERFLOW (lo))
6429 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6430 return omit_one_operand_loc (loc, type, tmp, arg00);
6432 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6434 case LE_EXPR:
6435 if (TREE_OVERFLOW (hi))
6437 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6438 return omit_one_operand_loc (loc, type, tmp, arg00);
6440 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6442 case GT_EXPR:
6443 if (TREE_OVERFLOW (hi))
6445 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6446 return omit_one_operand_loc (loc, type, tmp, arg00);
6448 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6450 case GE_EXPR:
6451 if (TREE_OVERFLOW (lo))
6453 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6454 return omit_one_operand_loc (loc, type, tmp, arg00);
6456 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6458 default:
6459 break;
6462 return NULL_TREE;
6466 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6467 equality/inequality test, then return a simplified form of the test
6468 using a sign testing. Otherwise return NULL. TYPE is the desired
6469 result type. */
6471 static tree
6472 fold_single_bit_test_into_sign_test (location_t loc,
6473 enum tree_code code, tree arg0, tree arg1,
6474 tree result_type)
6476 /* If this is testing a single bit, we can optimize the test. */
6477 if ((code == NE_EXPR || code == EQ_EXPR)
6478 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6479 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6481 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6482 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6483 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6485 if (arg00 != NULL_TREE
6486 /* This is only a win if casting to a signed type is cheap,
6487 i.e. when arg00's type is not a partial mode. */
6488 && TYPE_PRECISION (TREE_TYPE (arg00))
6489 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6491 tree stype = signed_type_for (TREE_TYPE (arg00));
6492 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6493 result_type,
6494 fold_convert_loc (loc, stype, arg00),
6495 build_int_cst (stype, 0));
6499 return NULL_TREE;
6502 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6503 equality/inequality test, then return a simplified form of
6504 the test using shifts and logical operations. Otherwise return
6505 NULL. TYPE is the desired result type. */
6507 tree
6508 fold_single_bit_test (location_t loc, enum tree_code code,
6509 tree arg0, tree arg1, tree result_type)
6511 /* If this is testing a single bit, we can optimize the test. */
6512 if ((code == NE_EXPR || code == EQ_EXPR)
6513 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6514 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6516 tree inner = TREE_OPERAND (arg0, 0);
6517 tree type = TREE_TYPE (arg0);
6518 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6519 machine_mode operand_mode = TYPE_MODE (type);
6520 int ops_unsigned;
6521 tree signed_type, unsigned_type, intermediate_type;
6522 tree tem, one;
6524 /* First, see if we can fold the single bit test into a sign-bit
6525 test. */
6526 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6527 result_type);
6528 if (tem)
6529 return tem;
6531 /* Otherwise we have (A & C) != 0 where C is a single bit,
6532 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6533 Similarly for (A & C) == 0. */
6535 /* If INNER is a right shift of a constant and it plus BITNUM does
6536 not overflow, adjust BITNUM and INNER. */
6537 if (TREE_CODE (inner) == RSHIFT_EXPR
6538 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6539 && bitnum < TYPE_PRECISION (type)
6540 && wi::ltu_p (TREE_OPERAND (inner, 1),
6541 TYPE_PRECISION (type) - bitnum))
6543 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6544 inner = TREE_OPERAND (inner, 0);
6547 /* If we are going to be able to omit the AND below, we must do our
6548 operations as unsigned. If we must use the AND, we have a choice.
6549 Normally unsigned is faster, but for some machines signed is. */
6550 #ifdef LOAD_EXTEND_OP
6551 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6552 && !flag_syntax_only) ? 0 : 1;
6553 #else
6554 ops_unsigned = 1;
6555 #endif
6557 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6558 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6559 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6560 inner = fold_convert_loc (loc, intermediate_type, inner);
6562 if (bitnum != 0)
6563 inner = build2 (RSHIFT_EXPR, intermediate_type,
6564 inner, size_int (bitnum));
6566 one = build_int_cst (intermediate_type, 1);
6568 if (code == EQ_EXPR)
6569 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6571 /* Put the AND last so it can combine with more things. */
6572 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6574 /* Make sure to return the proper type. */
6575 inner = fold_convert_loc (loc, result_type, inner);
6577 return inner;
6579 return NULL_TREE;
6582 /* Check whether we are allowed to reorder operands arg0 and arg1,
6583 such that the evaluation of arg1 occurs before arg0. */
6585 static bool
6586 reorder_operands_p (const_tree arg0, const_tree arg1)
6588 if (! flag_evaluation_order)
6589 return true;
6590 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6591 return true;
6592 return ! TREE_SIDE_EFFECTS (arg0)
6593 && ! TREE_SIDE_EFFECTS (arg1);
6596 /* Test whether it is preferable two swap two operands, ARG0 and
6597 ARG1, for example because ARG0 is an integer constant and ARG1
6598 isn't. If REORDER is true, only recommend swapping if we can
6599 evaluate the operands in reverse order. */
6601 bool
6602 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6604 if (CONSTANT_CLASS_P (arg1))
6605 return 0;
6606 if (CONSTANT_CLASS_P (arg0))
6607 return 1;
6609 STRIP_SIGN_NOPS (arg0);
6610 STRIP_SIGN_NOPS (arg1);
6612 if (TREE_CONSTANT (arg1))
6613 return 0;
6614 if (TREE_CONSTANT (arg0))
6615 return 1;
6617 if (reorder && flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6619 return 0;
6621 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6622 for commutative and comparison operators. Ensuring a canonical
6623 form allows the optimizers to find additional redundancies without
6624 having to explicitly check for both orderings. */
6625 if (TREE_CODE (arg0) == SSA_NAME
6626 && TREE_CODE (arg1) == SSA_NAME
6627 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6628 return 1;
6630 /* Put SSA_NAMEs last. */
6631 if (TREE_CODE (arg1) == SSA_NAME)
6632 return 0;
6633 if (TREE_CODE (arg0) == SSA_NAME)
6634 return 1;
6636 /* Put variables last. */
6637 if (DECL_P (arg1))
6638 return 0;
6639 if (DECL_P (arg0))
6640 return 1;
6642 return 0;
6645 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6646 ARG0 is extended to a wider type. */
6648 static tree
6649 fold_widened_comparison (location_t loc, enum tree_code code,
6650 tree type, tree arg0, tree arg1)
6652 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6653 tree arg1_unw;
6654 tree shorter_type, outer_type;
6655 tree min, max;
6656 bool above, below;
6658 if (arg0_unw == arg0)
6659 return NULL_TREE;
6660 shorter_type = TREE_TYPE (arg0_unw);
6662 #ifdef HAVE_canonicalize_funcptr_for_compare
6663 /* Disable this optimization if we're casting a function pointer
6664 type on targets that require function pointer canonicalization. */
6665 if (HAVE_canonicalize_funcptr_for_compare
6666 && TREE_CODE (shorter_type) == POINTER_TYPE
6667 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6668 return NULL_TREE;
6669 #endif
6671 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6672 return NULL_TREE;
6674 arg1_unw = get_unwidened (arg1, NULL_TREE);
6676 /* If possible, express the comparison in the shorter mode. */
6677 if ((code == EQ_EXPR || code == NE_EXPR
6678 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6679 && (TREE_TYPE (arg1_unw) == shorter_type
6680 || ((TYPE_PRECISION (shorter_type)
6681 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6682 && (TYPE_UNSIGNED (shorter_type)
6683 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6684 || (TREE_CODE (arg1_unw) == INTEGER_CST
6685 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6686 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6687 && int_fits_type_p (arg1_unw, shorter_type))))
6688 return fold_build2_loc (loc, code, type, arg0_unw,
6689 fold_convert_loc (loc, shorter_type, arg1_unw));
6691 if (TREE_CODE (arg1_unw) != INTEGER_CST
6692 || TREE_CODE (shorter_type) != INTEGER_TYPE
6693 || !int_fits_type_p (arg1_unw, shorter_type))
6694 return NULL_TREE;
6696 /* If we are comparing with the integer that does not fit into the range
6697 of the shorter type, the result is known. */
6698 outer_type = TREE_TYPE (arg1_unw);
6699 min = lower_bound_in_type (outer_type, shorter_type);
6700 max = upper_bound_in_type (outer_type, shorter_type);
6702 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6703 max, arg1_unw));
6704 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6705 arg1_unw, min));
6707 switch (code)
6709 case EQ_EXPR:
6710 if (above || below)
6711 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6712 break;
6714 case NE_EXPR:
6715 if (above || below)
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6717 break;
6719 case LT_EXPR:
6720 case LE_EXPR:
6721 if (above)
6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6723 else if (below)
6724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6726 case GT_EXPR:
6727 case GE_EXPR:
6728 if (above)
6729 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6730 else if (below)
6731 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6733 default:
6734 break;
6737 return NULL_TREE;
6740 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6741 ARG0 just the signedness is changed. */
6743 static tree
6744 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6745 tree arg0, tree arg1)
6747 tree arg0_inner;
6748 tree inner_type, outer_type;
6750 if (!CONVERT_EXPR_P (arg0))
6751 return NULL_TREE;
6753 outer_type = TREE_TYPE (arg0);
6754 arg0_inner = TREE_OPERAND (arg0, 0);
6755 inner_type = TREE_TYPE (arg0_inner);
6757 #ifdef HAVE_canonicalize_funcptr_for_compare
6758 /* Disable this optimization if we're casting a function pointer
6759 type on targets that require function pointer canonicalization. */
6760 if (HAVE_canonicalize_funcptr_for_compare
6761 && TREE_CODE (inner_type) == POINTER_TYPE
6762 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6763 return NULL_TREE;
6764 #endif
6766 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6767 return NULL_TREE;
6769 if (TREE_CODE (arg1) != INTEGER_CST
6770 && !(CONVERT_EXPR_P (arg1)
6771 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6772 return NULL_TREE;
6774 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6775 && code != NE_EXPR
6776 && code != EQ_EXPR)
6777 return NULL_TREE;
6779 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6780 return NULL_TREE;
6782 if (TREE_CODE (arg1) == INTEGER_CST)
6783 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6784 TREE_OVERFLOW (arg1));
6785 else
6786 arg1 = fold_convert_loc (loc, inner_type, arg1);
6788 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6792 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6793 means A >= Y && A != MAX, but in this case we know that
6794 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6796 static tree
6797 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6799 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6801 if (TREE_CODE (bound) == LT_EXPR)
6802 a = TREE_OPERAND (bound, 0);
6803 else if (TREE_CODE (bound) == GT_EXPR)
6804 a = TREE_OPERAND (bound, 1);
6805 else
6806 return NULL_TREE;
6808 typea = TREE_TYPE (a);
6809 if (!INTEGRAL_TYPE_P (typea)
6810 && !POINTER_TYPE_P (typea))
6811 return NULL_TREE;
6813 if (TREE_CODE (ineq) == LT_EXPR)
6815 a1 = TREE_OPERAND (ineq, 1);
6816 y = TREE_OPERAND (ineq, 0);
6818 else if (TREE_CODE (ineq) == GT_EXPR)
6820 a1 = TREE_OPERAND (ineq, 0);
6821 y = TREE_OPERAND (ineq, 1);
6823 else
6824 return NULL_TREE;
6826 if (TREE_TYPE (a1) != typea)
6827 return NULL_TREE;
6829 if (POINTER_TYPE_P (typea))
6831 /* Convert the pointer types into integer before taking the difference. */
6832 tree ta = fold_convert_loc (loc, ssizetype, a);
6833 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6834 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6836 else
6837 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6839 if (!diff || !integer_onep (diff))
6840 return NULL_TREE;
6842 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6845 /* Fold a sum or difference of at least one multiplication.
6846 Returns the folded tree or NULL if no simplification could be made. */
6848 static tree
6849 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6850 tree arg0, tree arg1)
6852 tree arg00, arg01, arg10, arg11;
6853 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6855 /* (A * C) +- (B * C) -> (A+-B) * C.
6856 (A * C) +- A -> A * (C+-1).
6857 We are most concerned about the case where C is a constant,
6858 but other combinations show up during loop reduction. Since
6859 it is not difficult, try all four possibilities. */
6861 if (TREE_CODE (arg0) == MULT_EXPR)
6863 arg00 = TREE_OPERAND (arg0, 0);
6864 arg01 = TREE_OPERAND (arg0, 1);
6866 else if (TREE_CODE (arg0) == INTEGER_CST)
6868 arg00 = build_one_cst (type);
6869 arg01 = arg0;
6871 else
6873 /* We cannot generate constant 1 for fract. */
6874 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6875 return NULL_TREE;
6876 arg00 = arg0;
6877 arg01 = build_one_cst (type);
6879 if (TREE_CODE (arg1) == MULT_EXPR)
6881 arg10 = TREE_OPERAND (arg1, 0);
6882 arg11 = TREE_OPERAND (arg1, 1);
6884 else if (TREE_CODE (arg1) == INTEGER_CST)
6886 arg10 = build_one_cst (type);
6887 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6888 the purpose of this canonicalization. */
6889 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6890 && negate_expr_p (arg1)
6891 && code == PLUS_EXPR)
6893 arg11 = negate_expr (arg1);
6894 code = MINUS_EXPR;
6896 else
6897 arg11 = arg1;
6899 else
6901 /* We cannot generate constant 1 for fract. */
6902 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6903 return NULL_TREE;
6904 arg10 = arg1;
6905 arg11 = build_one_cst (type);
6907 same = NULL_TREE;
6909 if (operand_equal_p (arg01, arg11, 0))
6910 same = arg01, alt0 = arg00, alt1 = arg10;
6911 else if (operand_equal_p (arg00, arg10, 0))
6912 same = arg00, alt0 = arg01, alt1 = arg11;
6913 else if (operand_equal_p (arg00, arg11, 0))
6914 same = arg00, alt0 = arg01, alt1 = arg10;
6915 else if (operand_equal_p (arg01, arg10, 0))
6916 same = arg01, alt0 = arg00, alt1 = arg11;
6918 /* No identical multiplicands; see if we can find a common
6919 power-of-two factor in non-power-of-two multiplies. This
6920 can help in multi-dimensional array access. */
6921 else if (tree_fits_shwi_p (arg01)
6922 && tree_fits_shwi_p (arg11))
6924 HOST_WIDE_INT int01, int11, tmp;
6925 bool swap = false;
6926 tree maybe_same;
6927 int01 = tree_to_shwi (arg01);
6928 int11 = tree_to_shwi (arg11);
6930 /* Move min of absolute values to int11. */
6931 if (absu_hwi (int01) < absu_hwi (int11))
6933 tmp = int01, int01 = int11, int11 = tmp;
6934 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6935 maybe_same = arg01;
6936 swap = true;
6938 else
6939 maybe_same = arg11;
6941 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6942 /* The remainder should not be a constant, otherwise we
6943 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6944 increased the number of multiplications necessary. */
6945 && TREE_CODE (arg10) != INTEGER_CST)
6947 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6948 build_int_cst (TREE_TYPE (arg00),
6949 int01 / int11));
6950 alt1 = arg10;
6951 same = maybe_same;
6952 if (swap)
6953 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6957 if (same)
6958 return fold_build2_loc (loc, MULT_EXPR, type,
6959 fold_build2_loc (loc, code, type,
6960 fold_convert_loc (loc, type, alt0),
6961 fold_convert_loc (loc, type, alt1)),
6962 fold_convert_loc (loc, type, same));
6964 return NULL_TREE;
6967 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6968 specified by EXPR into the buffer PTR of length LEN bytes.
6969 Return the number of bytes placed in the buffer, or zero
6970 upon failure. */
6972 static int
6973 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6975 tree type = TREE_TYPE (expr);
6976 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6977 int byte, offset, word, words;
6978 unsigned char value;
6980 if ((off == -1 && total_bytes > len)
6981 || off >= total_bytes)
6982 return 0;
6983 if (off == -1)
6984 off = 0;
6985 words = total_bytes / UNITS_PER_WORD;
6987 for (byte = 0; byte < total_bytes; byte++)
6989 int bitpos = byte * BITS_PER_UNIT;
6990 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6991 number of bytes. */
6992 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6994 if (total_bytes > UNITS_PER_WORD)
6996 word = byte / UNITS_PER_WORD;
6997 if (WORDS_BIG_ENDIAN)
6998 word = (words - 1) - word;
6999 offset = word * UNITS_PER_WORD;
7000 if (BYTES_BIG_ENDIAN)
7001 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7002 else
7003 offset += byte % UNITS_PER_WORD;
7005 else
7006 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7007 if (offset >= off
7008 && offset - off < len)
7009 ptr[offset - off] = value;
7011 return MIN (len, total_bytes - off);
7015 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7020 static int
7021 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7023 tree type = TREE_TYPE (expr);
7024 machine_mode mode = TYPE_MODE (type);
7025 int total_bytes = GET_MODE_SIZE (mode);
7026 FIXED_VALUE_TYPE value;
7027 tree i_value, i_type;
7029 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7030 return 0;
7032 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7034 if (NULL_TREE == i_type
7035 || TYPE_PRECISION (i_type) != total_bytes)
7036 return 0;
7038 value = TREE_FIXED_CST (expr);
7039 i_value = double_int_to_tree (i_type, value.data);
7041 return native_encode_int (i_value, ptr, len, off);
7045 /* Subroutine of native_encode_expr. Encode the REAL_CST
7046 specified by EXPR into the buffer PTR of length LEN bytes.
7047 Return the number of bytes placed in the buffer, or zero
7048 upon failure. */
7050 static int
7051 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7053 tree type = TREE_TYPE (expr);
7054 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7055 int byte, offset, word, words, bitpos;
7056 unsigned char value;
7058 /* There are always 32 bits in each long, no matter the size of
7059 the hosts long. We handle floating point representations with
7060 up to 192 bits. */
7061 long tmp[6];
7063 if ((off == -1 && total_bytes > len)
7064 || off >= total_bytes)
7065 return 0;
7066 if (off == -1)
7067 off = 0;
7068 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7070 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7072 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7073 bitpos += BITS_PER_UNIT)
7075 byte = (bitpos / BITS_PER_UNIT) & 3;
7076 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7078 if (UNITS_PER_WORD < 4)
7080 word = byte / UNITS_PER_WORD;
7081 if (WORDS_BIG_ENDIAN)
7082 word = (words - 1) - word;
7083 offset = word * UNITS_PER_WORD;
7084 if (BYTES_BIG_ENDIAN)
7085 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7086 else
7087 offset += byte % UNITS_PER_WORD;
7089 else
7090 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7091 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7092 if (offset >= off
7093 && offset - off < len)
7094 ptr[offset - off] = value;
7096 return MIN (len, total_bytes - off);
7099 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7100 specified by EXPR into the buffer PTR of length LEN bytes.
7101 Return the number of bytes placed in the buffer, or zero
7102 upon failure. */
7104 static int
7105 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7107 int rsize, isize;
7108 tree part;
7110 part = TREE_REALPART (expr);
7111 rsize = native_encode_expr (part, ptr, len, off);
7112 if (off == -1
7113 && rsize == 0)
7114 return 0;
7115 part = TREE_IMAGPART (expr);
7116 if (off != -1)
7117 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7118 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7119 if (off == -1
7120 && isize != rsize)
7121 return 0;
7122 return rsize + isize;
7126 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7127 specified by EXPR into the buffer PTR of length LEN bytes.
7128 Return the number of bytes placed in the buffer, or zero
7129 upon failure. */
7131 static int
7132 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7134 unsigned i, count;
7135 int size, offset;
7136 tree itype, elem;
7138 offset = 0;
7139 count = VECTOR_CST_NELTS (expr);
7140 itype = TREE_TYPE (TREE_TYPE (expr));
7141 size = GET_MODE_SIZE (TYPE_MODE (itype));
7142 for (i = 0; i < count; i++)
7144 if (off >= size)
7146 off -= size;
7147 continue;
7149 elem = VECTOR_CST_ELT (expr, i);
7150 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7151 if ((off == -1 && res != size)
7152 || res == 0)
7153 return 0;
7154 offset += res;
7155 if (offset >= len)
7156 return offset;
7157 if (off != -1)
7158 off = 0;
7160 return offset;
7164 /* Subroutine of native_encode_expr. Encode the STRING_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7167 upon failure. */
7169 static int
7170 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7172 tree type = TREE_TYPE (expr);
7173 HOST_WIDE_INT total_bytes;
7175 if (TREE_CODE (type) != ARRAY_TYPE
7176 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7177 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7178 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7179 return 0;
7180 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7181 if ((off == -1 && total_bytes > len)
7182 || off >= total_bytes)
7183 return 0;
7184 if (off == -1)
7185 off = 0;
7186 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7188 int written = 0;
7189 if (off < TREE_STRING_LENGTH (expr))
7191 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7192 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7194 memset (ptr + written, 0,
7195 MIN (total_bytes - written, len - written));
7197 else
7198 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7199 return MIN (total_bytes - off, len);
7203 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7204 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7205 buffer PTR of length LEN bytes. If OFF is not -1 then start
7206 the encoding at byte offset OFF and encode at most LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero upon failure. */
7210 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7212 switch (TREE_CODE (expr))
7214 case INTEGER_CST:
7215 return native_encode_int (expr, ptr, len, off);
7217 case REAL_CST:
7218 return native_encode_real (expr, ptr, len, off);
7220 case FIXED_CST:
7221 return native_encode_fixed (expr, ptr, len, off);
7223 case COMPLEX_CST:
7224 return native_encode_complex (expr, ptr, len, off);
7226 case VECTOR_CST:
7227 return native_encode_vector (expr, ptr, len, off);
7229 case STRING_CST:
7230 return native_encode_string (expr, ptr, len, off);
7232 default:
7233 return 0;
7238 /* Subroutine of native_interpret_expr. Interpret the contents of
7239 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7240 If the buffer cannot be interpreted, return NULL_TREE. */
7242 static tree
7243 native_interpret_int (tree type, const unsigned char *ptr, int len)
7245 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7247 if (total_bytes > len
7248 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7249 return NULL_TREE;
7251 wide_int result = wi::from_buffer (ptr, total_bytes);
7253 return wide_int_to_tree (type, result);
7257 /* Subroutine of native_interpret_expr. Interpret the contents of
7258 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7259 If the buffer cannot be interpreted, return NULL_TREE. */
7261 static tree
7262 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7264 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7265 double_int result;
7266 FIXED_VALUE_TYPE fixed_value;
7268 if (total_bytes > len
7269 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7270 return NULL_TREE;
7272 result = double_int::from_buffer (ptr, total_bytes);
7273 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7275 return build_fixed (type, fixed_value);
7279 /* Subroutine of native_interpret_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7281 If the buffer cannot be interpreted, return NULL_TREE. */
7283 static tree
7284 native_interpret_real (tree type, const unsigned char *ptr, int len)
7286 machine_mode mode = TYPE_MODE (type);
7287 int total_bytes = GET_MODE_SIZE (mode);
7288 int byte, offset, word, words, bitpos;
7289 unsigned char value;
7290 /* There are always 32 bits in each long, no matter the size of
7291 the hosts long. We handle floating point representations with
7292 up to 192 bits. */
7293 REAL_VALUE_TYPE r;
7294 long tmp[6];
7296 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7297 if (total_bytes > len || total_bytes > 24)
7298 return NULL_TREE;
7299 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7301 memset (tmp, 0, sizeof (tmp));
7302 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7303 bitpos += BITS_PER_UNIT)
7305 byte = (bitpos / BITS_PER_UNIT) & 3;
7306 if (UNITS_PER_WORD < 4)
7308 word = byte / UNITS_PER_WORD;
7309 if (WORDS_BIG_ENDIAN)
7310 word = (words - 1) - word;
7311 offset = word * UNITS_PER_WORD;
7312 if (BYTES_BIG_ENDIAN)
7313 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7314 else
7315 offset += byte % UNITS_PER_WORD;
7317 else
7318 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7319 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7321 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7324 real_from_target (&r, tmp, mode);
7325 return build_real (type, r);
7329 /* Subroutine of native_interpret_expr. Interpret the contents of
7330 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7331 If the buffer cannot be interpreted, return NULL_TREE. */
7333 static tree
7334 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7336 tree etype, rpart, ipart;
7337 int size;
7339 etype = TREE_TYPE (type);
7340 size = GET_MODE_SIZE (TYPE_MODE (etype));
7341 if (size * 2 > len)
7342 return NULL_TREE;
7343 rpart = native_interpret_expr (etype, ptr, size);
7344 if (!rpart)
7345 return NULL_TREE;
7346 ipart = native_interpret_expr (etype, ptr+size, size);
7347 if (!ipart)
7348 return NULL_TREE;
7349 return build_complex (type, rpart, ipart);
7353 /* Subroutine of native_interpret_expr. Interpret the contents of
7354 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7355 If the buffer cannot be interpreted, return NULL_TREE. */
7357 static tree
7358 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7360 tree etype, elem;
7361 int i, size, count;
7362 tree *elements;
7364 etype = TREE_TYPE (type);
7365 size = GET_MODE_SIZE (TYPE_MODE (etype));
7366 count = TYPE_VECTOR_SUBPARTS (type);
7367 if (size * count > len)
7368 return NULL_TREE;
7370 elements = XALLOCAVEC (tree, count);
7371 for (i = count - 1; i >= 0; i--)
7373 elem = native_interpret_expr (etype, ptr+(i*size), size);
7374 if (!elem)
7375 return NULL_TREE;
7376 elements[i] = elem;
7378 return build_vector (type, elements);
7382 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a constant of type TYPE. For
7384 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7385 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7386 return NULL_TREE. */
7388 tree
7389 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7391 switch (TREE_CODE (type))
7393 case INTEGER_TYPE:
7394 case ENUMERAL_TYPE:
7395 case BOOLEAN_TYPE:
7396 case POINTER_TYPE:
7397 case REFERENCE_TYPE:
7398 return native_interpret_int (type, ptr, len);
7400 case REAL_TYPE:
7401 return native_interpret_real (type, ptr, len);
7403 case FIXED_POINT_TYPE:
7404 return native_interpret_fixed (type, ptr, len);
7406 case COMPLEX_TYPE:
7407 return native_interpret_complex (type, ptr, len);
7409 case VECTOR_TYPE:
7410 return native_interpret_vector (type, ptr, len);
7412 default:
7413 return NULL_TREE;
7417 /* Returns true if we can interpret the contents of a native encoding
7418 as TYPE. */
7420 static bool
7421 can_native_interpret_type_p (tree type)
7423 switch (TREE_CODE (type))
7425 case INTEGER_TYPE:
7426 case ENUMERAL_TYPE:
7427 case BOOLEAN_TYPE:
7428 case POINTER_TYPE:
7429 case REFERENCE_TYPE:
7430 case FIXED_POINT_TYPE:
7431 case REAL_TYPE:
7432 case COMPLEX_TYPE:
7433 case VECTOR_TYPE:
7434 return true;
7435 default:
7436 return false;
7440 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7441 TYPE at compile-time. If we're unable to perform the conversion
7442 return NULL_TREE. */
7444 static tree
7445 fold_view_convert_expr (tree type, tree expr)
7447 /* We support up to 512-bit values (for V8DFmode). */
7448 unsigned char buffer[64];
7449 int len;
7451 /* Check that the host and target are sane. */
7452 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7453 return NULL_TREE;
7455 len = native_encode_expr (expr, buffer, sizeof (buffer));
7456 if (len == 0)
7457 return NULL_TREE;
7459 return native_interpret_expr (type, buffer, len);
7462 /* Build an expression for the address of T. Folds away INDIRECT_REF
7463 to avoid confusing the gimplify process. */
7465 tree
7466 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7468 /* The size of the object is not relevant when talking about its address. */
7469 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7470 t = TREE_OPERAND (t, 0);
7472 if (TREE_CODE (t) == INDIRECT_REF)
7474 t = TREE_OPERAND (t, 0);
7476 if (TREE_TYPE (t) != ptrtype)
7477 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7479 else if (TREE_CODE (t) == MEM_REF
7480 && integer_zerop (TREE_OPERAND (t, 1)))
7481 return TREE_OPERAND (t, 0);
7482 else if (TREE_CODE (t) == MEM_REF
7483 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7484 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7485 TREE_OPERAND (t, 0),
7486 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7487 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7489 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7491 if (TREE_TYPE (t) != ptrtype)
7492 t = fold_convert_loc (loc, ptrtype, t);
7494 else
7495 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7497 return t;
7500 /* Build an expression for the address of T. */
7502 tree
7503 build_fold_addr_expr_loc (location_t loc, tree t)
7505 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7507 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7510 static bool vec_cst_ctor_to_array (tree, tree *);
7512 /* Fold a unary expression of code CODE and type TYPE with operand
7513 OP0. Return the folded expression if folding is successful.
7514 Otherwise, return NULL_TREE. */
7516 tree
7517 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7519 tree tem;
7520 tree arg0;
7521 enum tree_code_class kind = TREE_CODE_CLASS (code);
7523 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7524 && TREE_CODE_LENGTH (code) == 1);
7526 tem = generic_simplify (loc, code, type, op0);
7527 if (tem)
7528 return tem;
7530 arg0 = op0;
7531 if (arg0)
7533 if (CONVERT_EXPR_CODE_P (code)
7534 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7536 /* Don't use STRIP_NOPS, because signedness of argument type
7537 matters. */
7538 STRIP_SIGN_NOPS (arg0);
7540 else
7542 /* Strip any conversions that don't change the mode. This
7543 is safe for every expression, except for a comparison
7544 expression because its signedness is derived from its
7545 operands.
7547 Note that this is done as an internal manipulation within
7548 the constant folder, in order to find the simplest
7549 representation of the arguments so that their form can be
7550 studied. In any cases, the appropriate type conversions
7551 should be put back in the tree that will get out of the
7552 constant folder. */
7553 STRIP_NOPS (arg0);
7557 if (TREE_CODE_CLASS (code) == tcc_unary)
7559 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7560 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7561 fold_build1_loc (loc, code, type,
7562 fold_convert_loc (loc, TREE_TYPE (op0),
7563 TREE_OPERAND (arg0, 1))));
7564 else if (TREE_CODE (arg0) == COND_EXPR)
7566 tree arg01 = TREE_OPERAND (arg0, 1);
7567 tree arg02 = TREE_OPERAND (arg0, 2);
7568 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7569 arg01 = fold_build1_loc (loc, code, type,
7570 fold_convert_loc (loc,
7571 TREE_TYPE (op0), arg01));
7572 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7573 arg02 = fold_build1_loc (loc, code, type,
7574 fold_convert_loc (loc,
7575 TREE_TYPE (op0), arg02));
7576 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7577 arg01, arg02);
7579 /* If this was a conversion, and all we did was to move into
7580 inside the COND_EXPR, bring it back out. But leave it if
7581 it is a conversion from integer to integer and the
7582 result precision is no wider than a word since such a
7583 conversion is cheap and may be optimized away by combine,
7584 while it couldn't if it were outside the COND_EXPR. Then return
7585 so we don't get into an infinite recursion loop taking the
7586 conversion out and then back in. */
7588 if ((CONVERT_EXPR_CODE_P (code)
7589 || code == NON_LVALUE_EXPR)
7590 && TREE_CODE (tem) == COND_EXPR
7591 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7592 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7593 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7594 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7595 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7596 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7597 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7598 && (INTEGRAL_TYPE_P
7599 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7600 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7601 || flag_syntax_only))
7602 tem = build1_loc (loc, code, type,
7603 build3 (COND_EXPR,
7604 TREE_TYPE (TREE_OPERAND
7605 (TREE_OPERAND (tem, 1), 0)),
7606 TREE_OPERAND (tem, 0),
7607 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7608 TREE_OPERAND (TREE_OPERAND (tem, 2),
7609 0)));
7610 return tem;
7614 switch (code)
7616 case NON_LVALUE_EXPR:
7617 if (!maybe_lvalue_p (op0))
7618 return fold_convert_loc (loc, type, op0);
7619 return NULL_TREE;
7621 CASE_CONVERT:
7622 case FLOAT_EXPR:
7623 case FIX_TRUNC_EXPR:
7624 if (COMPARISON_CLASS_P (op0))
7626 /* If we have (type) (a CMP b) and type is an integral type, return
7627 new expression involving the new type. Canonicalize
7628 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7629 non-integral type.
7630 Do not fold the result as that would not simplify further, also
7631 folding again results in recursions. */
7632 if (TREE_CODE (type) == BOOLEAN_TYPE)
7633 return build2_loc (loc, TREE_CODE (op0), type,
7634 TREE_OPERAND (op0, 0),
7635 TREE_OPERAND (op0, 1));
7636 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7637 && TREE_CODE (type) != VECTOR_TYPE)
7638 return build3_loc (loc, COND_EXPR, type, op0,
7639 constant_boolean_node (true, type),
7640 constant_boolean_node (false, type));
7643 /* Handle (T *)&A.B.C for A being of type T and B and C
7644 living at offset zero. This occurs frequently in
7645 C++ upcasting and then accessing the base. */
7646 if (TREE_CODE (op0) == ADDR_EXPR
7647 && POINTER_TYPE_P (type)
7648 && handled_component_p (TREE_OPERAND (op0, 0)))
7650 HOST_WIDE_INT bitsize, bitpos;
7651 tree offset;
7652 machine_mode mode;
7653 int unsignedp, volatilep;
7654 tree base = TREE_OPERAND (op0, 0);
7655 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7656 &mode, &unsignedp, &volatilep, false);
7657 /* If the reference was to a (constant) zero offset, we can use
7658 the address of the base if it has the same base type
7659 as the result type and the pointer type is unqualified. */
7660 if (! offset && bitpos == 0
7661 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7662 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7663 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7664 return fold_convert_loc (loc, type,
7665 build_fold_addr_expr_loc (loc, base));
7668 if (TREE_CODE (op0) == MODIFY_EXPR
7669 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7670 /* Detect assigning a bitfield. */
7671 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7672 && DECL_BIT_FIELD
7673 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7675 /* Don't leave an assignment inside a conversion
7676 unless assigning a bitfield. */
7677 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7678 /* First do the assignment, then return converted constant. */
7679 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7680 TREE_NO_WARNING (tem) = 1;
7681 TREE_USED (tem) = 1;
7682 return tem;
7685 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7686 constants (if x has signed type, the sign bit cannot be set
7687 in c). This folds extension into the BIT_AND_EXPR.
7688 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7689 very likely don't have maximal range for their precision and this
7690 transformation effectively doesn't preserve non-maximal ranges. */
7691 if (TREE_CODE (type) == INTEGER_TYPE
7692 && TREE_CODE (op0) == BIT_AND_EXPR
7693 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7695 tree and_expr = op0;
7696 tree and0 = TREE_OPERAND (and_expr, 0);
7697 tree and1 = TREE_OPERAND (and_expr, 1);
7698 int change = 0;
7700 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7701 || (TYPE_PRECISION (type)
7702 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7703 change = 1;
7704 else if (TYPE_PRECISION (TREE_TYPE (and1))
7705 <= HOST_BITS_PER_WIDE_INT
7706 && tree_fits_uhwi_p (and1))
7708 unsigned HOST_WIDE_INT cst;
7710 cst = tree_to_uhwi (and1);
7711 cst &= HOST_WIDE_INT_M1U
7712 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7713 change = (cst == 0);
7714 #ifdef LOAD_EXTEND_OP
7715 if (change
7716 && !flag_syntax_only
7717 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7718 == ZERO_EXTEND))
7720 tree uns = unsigned_type_for (TREE_TYPE (and0));
7721 and0 = fold_convert_loc (loc, uns, and0);
7722 and1 = fold_convert_loc (loc, uns, and1);
7724 #endif
7726 if (change)
7728 tem = force_fit_type (type, wi::to_widest (and1), 0,
7729 TREE_OVERFLOW (and1));
7730 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7731 fold_convert_loc (loc, type, and0), tem);
7735 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7736 when one of the new casts will fold away. Conservatively we assume
7737 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7738 if (POINTER_TYPE_P (type)
7739 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7740 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7741 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7742 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7743 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7745 tree arg00 = TREE_OPERAND (arg0, 0);
7746 tree arg01 = TREE_OPERAND (arg0, 1);
7748 return fold_build_pointer_plus_loc
7749 (loc, fold_convert_loc (loc, type, arg00), arg01);
7752 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7753 of the same precision, and X is an integer type not narrower than
7754 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7755 if (INTEGRAL_TYPE_P (type)
7756 && TREE_CODE (op0) == BIT_NOT_EXPR
7757 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7758 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7759 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7761 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7762 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7763 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7764 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7765 fold_convert_loc (loc, type, tem));
7768 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7769 type of X and Y (integer types only). */
7770 if (INTEGRAL_TYPE_P (type)
7771 && TREE_CODE (op0) == MULT_EXPR
7772 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7773 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7775 /* Be careful not to introduce new overflows. */
7776 tree mult_type;
7777 if (TYPE_OVERFLOW_WRAPS (type))
7778 mult_type = type;
7779 else
7780 mult_type = unsigned_type_for (type);
7782 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7784 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7785 fold_convert_loc (loc, mult_type,
7786 TREE_OPERAND (op0, 0)),
7787 fold_convert_loc (loc, mult_type,
7788 TREE_OPERAND (op0, 1)));
7789 return fold_convert_loc (loc, type, tem);
7793 tem = fold_convert_const (code, type, arg0);
7794 return tem ? tem : NULL_TREE;
7796 case ADDR_SPACE_CONVERT_EXPR:
7797 if (integer_zerop (arg0))
7798 return fold_convert_const (code, type, arg0);
7799 return NULL_TREE;
7801 case FIXED_CONVERT_EXPR:
7802 tem = fold_convert_const (code, type, arg0);
7803 return tem ? tem : NULL_TREE;
7805 case VIEW_CONVERT_EXPR:
7806 if (TREE_CODE (op0) == MEM_REF)
7807 return fold_build2_loc (loc, MEM_REF, type,
7808 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7810 return fold_view_convert_expr (type, op0);
7812 case NEGATE_EXPR:
7813 tem = fold_negate_expr (loc, arg0);
7814 if (tem)
7815 return fold_convert_loc (loc, type, tem);
7816 return NULL_TREE;
7818 case ABS_EXPR:
7819 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7820 return fold_abs_const (arg0, type);
7821 /* Convert fabs((double)float) into (double)fabsf(float). */
7822 else if (TREE_CODE (arg0) == NOP_EXPR
7823 && TREE_CODE (type) == REAL_TYPE)
7825 tree targ0 = strip_float_extensions (arg0);
7826 if (targ0 != arg0)
7827 return fold_convert_loc (loc, type,
7828 fold_build1_loc (loc, ABS_EXPR,
7829 TREE_TYPE (targ0),
7830 targ0));
7832 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7833 else if (TREE_CODE (arg0) == ABS_EXPR)
7834 return arg0;
7836 /* Strip sign ops from argument. */
7837 if (TREE_CODE (type) == REAL_TYPE)
7839 tem = fold_strip_sign_ops (arg0);
7840 if (tem)
7841 return fold_build1_loc (loc, ABS_EXPR, type,
7842 fold_convert_loc (loc, type, tem));
7844 return NULL_TREE;
7846 case CONJ_EXPR:
7847 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7848 return fold_convert_loc (loc, type, arg0);
7849 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7851 tree itype = TREE_TYPE (type);
7852 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7853 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7854 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7855 negate_expr (ipart));
7857 if (TREE_CODE (arg0) == COMPLEX_CST)
7859 tree itype = TREE_TYPE (type);
7860 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7861 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7862 return build_complex (type, rpart, negate_expr (ipart));
7864 if (TREE_CODE (arg0) == CONJ_EXPR)
7865 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7866 return NULL_TREE;
7868 case BIT_NOT_EXPR:
7869 if (TREE_CODE (arg0) == INTEGER_CST)
7870 return fold_not_const (arg0, type);
7871 /* Convert ~ (-A) to A - 1. */
7872 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7873 return fold_build2_loc (loc, MINUS_EXPR, type,
7874 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7875 build_int_cst (type, 1));
7876 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7877 else if (INTEGRAL_TYPE_P (type)
7878 && ((TREE_CODE (arg0) == MINUS_EXPR
7879 && integer_onep (TREE_OPERAND (arg0, 1)))
7880 || (TREE_CODE (arg0) == PLUS_EXPR
7881 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7882 return fold_build1_loc (loc, NEGATE_EXPR, type,
7883 fold_convert_loc (loc, type,
7884 TREE_OPERAND (arg0, 0)));
7885 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7886 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7887 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7888 fold_convert_loc (loc, type,
7889 TREE_OPERAND (arg0, 0)))))
7890 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7891 fold_convert_loc (loc, type,
7892 TREE_OPERAND (arg0, 1)));
7893 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7894 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7895 fold_convert_loc (loc, type,
7896 TREE_OPERAND (arg0, 1)))))
7897 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7898 fold_convert_loc (loc, type,
7899 TREE_OPERAND (arg0, 0)), tem);
7900 /* Perform BIT_NOT_EXPR on each element individually. */
7901 else if (TREE_CODE (arg0) == VECTOR_CST)
7903 tree *elements;
7904 tree elem;
7905 unsigned count = VECTOR_CST_NELTS (arg0), i;
7907 elements = XALLOCAVEC (tree, count);
7908 for (i = 0; i < count; i++)
7910 elem = VECTOR_CST_ELT (arg0, i);
7911 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7912 if (elem == NULL_TREE)
7913 break;
7914 elements[i] = elem;
7916 if (i == count)
7917 return build_vector (type, elements);
7920 return NULL_TREE;
7922 case TRUTH_NOT_EXPR:
7923 /* Note that the operand of this must be an int
7924 and its values must be 0 or 1.
7925 ("true" is a fixed value perhaps depending on the language,
7926 but we don't handle values other than 1 correctly yet.) */
7927 tem = fold_truth_not_expr (loc, arg0);
7928 if (!tem)
7929 return NULL_TREE;
7930 return fold_convert_loc (loc, type, tem);
7932 case REALPART_EXPR:
7933 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7934 return fold_convert_loc (loc, type, arg0);
7935 if (TREE_CODE (arg0) == COMPLEX_CST)
7936 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7937 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7939 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7940 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7941 fold_build1_loc (loc, REALPART_EXPR, itype,
7942 TREE_OPERAND (arg0, 0)),
7943 fold_build1_loc (loc, REALPART_EXPR, itype,
7944 TREE_OPERAND (arg0, 1)));
7945 return fold_convert_loc (loc, type, tem);
7947 if (TREE_CODE (arg0) == CONJ_EXPR)
7949 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7950 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7951 TREE_OPERAND (arg0, 0));
7952 return fold_convert_loc (loc, type, tem);
7954 if (TREE_CODE (arg0) == CALL_EXPR)
7956 tree fn = get_callee_fndecl (arg0);
7957 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7958 switch (DECL_FUNCTION_CODE (fn))
7960 CASE_FLT_FN (BUILT_IN_CEXPI):
7961 fn = mathfn_built_in (type, BUILT_IN_COS);
7962 if (fn)
7963 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7964 break;
7966 default:
7967 break;
7970 return NULL_TREE;
7972 case IMAGPART_EXPR:
7973 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7974 return build_zero_cst (type);
7975 if (TREE_CODE (arg0) == COMPLEX_CST)
7976 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
7977 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7979 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7980 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7981 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7982 TREE_OPERAND (arg0, 0)),
7983 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7984 TREE_OPERAND (arg0, 1)));
7985 return fold_convert_loc (loc, type, tem);
7987 if (TREE_CODE (arg0) == CONJ_EXPR)
7989 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7990 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7991 return fold_convert_loc (loc, type, negate_expr (tem));
7993 if (TREE_CODE (arg0) == CALL_EXPR)
7995 tree fn = get_callee_fndecl (arg0);
7996 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7997 switch (DECL_FUNCTION_CODE (fn))
7999 CASE_FLT_FN (BUILT_IN_CEXPI):
8000 fn = mathfn_built_in (type, BUILT_IN_SIN);
8001 if (fn)
8002 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8003 break;
8005 default:
8006 break;
8009 return NULL_TREE;
8011 case INDIRECT_REF:
8012 /* Fold *&X to X if X is an lvalue. */
8013 if (TREE_CODE (op0) == ADDR_EXPR)
8015 tree op00 = TREE_OPERAND (op0, 0);
8016 if ((TREE_CODE (op00) == VAR_DECL
8017 || TREE_CODE (op00) == PARM_DECL
8018 || TREE_CODE (op00) == RESULT_DECL)
8019 && !TREE_READONLY (op00))
8020 return op00;
8022 return NULL_TREE;
8024 case VEC_UNPACK_LO_EXPR:
8025 case VEC_UNPACK_HI_EXPR:
8026 case VEC_UNPACK_FLOAT_LO_EXPR:
8027 case VEC_UNPACK_FLOAT_HI_EXPR:
8029 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8030 tree *elts;
8031 enum tree_code subcode;
8033 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8034 if (TREE_CODE (arg0) != VECTOR_CST)
8035 return NULL_TREE;
8037 elts = XALLOCAVEC (tree, nelts * 2);
8038 if (!vec_cst_ctor_to_array (arg0, elts))
8039 return NULL_TREE;
8041 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8042 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8043 elts += nelts;
8045 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8046 subcode = NOP_EXPR;
8047 else
8048 subcode = FLOAT_EXPR;
8050 for (i = 0; i < nelts; i++)
8052 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8053 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8054 return NULL_TREE;
8057 return build_vector (type, elts);
8060 case REDUC_MIN_EXPR:
8061 case REDUC_MAX_EXPR:
8062 case REDUC_PLUS_EXPR:
8064 unsigned int nelts, i;
8065 tree *elts;
8066 enum tree_code subcode;
8068 if (TREE_CODE (op0) != VECTOR_CST)
8069 return NULL_TREE;
8070 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8072 elts = XALLOCAVEC (tree, nelts);
8073 if (!vec_cst_ctor_to_array (op0, elts))
8074 return NULL_TREE;
8076 switch (code)
8078 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8079 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8080 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8081 default: gcc_unreachable ();
8084 for (i = 1; i < nelts; i++)
8086 elts[0] = const_binop (subcode, elts[0], elts[i]);
8087 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8088 return NULL_TREE;
8091 return elts[0];
8094 default:
8095 return NULL_TREE;
8096 } /* switch (code) */
8100 /* If the operation was a conversion do _not_ mark a resulting constant
8101 with TREE_OVERFLOW if the original constant was not. These conversions
8102 have implementation defined behavior and retaining the TREE_OVERFLOW
8103 flag here would confuse later passes such as VRP. */
8104 tree
8105 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8106 tree type, tree op0)
8108 tree res = fold_unary_loc (loc, code, type, op0);
8109 if (res
8110 && TREE_CODE (res) == INTEGER_CST
8111 && TREE_CODE (op0) == INTEGER_CST
8112 && CONVERT_EXPR_CODE_P (code))
8113 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8115 return res;
8118 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8119 operands OP0 and OP1. LOC is the location of the resulting expression.
8120 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8121 Return the folded expression if folding is successful. Otherwise,
8122 return NULL_TREE. */
8123 static tree
8124 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8125 tree arg0, tree arg1, tree op0, tree op1)
8127 tree tem;
8129 /* We only do these simplifications if we are optimizing. */
8130 if (!optimize)
8131 return NULL_TREE;
8133 /* Check for things like (A || B) && (A || C). We can convert this
8134 to A || (B && C). Note that either operator can be any of the four
8135 truth and/or operations and the transformation will still be
8136 valid. Also note that we only care about order for the
8137 ANDIF and ORIF operators. If B contains side effects, this
8138 might change the truth-value of A. */
8139 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8140 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8141 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8142 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8143 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8144 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8146 tree a00 = TREE_OPERAND (arg0, 0);
8147 tree a01 = TREE_OPERAND (arg0, 1);
8148 tree a10 = TREE_OPERAND (arg1, 0);
8149 tree a11 = TREE_OPERAND (arg1, 1);
8150 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8151 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8152 && (code == TRUTH_AND_EXPR
8153 || code == TRUTH_OR_EXPR));
8155 if (operand_equal_p (a00, a10, 0))
8156 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8157 fold_build2_loc (loc, code, type, a01, a11));
8158 else if (commutative && operand_equal_p (a00, a11, 0))
8159 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8160 fold_build2_loc (loc, code, type, a01, a10));
8161 else if (commutative && operand_equal_p (a01, a10, 0))
8162 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8163 fold_build2_loc (loc, code, type, a00, a11));
8165 /* This case if tricky because we must either have commutative
8166 operators or else A10 must not have side-effects. */
8168 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8169 && operand_equal_p (a01, a11, 0))
8170 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8171 fold_build2_loc (loc, code, type, a00, a10),
8172 a01);
8175 /* See if we can build a range comparison. */
8176 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8177 return tem;
8179 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8180 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8182 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8183 if (tem)
8184 return fold_build2_loc (loc, code, type, tem, arg1);
8187 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8188 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8190 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8191 if (tem)
8192 return fold_build2_loc (loc, code, type, arg0, tem);
8195 /* Check for the possibility of merging component references. If our
8196 lhs is another similar operation, try to merge its rhs with our
8197 rhs. Then try to merge our lhs and rhs. */
8198 if (TREE_CODE (arg0) == code
8199 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8200 TREE_OPERAND (arg0, 1), arg1)))
8201 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8203 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8204 return tem;
8206 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8207 && (code == TRUTH_AND_EXPR
8208 || code == TRUTH_ANDIF_EXPR
8209 || code == TRUTH_OR_EXPR
8210 || code == TRUTH_ORIF_EXPR))
8212 enum tree_code ncode, icode;
8214 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8216 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8218 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8219 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8220 We don't want to pack more than two leafs to a non-IF AND/OR
8221 expression.
8222 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8223 equal to IF-CODE, then we don't want to add right-hand operand.
8224 If the inner right-hand side of left-hand operand has
8225 side-effects, or isn't simple, then we can't add to it,
8226 as otherwise we might destroy if-sequence. */
8227 if (TREE_CODE (arg0) == icode
8228 && simple_operand_p_2 (arg1)
8229 /* Needed for sequence points to handle trappings, and
8230 side-effects. */
8231 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8233 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8234 arg1);
8235 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8236 tem);
8238 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8239 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8240 else if (TREE_CODE (arg1) == icode
8241 && simple_operand_p_2 (arg0)
8242 /* Needed for sequence points to handle trappings, and
8243 side-effects. */
8244 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8246 tem = fold_build2_loc (loc, ncode, type,
8247 arg0, TREE_OPERAND (arg1, 0));
8248 return fold_build2_loc (loc, icode, type, tem,
8249 TREE_OPERAND (arg1, 1));
8251 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8252 into (A OR B).
8253 For sequence point consistancy, we need to check for trapping,
8254 and side-effects. */
8255 else if (code == icode && simple_operand_p_2 (arg0)
8256 && simple_operand_p_2 (arg1))
8257 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8260 return NULL_TREE;
8263 /* Fold a binary expression of code CODE and type TYPE with operands
8264 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8265 Return the folded expression if folding is successful. Otherwise,
8266 return NULL_TREE. */
8268 static tree
8269 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8271 enum tree_code compl_code;
8273 if (code == MIN_EXPR)
8274 compl_code = MAX_EXPR;
8275 else if (code == MAX_EXPR)
8276 compl_code = MIN_EXPR;
8277 else
8278 gcc_unreachable ();
8280 /* MIN (MAX (a, b), b) == b. */
8281 if (TREE_CODE (op0) == compl_code
8282 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8283 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8285 /* MIN (MAX (b, a), b) == b. */
8286 if (TREE_CODE (op0) == compl_code
8287 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8288 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8289 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8291 /* MIN (a, MAX (a, b)) == a. */
8292 if (TREE_CODE (op1) == compl_code
8293 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8294 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8295 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8297 /* MIN (a, MAX (b, a)) == a. */
8298 if (TREE_CODE (op1) == compl_code
8299 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8300 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8301 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8303 return NULL_TREE;
8306 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8307 by changing CODE to reduce the magnitude of constants involved in
8308 ARG0 of the comparison.
8309 Returns a canonicalized comparison tree if a simplification was
8310 possible, otherwise returns NULL_TREE.
8311 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8312 valid if signed overflow is undefined. */
8314 static tree
8315 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8316 tree arg0, tree arg1,
8317 bool *strict_overflow_p)
8319 enum tree_code code0 = TREE_CODE (arg0);
8320 tree t, cst0 = NULL_TREE;
8321 int sgn0;
8322 bool swap = false;
8324 /* Match A +- CST code arg1 and CST code arg1. We can change the
8325 first form only if overflow is undefined. */
8326 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8327 /* In principle pointers also have undefined overflow behavior,
8328 but that causes problems elsewhere. */
8329 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8330 && (code0 == MINUS_EXPR
8331 || code0 == PLUS_EXPR)
8332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8333 || code0 == INTEGER_CST))
8334 return NULL_TREE;
8336 /* Identify the constant in arg0 and its sign. */
8337 if (code0 == INTEGER_CST)
8338 cst0 = arg0;
8339 else
8340 cst0 = TREE_OPERAND (arg0, 1);
8341 sgn0 = tree_int_cst_sgn (cst0);
8343 /* Overflowed constants and zero will cause problems. */
8344 if (integer_zerop (cst0)
8345 || TREE_OVERFLOW (cst0))
8346 return NULL_TREE;
8348 /* See if we can reduce the magnitude of the constant in
8349 arg0 by changing the comparison code. */
8350 if (code0 == INTEGER_CST)
8352 /* CST <= arg1 -> CST-1 < arg1. */
8353 if (code == LE_EXPR && sgn0 == 1)
8354 code = LT_EXPR;
8355 /* -CST < arg1 -> -CST-1 <= arg1. */
8356 else if (code == LT_EXPR && sgn0 == -1)
8357 code = LE_EXPR;
8358 /* CST > arg1 -> CST-1 >= arg1. */
8359 else if (code == GT_EXPR && sgn0 == 1)
8360 code = GE_EXPR;
8361 /* -CST >= arg1 -> -CST-1 > arg1. */
8362 else if (code == GE_EXPR && sgn0 == -1)
8363 code = GT_EXPR;
8364 else
8365 return NULL_TREE;
8366 /* arg1 code' CST' might be more canonical. */
8367 swap = true;
8369 else
8371 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8372 if (code == LT_EXPR
8373 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8374 code = LE_EXPR;
8375 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8376 else if (code == GT_EXPR
8377 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8378 code = GE_EXPR;
8379 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8380 else if (code == LE_EXPR
8381 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8382 code = LT_EXPR;
8383 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8384 else if (code == GE_EXPR
8385 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8386 code = GT_EXPR;
8387 else
8388 return NULL_TREE;
8389 *strict_overflow_p = true;
8392 /* Now build the constant reduced in magnitude. But not if that
8393 would produce one outside of its types range. */
8394 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8395 && ((sgn0 == 1
8396 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8397 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8398 || (sgn0 == -1
8399 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8400 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8401 /* We cannot swap the comparison here as that would cause us to
8402 endlessly recurse. */
8403 return NULL_TREE;
8405 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8406 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8407 if (code0 != INTEGER_CST)
8408 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8409 t = fold_convert (TREE_TYPE (arg1), t);
8411 /* If swapping might yield to a more canonical form, do so. */
8412 if (swap)
8413 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8414 else
8415 return fold_build2_loc (loc, code, type, t, arg1);
8418 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8419 overflow further. Try to decrease the magnitude of constants involved
8420 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8421 and put sole constants at the second argument position.
8422 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8424 static tree
8425 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8426 tree arg0, tree arg1)
8428 tree t;
8429 bool strict_overflow_p;
8430 const char * const warnmsg = G_("assuming signed overflow does not occur "
8431 "when reducing constant in comparison");
8433 /* Try canonicalization by simplifying arg0. */
8434 strict_overflow_p = false;
8435 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8436 &strict_overflow_p);
8437 if (t)
8439 if (strict_overflow_p)
8440 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8441 return t;
8444 /* Try canonicalization by simplifying arg1 using the swapped
8445 comparison. */
8446 code = swap_tree_comparison (code);
8447 strict_overflow_p = false;
8448 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8449 &strict_overflow_p);
8450 if (t && strict_overflow_p)
8451 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8452 return t;
8455 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8456 space. This is used to avoid issuing overflow warnings for
8457 expressions like &p->x which can not wrap. */
8459 static bool
8460 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8462 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8463 return true;
8465 if (bitpos < 0)
8466 return true;
8468 wide_int wi_offset;
8469 int precision = TYPE_PRECISION (TREE_TYPE (base));
8470 if (offset == NULL_TREE)
8471 wi_offset = wi::zero (precision);
8472 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8473 return true;
8474 else
8475 wi_offset = offset;
8477 bool overflow;
8478 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8479 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8480 if (overflow)
8481 return true;
8483 if (!wi::fits_uhwi_p (total))
8484 return true;
8486 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8487 if (size <= 0)
8488 return true;
8490 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8491 array. */
8492 if (TREE_CODE (base) == ADDR_EXPR)
8494 HOST_WIDE_INT base_size;
8496 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8497 if (base_size > 0 && size < base_size)
8498 size = base_size;
8501 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8504 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8505 kind INTEGER_CST. This makes sure to properly sign-extend the
8506 constant. */
8508 static HOST_WIDE_INT
8509 size_low_cst (const_tree t)
8511 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8512 int prec = TYPE_PRECISION (TREE_TYPE (t));
8513 if (prec < HOST_BITS_PER_WIDE_INT)
8514 return sext_hwi (w, prec);
8515 return w;
8518 /* Subroutine of fold_binary. This routine performs all of the
8519 transformations that are common to the equality/inequality
8520 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8521 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8522 fold_binary should call fold_binary. Fold a comparison with
8523 tree code CODE and type TYPE with operands OP0 and OP1. Return
8524 the folded comparison or NULL_TREE. */
8526 static tree
8527 fold_comparison (location_t loc, enum tree_code code, tree type,
8528 tree op0, tree op1)
8530 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8531 tree arg0, arg1, tem;
8533 arg0 = op0;
8534 arg1 = op1;
8536 STRIP_SIGN_NOPS (arg0);
8537 STRIP_SIGN_NOPS (arg1);
8539 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8540 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8541 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8543 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8544 && TREE_CODE (arg1) == INTEGER_CST
8545 && !TREE_OVERFLOW (arg1))
8547 const enum tree_code
8548 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8549 tree const1 = TREE_OPERAND (arg0, 1);
8550 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8551 tree variable = TREE_OPERAND (arg0, 0);
8552 tree new_const = int_const_binop (reverse_op, const2, const1);
8554 /* If the constant operation overflowed this can be
8555 simplified as a comparison against INT_MAX/INT_MIN. */
8556 if (TREE_OVERFLOW (new_const)
8557 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8559 int const1_sgn = tree_int_cst_sgn (const1);
8560 enum tree_code code2 = code;
8562 /* Get the sign of the constant on the lhs if the
8563 operation were VARIABLE + CONST1. */
8564 if (TREE_CODE (arg0) == MINUS_EXPR)
8565 const1_sgn = -const1_sgn;
8567 /* The sign of the constant determines if we overflowed
8568 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8569 Canonicalize to the INT_MIN overflow by swapping the comparison
8570 if necessary. */
8571 if (const1_sgn == -1)
8572 code2 = swap_tree_comparison (code);
8574 /* We now can look at the canonicalized case
8575 VARIABLE + 1 CODE2 INT_MIN
8576 and decide on the result. */
8577 switch (code2)
8579 case EQ_EXPR:
8580 case LT_EXPR:
8581 case LE_EXPR:
8582 return
8583 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8585 case NE_EXPR:
8586 case GE_EXPR:
8587 case GT_EXPR:
8588 return
8589 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8591 default:
8592 gcc_unreachable ();
8595 else
8597 if (!equality_code)
8598 fold_overflow_warning ("assuming signed overflow does not occur "
8599 "when changing X +- C1 cmp C2 to "
8600 "X cmp C2 -+ C1",
8601 WARN_STRICT_OVERFLOW_COMPARISON);
8602 return fold_build2_loc (loc, code, type, variable, new_const);
8606 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8607 if (TREE_CODE (arg0) == MINUS_EXPR
8608 && equality_code
8609 && integer_zerop (arg1))
8611 /* ??? The transformation is valid for the other operators if overflow
8612 is undefined for the type, but performing it here badly interacts
8613 with the transformation in fold_cond_expr_with_comparison which
8614 attempts to synthetize ABS_EXPR. */
8615 if (!equality_code)
8616 fold_overflow_warning ("assuming signed overflow does not occur "
8617 "when changing X - Y cmp 0 to X cmp Y",
8618 WARN_STRICT_OVERFLOW_COMPARISON);
8619 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8620 TREE_OPERAND (arg0, 1));
8623 /* For comparisons of pointers we can decompose it to a compile time
8624 comparison of the base objects and the offsets into the object.
8625 This requires at least one operand being an ADDR_EXPR or a
8626 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8627 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8628 && (TREE_CODE (arg0) == ADDR_EXPR
8629 || TREE_CODE (arg1) == ADDR_EXPR
8630 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8631 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8633 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8634 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8635 machine_mode mode;
8636 int volatilep, unsignedp;
8637 bool indirect_base0 = false, indirect_base1 = false;
8639 /* Get base and offset for the access. Strip ADDR_EXPR for
8640 get_inner_reference, but put it back by stripping INDIRECT_REF
8641 off the base object if possible. indirect_baseN will be true
8642 if baseN is not an address but refers to the object itself. */
8643 base0 = arg0;
8644 if (TREE_CODE (arg0) == ADDR_EXPR)
8646 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8647 &bitsize, &bitpos0, &offset0, &mode,
8648 &unsignedp, &volatilep, false);
8649 if (TREE_CODE (base0) == INDIRECT_REF)
8650 base0 = TREE_OPERAND (base0, 0);
8651 else
8652 indirect_base0 = true;
8654 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8656 base0 = TREE_OPERAND (arg0, 0);
8657 STRIP_SIGN_NOPS (base0);
8658 if (TREE_CODE (base0) == ADDR_EXPR)
8660 base0 = TREE_OPERAND (base0, 0);
8661 indirect_base0 = true;
8663 offset0 = TREE_OPERAND (arg0, 1);
8664 if (tree_fits_shwi_p (offset0))
8666 HOST_WIDE_INT off = size_low_cst (offset0);
8667 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8668 * BITS_PER_UNIT)
8669 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8671 bitpos0 = off * BITS_PER_UNIT;
8672 offset0 = NULL_TREE;
8677 base1 = arg1;
8678 if (TREE_CODE (arg1) == ADDR_EXPR)
8680 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8681 &bitsize, &bitpos1, &offset1, &mode,
8682 &unsignedp, &volatilep, false);
8683 if (TREE_CODE (base1) == INDIRECT_REF)
8684 base1 = TREE_OPERAND (base1, 0);
8685 else
8686 indirect_base1 = true;
8688 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8690 base1 = TREE_OPERAND (arg1, 0);
8691 STRIP_SIGN_NOPS (base1);
8692 if (TREE_CODE (base1) == ADDR_EXPR)
8694 base1 = TREE_OPERAND (base1, 0);
8695 indirect_base1 = true;
8697 offset1 = TREE_OPERAND (arg1, 1);
8698 if (tree_fits_shwi_p (offset1))
8700 HOST_WIDE_INT off = size_low_cst (offset1);
8701 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8702 * BITS_PER_UNIT)
8703 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8705 bitpos1 = off * BITS_PER_UNIT;
8706 offset1 = NULL_TREE;
8711 /* A local variable can never be pointed to by
8712 the default SSA name of an incoming parameter. */
8713 if ((TREE_CODE (arg0) == ADDR_EXPR
8714 && indirect_base0
8715 && TREE_CODE (base0) == VAR_DECL
8716 && auto_var_in_fn_p (base0, current_function_decl)
8717 && !indirect_base1
8718 && TREE_CODE (base1) == SSA_NAME
8719 && SSA_NAME_IS_DEFAULT_DEF (base1)
8720 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8721 || (TREE_CODE (arg1) == ADDR_EXPR
8722 && indirect_base1
8723 && TREE_CODE (base1) == VAR_DECL
8724 && auto_var_in_fn_p (base1, current_function_decl)
8725 && !indirect_base0
8726 && TREE_CODE (base0) == SSA_NAME
8727 && SSA_NAME_IS_DEFAULT_DEF (base0)
8728 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8730 if (code == NE_EXPR)
8731 return constant_boolean_node (1, type);
8732 else if (code == EQ_EXPR)
8733 return constant_boolean_node (0, type);
8735 /* If we have equivalent bases we might be able to simplify. */
8736 else if (indirect_base0 == indirect_base1
8737 && operand_equal_p (base0, base1, 0))
8739 /* We can fold this expression to a constant if the non-constant
8740 offset parts are equal. */
8741 if ((offset0 == offset1
8742 || (offset0 && offset1
8743 && operand_equal_p (offset0, offset1, 0)))
8744 && (code == EQ_EXPR
8745 || code == NE_EXPR
8746 || (indirect_base0 && DECL_P (base0))
8747 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8750 if (!equality_code
8751 && bitpos0 != bitpos1
8752 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8753 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8754 fold_overflow_warning (("assuming pointer wraparound does not "
8755 "occur when comparing P +- C1 with "
8756 "P +- C2"),
8757 WARN_STRICT_OVERFLOW_CONDITIONAL);
8759 switch (code)
8761 case EQ_EXPR:
8762 return constant_boolean_node (bitpos0 == bitpos1, type);
8763 case NE_EXPR:
8764 return constant_boolean_node (bitpos0 != bitpos1, type);
8765 case LT_EXPR:
8766 return constant_boolean_node (bitpos0 < bitpos1, type);
8767 case LE_EXPR:
8768 return constant_boolean_node (bitpos0 <= bitpos1, type);
8769 case GE_EXPR:
8770 return constant_boolean_node (bitpos0 >= bitpos1, type);
8771 case GT_EXPR:
8772 return constant_boolean_node (bitpos0 > bitpos1, type);
8773 default:;
8776 /* We can simplify the comparison to a comparison of the variable
8777 offset parts if the constant offset parts are equal.
8778 Be careful to use signed sizetype here because otherwise we
8779 mess with array offsets in the wrong way. This is possible
8780 because pointer arithmetic is restricted to retain within an
8781 object and overflow on pointer differences is undefined as of
8782 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8783 else if (bitpos0 == bitpos1
8784 && (equality_code
8785 || (indirect_base0 && DECL_P (base0))
8786 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8788 /* By converting to signed sizetype we cover middle-end pointer
8789 arithmetic which operates on unsigned pointer types of size
8790 type size and ARRAY_REF offsets which are properly sign or
8791 zero extended from their type in case it is narrower than
8792 sizetype. */
8793 if (offset0 == NULL_TREE)
8794 offset0 = build_int_cst (ssizetype, 0);
8795 else
8796 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8797 if (offset1 == NULL_TREE)
8798 offset1 = build_int_cst (ssizetype, 0);
8799 else
8800 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8802 if (!equality_code
8803 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8804 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8805 fold_overflow_warning (("assuming pointer wraparound does not "
8806 "occur when comparing P +- C1 with "
8807 "P +- C2"),
8808 WARN_STRICT_OVERFLOW_COMPARISON);
8810 return fold_build2_loc (loc, code, type, offset0, offset1);
8813 /* For non-equal bases we can simplify if they are addresses
8814 of local binding decls or constants. */
8815 else if (indirect_base0 && indirect_base1
8816 /* We know that !operand_equal_p (base0, base1, 0)
8817 because the if condition was false. But make
8818 sure two decls are not the same. */
8819 && base0 != base1
8820 && TREE_CODE (arg0) == ADDR_EXPR
8821 && TREE_CODE (arg1) == ADDR_EXPR
8822 && (((TREE_CODE (base0) == VAR_DECL
8823 || TREE_CODE (base0) == PARM_DECL)
8824 && (targetm.binds_local_p (base0)
8825 || CONSTANT_CLASS_P (base1)))
8826 || CONSTANT_CLASS_P (base0))
8827 && (((TREE_CODE (base1) == VAR_DECL
8828 || TREE_CODE (base1) == PARM_DECL)
8829 && (targetm.binds_local_p (base1)
8830 || CONSTANT_CLASS_P (base0)))
8831 || CONSTANT_CLASS_P (base1)))
8833 if (code == EQ_EXPR)
8834 return omit_two_operands_loc (loc, type, boolean_false_node,
8835 arg0, arg1);
8836 else if (code == NE_EXPR)
8837 return omit_two_operands_loc (loc, type, boolean_true_node,
8838 arg0, arg1);
8840 /* For equal offsets we can simplify to a comparison of the
8841 base addresses. */
8842 else if (bitpos0 == bitpos1
8843 && (indirect_base0
8844 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8845 && (indirect_base1
8846 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8847 && ((offset0 == offset1)
8848 || (offset0 && offset1
8849 && operand_equal_p (offset0, offset1, 0))))
8851 if (indirect_base0)
8852 base0 = build_fold_addr_expr_loc (loc, base0);
8853 if (indirect_base1)
8854 base1 = build_fold_addr_expr_loc (loc, base1);
8855 return fold_build2_loc (loc, code, type, base0, base1);
8859 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8860 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8861 the resulting offset is smaller in absolute value than the
8862 original one and has the same sign. */
8863 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8864 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8865 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8866 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8867 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8868 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8869 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8871 tree const1 = TREE_OPERAND (arg0, 1);
8872 tree const2 = TREE_OPERAND (arg1, 1);
8873 tree variable1 = TREE_OPERAND (arg0, 0);
8874 tree variable2 = TREE_OPERAND (arg1, 0);
8875 tree cst;
8876 const char * const warnmsg = G_("assuming signed overflow does not "
8877 "occur when combining constants around "
8878 "a comparison");
8880 /* Put the constant on the side where it doesn't overflow and is
8881 of lower absolute value and of same sign than before. */
8882 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8883 ? MINUS_EXPR : PLUS_EXPR,
8884 const2, const1);
8885 if (!TREE_OVERFLOW (cst)
8886 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8887 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8889 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8890 return fold_build2_loc (loc, code, type,
8891 variable1,
8892 fold_build2_loc (loc, TREE_CODE (arg1),
8893 TREE_TYPE (arg1),
8894 variable2, cst));
8897 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8898 ? MINUS_EXPR : PLUS_EXPR,
8899 const1, const2);
8900 if (!TREE_OVERFLOW (cst)
8901 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8902 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8905 return fold_build2_loc (loc, code, type,
8906 fold_build2_loc (loc, TREE_CODE (arg0),
8907 TREE_TYPE (arg0),
8908 variable1, cst),
8909 variable2);
8913 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8914 signed arithmetic case. That form is created by the compiler
8915 often enough for folding it to be of value. One example is in
8916 computing loop trip counts after Operator Strength Reduction. */
8917 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8918 && TREE_CODE (arg0) == MULT_EXPR
8919 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8920 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8921 && integer_zerop (arg1))
8923 tree const1 = TREE_OPERAND (arg0, 1);
8924 tree const2 = arg1; /* zero */
8925 tree variable1 = TREE_OPERAND (arg0, 0);
8926 enum tree_code cmp_code = code;
8928 /* Handle unfolded multiplication by zero. */
8929 if (integer_zerop (const1))
8930 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8932 fold_overflow_warning (("assuming signed overflow does not occur when "
8933 "eliminating multiplication in comparison "
8934 "with zero"),
8935 WARN_STRICT_OVERFLOW_COMPARISON);
8937 /* If const1 is negative we swap the sense of the comparison. */
8938 if (tree_int_cst_sgn (const1) < 0)
8939 cmp_code = swap_tree_comparison (cmp_code);
8941 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8944 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8945 if (tem)
8946 return tem;
8948 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8950 tree targ0 = strip_float_extensions (arg0);
8951 tree targ1 = strip_float_extensions (arg1);
8952 tree newtype = TREE_TYPE (targ0);
8954 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8955 newtype = TREE_TYPE (targ1);
8957 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8958 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8959 return fold_build2_loc (loc, code, type,
8960 fold_convert_loc (loc, newtype, targ0),
8961 fold_convert_loc (loc, newtype, targ1));
8963 /* (-a) CMP (-b) -> b CMP a */
8964 if (TREE_CODE (arg0) == NEGATE_EXPR
8965 && TREE_CODE (arg1) == NEGATE_EXPR)
8966 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8967 TREE_OPERAND (arg0, 0));
8969 if (TREE_CODE (arg1) == REAL_CST)
8971 REAL_VALUE_TYPE cst;
8972 cst = TREE_REAL_CST (arg1);
8974 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8975 if (TREE_CODE (arg0) == NEGATE_EXPR)
8976 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8977 TREE_OPERAND (arg0, 0),
8978 build_real (TREE_TYPE (arg1),
8979 real_value_negate (&cst)));
8981 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8982 /* a CMP (-0) -> a CMP 0 */
8983 if (REAL_VALUE_MINUS_ZERO (cst))
8984 return fold_build2_loc (loc, code, type, arg0,
8985 build_real (TREE_TYPE (arg1), dconst0));
8987 /* x != NaN is always true, other ops are always false. */
8988 if (REAL_VALUE_ISNAN (cst)
8989 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8991 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8992 return omit_one_operand_loc (loc, type, tem, arg0);
8995 /* Fold comparisons against infinity. */
8996 if (REAL_VALUE_ISINF (cst)
8997 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8999 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9000 if (tem != NULL_TREE)
9001 return tem;
9005 /* If this is a comparison of a real constant with a PLUS_EXPR
9006 or a MINUS_EXPR of a real constant, we can convert it into a
9007 comparison with a revised real constant as long as no overflow
9008 occurs when unsafe_math_optimizations are enabled. */
9009 if (flag_unsafe_math_optimizations
9010 && TREE_CODE (arg1) == REAL_CST
9011 && (TREE_CODE (arg0) == PLUS_EXPR
9012 || TREE_CODE (arg0) == MINUS_EXPR)
9013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9014 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9015 ? MINUS_EXPR : PLUS_EXPR,
9016 arg1, TREE_OPERAND (arg0, 1)))
9017 && !TREE_OVERFLOW (tem))
9018 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9020 /* Likewise, we can simplify a comparison of a real constant with
9021 a MINUS_EXPR whose first operand is also a real constant, i.e.
9022 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9023 floating-point types only if -fassociative-math is set. */
9024 if (flag_associative_math
9025 && TREE_CODE (arg1) == REAL_CST
9026 && TREE_CODE (arg0) == MINUS_EXPR
9027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9028 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9029 arg1))
9030 && !TREE_OVERFLOW (tem))
9031 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9032 TREE_OPERAND (arg0, 1), tem);
9034 /* Fold comparisons against built-in math functions. */
9035 if (TREE_CODE (arg1) == REAL_CST
9036 && flag_unsafe_math_optimizations
9037 && ! flag_errno_math)
9039 enum built_in_function fcode = builtin_mathfn_code (arg0);
9041 if (fcode != END_BUILTINS)
9043 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9044 if (tem != NULL_TREE)
9045 return tem;
9050 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9051 && CONVERT_EXPR_P (arg0))
9053 /* If we are widening one operand of an integer comparison,
9054 see if the other operand is similarly being widened. Perhaps we
9055 can do the comparison in the narrower type. */
9056 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9057 if (tem)
9058 return tem;
9060 /* Or if we are changing signedness. */
9061 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9062 if (tem)
9063 return tem;
9066 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9067 constant, we can simplify it. */
9068 if (TREE_CODE (arg1) == INTEGER_CST
9069 && (TREE_CODE (arg0) == MIN_EXPR
9070 || TREE_CODE (arg0) == MAX_EXPR)
9071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9073 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9074 if (tem)
9075 return tem;
9078 /* Simplify comparison of something with itself. (For IEEE
9079 floating-point, we can only do some of these simplifications.) */
9080 if (operand_equal_p (arg0, arg1, 0))
9082 switch (code)
9084 case EQ_EXPR:
9085 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9086 || ! HONOR_NANS (element_mode (arg0)))
9087 return constant_boolean_node (1, type);
9088 break;
9090 case GE_EXPR:
9091 case LE_EXPR:
9092 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9093 || ! HONOR_NANS (element_mode (arg0)))
9094 return constant_boolean_node (1, type);
9095 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9097 case NE_EXPR:
9098 /* For NE, we can only do this simplification if integer
9099 or we don't honor IEEE floating point NaNs. */
9100 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9101 && HONOR_NANS (element_mode (arg0)))
9102 break;
9103 /* ... fall through ... */
9104 case GT_EXPR:
9105 case LT_EXPR:
9106 return constant_boolean_node (0, type);
9107 default:
9108 gcc_unreachable ();
9112 /* If we are comparing an expression that just has comparisons
9113 of two integer values, arithmetic expressions of those comparisons,
9114 and constants, we can simplify it. There are only three cases
9115 to check: the two values can either be equal, the first can be
9116 greater, or the second can be greater. Fold the expression for
9117 those three values. Since each value must be 0 or 1, we have
9118 eight possibilities, each of which corresponds to the constant 0
9119 or 1 or one of the six possible comparisons.
9121 This handles common cases like (a > b) == 0 but also handles
9122 expressions like ((x > y) - (y > x)) > 0, which supposedly
9123 occur in macroized code. */
9125 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9127 tree cval1 = 0, cval2 = 0;
9128 int save_p = 0;
9130 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9131 /* Don't handle degenerate cases here; they should already
9132 have been handled anyway. */
9133 && cval1 != 0 && cval2 != 0
9134 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9135 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9136 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9137 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9138 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9139 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9140 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9142 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9143 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9145 /* We can't just pass T to eval_subst in case cval1 or cval2
9146 was the same as ARG1. */
9148 tree high_result
9149 = fold_build2_loc (loc, code, type,
9150 eval_subst (loc, arg0, cval1, maxval,
9151 cval2, minval),
9152 arg1);
9153 tree equal_result
9154 = fold_build2_loc (loc, code, type,
9155 eval_subst (loc, arg0, cval1, maxval,
9156 cval2, maxval),
9157 arg1);
9158 tree low_result
9159 = fold_build2_loc (loc, code, type,
9160 eval_subst (loc, arg0, cval1, minval,
9161 cval2, maxval),
9162 arg1);
9164 /* All three of these results should be 0 or 1. Confirm they are.
9165 Then use those values to select the proper code to use. */
9167 if (TREE_CODE (high_result) == INTEGER_CST
9168 && TREE_CODE (equal_result) == INTEGER_CST
9169 && TREE_CODE (low_result) == INTEGER_CST)
9171 /* Make a 3-bit mask with the high-order bit being the
9172 value for `>', the next for '=', and the low for '<'. */
9173 switch ((integer_onep (high_result) * 4)
9174 + (integer_onep (equal_result) * 2)
9175 + integer_onep (low_result))
9177 case 0:
9178 /* Always false. */
9179 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9180 case 1:
9181 code = LT_EXPR;
9182 break;
9183 case 2:
9184 code = EQ_EXPR;
9185 break;
9186 case 3:
9187 code = LE_EXPR;
9188 break;
9189 case 4:
9190 code = GT_EXPR;
9191 break;
9192 case 5:
9193 code = NE_EXPR;
9194 break;
9195 case 6:
9196 code = GE_EXPR;
9197 break;
9198 case 7:
9199 /* Always true. */
9200 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9203 if (save_p)
9205 tem = save_expr (build2 (code, type, cval1, cval2));
9206 SET_EXPR_LOCATION (tem, loc);
9207 return tem;
9209 return fold_build2_loc (loc, code, type, cval1, cval2);
9214 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9215 into a single range test. */
9216 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9217 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9218 && TREE_CODE (arg1) == INTEGER_CST
9219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9220 && !integer_zerop (TREE_OPERAND (arg0, 1))
9221 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9222 && !TREE_OVERFLOW (arg1))
9224 tem = fold_div_compare (loc, code, type, arg0, arg1);
9225 if (tem != NULL_TREE)
9226 return tem;
9229 /* Fold ~X op ~Y as Y op X. */
9230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9231 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9233 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9234 return fold_build2_loc (loc, code, type,
9235 fold_convert_loc (loc, cmp_type,
9236 TREE_OPERAND (arg1, 0)),
9237 TREE_OPERAND (arg0, 0));
9240 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9242 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9244 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9245 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9246 TREE_OPERAND (arg0, 0),
9247 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9248 fold_convert_loc (loc, cmp_type, arg1)));
9251 return NULL_TREE;
9255 /* Subroutine of fold_binary. Optimize complex multiplications of the
9256 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9257 argument EXPR represents the expression "z" of type TYPE. */
9259 static tree
9260 fold_mult_zconjz (location_t loc, tree type, tree expr)
9262 tree itype = TREE_TYPE (type);
9263 tree rpart, ipart, tem;
9265 if (TREE_CODE (expr) == COMPLEX_EXPR)
9267 rpart = TREE_OPERAND (expr, 0);
9268 ipart = TREE_OPERAND (expr, 1);
9270 else if (TREE_CODE (expr) == COMPLEX_CST)
9272 rpart = TREE_REALPART (expr);
9273 ipart = TREE_IMAGPART (expr);
9275 else
9277 expr = save_expr (expr);
9278 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9279 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9282 rpart = save_expr (rpart);
9283 ipart = save_expr (ipart);
9284 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9285 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9286 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9287 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9288 build_zero_cst (itype));
9292 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9293 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9294 guarantees that P and N have the same least significant log2(M) bits.
9295 N is not otherwise constrained. In particular, N is not normalized to
9296 0 <= N < M as is common. In general, the precise value of P is unknown.
9297 M is chosen as large as possible such that constant N can be determined.
9299 Returns M and sets *RESIDUE to N.
9301 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9302 account. This is not always possible due to PR 35705.
9305 static unsigned HOST_WIDE_INT
9306 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9307 bool allow_func_align)
9309 enum tree_code code;
9311 *residue = 0;
9313 code = TREE_CODE (expr);
9314 if (code == ADDR_EXPR)
9316 unsigned int bitalign;
9317 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9318 *residue /= BITS_PER_UNIT;
9319 return bitalign / BITS_PER_UNIT;
9321 else if (code == POINTER_PLUS_EXPR)
9323 tree op0, op1;
9324 unsigned HOST_WIDE_INT modulus;
9325 enum tree_code inner_code;
9327 op0 = TREE_OPERAND (expr, 0);
9328 STRIP_NOPS (op0);
9329 modulus = get_pointer_modulus_and_residue (op0, residue,
9330 allow_func_align);
9332 op1 = TREE_OPERAND (expr, 1);
9333 STRIP_NOPS (op1);
9334 inner_code = TREE_CODE (op1);
9335 if (inner_code == INTEGER_CST)
9337 *residue += TREE_INT_CST_LOW (op1);
9338 return modulus;
9340 else if (inner_code == MULT_EXPR)
9342 op1 = TREE_OPERAND (op1, 1);
9343 if (TREE_CODE (op1) == INTEGER_CST)
9345 unsigned HOST_WIDE_INT align;
9347 /* Compute the greatest power-of-2 divisor of op1. */
9348 align = TREE_INT_CST_LOW (op1);
9349 align &= -align;
9351 /* If align is non-zero and less than *modulus, replace
9352 *modulus with align., If align is 0, then either op1 is 0
9353 or the greatest power-of-2 divisor of op1 doesn't fit in an
9354 unsigned HOST_WIDE_INT. In either case, no additional
9355 constraint is imposed. */
9356 if (align)
9357 modulus = MIN (modulus, align);
9359 return modulus;
9364 /* If we get here, we were unable to determine anything useful about the
9365 expression. */
9366 return 1;
9369 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9370 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9372 static bool
9373 vec_cst_ctor_to_array (tree arg, tree *elts)
9375 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9377 if (TREE_CODE (arg) == VECTOR_CST)
9379 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9380 elts[i] = VECTOR_CST_ELT (arg, i);
9382 else if (TREE_CODE (arg) == CONSTRUCTOR)
9384 constructor_elt *elt;
9386 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9387 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9388 return false;
9389 else
9390 elts[i] = elt->value;
9392 else
9393 return false;
9394 for (; i < nelts; i++)
9395 elts[i]
9396 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9397 return true;
9400 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9401 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9402 NULL_TREE otherwise. */
9404 static tree
9405 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9407 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9408 tree *elts;
9409 bool need_ctor = false;
9411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9412 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9413 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9414 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9415 return NULL_TREE;
9417 elts = XALLOCAVEC (tree, nelts * 3);
9418 if (!vec_cst_ctor_to_array (arg0, elts)
9419 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9420 return NULL_TREE;
9422 for (i = 0; i < nelts; i++)
9424 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9425 need_ctor = true;
9426 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9429 if (need_ctor)
9431 vec<constructor_elt, va_gc> *v;
9432 vec_alloc (v, nelts);
9433 for (i = 0; i < nelts; i++)
9434 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9435 return build_constructor (type, v);
9437 else
9438 return build_vector (type, &elts[2 * nelts]);
9441 /* Try to fold a pointer difference of type TYPE two address expressions of
9442 array references AREF0 and AREF1 using location LOC. Return a
9443 simplified expression for the difference or NULL_TREE. */
9445 static tree
9446 fold_addr_of_array_ref_difference (location_t loc, tree type,
9447 tree aref0, tree aref1)
9449 tree base0 = TREE_OPERAND (aref0, 0);
9450 tree base1 = TREE_OPERAND (aref1, 0);
9451 tree base_offset = build_int_cst (type, 0);
9453 /* If the bases are array references as well, recurse. If the bases
9454 are pointer indirections compute the difference of the pointers.
9455 If the bases are equal, we are set. */
9456 if ((TREE_CODE (base0) == ARRAY_REF
9457 && TREE_CODE (base1) == ARRAY_REF
9458 && (base_offset
9459 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9460 || (INDIRECT_REF_P (base0)
9461 && INDIRECT_REF_P (base1)
9462 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9463 TREE_OPERAND (base0, 0),
9464 TREE_OPERAND (base1, 0))))
9465 || operand_equal_p (base0, base1, 0))
9467 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9468 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9469 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9470 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9471 return fold_build2_loc (loc, PLUS_EXPR, type,
9472 base_offset,
9473 fold_build2_loc (loc, MULT_EXPR, type,
9474 diff, esz));
9476 return NULL_TREE;
9479 /* If the real or vector real constant CST of type TYPE has an exact
9480 inverse, return it, else return NULL. */
9482 tree
9483 exact_inverse (tree type, tree cst)
9485 REAL_VALUE_TYPE r;
9486 tree unit_type, *elts;
9487 machine_mode mode;
9488 unsigned vec_nelts, i;
9490 switch (TREE_CODE (cst))
9492 case REAL_CST:
9493 r = TREE_REAL_CST (cst);
9495 if (exact_real_inverse (TYPE_MODE (type), &r))
9496 return build_real (type, r);
9498 return NULL_TREE;
9500 case VECTOR_CST:
9501 vec_nelts = VECTOR_CST_NELTS (cst);
9502 elts = XALLOCAVEC (tree, vec_nelts);
9503 unit_type = TREE_TYPE (type);
9504 mode = TYPE_MODE (unit_type);
9506 for (i = 0; i < vec_nelts; i++)
9508 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9509 if (!exact_real_inverse (mode, &r))
9510 return NULL_TREE;
9511 elts[i] = build_real (unit_type, r);
9514 return build_vector (type, elts);
9516 default:
9517 return NULL_TREE;
9521 /* Mask out the tz least significant bits of X of type TYPE where
9522 tz is the number of trailing zeroes in Y. */
9523 static wide_int
9524 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9526 int tz = wi::ctz (y);
9527 if (tz > 0)
9528 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9529 return x;
9532 /* Return true when T is an address and is known to be nonzero.
9533 For floating point we further ensure that T is not denormal.
9534 Similar logic is present in nonzero_address in rtlanal.h.
9536 If the return value is based on the assumption that signed overflow
9537 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9538 change *STRICT_OVERFLOW_P. */
9540 static bool
9541 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9543 tree type = TREE_TYPE (t);
9544 enum tree_code code;
9546 /* Doing something useful for floating point would need more work. */
9547 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9548 return false;
9550 code = TREE_CODE (t);
9551 switch (TREE_CODE_CLASS (code))
9553 case tcc_unary:
9554 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9555 strict_overflow_p);
9556 case tcc_binary:
9557 case tcc_comparison:
9558 return tree_binary_nonzero_warnv_p (code, type,
9559 TREE_OPERAND (t, 0),
9560 TREE_OPERAND (t, 1),
9561 strict_overflow_p);
9562 case tcc_constant:
9563 case tcc_declaration:
9564 case tcc_reference:
9565 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9567 default:
9568 break;
9571 switch (code)
9573 case TRUTH_NOT_EXPR:
9574 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9575 strict_overflow_p);
9577 case TRUTH_AND_EXPR:
9578 case TRUTH_OR_EXPR:
9579 case TRUTH_XOR_EXPR:
9580 return tree_binary_nonzero_warnv_p (code, type,
9581 TREE_OPERAND (t, 0),
9582 TREE_OPERAND (t, 1),
9583 strict_overflow_p);
9585 case COND_EXPR:
9586 case CONSTRUCTOR:
9587 case OBJ_TYPE_REF:
9588 case ASSERT_EXPR:
9589 case ADDR_EXPR:
9590 case WITH_SIZE_EXPR:
9591 case SSA_NAME:
9592 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9594 case COMPOUND_EXPR:
9595 case MODIFY_EXPR:
9596 case BIND_EXPR:
9597 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9598 strict_overflow_p);
9600 case SAVE_EXPR:
9601 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9602 strict_overflow_p);
9604 case CALL_EXPR:
9606 tree fndecl = get_callee_fndecl (t);
9607 if (!fndecl) return false;
9608 if (flag_delete_null_pointer_checks && !flag_check_new
9609 && DECL_IS_OPERATOR_NEW (fndecl)
9610 && !TREE_NOTHROW (fndecl))
9611 return true;
9612 if (flag_delete_null_pointer_checks
9613 && lookup_attribute ("returns_nonnull",
9614 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9615 return true;
9616 return alloca_call_p (t);
9619 default:
9620 break;
9622 return false;
9625 /* Return true when T is an address and is known to be nonzero.
9626 Handle warnings about undefined signed overflow. */
9628 static bool
9629 tree_expr_nonzero_p (tree t)
9631 bool ret, strict_overflow_p;
9633 strict_overflow_p = false;
9634 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9635 if (strict_overflow_p)
9636 fold_overflow_warning (("assuming signed overflow does not occur when "
9637 "determining that expression is always "
9638 "non-zero"),
9639 WARN_STRICT_OVERFLOW_MISC);
9640 return ret;
9643 /* Fold a binary expression of code CODE and type TYPE with operands
9644 OP0 and OP1. LOC is the location of the resulting expression.
9645 Return the folded expression if folding is successful. Otherwise,
9646 return NULL_TREE. */
9648 tree
9649 fold_binary_loc (location_t loc,
9650 enum tree_code code, tree type, tree op0, tree op1)
9652 enum tree_code_class kind = TREE_CODE_CLASS (code);
9653 tree arg0, arg1, tem;
9654 tree t1 = NULL_TREE;
9655 bool strict_overflow_p;
9656 unsigned int prec;
9658 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9659 && TREE_CODE_LENGTH (code) == 2
9660 && op0 != NULL_TREE
9661 && op1 != NULL_TREE);
9663 arg0 = op0;
9664 arg1 = op1;
9666 /* Strip any conversions that don't change the mode. This is
9667 safe for every expression, except for a comparison expression
9668 because its signedness is derived from its operands. So, in
9669 the latter case, only strip conversions that don't change the
9670 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9671 preserved.
9673 Note that this is done as an internal manipulation within the
9674 constant folder, in order to find the simplest representation
9675 of the arguments so that their form can be studied. In any
9676 cases, the appropriate type conversions should be put back in
9677 the tree that will get out of the constant folder. */
9679 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9681 STRIP_SIGN_NOPS (arg0);
9682 STRIP_SIGN_NOPS (arg1);
9684 else
9686 STRIP_NOPS (arg0);
9687 STRIP_NOPS (arg1);
9690 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9691 constant but we can't do arithmetic on them. */
9692 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9693 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9694 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9695 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9696 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9697 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9698 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9700 if (kind == tcc_binary)
9702 /* Make sure type and arg0 have the same saturating flag. */
9703 gcc_assert (TYPE_SATURATING (type)
9704 == TYPE_SATURATING (TREE_TYPE (arg0)));
9705 tem = const_binop (code, arg0, arg1);
9707 else if (kind == tcc_comparison)
9708 tem = fold_relational_const (code, type, arg0, arg1);
9709 else
9710 tem = NULL_TREE;
9712 if (tem != NULL_TREE)
9714 if (TREE_TYPE (tem) != type)
9715 tem = fold_convert_loc (loc, type, tem);
9716 return tem;
9720 /* If this is a commutative operation, and ARG0 is a constant, move it
9721 to ARG1 to reduce the number of tests below. */
9722 if (commutative_tree_code (code)
9723 && tree_swap_operands_p (arg0, arg1, true))
9724 return fold_build2_loc (loc, code, type, op1, op0);
9726 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9727 to ARG1 to reduce the number of tests below. */
9728 if (kind == tcc_comparison
9729 && tree_swap_operands_p (arg0, arg1, true))
9730 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9732 tem = generic_simplify (loc, code, type, op0, op1);
9733 if (tem)
9734 return tem;
9736 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9738 First check for cases where an arithmetic operation is applied to a
9739 compound, conditional, or comparison operation. Push the arithmetic
9740 operation inside the compound or conditional to see if any folding
9741 can then be done. Convert comparison to conditional for this purpose.
9742 The also optimizes non-constant cases that used to be done in
9743 expand_expr.
9745 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9746 one of the operands is a comparison and the other is a comparison, a
9747 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9748 code below would make the expression more complex. Change it to a
9749 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9750 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9752 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9753 || code == EQ_EXPR || code == NE_EXPR)
9754 && TREE_CODE (type) != VECTOR_TYPE
9755 && ((truth_value_p (TREE_CODE (arg0))
9756 && (truth_value_p (TREE_CODE (arg1))
9757 || (TREE_CODE (arg1) == BIT_AND_EXPR
9758 && integer_onep (TREE_OPERAND (arg1, 1)))))
9759 || (truth_value_p (TREE_CODE (arg1))
9760 && (truth_value_p (TREE_CODE (arg0))
9761 || (TREE_CODE (arg0) == BIT_AND_EXPR
9762 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9764 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9765 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9766 : TRUTH_XOR_EXPR,
9767 boolean_type_node,
9768 fold_convert_loc (loc, boolean_type_node, arg0),
9769 fold_convert_loc (loc, boolean_type_node, arg1));
9771 if (code == EQ_EXPR)
9772 tem = invert_truthvalue_loc (loc, tem);
9774 return fold_convert_loc (loc, type, tem);
9777 if (TREE_CODE_CLASS (code) == tcc_binary
9778 || TREE_CODE_CLASS (code) == tcc_comparison)
9780 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9782 tem = fold_build2_loc (loc, code, type,
9783 fold_convert_loc (loc, TREE_TYPE (op0),
9784 TREE_OPERAND (arg0, 1)), op1);
9785 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9786 tem);
9788 if (TREE_CODE (arg1) == COMPOUND_EXPR
9789 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9791 tem = fold_build2_loc (loc, code, type, op0,
9792 fold_convert_loc (loc, TREE_TYPE (op1),
9793 TREE_OPERAND (arg1, 1)));
9794 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9795 tem);
9798 if (TREE_CODE (arg0) == COND_EXPR
9799 || TREE_CODE (arg0) == VEC_COND_EXPR
9800 || COMPARISON_CLASS_P (arg0))
9802 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9803 arg0, arg1,
9804 /*cond_first_p=*/1);
9805 if (tem != NULL_TREE)
9806 return tem;
9809 if (TREE_CODE (arg1) == COND_EXPR
9810 || TREE_CODE (arg1) == VEC_COND_EXPR
9811 || COMPARISON_CLASS_P (arg1))
9813 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9814 arg1, arg0,
9815 /*cond_first_p=*/0);
9816 if (tem != NULL_TREE)
9817 return tem;
9821 switch (code)
9823 case MEM_REF:
9824 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9825 if (TREE_CODE (arg0) == ADDR_EXPR
9826 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9828 tree iref = TREE_OPERAND (arg0, 0);
9829 return fold_build2 (MEM_REF, type,
9830 TREE_OPERAND (iref, 0),
9831 int_const_binop (PLUS_EXPR, arg1,
9832 TREE_OPERAND (iref, 1)));
9835 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9836 if (TREE_CODE (arg0) == ADDR_EXPR
9837 && handled_component_p (TREE_OPERAND (arg0, 0)))
9839 tree base;
9840 HOST_WIDE_INT coffset;
9841 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9842 &coffset);
9843 if (!base)
9844 return NULL_TREE;
9845 return fold_build2 (MEM_REF, type,
9846 build_fold_addr_expr (base),
9847 int_const_binop (PLUS_EXPR, arg1,
9848 size_int (coffset)));
9851 return NULL_TREE;
9853 case POINTER_PLUS_EXPR:
9854 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9855 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9856 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9857 return fold_convert_loc (loc, type,
9858 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9859 fold_convert_loc (loc, sizetype,
9860 arg1),
9861 fold_convert_loc (loc, sizetype,
9862 arg0)));
9864 /* PTR_CST +p CST -> CST1 */
9865 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9866 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9867 fold_convert_loc (loc, type, arg1));
9869 return NULL_TREE;
9871 case PLUS_EXPR:
9872 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9874 /* X + (X / CST) * -CST is X % CST. */
9875 if (TREE_CODE (arg1) == MULT_EXPR
9876 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9877 && operand_equal_p (arg0,
9878 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9880 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9881 tree cst1 = TREE_OPERAND (arg1, 1);
9882 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9883 cst1, cst0);
9884 if (sum && integer_zerop (sum))
9885 return fold_convert_loc (loc, type,
9886 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9887 TREE_TYPE (arg0), arg0,
9888 cst0));
9892 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9893 one. Make sure the type is not saturating and has the signedness of
9894 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9895 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9896 if ((TREE_CODE (arg0) == MULT_EXPR
9897 || TREE_CODE (arg1) == MULT_EXPR)
9898 && !TYPE_SATURATING (type)
9899 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9900 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9901 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9903 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9904 if (tem)
9905 return tem;
9908 if (! FLOAT_TYPE_P (type))
9910 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9911 with a constant, and the two constants have no bits in common,
9912 we should treat this as a BIT_IOR_EXPR since this may produce more
9913 simplifications. */
9914 if (TREE_CODE (arg0) == BIT_AND_EXPR
9915 && TREE_CODE (arg1) == BIT_AND_EXPR
9916 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9917 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9918 && wi::bit_and (TREE_OPERAND (arg0, 1),
9919 TREE_OPERAND (arg1, 1)) == 0)
9921 code = BIT_IOR_EXPR;
9922 goto bit_ior;
9925 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9926 (plus (plus (mult) (mult)) (foo)) so that we can
9927 take advantage of the factoring cases below. */
9928 if (TYPE_OVERFLOW_WRAPS (type)
9929 && (((TREE_CODE (arg0) == PLUS_EXPR
9930 || TREE_CODE (arg0) == MINUS_EXPR)
9931 && TREE_CODE (arg1) == MULT_EXPR)
9932 || ((TREE_CODE (arg1) == PLUS_EXPR
9933 || TREE_CODE (arg1) == MINUS_EXPR)
9934 && TREE_CODE (arg0) == MULT_EXPR)))
9936 tree parg0, parg1, parg, marg;
9937 enum tree_code pcode;
9939 if (TREE_CODE (arg1) == MULT_EXPR)
9940 parg = arg0, marg = arg1;
9941 else
9942 parg = arg1, marg = arg0;
9943 pcode = TREE_CODE (parg);
9944 parg0 = TREE_OPERAND (parg, 0);
9945 parg1 = TREE_OPERAND (parg, 1);
9946 STRIP_NOPS (parg0);
9947 STRIP_NOPS (parg1);
9949 if (TREE_CODE (parg0) == MULT_EXPR
9950 && TREE_CODE (parg1) != MULT_EXPR)
9951 return fold_build2_loc (loc, pcode, type,
9952 fold_build2_loc (loc, PLUS_EXPR, type,
9953 fold_convert_loc (loc, type,
9954 parg0),
9955 fold_convert_loc (loc, type,
9956 marg)),
9957 fold_convert_loc (loc, type, parg1));
9958 if (TREE_CODE (parg0) != MULT_EXPR
9959 && TREE_CODE (parg1) == MULT_EXPR)
9960 return
9961 fold_build2_loc (loc, PLUS_EXPR, type,
9962 fold_convert_loc (loc, type, parg0),
9963 fold_build2_loc (loc, pcode, type,
9964 fold_convert_loc (loc, type, marg),
9965 fold_convert_loc (loc, type,
9966 parg1)));
9969 else
9971 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9972 to __complex__ ( x, y ). This is not the same for SNaNs or
9973 if signed zeros are involved. */
9974 if (!HONOR_SNANS (element_mode (arg0))
9975 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9976 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9978 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9979 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9980 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9981 bool arg0rz = false, arg0iz = false;
9982 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9983 || (arg0i && (arg0iz = real_zerop (arg0i))))
9985 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9986 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9987 if (arg0rz && arg1i && real_zerop (arg1i))
9989 tree rp = arg1r ? arg1r
9990 : build1 (REALPART_EXPR, rtype, arg1);
9991 tree ip = arg0i ? arg0i
9992 : build1 (IMAGPART_EXPR, rtype, arg0);
9993 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9995 else if (arg0iz && arg1r && real_zerop (arg1r))
9997 tree rp = arg0r ? arg0r
9998 : build1 (REALPART_EXPR, rtype, arg0);
9999 tree ip = arg1i ? arg1i
10000 : build1 (IMAGPART_EXPR, rtype, arg1);
10001 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10006 if (flag_unsafe_math_optimizations
10007 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10008 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10009 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10010 return tem;
10012 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10013 We associate floats only if the user has specified
10014 -fassociative-math. */
10015 if (flag_associative_math
10016 && TREE_CODE (arg1) == PLUS_EXPR
10017 && TREE_CODE (arg0) != MULT_EXPR)
10019 tree tree10 = TREE_OPERAND (arg1, 0);
10020 tree tree11 = TREE_OPERAND (arg1, 1);
10021 if (TREE_CODE (tree11) == MULT_EXPR
10022 && TREE_CODE (tree10) == MULT_EXPR)
10024 tree tree0;
10025 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10026 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10029 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10030 We associate floats only if the user has specified
10031 -fassociative-math. */
10032 if (flag_associative_math
10033 && TREE_CODE (arg0) == PLUS_EXPR
10034 && TREE_CODE (arg1) != MULT_EXPR)
10036 tree tree00 = TREE_OPERAND (arg0, 0);
10037 tree tree01 = TREE_OPERAND (arg0, 1);
10038 if (TREE_CODE (tree01) == MULT_EXPR
10039 && TREE_CODE (tree00) == MULT_EXPR)
10041 tree tree0;
10042 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10043 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10048 bit_rotate:
10049 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10050 is a rotate of A by C1 bits. */
10051 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10052 is a rotate of A by B bits. */
10054 enum tree_code code0, code1;
10055 tree rtype;
10056 code0 = TREE_CODE (arg0);
10057 code1 = TREE_CODE (arg1);
10058 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10059 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10060 && operand_equal_p (TREE_OPERAND (arg0, 0),
10061 TREE_OPERAND (arg1, 0), 0)
10062 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10063 TYPE_UNSIGNED (rtype))
10064 /* Only create rotates in complete modes. Other cases are not
10065 expanded properly. */
10066 && (element_precision (rtype)
10067 == element_precision (TYPE_MODE (rtype))))
10069 tree tree01, tree11;
10070 enum tree_code code01, code11;
10072 tree01 = TREE_OPERAND (arg0, 1);
10073 tree11 = TREE_OPERAND (arg1, 1);
10074 STRIP_NOPS (tree01);
10075 STRIP_NOPS (tree11);
10076 code01 = TREE_CODE (tree01);
10077 code11 = TREE_CODE (tree11);
10078 if (code01 == INTEGER_CST
10079 && code11 == INTEGER_CST
10080 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10081 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10083 tem = build2_loc (loc, LROTATE_EXPR,
10084 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10085 TREE_OPERAND (arg0, 0),
10086 code0 == LSHIFT_EXPR ? tree01 : tree11);
10087 return fold_convert_loc (loc, type, tem);
10089 else if (code11 == MINUS_EXPR)
10091 tree tree110, tree111;
10092 tree110 = TREE_OPERAND (tree11, 0);
10093 tree111 = TREE_OPERAND (tree11, 1);
10094 STRIP_NOPS (tree110);
10095 STRIP_NOPS (tree111);
10096 if (TREE_CODE (tree110) == INTEGER_CST
10097 && 0 == compare_tree_int (tree110,
10098 element_precision
10099 (TREE_TYPE (TREE_OPERAND
10100 (arg0, 0))))
10101 && operand_equal_p (tree01, tree111, 0))
10102 return
10103 fold_convert_loc (loc, type,
10104 build2 ((code0 == LSHIFT_EXPR
10105 ? LROTATE_EXPR
10106 : RROTATE_EXPR),
10107 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10108 TREE_OPERAND (arg0, 0), tree01));
10110 else if (code01 == MINUS_EXPR)
10112 tree tree010, tree011;
10113 tree010 = TREE_OPERAND (tree01, 0);
10114 tree011 = TREE_OPERAND (tree01, 1);
10115 STRIP_NOPS (tree010);
10116 STRIP_NOPS (tree011);
10117 if (TREE_CODE (tree010) == INTEGER_CST
10118 && 0 == compare_tree_int (tree010,
10119 element_precision
10120 (TREE_TYPE (TREE_OPERAND
10121 (arg0, 0))))
10122 && operand_equal_p (tree11, tree011, 0))
10123 return fold_convert_loc
10124 (loc, type,
10125 build2 ((code0 != LSHIFT_EXPR
10126 ? LROTATE_EXPR
10127 : RROTATE_EXPR),
10128 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10129 TREE_OPERAND (arg0, 0), tree11));
10134 associate:
10135 /* In most languages, can't associate operations on floats through
10136 parentheses. Rather than remember where the parentheses were, we
10137 don't associate floats at all, unless the user has specified
10138 -fassociative-math.
10139 And, we need to make sure type is not saturating. */
10141 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10142 && !TYPE_SATURATING (type))
10144 tree var0, con0, lit0, minus_lit0;
10145 tree var1, con1, lit1, minus_lit1;
10146 tree atype = type;
10147 bool ok = true;
10149 /* Split both trees into variables, constants, and literals. Then
10150 associate each group together, the constants with literals,
10151 then the result with variables. This increases the chances of
10152 literals being recombined later and of generating relocatable
10153 expressions for the sum of a constant and literal. */
10154 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10155 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10156 code == MINUS_EXPR);
10158 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10159 if (code == MINUS_EXPR)
10160 code = PLUS_EXPR;
10162 /* With undefined overflow prefer doing association in a type
10163 which wraps on overflow, if that is one of the operand types. */
10164 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10165 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10167 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10168 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10169 atype = TREE_TYPE (arg0);
10170 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10171 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10172 atype = TREE_TYPE (arg1);
10173 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10176 /* With undefined overflow we can only associate constants with one
10177 variable, and constants whose association doesn't overflow. */
10178 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10179 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10181 if (var0 && var1)
10183 tree tmp0 = var0;
10184 tree tmp1 = var1;
10186 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10187 tmp0 = TREE_OPERAND (tmp0, 0);
10188 if (CONVERT_EXPR_P (tmp0)
10189 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10190 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10191 <= TYPE_PRECISION (atype)))
10192 tmp0 = TREE_OPERAND (tmp0, 0);
10193 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10194 tmp1 = TREE_OPERAND (tmp1, 0);
10195 if (CONVERT_EXPR_P (tmp1)
10196 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10197 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10198 <= TYPE_PRECISION (atype)))
10199 tmp1 = TREE_OPERAND (tmp1, 0);
10200 /* The only case we can still associate with two variables
10201 is if they are the same, modulo negation and bit-pattern
10202 preserving conversions. */
10203 if (!operand_equal_p (tmp0, tmp1, 0))
10204 ok = false;
10208 /* Only do something if we found more than two objects. Otherwise,
10209 nothing has changed and we risk infinite recursion. */
10210 if (ok
10211 && (2 < ((var0 != 0) + (var1 != 0)
10212 + (con0 != 0) + (con1 != 0)
10213 + (lit0 != 0) + (lit1 != 0)
10214 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10216 bool any_overflows = false;
10217 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10218 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10219 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10220 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10221 var0 = associate_trees (loc, var0, var1, code, atype);
10222 con0 = associate_trees (loc, con0, con1, code, atype);
10223 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10224 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10225 code, atype);
10227 /* Preserve the MINUS_EXPR if the negative part of the literal is
10228 greater than the positive part. Otherwise, the multiplicative
10229 folding code (i.e extract_muldiv) may be fooled in case
10230 unsigned constants are subtracted, like in the following
10231 example: ((X*2 + 4) - 8U)/2. */
10232 if (minus_lit0 && lit0)
10234 if (TREE_CODE (lit0) == INTEGER_CST
10235 && TREE_CODE (minus_lit0) == INTEGER_CST
10236 && tree_int_cst_lt (lit0, minus_lit0))
10238 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10239 MINUS_EXPR, atype);
10240 lit0 = 0;
10242 else
10244 lit0 = associate_trees (loc, lit0, minus_lit0,
10245 MINUS_EXPR, atype);
10246 minus_lit0 = 0;
10250 /* Don't introduce overflows through reassociation. */
10251 if (!any_overflows
10252 && ((lit0 && TREE_OVERFLOW (lit0))
10253 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10254 return NULL_TREE;
10256 if (minus_lit0)
10258 if (con0 == 0)
10259 return
10260 fold_convert_loc (loc, type,
10261 associate_trees (loc, var0, minus_lit0,
10262 MINUS_EXPR, atype));
10263 else
10265 con0 = associate_trees (loc, con0, minus_lit0,
10266 MINUS_EXPR, atype);
10267 return
10268 fold_convert_loc (loc, type,
10269 associate_trees (loc, var0, con0,
10270 PLUS_EXPR, atype));
10274 con0 = associate_trees (loc, con0, lit0, code, atype);
10275 return
10276 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10277 code, atype));
10281 return NULL_TREE;
10283 case MINUS_EXPR:
10284 /* Pointer simplifications for subtraction, simple reassociations. */
10285 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10287 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10288 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10289 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10291 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10292 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10293 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10294 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10295 return fold_build2_loc (loc, PLUS_EXPR, type,
10296 fold_build2_loc (loc, MINUS_EXPR, type,
10297 arg00, arg10),
10298 fold_build2_loc (loc, MINUS_EXPR, type,
10299 arg01, arg11));
10301 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10302 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10304 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10305 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10306 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10307 fold_convert_loc (loc, type, arg1));
10308 if (tmp)
10309 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10311 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10312 simplifies. */
10313 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10315 tree arg10 = fold_convert_loc (loc, type,
10316 TREE_OPERAND (arg1, 0));
10317 tree arg11 = fold_convert_loc (loc, type,
10318 TREE_OPERAND (arg1, 1));
10319 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10320 fold_convert_loc (loc, type, arg0),
10321 arg10);
10322 if (tmp)
10323 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10326 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10327 if (TREE_CODE (arg0) == NEGATE_EXPR
10328 && negate_expr_p (arg1)
10329 && reorder_operands_p (arg0, arg1))
10330 return fold_build2_loc (loc, MINUS_EXPR, type,
10331 fold_convert_loc (loc, type,
10332 negate_expr (arg1)),
10333 fold_convert_loc (loc, type,
10334 TREE_OPERAND (arg0, 0)));
10335 /* Convert -A - 1 to ~A. */
10336 if (TREE_CODE (arg0) == NEGATE_EXPR
10337 && integer_each_onep (arg1)
10338 && !TYPE_OVERFLOW_TRAPS (type))
10339 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10340 fold_convert_loc (loc, type,
10341 TREE_OPERAND (arg0, 0)));
10343 /* Convert -1 - A to ~A. */
10344 if (TREE_CODE (type) != COMPLEX_TYPE
10345 && integer_all_onesp (arg0))
10346 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10349 /* X - (X / Y) * Y is X % Y. */
10350 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10351 && TREE_CODE (arg1) == MULT_EXPR
10352 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10353 && operand_equal_p (arg0,
10354 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10355 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10356 TREE_OPERAND (arg1, 1), 0))
10357 return
10358 fold_convert_loc (loc, type,
10359 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10360 arg0, TREE_OPERAND (arg1, 1)));
10362 if (! FLOAT_TYPE_P (type))
10364 /* Fold A - (A & B) into ~B & A. */
10365 if (!TREE_SIDE_EFFECTS (arg0)
10366 && TREE_CODE (arg1) == BIT_AND_EXPR)
10368 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10370 tree arg10 = fold_convert_loc (loc, type,
10371 TREE_OPERAND (arg1, 0));
10372 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10373 fold_build1_loc (loc, BIT_NOT_EXPR,
10374 type, arg10),
10375 fold_convert_loc (loc, type, arg0));
10377 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10379 tree arg11 = fold_convert_loc (loc,
10380 type, TREE_OPERAND (arg1, 1));
10381 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10382 fold_build1_loc (loc, BIT_NOT_EXPR,
10383 type, arg11),
10384 fold_convert_loc (loc, type, arg0));
10388 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10389 any power of 2 minus 1. */
10390 if (TREE_CODE (arg0) == BIT_AND_EXPR
10391 && TREE_CODE (arg1) == BIT_AND_EXPR
10392 && operand_equal_p (TREE_OPERAND (arg0, 0),
10393 TREE_OPERAND (arg1, 0), 0))
10395 tree mask0 = TREE_OPERAND (arg0, 1);
10396 tree mask1 = TREE_OPERAND (arg1, 1);
10397 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10399 if (operand_equal_p (tem, mask1, 0))
10401 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10402 TREE_OPERAND (arg0, 0), mask1);
10403 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10408 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10409 __complex__ ( x, -y ). This is not the same for SNaNs or if
10410 signed zeros are involved. */
10411 if (!HONOR_SNANS (element_mode (arg0))
10412 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10413 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10415 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10416 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10417 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10418 bool arg0rz = false, arg0iz = false;
10419 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10420 || (arg0i && (arg0iz = real_zerop (arg0i))))
10422 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10423 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10424 if (arg0rz && arg1i && real_zerop (arg1i))
10426 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10427 arg1r ? arg1r
10428 : build1 (REALPART_EXPR, rtype, arg1));
10429 tree ip = arg0i ? arg0i
10430 : build1 (IMAGPART_EXPR, rtype, arg0);
10431 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10433 else if (arg0iz && arg1r && real_zerop (arg1r))
10435 tree rp = arg0r ? arg0r
10436 : build1 (REALPART_EXPR, rtype, arg0);
10437 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10438 arg1i ? arg1i
10439 : build1 (IMAGPART_EXPR, rtype, arg1));
10440 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10445 /* A - B -> A + (-B) if B is easily negatable. */
10446 if (negate_expr_p (arg1)
10447 && !TYPE_OVERFLOW_SANITIZED (type)
10448 && ((FLOAT_TYPE_P (type)
10449 /* Avoid this transformation if B is a positive REAL_CST. */
10450 && (TREE_CODE (arg1) != REAL_CST
10451 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10452 || INTEGRAL_TYPE_P (type)))
10453 return fold_build2_loc (loc, PLUS_EXPR, type,
10454 fold_convert_loc (loc, type, arg0),
10455 fold_convert_loc (loc, type,
10456 negate_expr (arg1)));
10458 /* Try folding difference of addresses. */
10460 HOST_WIDE_INT diff;
10462 if ((TREE_CODE (arg0) == ADDR_EXPR
10463 || TREE_CODE (arg1) == ADDR_EXPR)
10464 && ptr_difference_const (arg0, arg1, &diff))
10465 return build_int_cst_type (type, diff);
10468 /* Fold &a[i] - &a[j] to i-j. */
10469 if (TREE_CODE (arg0) == ADDR_EXPR
10470 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10471 && TREE_CODE (arg1) == ADDR_EXPR
10472 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10474 tree tem = fold_addr_of_array_ref_difference (loc, type,
10475 TREE_OPERAND (arg0, 0),
10476 TREE_OPERAND (arg1, 0));
10477 if (tem)
10478 return tem;
10481 if (FLOAT_TYPE_P (type)
10482 && flag_unsafe_math_optimizations
10483 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10484 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10485 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10486 return tem;
10488 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10489 one. Make sure the type is not saturating and has the signedness of
10490 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10491 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10492 if ((TREE_CODE (arg0) == MULT_EXPR
10493 || TREE_CODE (arg1) == MULT_EXPR)
10494 && !TYPE_SATURATING (type)
10495 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10496 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10497 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10499 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10500 if (tem)
10501 return tem;
10504 goto associate;
10506 case MULT_EXPR:
10507 /* (-A) * (-B) -> A * B */
10508 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10509 return fold_build2_loc (loc, MULT_EXPR, type,
10510 fold_convert_loc (loc, type,
10511 TREE_OPERAND (arg0, 0)),
10512 fold_convert_loc (loc, type,
10513 negate_expr (arg1)));
10514 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10515 return fold_build2_loc (loc, MULT_EXPR, type,
10516 fold_convert_loc (loc, type,
10517 negate_expr (arg0)),
10518 fold_convert_loc (loc, type,
10519 TREE_OPERAND (arg1, 0)));
10521 if (! FLOAT_TYPE_P (type))
10523 /* Transform x * -C into -x * C if x is easily negatable. */
10524 if (TREE_CODE (arg1) == INTEGER_CST
10525 && tree_int_cst_sgn (arg1) == -1
10526 && negate_expr_p (arg0)
10527 && (tem = negate_expr (arg1)) != arg1
10528 && !TREE_OVERFLOW (tem))
10529 return fold_build2_loc (loc, MULT_EXPR, type,
10530 fold_convert_loc (loc, type,
10531 negate_expr (arg0)),
10532 tem);
10534 /* (a * (1 << b)) is (a << b) */
10535 if (TREE_CODE (arg1) == LSHIFT_EXPR
10536 && integer_onep (TREE_OPERAND (arg1, 0)))
10537 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10538 TREE_OPERAND (arg1, 1));
10539 if (TREE_CODE (arg0) == LSHIFT_EXPR
10540 && integer_onep (TREE_OPERAND (arg0, 0)))
10541 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10542 TREE_OPERAND (arg0, 1));
10544 /* (A + A) * C -> A * 2 * C */
10545 if (TREE_CODE (arg0) == PLUS_EXPR
10546 && TREE_CODE (arg1) == INTEGER_CST
10547 && operand_equal_p (TREE_OPERAND (arg0, 0),
10548 TREE_OPERAND (arg0, 1), 0))
10549 return fold_build2_loc (loc, MULT_EXPR, type,
10550 omit_one_operand_loc (loc, type,
10551 TREE_OPERAND (arg0, 0),
10552 TREE_OPERAND (arg0, 1)),
10553 fold_build2_loc (loc, MULT_EXPR, type,
10554 build_int_cst (type, 2) , arg1));
10556 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10557 sign-changing only. */
10558 if (TREE_CODE (arg1) == INTEGER_CST
10559 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10560 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10561 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10563 strict_overflow_p = false;
10564 if (TREE_CODE (arg1) == INTEGER_CST
10565 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10566 &strict_overflow_p)))
10568 if (strict_overflow_p)
10569 fold_overflow_warning (("assuming signed overflow does not "
10570 "occur when simplifying "
10571 "multiplication"),
10572 WARN_STRICT_OVERFLOW_MISC);
10573 return fold_convert_loc (loc, type, tem);
10576 /* Optimize z * conj(z) for integer complex numbers. */
10577 if (TREE_CODE (arg0) == CONJ_EXPR
10578 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10579 return fold_mult_zconjz (loc, type, arg1);
10580 if (TREE_CODE (arg1) == CONJ_EXPR
10581 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10582 return fold_mult_zconjz (loc, type, arg0);
10584 else
10586 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10587 the result for floating point types due to rounding so it is applied
10588 only if -fassociative-math was specify. */
10589 if (flag_associative_math
10590 && TREE_CODE (arg0) == RDIV_EXPR
10591 && TREE_CODE (arg1) == REAL_CST
10592 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10594 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10595 arg1);
10596 if (tem)
10597 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10598 TREE_OPERAND (arg0, 1));
10601 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10602 if (operand_equal_p (arg0, arg1, 0))
10604 tree tem = fold_strip_sign_ops (arg0);
10605 if (tem != NULL_TREE)
10607 tem = fold_convert_loc (loc, type, tem);
10608 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10612 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10613 This is not the same for NaNs or if signed zeros are
10614 involved. */
10615 if (!HONOR_NANS (element_mode (arg0))
10616 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10617 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10618 && TREE_CODE (arg1) == COMPLEX_CST
10619 && real_zerop (TREE_REALPART (arg1)))
10621 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10622 if (real_onep (TREE_IMAGPART (arg1)))
10623 return
10624 fold_build2_loc (loc, COMPLEX_EXPR, type,
10625 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10626 rtype, arg0)),
10627 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10628 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10629 return
10630 fold_build2_loc (loc, COMPLEX_EXPR, type,
10631 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10632 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10633 rtype, arg0)));
10636 /* Optimize z * conj(z) for floating point complex numbers.
10637 Guarded by flag_unsafe_math_optimizations as non-finite
10638 imaginary components don't produce scalar results. */
10639 if (flag_unsafe_math_optimizations
10640 && TREE_CODE (arg0) == CONJ_EXPR
10641 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10642 return fold_mult_zconjz (loc, type, arg1);
10643 if (flag_unsafe_math_optimizations
10644 && TREE_CODE (arg1) == CONJ_EXPR
10645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10646 return fold_mult_zconjz (loc, type, arg0);
10648 if (flag_unsafe_math_optimizations)
10650 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10651 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10653 /* Optimizations of root(...)*root(...). */
10654 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10656 tree rootfn, arg;
10657 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10658 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10660 /* Optimize sqrt(x)*sqrt(x) as x. */
10661 if (BUILTIN_SQRT_P (fcode0)
10662 && operand_equal_p (arg00, arg10, 0)
10663 && ! HONOR_SNANS (element_mode (type)))
10664 return arg00;
10666 /* Optimize root(x)*root(y) as root(x*y). */
10667 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10668 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10669 return build_call_expr_loc (loc, rootfn, 1, arg);
10672 /* Optimize expN(x)*expN(y) as expN(x+y). */
10673 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10675 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10676 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10677 CALL_EXPR_ARG (arg0, 0),
10678 CALL_EXPR_ARG (arg1, 0));
10679 return build_call_expr_loc (loc, expfn, 1, arg);
10682 /* Optimizations of pow(...)*pow(...). */
10683 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10684 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10685 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10687 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10688 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10689 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10690 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10692 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10693 if (operand_equal_p (arg01, arg11, 0))
10695 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10696 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10697 arg00, arg10);
10698 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10701 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10702 if (operand_equal_p (arg00, arg10, 0))
10704 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10705 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10706 arg01, arg11);
10707 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10711 /* Optimize tan(x)*cos(x) as sin(x). */
10712 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10713 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10714 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10715 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10716 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10717 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10718 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10719 CALL_EXPR_ARG (arg1, 0), 0))
10721 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10723 if (sinfn != NULL_TREE)
10724 return build_call_expr_loc (loc, sinfn, 1,
10725 CALL_EXPR_ARG (arg0, 0));
10728 /* Optimize x*pow(x,c) as pow(x,c+1). */
10729 if (fcode1 == BUILT_IN_POW
10730 || fcode1 == BUILT_IN_POWF
10731 || fcode1 == BUILT_IN_POWL)
10733 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10734 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10735 if (TREE_CODE (arg11) == REAL_CST
10736 && !TREE_OVERFLOW (arg11)
10737 && operand_equal_p (arg0, arg10, 0))
10739 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10740 REAL_VALUE_TYPE c;
10741 tree arg;
10743 c = TREE_REAL_CST (arg11);
10744 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10745 arg = build_real (type, c);
10746 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10750 /* Optimize pow(x,c)*x as pow(x,c+1). */
10751 if (fcode0 == BUILT_IN_POW
10752 || fcode0 == BUILT_IN_POWF
10753 || fcode0 == BUILT_IN_POWL)
10755 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10756 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10757 if (TREE_CODE (arg01) == REAL_CST
10758 && !TREE_OVERFLOW (arg01)
10759 && operand_equal_p (arg1, arg00, 0))
10761 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10762 REAL_VALUE_TYPE c;
10763 tree arg;
10765 c = TREE_REAL_CST (arg01);
10766 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10767 arg = build_real (type, c);
10768 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10772 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10773 if (!in_gimple_form
10774 && optimize
10775 && operand_equal_p (arg0, arg1, 0))
10777 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10779 if (powfn)
10781 tree arg = build_real (type, dconst2);
10782 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10787 goto associate;
10789 case BIT_IOR_EXPR:
10790 bit_ior:
10791 /* ~X | X is -1. */
10792 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10793 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10795 t1 = build_zero_cst (type);
10796 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10797 return omit_one_operand_loc (loc, type, t1, arg1);
10800 /* X | ~X is -1. */
10801 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10804 t1 = build_zero_cst (type);
10805 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10806 return omit_one_operand_loc (loc, type, t1, arg0);
10809 /* Canonicalize (X & C1) | C2. */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && TREE_CODE (arg1) == INTEGER_CST
10812 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10814 int width = TYPE_PRECISION (type), w;
10815 wide_int c1 = TREE_OPERAND (arg0, 1);
10816 wide_int c2 = arg1;
10818 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10819 if ((c1 & c2) == c1)
10820 return omit_one_operand_loc (loc, type, arg1,
10821 TREE_OPERAND (arg0, 0));
10823 wide_int msk = wi::mask (width, false,
10824 TYPE_PRECISION (TREE_TYPE (arg1)));
10826 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10827 if (msk.and_not (c1 | c2) == 0)
10828 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10829 TREE_OPERAND (arg0, 0), arg1);
10831 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10832 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10833 mode which allows further optimizations. */
10834 c1 &= msk;
10835 c2 &= msk;
10836 wide_int c3 = c1.and_not (c2);
10837 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10839 wide_int mask = wi::mask (w, false,
10840 TYPE_PRECISION (type));
10841 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10843 c3 = mask;
10844 break;
10848 if (c3 != c1)
10849 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10850 fold_build2_loc (loc, BIT_AND_EXPR, type,
10851 TREE_OPERAND (arg0, 0),
10852 wide_int_to_tree (type,
10853 c3)),
10854 arg1);
10857 /* (X & ~Y) | (~X & Y) is X ^ Y */
10858 if (TREE_CODE (arg0) == BIT_AND_EXPR
10859 && TREE_CODE (arg1) == BIT_AND_EXPR)
10861 tree a0, a1, l0, l1, n0, n1;
10863 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10864 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10866 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10867 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10869 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10870 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10872 if ((operand_equal_p (n0, a0, 0)
10873 && operand_equal_p (n1, a1, 0))
10874 || (operand_equal_p (n0, a1, 0)
10875 && operand_equal_p (n1, a0, 0)))
10876 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10879 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10880 if (t1 != NULL_TREE)
10881 return t1;
10883 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10885 This results in more efficient code for machines without a NAND
10886 instruction. Combine will canonicalize to the first form
10887 which will allow use of NAND instructions provided by the
10888 backend if they exist. */
10889 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10890 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10892 return
10893 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10894 build2 (BIT_AND_EXPR, type,
10895 fold_convert_loc (loc, type,
10896 TREE_OPERAND (arg0, 0)),
10897 fold_convert_loc (loc, type,
10898 TREE_OPERAND (arg1, 0))));
10901 /* See if this can be simplified into a rotate first. If that
10902 is unsuccessful continue in the association code. */
10903 goto bit_rotate;
10905 case BIT_XOR_EXPR:
10906 /* ~X ^ X is -1. */
10907 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10908 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10910 t1 = build_zero_cst (type);
10911 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10912 return omit_one_operand_loc (loc, type, t1, arg1);
10915 /* X ^ ~X is -1. */
10916 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10917 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10919 t1 = build_zero_cst (type);
10920 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10921 return omit_one_operand_loc (loc, type, t1, arg0);
10924 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10925 with a constant, and the two constants have no bits in common,
10926 we should treat this as a BIT_IOR_EXPR since this may produce more
10927 simplifications. */
10928 if (TREE_CODE (arg0) == BIT_AND_EXPR
10929 && TREE_CODE (arg1) == BIT_AND_EXPR
10930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10931 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10932 && wi::bit_and (TREE_OPERAND (arg0, 1),
10933 TREE_OPERAND (arg1, 1)) == 0)
10935 code = BIT_IOR_EXPR;
10936 goto bit_ior;
10939 /* (X | Y) ^ X -> Y & ~ X*/
10940 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10943 tree t2 = TREE_OPERAND (arg0, 1);
10944 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10945 arg1);
10946 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10947 fold_convert_loc (loc, type, t2),
10948 fold_convert_loc (loc, type, t1));
10949 return t1;
10952 /* (Y | X) ^ X -> Y & ~ X*/
10953 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10956 tree t2 = TREE_OPERAND (arg0, 0);
10957 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10958 arg1);
10959 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10960 fold_convert_loc (loc, type, t2),
10961 fold_convert_loc (loc, type, t1));
10962 return t1;
10965 /* X ^ (X | Y) -> Y & ~ X*/
10966 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10969 tree t2 = TREE_OPERAND (arg1, 1);
10970 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10971 arg0);
10972 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10973 fold_convert_loc (loc, type, t2),
10974 fold_convert_loc (loc, type, t1));
10975 return t1;
10978 /* X ^ (Y | X) -> Y & ~ X*/
10979 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10980 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10982 tree t2 = TREE_OPERAND (arg1, 0);
10983 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10984 arg0);
10985 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10986 fold_convert_loc (loc, type, t2),
10987 fold_convert_loc (loc, type, t1));
10988 return t1;
10991 /* Convert ~X ^ ~Y to X ^ Y. */
10992 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10993 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10994 return fold_build2_loc (loc, code, type,
10995 fold_convert_loc (loc, type,
10996 TREE_OPERAND (arg0, 0)),
10997 fold_convert_loc (loc, type,
10998 TREE_OPERAND (arg1, 0)));
11000 /* Convert ~X ^ C to X ^ ~C. */
11001 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11002 && TREE_CODE (arg1) == INTEGER_CST)
11003 return fold_build2_loc (loc, code, type,
11004 fold_convert_loc (loc, type,
11005 TREE_OPERAND (arg0, 0)),
11006 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11008 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11009 if (TREE_CODE (arg0) == BIT_AND_EXPR
11010 && INTEGRAL_TYPE_P (type)
11011 && integer_onep (TREE_OPERAND (arg0, 1))
11012 && integer_onep (arg1))
11013 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11014 build_zero_cst (TREE_TYPE (arg0)));
11016 /* Fold (X & Y) ^ Y as ~X & Y. */
11017 if (TREE_CODE (arg0) == BIT_AND_EXPR
11018 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11020 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11021 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11022 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11023 fold_convert_loc (loc, type, arg1));
11025 /* Fold (X & Y) ^ X as ~Y & X. */
11026 if (TREE_CODE (arg0) == BIT_AND_EXPR
11027 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11028 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11030 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11031 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11032 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11033 fold_convert_loc (loc, type, arg1));
11035 /* Fold X ^ (X & Y) as X & ~Y. */
11036 if (TREE_CODE (arg1) == BIT_AND_EXPR
11037 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11039 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11040 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11041 fold_convert_loc (loc, type, arg0),
11042 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11044 /* Fold X ^ (Y & X) as ~Y & X. */
11045 if (TREE_CODE (arg1) == BIT_AND_EXPR
11046 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11047 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11049 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11050 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11051 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11052 fold_convert_loc (loc, type, arg0));
11055 /* See if this can be simplified into a rotate first. If that
11056 is unsuccessful continue in the association code. */
11057 goto bit_rotate;
11059 case BIT_AND_EXPR:
11060 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11061 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11062 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11063 || (TREE_CODE (arg0) == EQ_EXPR
11064 && integer_zerop (TREE_OPERAND (arg0, 1))))
11065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11066 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11068 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11069 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11070 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11071 || (TREE_CODE (arg1) == EQ_EXPR
11072 && integer_zerop (TREE_OPERAND (arg1, 1))))
11073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11074 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11076 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11077 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11078 && INTEGRAL_TYPE_P (type)
11079 && integer_onep (TREE_OPERAND (arg0, 1))
11080 && integer_onep (arg1))
11082 tree tem2;
11083 tem = TREE_OPERAND (arg0, 0);
11084 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11085 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11086 tem, tem2);
11087 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11088 build_zero_cst (TREE_TYPE (tem)));
11090 /* Fold ~X & 1 as (X & 1) == 0. */
11091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11092 && INTEGRAL_TYPE_P (type)
11093 && integer_onep (arg1))
11095 tree tem2;
11096 tem = TREE_OPERAND (arg0, 0);
11097 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11098 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11099 tem, tem2);
11100 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11101 build_zero_cst (TREE_TYPE (tem)));
11103 /* Fold !X & 1 as X == 0. */
11104 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11105 && integer_onep (arg1))
11107 tem = TREE_OPERAND (arg0, 0);
11108 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11109 build_zero_cst (TREE_TYPE (tem)));
11112 /* Fold (X ^ Y) & Y as ~X & Y. */
11113 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11116 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11117 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11118 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11119 fold_convert_loc (loc, type, arg1));
11121 /* Fold (X ^ Y) & X as ~Y & X. */
11122 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11124 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11126 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11127 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11128 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11129 fold_convert_loc (loc, type, arg1));
11131 /* Fold X & (X ^ Y) as X & ~Y. */
11132 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11133 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11135 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11136 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11137 fold_convert_loc (loc, type, arg0),
11138 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11140 /* Fold X & (Y ^ X) as ~Y & X. */
11141 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11142 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11143 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11145 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11146 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11147 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11148 fold_convert_loc (loc, type, arg0));
11151 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11152 multiple of 1 << CST. */
11153 if (TREE_CODE (arg1) == INTEGER_CST)
11155 wide_int cst1 = arg1;
11156 wide_int ncst1 = -cst1;
11157 if ((cst1 & ncst1) == ncst1
11158 && multiple_of_p (type, arg0,
11159 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11160 return fold_convert_loc (loc, type, arg0);
11163 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11164 bits from CST2. */
11165 if (TREE_CODE (arg1) == INTEGER_CST
11166 && TREE_CODE (arg0) == MULT_EXPR
11167 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11169 wide_int warg1 = arg1;
11170 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11172 if (masked == 0)
11173 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11174 arg0, arg1);
11175 else if (masked != warg1)
11177 /* Avoid the transform if arg1 is a mask of some
11178 mode which allows further optimizations. */
11179 int pop = wi::popcount (warg1);
11180 if (!(pop >= BITS_PER_UNIT
11181 && exact_log2 (pop) != -1
11182 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11183 return fold_build2_loc (loc, code, type, op0,
11184 wide_int_to_tree (type, masked));
11188 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11189 ((A & N) + B) & M -> (A + B) & M
11190 Similarly if (N & M) == 0,
11191 ((A | N) + B) & M -> (A + B) & M
11192 and for - instead of + (or unary - instead of +)
11193 and/or ^ instead of |.
11194 If B is constant and (B & M) == 0, fold into A & M. */
11195 if (TREE_CODE (arg1) == INTEGER_CST)
11197 wide_int cst1 = arg1;
11198 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11199 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11200 && (TREE_CODE (arg0) == PLUS_EXPR
11201 || TREE_CODE (arg0) == MINUS_EXPR
11202 || TREE_CODE (arg0) == NEGATE_EXPR)
11203 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11204 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11206 tree pmop[2];
11207 int which = 0;
11208 wide_int cst0;
11210 /* Now we know that arg0 is (C + D) or (C - D) or
11211 -C and arg1 (M) is == (1LL << cst) - 1.
11212 Store C into PMOP[0] and D into PMOP[1]. */
11213 pmop[0] = TREE_OPERAND (arg0, 0);
11214 pmop[1] = NULL;
11215 if (TREE_CODE (arg0) != NEGATE_EXPR)
11217 pmop[1] = TREE_OPERAND (arg0, 1);
11218 which = 1;
11221 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11222 which = -1;
11224 for (; which >= 0; which--)
11225 switch (TREE_CODE (pmop[which]))
11227 case BIT_AND_EXPR:
11228 case BIT_IOR_EXPR:
11229 case BIT_XOR_EXPR:
11230 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11231 != INTEGER_CST)
11232 break;
11233 cst0 = TREE_OPERAND (pmop[which], 1);
11234 cst0 &= cst1;
11235 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11237 if (cst0 != cst1)
11238 break;
11240 else if (cst0 != 0)
11241 break;
11242 /* If C or D is of the form (A & N) where
11243 (N & M) == M, or of the form (A | N) or
11244 (A ^ N) where (N & M) == 0, replace it with A. */
11245 pmop[which] = TREE_OPERAND (pmop[which], 0);
11246 break;
11247 case INTEGER_CST:
11248 /* If C or D is a N where (N & M) == 0, it can be
11249 omitted (assumed 0). */
11250 if ((TREE_CODE (arg0) == PLUS_EXPR
11251 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11252 && (cst1 & pmop[which]) == 0)
11253 pmop[which] = NULL;
11254 break;
11255 default:
11256 break;
11259 /* Only build anything new if we optimized one or both arguments
11260 above. */
11261 if (pmop[0] != TREE_OPERAND (arg0, 0)
11262 || (TREE_CODE (arg0) != NEGATE_EXPR
11263 && pmop[1] != TREE_OPERAND (arg0, 1)))
11265 tree utype = TREE_TYPE (arg0);
11266 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11268 /* Perform the operations in a type that has defined
11269 overflow behavior. */
11270 utype = unsigned_type_for (TREE_TYPE (arg0));
11271 if (pmop[0] != NULL)
11272 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11273 if (pmop[1] != NULL)
11274 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11277 if (TREE_CODE (arg0) == NEGATE_EXPR)
11278 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11279 else if (TREE_CODE (arg0) == PLUS_EXPR)
11281 if (pmop[0] != NULL && pmop[1] != NULL)
11282 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11283 pmop[0], pmop[1]);
11284 else if (pmop[0] != NULL)
11285 tem = pmop[0];
11286 else if (pmop[1] != NULL)
11287 tem = pmop[1];
11288 else
11289 return build_int_cst (type, 0);
11291 else if (pmop[0] == NULL)
11292 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11293 else
11294 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11295 pmop[0], pmop[1]);
11296 /* TEM is now the new binary +, - or unary - replacement. */
11297 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11298 fold_convert_loc (loc, utype, arg1));
11299 return fold_convert_loc (loc, type, tem);
11304 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11305 if (t1 != NULL_TREE)
11306 return t1;
11307 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11308 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11309 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11311 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11313 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11314 if (mask == -1)
11315 return
11316 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11319 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11321 This results in more efficient code for machines without a NOR
11322 instruction. Combine will canonicalize to the first form
11323 which will allow use of NOR instructions provided by the
11324 backend if they exist. */
11325 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11326 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11328 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11329 build2 (BIT_IOR_EXPR, type,
11330 fold_convert_loc (loc, type,
11331 TREE_OPERAND (arg0, 0)),
11332 fold_convert_loc (loc, type,
11333 TREE_OPERAND (arg1, 0))));
11336 /* If arg0 is derived from the address of an object or function, we may
11337 be able to fold this expression using the object or function's
11338 alignment. */
11339 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11341 unsigned HOST_WIDE_INT modulus, residue;
11342 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11344 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11345 integer_onep (arg1));
11347 /* This works because modulus is a power of 2. If this weren't the
11348 case, we'd have to replace it by its greatest power-of-2
11349 divisor: modulus & -modulus. */
11350 if (low < modulus)
11351 return build_int_cst (type, residue & low);
11354 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11355 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11356 if the new mask might be further optimized. */
11357 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11358 || TREE_CODE (arg0) == RSHIFT_EXPR)
11359 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11360 && TREE_CODE (arg1) == INTEGER_CST
11361 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11362 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11363 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11364 < TYPE_PRECISION (TREE_TYPE (arg0))))
11366 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11367 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11368 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11369 tree shift_type = TREE_TYPE (arg0);
11371 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11372 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11373 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11374 && TYPE_PRECISION (TREE_TYPE (arg0))
11375 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11377 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11378 tree arg00 = TREE_OPERAND (arg0, 0);
11379 /* See if more bits can be proven as zero because of
11380 zero extension. */
11381 if (TREE_CODE (arg00) == NOP_EXPR
11382 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11384 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11385 if (TYPE_PRECISION (inner_type)
11386 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11387 && TYPE_PRECISION (inner_type) < prec)
11389 prec = TYPE_PRECISION (inner_type);
11390 /* See if we can shorten the right shift. */
11391 if (shiftc < prec)
11392 shift_type = inner_type;
11393 /* Otherwise X >> C1 is all zeros, so we'll optimize
11394 it into (X, 0) later on by making sure zerobits
11395 is all ones. */
11398 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11399 if (shiftc < prec)
11401 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11402 zerobits <<= prec - shiftc;
11404 /* For arithmetic shift if sign bit could be set, zerobits
11405 can contain actually sign bits, so no transformation is
11406 possible, unless MASK masks them all away. In that
11407 case the shift needs to be converted into logical shift. */
11408 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11409 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11411 if ((mask & zerobits) == 0)
11412 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11413 else
11414 zerobits = 0;
11418 /* ((X << 16) & 0xff00) is (X, 0). */
11419 if ((mask & zerobits) == mask)
11420 return omit_one_operand_loc (loc, type,
11421 build_int_cst (type, 0), arg0);
11423 newmask = mask | zerobits;
11424 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11426 /* Only do the transformation if NEWMASK is some integer
11427 mode's mask. */
11428 for (prec = BITS_PER_UNIT;
11429 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11430 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11431 break;
11432 if (prec < HOST_BITS_PER_WIDE_INT
11433 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11435 tree newmaskt;
11437 if (shift_type != TREE_TYPE (arg0))
11439 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11440 fold_convert_loc (loc, shift_type,
11441 TREE_OPERAND (arg0, 0)),
11442 TREE_OPERAND (arg0, 1));
11443 tem = fold_convert_loc (loc, type, tem);
11445 else
11446 tem = op0;
11447 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11448 if (!tree_int_cst_equal (newmaskt, arg1))
11449 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11454 goto associate;
11456 case RDIV_EXPR:
11457 /* Don't touch a floating-point divide by zero unless the mode
11458 of the constant can represent infinity. */
11459 if (TREE_CODE (arg1) == REAL_CST
11460 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11461 && real_zerop (arg1))
11462 return NULL_TREE;
11464 /* (-A) / (-B) -> A / B */
11465 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11466 return fold_build2_loc (loc, RDIV_EXPR, type,
11467 TREE_OPERAND (arg0, 0),
11468 negate_expr (arg1));
11469 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11470 return fold_build2_loc (loc, RDIV_EXPR, type,
11471 negate_expr (arg0),
11472 TREE_OPERAND (arg1, 0));
11474 /* Convert A/B/C to A/(B*C). */
11475 if (flag_reciprocal_math
11476 && TREE_CODE (arg0) == RDIV_EXPR)
11477 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11478 fold_build2_loc (loc, MULT_EXPR, type,
11479 TREE_OPERAND (arg0, 1), arg1));
11481 /* Convert A/(B/C) to (A/B)*C. */
11482 if (flag_reciprocal_math
11483 && TREE_CODE (arg1) == RDIV_EXPR)
11484 return fold_build2_loc (loc, MULT_EXPR, type,
11485 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11486 TREE_OPERAND (arg1, 0)),
11487 TREE_OPERAND (arg1, 1));
11489 /* Convert C1/(X*C2) into (C1/C2)/X. */
11490 if (flag_reciprocal_math
11491 && TREE_CODE (arg1) == MULT_EXPR
11492 && TREE_CODE (arg0) == REAL_CST
11493 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11495 tree tem = const_binop (RDIV_EXPR, arg0,
11496 TREE_OPERAND (arg1, 1));
11497 if (tem)
11498 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11499 TREE_OPERAND (arg1, 0));
11502 if (flag_unsafe_math_optimizations)
11504 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11505 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11507 /* Optimize sin(x)/cos(x) as tan(x). */
11508 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11509 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11510 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11511 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11512 CALL_EXPR_ARG (arg1, 0), 0))
11514 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11516 if (tanfn != NULL_TREE)
11517 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11520 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11521 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11522 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11523 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11524 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11525 CALL_EXPR_ARG (arg1, 0), 0))
11527 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11529 if (tanfn != NULL_TREE)
11531 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11532 CALL_EXPR_ARG (arg0, 0));
11533 return fold_build2_loc (loc, RDIV_EXPR, type,
11534 build_real (type, dconst1), tmp);
11538 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11539 NaNs or Infinities. */
11540 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11541 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11542 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11544 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11545 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11547 if (! HONOR_NANS (element_mode (arg00))
11548 && ! HONOR_INFINITIES (element_mode (arg00))
11549 && operand_equal_p (arg00, arg01, 0))
11551 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11553 if (cosfn != NULL_TREE)
11554 return build_call_expr_loc (loc, cosfn, 1, arg00);
11558 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11559 NaNs or Infinities. */
11560 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11561 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11562 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11564 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11565 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11567 if (! HONOR_NANS (element_mode (arg00))
11568 && ! HONOR_INFINITIES (element_mode (arg00))
11569 && operand_equal_p (arg00, arg01, 0))
11571 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11573 if (cosfn != NULL_TREE)
11575 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11576 return fold_build2_loc (loc, RDIV_EXPR, type,
11577 build_real (type, dconst1),
11578 tmp);
11583 /* Optimize pow(x,c)/x as pow(x,c-1). */
11584 if (fcode0 == BUILT_IN_POW
11585 || fcode0 == BUILT_IN_POWF
11586 || fcode0 == BUILT_IN_POWL)
11588 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11589 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11590 if (TREE_CODE (arg01) == REAL_CST
11591 && !TREE_OVERFLOW (arg01)
11592 && operand_equal_p (arg1, arg00, 0))
11594 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11595 REAL_VALUE_TYPE c;
11596 tree arg;
11598 c = TREE_REAL_CST (arg01);
11599 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11600 arg = build_real (type, c);
11601 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11605 /* Optimize a/root(b/c) into a*root(c/b). */
11606 if (BUILTIN_ROOT_P (fcode1))
11608 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11610 if (TREE_CODE (rootarg) == RDIV_EXPR)
11612 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11613 tree b = TREE_OPERAND (rootarg, 0);
11614 tree c = TREE_OPERAND (rootarg, 1);
11616 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11618 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11619 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11623 /* Optimize x/expN(y) into x*expN(-y). */
11624 if (BUILTIN_EXPONENT_P (fcode1))
11626 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11627 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11628 arg1 = build_call_expr_loc (loc,
11629 expfn, 1,
11630 fold_convert_loc (loc, type, arg));
11631 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11634 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11635 if (fcode1 == BUILT_IN_POW
11636 || fcode1 == BUILT_IN_POWF
11637 || fcode1 == BUILT_IN_POWL)
11639 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11640 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11641 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11642 tree neg11 = fold_convert_loc (loc, type,
11643 negate_expr (arg11));
11644 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11645 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11648 return NULL_TREE;
11650 case TRUNC_DIV_EXPR:
11651 /* Optimize (X & (-A)) / A where A is a power of 2,
11652 to X >> log2(A) */
11653 if (TREE_CODE (arg0) == BIT_AND_EXPR
11654 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11655 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11657 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11658 arg1, TREE_OPERAND (arg0, 1));
11659 if (sum && integer_zerop (sum)) {
11660 tree pow2 = build_int_cst (integer_type_node,
11661 wi::exact_log2 (arg1));
11662 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11663 TREE_OPERAND (arg0, 0), pow2);
11667 /* Fall through */
11669 case FLOOR_DIV_EXPR:
11670 /* Simplify A / (B << N) where A and B are positive and B is
11671 a power of 2, to A >> (N + log2(B)). */
11672 strict_overflow_p = false;
11673 if (TREE_CODE (arg1) == LSHIFT_EXPR
11674 && (TYPE_UNSIGNED (type)
11675 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11677 tree sval = TREE_OPERAND (arg1, 0);
11678 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11680 tree sh_cnt = TREE_OPERAND (arg1, 1);
11681 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11682 wi::exact_log2 (sval));
11684 if (strict_overflow_p)
11685 fold_overflow_warning (("assuming signed overflow does not "
11686 "occur when simplifying A / (B << N)"),
11687 WARN_STRICT_OVERFLOW_MISC);
11689 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11690 sh_cnt, pow2);
11691 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11692 fold_convert_loc (loc, type, arg0), sh_cnt);
11696 /* Fall through */
11698 case ROUND_DIV_EXPR:
11699 case CEIL_DIV_EXPR:
11700 case EXACT_DIV_EXPR:
11701 if (integer_zerop (arg1))
11702 return NULL_TREE;
11704 /* Convert -A / -B to A / B when the type is signed and overflow is
11705 undefined. */
11706 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11707 && TREE_CODE (arg0) == NEGATE_EXPR
11708 && negate_expr_p (arg1))
11710 if (INTEGRAL_TYPE_P (type))
11711 fold_overflow_warning (("assuming signed overflow does not occur "
11712 "when distributing negation across "
11713 "division"),
11714 WARN_STRICT_OVERFLOW_MISC);
11715 return fold_build2_loc (loc, code, type,
11716 fold_convert_loc (loc, type,
11717 TREE_OPERAND (arg0, 0)),
11718 fold_convert_loc (loc, type,
11719 negate_expr (arg1)));
11721 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11722 && TREE_CODE (arg1) == NEGATE_EXPR
11723 && negate_expr_p (arg0))
11725 if (INTEGRAL_TYPE_P (type))
11726 fold_overflow_warning (("assuming signed overflow does not occur "
11727 "when distributing negation across "
11728 "division"),
11729 WARN_STRICT_OVERFLOW_MISC);
11730 return fold_build2_loc (loc, code, type,
11731 fold_convert_loc (loc, type,
11732 negate_expr (arg0)),
11733 fold_convert_loc (loc, type,
11734 TREE_OPERAND (arg1, 0)));
11737 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11738 operation, EXACT_DIV_EXPR.
11740 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11741 At one time others generated faster code, it's not clear if they do
11742 after the last round to changes to the DIV code in expmed.c. */
11743 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11744 && multiple_of_p (type, arg0, arg1))
11745 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11747 strict_overflow_p = false;
11748 if (TREE_CODE (arg1) == INTEGER_CST
11749 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11750 &strict_overflow_p)))
11752 if (strict_overflow_p)
11753 fold_overflow_warning (("assuming signed overflow does not occur "
11754 "when simplifying division"),
11755 WARN_STRICT_OVERFLOW_MISC);
11756 return fold_convert_loc (loc, type, tem);
11759 return NULL_TREE;
11761 case CEIL_MOD_EXPR:
11762 case FLOOR_MOD_EXPR:
11763 case ROUND_MOD_EXPR:
11764 case TRUNC_MOD_EXPR:
11765 /* X % -Y is the same as X % Y. */
11766 if (code == TRUNC_MOD_EXPR
11767 && !TYPE_UNSIGNED (type)
11768 && TREE_CODE (arg1) == NEGATE_EXPR
11769 && !TYPE_OVERFLOW_TRAPS (type))
11770 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11771 fold_convert_loc (loc, type,
11772 TREE_OPERAND (arg1, 0)));
11774 strict_overflow_p = false;
11775 if (TREE_CODE (arg1) == INTEGER_CST
11776 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11777 &strict_overflow_p)))
11779 if (strict_overflow_p)
11780 fold_overflow_warning (("assuming signed overflow does not occur "
11781 "when simplifying modulus"),
11782 WARN_STRICT_OVERFLOW_MISC);
11783 return fold_convert_loc (loc, type, tem);
11786 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11787 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11788 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11789 && (TYPE_UNSIGNED (type)
11790 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11792 tree c = arg1;
11793 /* Also optimize A % (C << N) where C is a power of 2,
11794 to A & ((C << N) - 1). */
11795 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11796 c = TREE_OPERAND (arg1, 0);
11798 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11800 tree mask
11801 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11802 build_int_cst (TREE_TYPE (arg1), 1));
11803 if (strict_overflow_p)
11804 fold_overflow_warning (("assuming signed overflow does not "
11805 "occur when simplifying "
11806 "X % (power of two)"),
11807 WARN_STRICT_OVERFLOW_MISC);
11808 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11809 fold_convert_loc (loc, type, arg0),
11810 fold_convert_loc (loc, type, mask));
11814 return NULL_TREE;
11816 case LROTATE_EXPR:
11817 case RROTATE_EXPR:
11818 case RSHIFT_EXPR:
11819 case LSHIFT_EXPR:
11820 /* Since negative shift count is not well-defined,
11821 don't try to compute it in the compiler. */
11822 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11823 return NULL_TREE;
11825 prec = element_precision (type);
11827 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11828 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11829 && tree_to_uhwi (arg1) < prec
11830 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11831 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11833 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11834 + tree_to_uhwi (arg1));
11836 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11837 being well defined. */
11838 if (low >= prec)
11840 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11841 low = low % prec;
11842 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11843 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11844 TREE_OPERAND (arg0, 0));
11845 else
11846 low = prec - 1;
11849 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11850 build_int_cst (TREE_TYPE (arg1), low));
11853 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11854 into x & ((unsigned)-1 >> c) for unsigned types. */
11855 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11856 || (TYPE_UNSIGNED (type)
11857 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11858 && tree_fits_uhwi_p (arg1)
11859 && tree_to_uhwi (arg1) < prec
11860 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11861 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11863 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11864 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11865 tree lshift;
11866 tree arg00;
11868 if (low0 == low1)
11870 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11872 lshift = build_minus_one_cst (type);
11873 lshift = const_binop (code, lshift, arg1);
11875 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11879 /* If we have a rotate of a bit operation with the rotate count and
11880 the second operand of the bit operation both constant,
11881 permute the two operations. */
11882 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11883 && (TREE_CODE (arg0) == BIT_AND_EXPR
11884 || TREE_CODE (arg0) == BIT_IOR_EXPR
11885 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11887 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11888 fold_build2_loc (loc, code, type,
11889 TREE_OPERAND (arg0, 0), arg1),
11890 fold_build2_loc (loc, code, type,
11891 TREE_OPERAND (arg0, 1), arg1));
11893 /* Two consecutive rotates adding up to the some integer
11894 multiple of the precision of the type can be ignored. */
11895 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11896 && TREE_CODE (arg0) == RROTATE_EXPR
11897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11898 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11899 prec) == 0)
11900 return TREE_OPERAND (arg0, 0);
11902 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11903 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11904 if the latter can be further optimized. */
11905 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11906 && TREE_CODE (arg0) == BIT_AND_EXPR
11907 && TREE_CODE (arg1) == INTEGER_CST
11908 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11910 tree mask = fold_build2_loc (loc, code, type,
11911 fold_convert_loc (loc, type,
11912 TREE_OPERAND (arg0, 1)),
11913 arg1);
11914 tree shift = fold_build2_loc (loc, code, type,
11915 fold_convert_loc (loc, type,
11916 TREE_OPERAND (arg0, 0)),
11917 arg1);
11918 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11919 if (tem)
11920 return tem;
11923 return NULL_TREE;
11925 case MIN_EXPR:
11926 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11927 if (tem)
11928 return tem;
11929 goto associate;
11931 case MAX_EXPR:
11932 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11933 if (tem)
11934 return tem;
11935 goto associate;
11937 case TRUTH_ANDIF_EXPR:
11938 /* Note that the operands of this must be ints
11939 and their values must be 0 or 1.
11940 ("true" is a fixed value perhaps depending on the language.) */
11941 /* If first arg is constant zero, return it. */
11942 if (integer_zerop (arg0))
11943 return fold_convert_loc (loc, type, arg0);
11944 case TRUTH_AND_EXPR:
11945 /* If either arg is constant true, drop it. */
11946 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11947 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11948 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11949 /* Preserve sequence points. */
11950 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11951 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11952 /* If second arg is constant zero, result is zero, but first arg
11953 must be evaluated. */
11954 if (integer_zerop (arg1))
11955 return omit_one_operand_loc (loc, type, arg1, arg0);
11956 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11957 case will be handled here. */
11958 if (integer_zerop (arg0))
11959 return omit_one_operand_loc (loc, type, arg0, arg1);
11961 /* !X && X is always false. */
11962 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11963 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11964 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11965 /* X && !X is always false. */
11966 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11967 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11968 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11970 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11971 means A >= Y && A != MAX, but in this case we know that
11972 A < X <= MAX. */
11974 if (!TREE_SIDE_EFFECTS (arg0)
11975 && !TREE_SIDE_EFFECTS (arg1))
11977 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11978 if (tem && !operand_equal_p (tem, arg0, 0))
11979 return fold_build2_loc (loc, code, type, tem, arg1);
11981 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11982 if (tem && !operand_equal_p (tem, arg1, 0))
11983 return fold_build2_loc (loc, code, type, arg0, tem);
11986 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11987 != NULL_TREE)
11988 return tem;
11990 return NULL_TREE;
11992 case TRUTH_ORIF_EXPR:
11993 /* Note that the operands of this must be ints
11994 and their values must be 0 or true.
11995 ("true" is a fixed value perhaps depending on the language.) */
11996 /* If first arg is constant true, return it. */
11997 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11998 return fold_convert_loc (loc, type, arg0);
11999 case TRUTH_OR_EXPR:
12000 /* If either arg is constant zero, drop it. */
12001 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12002 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12003 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12004 /* Preserve sequence points. */
12005 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12006 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12007 /* If second arg is constant true, result is true, but we must
12008 evaluate first arg. */
12009 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12010 return omit_one_operand_loc (loc, type, arg1, arg0);
12011 /* Likewise for first arg, but note this only occurs here for
12012 TRUTH_OR_EXPR. */
12013 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12014 return omit_one_operand_loc (loc, type, arg0, arg1);
12016 /* !X || X is always true. */
12017 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12019 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12020 /* X || !X is always true. */
12021 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12023 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12025 /* (X && !Y) || (!X && Y) is X ^ Y */
12026 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12027 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12029 tree a0, a1, l0, l1, n0, n1;
12031 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12032 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12034 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12035 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12037 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12038 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12040 if ((operand_equal_p (n0, a0, 0)
12041 && operand_equal_p (n1, a1, 0))
12042 || (operand_equal_p (n0, a1, 0)
12043 && operand_equal_p (n1, a0, 0)))
12044 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12047 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12048 != NULL_TREE)
12049 return tem;
12051 return NULL_TREE;
12053 case TRUTH_XOR_EXPR:
12054 /* If the second arg is constant zero, drop it. */
12055 if (integer_zerop (arg1))
12056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12057 /* If the second arg is constant true, this is a logical inversion. */
12058 if (integer_onep (arg1))
12060 tem = invert_truthvalue_loc (loc, arg0);
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12063 /* Identical arguments cancel to zero. */
12064 if (operand_equal_p (arg0, arg1, 0))
12065 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12067 /* !X ^ X is always true. */
12068 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12069 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12070 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12072 /* X ^ !X is always true. */
12073 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12075 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12077 return NULL_TREE;
12079 case EQ_EXPR:
12080 case NE_EXPR:
12081 STRIP_NOPS (arg0);
12082 STRIP_NOPS (arg1);
12084 tem = fold_comparison (loc, code, type, op0, op1);
12085 if (tem != NULL_TREE)
12086 return tem;
12088 /* bool_var != 0 becomes bool_var. */
12089 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12090 && code == NE_EXPR)
12091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12093 /* bool_var == 1 becomes bool_var. */
12094 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12095 && code == EQ_EXPR)
12096 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12098 /* bool_var != 1 becomes !bool_var. */
12099 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12100 && code == NE_EXPR)
12101 return fold_convert_loc (loc, type,
12102 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12103 TREE_TYPE (arg0), arg0));
12105 /* bool_var == 0 becomes !bool_var. */
12106 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12107 && code == EQ_EXPR)
12108 return fold_convert_loc (loc, type,
12109 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12110 TREE_TYPE (arg0), arg0));
12112 /* !exp != 0 becomes !exp */
12113 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12114 && code == NE_EXPR)
12115 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12117 /* If this is an equality comparison of the address of two non-weak,
12118 unaliased symbols neither of which are extern (since we do not
12119 have access to attributes for externs), then we know the result. */
12120 if (TREE_CODE (arg0) == ADDR_EXPR
12121 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12122 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12123 && ! lookup_attribute ("alias",
12124 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12125 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12126 && TREE_CODE (arg1) == ADDR_EXPR
12127 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12128 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12129 && ! lookup_attribute ("alias",
12130 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12131 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12133 /* We know that we're looking at the address of two
12134 non-weak, unaliased, static _DECL nodes.
12136 It is both wasteful and incorrect to call operand_equal_p
12137 to compare the two ADDR_EXPR nodes. It is wasteful in that
12138 all we need to do is test pointer equality for the arguments
12139 to the two ADDR_EXPR nodes. It is incorrect to use
12140 operand_equal_p as that function is NOT equivalent to a
12141 C equality test. It can in fact return false for two
12142 objects which would test as equal using the C equality
12143 operator. */
12144 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12145 return constant_boolean_node (equal
12146 ? code == EQ_EXPR : code != EQ_EXPR,
12147 type);
12150 /* Similarly for a NEGATE_EXPR. */
12151 if (TREE_CODE (arg0) == NEGATE_EXPR
12152 && TREE_CODE (arg1) == INTEGER_CST
12153 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12154 arg1)))
12155 && TREE_CODE (tem) == INTEGER_CST
12156 && !TREE_OVERFLOW (tem))
12157 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12159 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12160 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12161 && TREE_CODE (arg1) == INTEGER_CST
12162 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12163 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12164 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12165 fold_convert_loc (loc,
12166 TREE_TYPE (arg0),
12167 arg1),
12168 TREE_OPERAND (arg0, 1)));
12170 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12171 if ((TREE_CODE (arg0) == PLUS_EXPR
12172 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12173 || TREE_CODE (arg0) == MINUS_EXPR)
12174 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12175 0)),
12176 arg1, 0)
12177 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12178 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12180 tree val = TREE_OPERAND (arg0, 1);
12181 return omit_two_operands_loc (loc, type,
12182 fold_build2_loc (loc, code, type,
12183 val,
12184 build_int_cst (TREE_TYPE (val),
12185 0)),
12186 TREE_OPERAND (arg0, 0), arg1);
12189 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12190 if (TREE_CODE (arg0) == MINUS_EXPR
12191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12192 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12193 1)),
12194 arg1, 0)
12195 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12197 return omit_two_operands_loc (loc, type,
12198 code == NE_EXPR
12199 ? boolean_true_node : boolean_false_node,
12200 TREE_OPERAND (arg0, 1), arg1);
12203 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12204 if (TREE_CODE (arg0) == ABS_EXPR
12205 && (integer_zerop (arg1) || real_zerop (arg1)))
12206 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12208 /* If this is an EQ or NE comparison with zero and ARG0 is
12209 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12210 two operations, but the latter can be done in one less insn
12211 on machines that have only two-operand insns or on which a
12212 constant cannot be the first operand. */
12213 if (TREE_CODE (arg0) == BIT_AND_EXPR
12214 && integer_zerop (arg1))
12216 tree arg00 = TREE_OPERAND (arg0, 0);
12217 tree arg01 = TREE_OPERAND (arg0, 1);
12218 if (TREE_CODE (arg00) == LSHIFT_EXPR
12219 && integer_onep (TREE_OPERAND (arg00, 0)))
12221 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12222 arg01, TREE_OPERAND (arg00, 1));
12223 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12224 build_int_cst (TREE_TYPE (arg0), 1));
12225 return fold_build2_loc (loc, code, type,
12226 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12227 arg1);
12229 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12230 && integer_onep (TREE_OPERAND (arg01, 0)))
12232 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12233 arg00, TREE_OPERAND (arg01, 1));
12234 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12235 build_int_cst (TREE_TYPE (arg0), 1));
12236 return fold_build2_loc (loc, code, type,
12237 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12238 arg1);
12242 /* If this is an NE or EQ comparison of zero against the result of a
12243 signed MOD operation whose second operand is a power of 2, make
12244 the MOD operation unsigned since it is simpler and equivalent. */
12245 if (integer_zerop (arg1)
12246 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12247 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12248 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12249 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12250 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12251 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12253 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12254 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12255 fold_convert_loc (loc, newtype,
12256 TREE_OPERAND (arg0, 0)),
12257 fold_convert_loc (loc, newtype,
12258 TREE_OPERAND (arg0, 1)));
12260 return fold_build2_loc (loc, code, type, newmod,
12261 fold_convert_loc (loc, newtype, arg1));
12264 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12265 C1 is a valid shift constant, and C2 is a power of two, i.e.
12266 a single bit. */
12267 if (TREE_CODE (arg0) == BIT_AND_EXPR
12268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12269 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12270 == INTEGER_CST
12271 && integer_pow2p (TREE_OPERAND (arg0, 1))
12272 && integer_zerop (arg1))
12274 tree itype = TREE_TYPE (arg0);
12275 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12276 prec = TYPE_PRECISION (itype);
12278 /* Check for a valid shift count. */
12279 if (wi::ltu_p (arg001, prec))
12281 tree arg01 = TREE_OPERAND (arg0, 1);
12282 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12283 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12284 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12285 can be rewritten as (X & (C2 << C1)) != 0. */
12286 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12288 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12289 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12290 return fold_build2_loc (loc, code, type, tem,
12291 fold_convert_loc (loc, itype, arg1));
12293 /* Otherwise, for signed (arithmetic) shifts,
12294 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12295 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12296 else if (!TYPE_UNSIGNED (itype))
12297 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12298 arg000, build_int_cst (itype, 0));
12299 /* Otherwise, of unsigned (logical) shifts,
12300 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12301 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12302 else
12303 return omit_one_operand_loc (loc, type,
12304 code == EQ_EXPR ? integer_one_node
12305 : integer_zero_node,
12306 arg000);
12310 /* If we have (A & C) == C where C is a power of 2, convert this into
12311 (A & C) != 0. Similarly for NE_EXPR. */
12312 if (TREE_CODE (arg0) == BIT_AND_EXPR
12313 && integer_pow2p (TREE_OPERAND (arg0, 1))
12314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12315 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12316 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12317 integer_zero_node));
12319 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12320 bit, then fold the expression into A < 0 or A >= 0. */
12321 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12322 if (tem)
12323 return tem;
12325 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12326 Similarly for NE_EXPR. */
12327 if (TREE_CODE (arg0) == BIT_AND_EXPR
12328 && TREE_CODE (arg1) == INTEGER_CST
12329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12331 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12332 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12333 TREE_OPERAND (arg0, 1));
12334 tree dandnotc
12335 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12336 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12337 notc);
12338 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12339 if (integer_nonzerop (dandnotc))
12340 return omit_one_operand_loc (loc, type, rslt, arg0);
12343 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12344 Similarly for NE_EXPR. */
12345 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12346 && TREE_CODE (arg1) == INTEGER_CST
12347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12349 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12350 tree candnotd
12351 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12352 TREE_OPERAND (arg0, 1),
12353 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12354 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12355 if (integer_nonzerop (candnotd))
12356 return omit_one_operand_loc (loc, type, rslt, arg0);
12359 /* If this is a comparison of a field, we may be able to simplify it. */
12360 if ((TREE_CODE (arg0) == COMPONENT_REF
12361 || TREE_CODE (arg0) == BIT_FIELD_REF)
12362 /* Handle the constant case even without -O
12363 to make sure the warnings are given. */
12364 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12366 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12367 if (t1)
12368 return t1;
12371 /* Optimize comparisons of strlen vs zero to a compare of the
12372 first character of the string vs zero. To wit,
12373 strlen(ptr) == 0 => *ptr == 0
12374 strlen(ptr) != 0 => *ptr != 0
12375 Other cases should reduce to one of these two (or a constant)
12376 due to the return value of strlen being unsigned. */
12377 if (TREE_CODE (arg0) == CALL_EXPR
12378 && integer_zerop (arg1))
12380 tree fndecl = get_callee_fndecl (arg0);
12382 if (fndecl
12383 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12384 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12385 && call_expr_nargs (arg0) == 1
12386 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12388 tree iref = build_fold_indirect_ref_loc (loc,
12389 CALL_EXPR_ARG (arg0, 0));
12390 return fold_build2_loc (loc, code, type, iref,
12391 build_int_cst (TREE_TYPE (iref), 0));
12395 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12396 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12397 if (TREE_CODE (arg0) == RSHIFT_EXPR
12398 && integer_zerop (arg1)
12399 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12401 tree arg00 = TREE_OPERAND (arg0, 0);
12402 tree arg01 = TREE_OPERAND (arg0, 1);
12403 tree itype = TREE_TYPE (arg00);
12404 if (wi::eq_p (arg01, element_precision (itype) - 1))
12406 if (TYPE_UNSIGNED (itype))
12408 itype = signed_type_for (itype);
12409 arg00 = fold_convert_loc (loc, itype, arg00);
12411 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12412 type, arg00, build_zero_cst (itype));
12416 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12417 if (integer_zerop (arg1)
12418 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12419 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12420 TREE_OPERAND (arg0, 1));
12422 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12423 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12424 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12425 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12426 build_zero_cst (TREE_TYPE (arg0)));
12427 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12428 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12430 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12431 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12432 build_zero_cst (TREE_TYPE (arg0)));
12434 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12435 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12436 && TREE_CODE (arg1) == INTEGER_CST
12437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12438 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12439 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12440 TREE_OPERAND (arg0, 1), arg1));
12442 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12443 (X & C) == 0 when C is a single bit. */
12444 if (TREE_CODE (arg0) == BIT_AND_EXPR
12445 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12446 && integer_zerop (arg1)
12447 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12449 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12450 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12451 TREE_OPERAND (arg0, 1));
12452 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12453 type, tem,
12454 fold_convert_loc (loc, TREE_TYPE (arg0),
12455 arg1));
12458 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12459 constant C is a power of two, i.e. a single bit. */
12460 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12461 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12462 && integer_zerop (arg1)
12463 && integer_pow2p (TREE_OPERAND (arg0, 1))
12464 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12465 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12467 tree arg00 = TREE_OPERAND (arg0, 0);
12468 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12469 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12472 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12473 when is C is a power of two, i.e. a single bit. */
12474 if (TREE_CODE (arg0) == BIT_AND_EXPR
12475 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12476 && integer_zerop (arg1)
12477 && integer_pow2p (TREE_OPERAND (arg0, 1))
12478 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12479 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12481 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12482 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12483 arg000, TREE_OPERAND (arg0, 1));
12484 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12485 tem, build_int_cst (TREE_TYPE (tem), 0));
12488 if (integer_zerop (arg1)
12489 && tree_expr_nonzero_p (arg0))
12491 tree res = constant_boolean_node (code==NE_EXPR, type);
12492 return omit_one_operand_loc (loc, type, res, arg0);
12495 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12496 if (TREE_CODE (arg0) == NEGATE_EXPR
12497 && TREE_CODE (arg1) == NEGATE_EXPR)
12498 return fold_build2_loc (loc, code, type,
12499 TREE_OPERAND (arg0, 0),
12500 fold_convert_loc (loc, TREE_TYPE (arg0),
12501 TREE_OPERAND (arg1, 0)));
12503 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12504 if (TREE_CODE (arg0) == BIT_AND_EXPR
12505 && TREE_CODE (arg1) == BIT_AND_EXPR)
12507 tree arg00 = TREE_OPERAND (arg0, 0);
12508 tree arg01 = TREE_OPERAND (arg0, 1);
12509 tree arg10 = TREE_OPERAND (arg1, 0);
12510 tree arg11 = TREE_OPERAND (arg1, 1);
12511 tree itype = TREE_TYPE (arg0);
12513 if (operand_equal_p (arg01, arg11, 0))
12514 return fold_build2_loc (loc, code, type,
12515 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12516 fold_build2_loc (loc,
12517 BIT_XOR_EXPR, itype,
12518 arg00, arg10),
12519 arg01),
12520 build_zero_cst (itype));
12522 if (operand_equal_p (arg01, arg10, 0))
12523 return fold_build2_loc (loc, code, type,
12524 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12525 fold_build2_loc (loc,
12526 BIT_XOR_EXPR, itype,
12527 arg00, arg11),
12528 arg01),
12529 build_zero_cst (itype));
12531 if (operand_equal_p (arg00, arg11, 0))
12532 return fold_build2_loc (loc, code, type,
12533 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12534 fold_build2_loc (loc,
12535 BIT_XOR_EXPR, itype,
12536 arg01, arg10),
12537 arg00),
12538 build_zero_cst (itype));
12540 if (operand_equal_p (arg00, arg10, 0))
12541 return fold_build2_loc (loc, code, type,
12542 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12543 fold_build2_loc (loc,
12544 BIT_XOR_EXPR, itype,
12545 arg01, arg11),
12546 arg00),
12547 build_zero_cst (itype));
12550 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12551 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12553 tree arg00 = TREE_OPERAND (arg0, 0);
12554 tree arg01 = TREE_OPERAND (arg0, 1);
12555 tree arg10 = TREE_OPERAND (arg1, 0);
12556 tree arg11 = TREE_OPERAND (arg1, 1);
12557 tree itype = TREE_TYPE (arg0);
12559 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12560 operand_equal_p guarantees no side-effects so we don't need
12561 to use omit_one_operand on Z. */
12562 if (operand_equal_p (arg01, arg11, 0))
12563 return fold_build2_loc (loc, code, type, arg00,
12564 fold_convert_loc (loc, TREE_TYPE (arg00),
12565 arg10));
12566 if (operand_equal_p (arg01, arg10, 0))
12567 return fold_build2_loc (loc, code, type, arg00,
12568 fold_convert_loc (loc, TREE_TYPE (arg00),
12569 arg11));
12570 if (operand_equal_p (arg00, arg11, 0))
12571 return fold_build2_loc (loc, code, type, arg01,
12572 fold_convert_loc (loc, TREE_TYPE (arg01),
12573 arg10));
12574 if (operand_equal_p (arg00, arg10, 0))
12575 return fold_build2_loc (loc, code, type, arg01,
12576 fold_convert_loc (loc, TREE_TYPE (arg01),
12577 arg11));
12579 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12580 if (TREE_CODE (arg01) == INTEGER_CST
12581 && TREE_CODE (arg11) == INTEGER_CST)
12583 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12584 fold_convert_loc (loc, itype, arg11));
12585 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12586 return fold_build2_loc (loc, code, type, tem,
12587 fold_convert_loc (loc, itype, arg10));
12591 /* Attempt to simplify equality/inequality comparisons of complex
12592 values. Only lower the comparison if the result is known or
12593 can be simplified to a single scalar comparison. */
12594 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12595 || TREE_CODE (arg0) == COMPLEX_CST)
12596 && (TREE_CODE (arg1) == COMPLEX_EXPR
12597 || TREE_CODE (arg1) == COMPLEX_CST))
12599 tree real0, imag0, real1, imag1;
12600 tree rcond, icond;
12602 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12604 real0 = TREE_OPERAND (arg0, 0);
12605 imag0 = TREE_OPERAND (arg0, 1);
12607 else
12609 real0 = TREE_REALPART (arg0);
12610 imag0 = TREE_IMAGPART (arg0);
12613 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12615 real1 = TREE_OPERAND (arg1, 0);
12616 imag1 = TREE_OPERAND (arg1, 1);
12618 else
12620 real1 = TREE_REALPART (arg1);
12621 imag1 = TREE_IMAGPART (arg1);
12624 rcond = fold_binary_loc (loc, code, type, real0, real1);
12625 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12627 if (integer_zerop (rcond))
12629 if (code == EQ_EXPR)
12630 return omit_two_operands_loc (loc, type, boolean_false_node,
12631 imag0, imag1);
12632 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12634 else
12636 if (code == NE_EXPR)
12637 return omit_two_operands_loc (loc, type, boolean_true_node,
12638 imag0, imag1);
12639 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12643 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12644 if (icond && TREE_CODE (icond) == INTEGER_CST)
12646 if (integer_zerop (icond))
12648 if (code == EQ_EXPR)
12649 return omit_two_operands_loc (loc, type, boolean_false_node,
12650 real0, real1);
12651 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12653 else
12655 if (code == NE_EXPR)
12656 return omit_two_operands_loc (loc, type, boolean_true_node,
12657 real0, real1);
12658 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12663 return NULL_TREE;
12665 case LT_EXPR:
12666 case GT_EXPR:
12667 case LE_EXPR:
12668 case GE_EXPR:
12669 tem = fold_comparison (loc, code, type, op0, op1);
12670 if (tem != NULL_TREE)
12671 return tem;
12673 /* Transform comparisons of the form X +- C CMP X. */
12674 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12675 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12676 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12677 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12678 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12679 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12681 tree arg01 = TREE_OPERAND (arg0, 1);
12682 enum tree_code code0 = TREE_CODE (arg0);
12683 int is_positive;
12685 if (TREE_CODE (arg01) == REAL_CST)
12686 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12687 else
12688 is_positive = tree_int_cst_sgn (arg01);
12690 /* (X - c) > X becomes false. */
12691 if (code == GT_EXPR
12692 && ((code0 == MINUS_EXPR && is_positive >= 0)
12693 || (code0 == PLUS_EXPR && is_positive <= 0)))
12695 if (TREE_CODE (arg01) == INTEGER_CST
12696 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12697 fold_overflow_warning (("assuming signed overflow does not "
12698 "occur when assuming that (X - c) > X "
12699 "is always false"),
12700 WARN_STRICT_OVERFLOW_ALL);
12701 return constant_boolean_node (0, type);
12704 /* Likewise (X + c) < X becomes false. */
12705 if (code == LT_EXPR
12706 && ((code0 == PLUS_EXPR && is_positive >= 0)
12707 || (code0 == MINUS_EXPR && is_positive <= 0)))
12709 if (TREE_CODE (arg01) == INTEGER_CST
12710 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12711 fold_overflow_warning (("assuming signed overflow does not "
12712 "occur when assuming that "
12713 "(X + c) < X is always false"),
12714 WARN_STRICT_OVERFLOW_ALL);
12715 return constant_boolean_node (0, type);
12718 /* Convert (X - c) <= X to true. */
12719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12720 && code == LE_EXPR
12721 && ((code0 == MINUS_EXPR && is_positive >= 0)
12722 || (code0 == PLUS_EXPR && is_positive <= 0)))
12724 if (TREE_CODE (arg01) == INTEGER_CST
12725 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12726 fold_overflow_warning (("assuming signed overflow does not "
12727 "occur when assuming that "
12728 "(X - c) <= X is always true"),
12729 WARN_STRICT_OVERFLOW_ALL);
12730 return constant_boolean_node (1, type);
12733 /* Convert (X + c) >= X to true. */
12734 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12735 && code == GE_EXPR
12736 && ((code0 == PLUS_EXPR && is_positive >= 0)
12737 || (code0 == MINUS_EXPR && is_positive <= 0)))
12739 if (TREE_CODE (arg01) == INTEGER_CST
12740 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12741 fold_overflow_warning (("assuming signed overflow does not "
12742 "occur when assuming that "
12743 "(X + c) >= X is always true"),
12744 WARN_STRICT_OVERFLOW_ALL);
12745 return constant_boolean_node (1, type);
12748 if (TREE_CODE (arg01) == INTEGER_CST)
12750 /* Convert X + c > X and X - c < X to true for integers. */
12751 if (code == GT_EXPR
12752 && ((code0 == PLUS_EXPR && is_positive > 0)
12753 || (code0 == MINUS_EXPR && is_positive < 0)))
12755 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12756 fold_overflow_warning (("assuming signed overflow does "
12757 "not occur when assuming that "
12758 "(X + c) > X is always true"),
12759 WARN_STRICT_OVERFLOW_ALL);
12760 return constant_boolean_node (1, type);
12763 if (code == LT_EXPR
12764 && ((code0 == MINUS_EXPR && is_positive > 0)
12765 || (code0 == PLUS_EXPR && is_positive < 0)))
12767 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12768 fold_overflow_warning (("assuming signed overflow does "
12769 "not occur when assuming that "
12770 "(X - c) < X is always true"),
12771 WARN_STRICT_OVERFLOW_ALL);
12772 return constant_boolean_node (1, type);
12775 /* Convert X + c <= X and X - c >= X to false for integers. */
12776 if (code == LE_EXPR
12777 && ((code0 == PLUS_EXPR && is_positive > 0)
12778 || (code0 == MINUS_EXPR && is_positive < 0)))
12780 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12781 fold_overflow_warning (("assuming signed overflow does "
12782 "not occur when assuming that "
12783 "(X + c) <= X is always false"),
12784 WARN_STRICT_OVERFLOW_ALL);
12785 return constant_boolean_node (0, type);
12788 if (code == GE_EXPR
12789 && ((code0 == MINUS_EXPR && is_positive > 0)
12790 || (code0 == PLUS_EXPR && is_positive < 0)))
12792 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12793 fold_overflow_warning (("assuming signed overflow does "
12794 "not occur when assuming that "
12795 "(X - c) >= X is always false"),
12796 WARN_STRICT_OVERFLOW_ALL);
12797 return constant_boolean_node (0, type);
12802 /* Comparisons with the highest or lowest possible integer of
12803 the specified precision will have known values. */
12805 tree arg1_type = TREE_TYPE (arg1);
12806 unsigned int prec = TYPE_PRECISION (arg1_type);
12808 if (TREE_CODE (arg1) == INTEGER_CST
12809 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12811 wide_int max = wi::max_value (arg1_type);
12812 wide_int signed_max = wi::max_value (prec, SIGNED);
12813 wide_int min = wi::min_value (arg1_type);
12815 if (wi::eq_p (arg1, max))
12816 switch (code)
12818 case GT_EXPR:
12819 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12821 case GE_EXPR:
12822 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12824 case LE_EXPR:
12825 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12827 case LT_EXPR:
12828 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12830 /* The GE_EXPR and LT_EXPR cases above are not normally
12831 reached because of previous transformations. */
12833 default:
12834 break;
12836 else if (wi::eq_p (arg1, max - 1))
12837 switch (code)
12839 case GT_EXPR:
12840 arg1 = const_binop (PLUS_EXPR, arg1,
12841 build_int_cst (TREE_TYPE (arg1), 1));
12842 return fold_build2_loc (loc, EQ_EXPR, type,
12843 fold_convert_loc (loc,
12844 TREE_TYPE (arg1), arg0),
12845 arg1);
12846 case LE_EXPR:
12847 arg1 = const_binop (PLUS_EXPR, arg1,
12848 build_int_cst (TREE_TYPE (arg1), 1));
12849 return fold_build2_loc (loc, NE_EXPR, type,
12850 fold_convert_loc (loc, TREE_TYPE (arg1),
12851 arg0),
12852 arg1);
12853 default:
12854 break;
12856 else if (wi::eq_p (arg1, min))
12857 switch (code)
12859 case LT_EXPR:
12860 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12862 case LE_EXPR:
12863 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12865 case GE_EXPR:
12866 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12868 case GT_EXPR:
12869 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12871 default:
12872 break;
12874 else if (wi::eq_p (arg1, min + 1))
12875 switch (code)
12877 case GE_EXPR:
12878 arg1 = const_binop (MINUS_EXPR, arg1,
12879 build_int_cst (TREE_TYPE (arg1), 1));
12880 return fold_build2_loc (loc, NE_EXPR, type,
12881 fold_convert_loc (loc,
12882 TREE_TYPE (arg1), arg0),
12883 arg1);
12884 case LT_EXPR:
12885 arg1 = const_binop (MINUS_EXPR, arg1,
12886 build_int_cst (TREE_TYPE (arg1), 1));
12887 return fold_build2_loc (loc, EQ_EXPR, type,
12888 fold_convert_loc (loc, TREE_TYPE (arg1),
12889 arg0),
12890 arg1);
12891 default:
12892 break;
12895 else if (wi::eq_p (arg1, signed_max)
12896 && TYPE_UNSIGNED (arg1_type)
12897 /* We will flip the signedness of the comparison operator
12898 associated with the mode of arg1, so the sign bit is
12899 specified by this mode. Check that arg1 is the signed
12900 max associated with this sign bit. */
12901 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12902 /* signed_type does not work on pointer types. */
12903 && INTEGRAL_TYPE_P (arg1_type))
12905 /* The following case also applies to X < signed_max+1
12906 and X >= signed_max+1 because previous transformations. */
12907 if (code == LE_EXPR || code == GT_EXPR)
12909 tree st = signed_type_for (arg1_type);
12910 return fold_build2_loc (loc,
12911 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12912 type, fold_convert_loc (loc, st, arg0),
12913 build_int_cst (st, 0));
12919 /* If we are comparing an ABS_EXPR with a constant, we can
12920 convert all the cases into explicit comparisons, but they may
12921 well not be faster than doing the ABS and one comparison.
12922 But ABS (X) <= C is a range comparison, which becomes a subtraction
12923 and a comparison, and is probably faster. */
12924 if (code == LE_EXPR
12925 && TREE_CODE (arg1) == INTEGER_CST
12926 && TREE_CODE (arg0) == ABS_EXPR
12927 && ! TREE_SIDE_EFFECTS (arg0)
12928 && (0 != (tem = negate_expr (arg1)))
12929 && TREE_CODE (tem) == INTEGER_CST
12930 && !TREE_OVERFLOW (tem))
12931 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12932 build2 (GE_EXPR, type,
12933 TREE_OPERAND (arg0, 0), tem),
12934 build2 (LE_EXPR, type,
12935 TREE_OPERAND (arg0, 0), arg1));
12937 /* Convert ABS_EXPR<x> >= 0 to true. */
12938 strict_overflow_p = false;
12939 if (code == GE_EXPR
12940 && (integer_zerop (arg1)
12941 || (! HONOR_NANS (element_mode (arg0))
12942 && real_zerop (arg1)))
12943 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12945 if (strict_overflow_p)
12946 fold_overflow_warning (("assuming signed overflow does not occur "
12947 "when simplifying comparison of "
12948 "absolute value and zero"),
12949 WARN_STRICT_OVERFLOW_CONDITIONAL);
12950 return omit_one_operand_loc (loc, type,
12951 constant_boolean_node (true, type),
12952 arg0);
12955 /* Convert ABS_EXPR<x> < 0 to false. */
12956 strict_overflow_p = false;
12957 if (code == LT_EXPR
12958 && (integer_zerop (arg1) || real_zerop (arg1))
12959 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12961 if (strict_overflow_p)
12962 fold_overflow_warning (("assuming signed overflow does not occur "
12963 "when simplifying comparison of "
12964 "absolute value and zero"),
12965 WARN_STRICT_OVERFLOW_CONDITIONAL);
12966 return omit_one_operand_loc (loc, type,
12967 constant_boolean_node (false, type),
12968 arg0);
12971 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12972 and similarly for >= into !=. */
12973 if ((code == LT_EXPR || code == GE_EXPR)
12974 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12975 && TREE_CODE (arg1) == LSHIFT_EXPR
12976 && integer_onep (TREE_OPERAND (arg1, 0)))
12977 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12978 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12979 TREE_OPERAND (arg1, 1)),
12980 build_zero_cst (TREE_TYPE (arg0)));
12982 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12983 otherwise Y might be >= # of bits in X's type and thus e.g.
12984 (unsigned char) (1 << Y) for Y 15 might be 0.
12985 If the cast is widening, then 1 << Y should have unsigned type,
12986 otherwise if Y is number of bits in the signed shift type minus 1,
12987 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12988 31 might be 0xffffffff80000000. */
12989 if ((code == LT_EXPR || code == GE_EXPR)
12990 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12991 && CONVERT_EXPR_P (arg1)
12992 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12993 && (element_precision (TREE_TYPE (arg1))
12994 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12995 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12996 || (element_precision (TREE_TYPE (arg1))
12997 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12998 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13000 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13001 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13002 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13003 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13004 build_zero_cst (TREE_TYPE (arg0)));
13007 return NULL_TREE;
13009 case UNORDERED_EXPR:
13010 case ORDERED_EXPR:
13011 case UNLT_EXPR:
13012 case UNLE_EXPR:
13013 case UNGT_EXPR:
13014 case UNGE_EXPR:
13015 case UNEQ_EXPR:
13016 case LTGT_EXPR:
13017 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13019 t1 = fold_relational_const (code, type, arg0, arg1);
13020 if (t1 != NULL_TREE)
13021 return t1;
13024 /* If the first operand is NaN, the result is constant. */
13025 if (TREE_CODE (arg0) == REAL_CST
13026 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13027 && (code != LTGT_EXPR || ! flag_trapping_math))
13029 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13030 ? integer_zero_node
13031 : integer_one_node;
13032 return omit_one_operand_loc (loc, type, t1, arg1);
13035 /* If the second operand is NaN, the result is constant. */
13036 if (TREE_CODE (arg1) == REAL_CST
13037 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13038 && (code != LTGT_EXPR || ! flag_trapping_math))
13040 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13041 ? integer_zero_node
13042 : integer_one_node;
13043 return omit_one_operand_loc (loc, type, t1, arg0);
13046 /* Simplify unordered comparison of something with itself. */
13047 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13048 && operand_equal_p (arg0, arg1, 0))
13049 return constant_boolean_node (1, type);
13051 if (code == LTGT_EXPR
13052 && !flag_trapping_math
13053 && operand_equal_p (arg0, arg1, 0))
13054 return constant_boolean_node (0, type);
13056 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13058 tree targ0 = strip_float_extensions (arg0);
13059 tree targ1 = strip_float_extensions (arg1);
13060 tree newtype = TREE_TYPE (targ0);
13062 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13063 newtype = TREE_TYPE (targ1);
13065 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13066 return fold_build2_loc (loc, code, type,
13067 fold_convert_loc (loc, newtype, targ0),
13068 fold_convert_loc (loc, newtype, targ1));
13071 return NULL_TREE;
13073 case COMPOUND_EXPR:
13074 /* When pedantic, a compound expression can be neither an lvalue
13075 nor an integer constant expression. */
13076 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13077 return NULL_TREE;
13078 /* Don't let (0, 0) be null pointer constant. */
13079 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13080 : fold_convert_loc (loc, type, arg1);
13081 return pedantic_non_lvalue_loc (loc, tem);
13083 case COMPLEX_EXPR:
13084 if ((TREE_CODE (arg0) == REAL_CST
13085 && TREE_CODE (arg1) == REAL_CST)
13086 || (TREE_CODE (arg0) == INTEGER_CST
13087 && TREE_CODE (arg1) == INTEGER_CST))
13088 return build_complex (type, arg0, arg1);
13089 return NULL_TREE;
13091 case ASSERT_EXPR:
13092 /* An ASSERT_EXPR should never be passed to fold_binary. */
13093 gcc_unreachable ();
13095 case VEC_PACK_TRUNC_EXPR:
13096 case VEC_PACK_FIX_TRUNC_EXPR:
13098 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13099 tree *elts;
13101 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13102 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13103 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13104 return NULL_TREE;
13106 elts = XALLOCAVEC (tree, nelts);
13107 if (!vec_cst_ctor_to_array (arg0, elts)
13108 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13109 return NULL_TREE;
13111 for (i = 0; i < nelts; i++)
13113 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13114 ? NOP_EXPR : FIX_TRUNC_EXPR,
13115 TREE_TYPE (type), elts[i]);
13116 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13117 return NULL_TREE;
13120 return build_vector (type, elts);
13123 case VEC_WIDEN_MULT_LO_EXPR:
13124 case VEC_WIDEN_MULT_HI_EXPR:
13125 case VEC_WIDEN_MULT_EVEN_EXPR:
13126 case VEC_WIDEN_MULT_ODD_EXPR:
13128 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13129 unsigned int out, ofs, scale;
13130 tree *elts;
13132 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13133 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13134 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13135 return NULL_TREE;
13137 elts = XALLOCAVEC (tree, nelts * 4);
13138 if (!vec_cst_ctor_to_array (arg0, elts)
13139 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13140 return NULL_TREE;
13142 if (code == VEC_WIDEN_MULT_LO_EXPR)
13143 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13144 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13145 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13146 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13147 scale = 1, ofs = 0;
13148 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13149 scale = 1, ofs = 1;
13151 for (out = 0; out < nelts; out++)
13153 unsigned int in1 = (out << scale) + ofs;
13154 unsigned int in2 = in1 + nelts * 2;
13155 tree t1, t2;
13157 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13158 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13160 if (t1 == NULL_TREE || t2 == NULL_TREE)
13161 return NULL_TREE;
13162 elts[out] = const_binop (MULT_EXPR, t1, t2);
13163 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13164 return NULL_TREE;
13167 return build_vector (type, elts);
13170 default:
13171 return NULL_TREE;
13172 } /* switch (code) */
13175 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13176 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13177 of GOTO_EXPR. */
13179 static tree
13180 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13182 switch (TREE_CODE (*tp))
13184 case LABEL_EXPR:
13185 return *tp;
13187 case GOTO_EXPR:
13188 *walk_subtrees = 0;
13190 /* ... fall through ... */
13192 default:
13193 return NULL_TREE;
13197 /* Return whether the sub-tree ST contains a label which is accessible from
13198 outside the sub-tree. */
13200 static bool
13201 contains_label_p (tree st)
13203 return
13204 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13207 /* Fold a ternary expression of code CODE and type TYPE with operands
13208 OP0, OP1, and OP2. Return the folded expression if folding is
13209 successful. Otherwise, return NULL_TREE. */
13211 tree
13212 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13213 tree op0, tree op1, tree op2)
13215 tree tem;
13216 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13217 enum tree_code_class kind = TREE_CODE_CLASS (code);
13219 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13220 && TREE_CODE_LENGTH (code) == 3);
13222 /* If this is a commutative operation, and OP0 is a constant, move it
13223 to OP1 to reduce the number of tests below. */
13224 if (commutative_ternary_tree_code (code)
13225 && tree_swap_operands_p (op0, op1, true))
13226 return fold_build3_loc (loc, code, type, op1, op0, op2);
13228 tem = generic_simplify (loc, code, type, op0, op1, op2);
13229 if (tem)
13230 return tem;
13232 /* Strip any conversions that don't change the mode. This is safe
13233 for every expression, except for a comparison expression because
13234 its signedness is derived from its operands. So, in the latter
13235 case, only strip conversions that don't change the signedness.
13237 Note that this is done as an internal manipulation within the
13238 constant folder, in order to find the simplest representation of
13239 the arguments so that their form can be studied. In any cases,
13240 the appropriate type conversions should be put back in the tree
13241 that will get out of the constant folder. */
13242 if (op0)
13244 arg0 = op0;
13245 STRIP_NOPS (arg0);
13248 if (op1)
13250 arg1 = op1;
13251 STRIP_NOPS (arg1);
13254 if (op2)
13256 arg2 = op2;
13257 STRIP_NOPS (arg2);
13260 switch (code)
13262 case COMPONENT_REF:
13263 if (TREE_CODE (arg0) == CONSTRUCTOR
13264 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13266 unsigned HOST_WIDE_INT idx;
13267 tree field, value;
13268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13269 if (field == arg1)
13270 return value;
13272 return NULL_TREE;
13274 case COND_EXPR:
13275 case VEC_COND_EXPR:
13276 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13277 so all simple results must be passed through pedantic_non_lvalue. */
13278 if (TREE_CODE (arg0) == INTEGER_CST)
13280 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13281 tem = integer_zerop (arg0) ? op2 : op1;
13282 /* Only optimize constant conditions when the selected branch
13283 has the same type as the COND_EXPR. This avoids optimizing
13284 away "c ? x : throw", where the throw has a void type.
13285 Avoid throwing away that operand which contains label. */
13286 if ((!TREE_SIDE_EFFECTS (unused_op)
13287 || !contains_label_p (unused_op))
13288 && (! VOID_TYPE_P (TREE_TYPE (tem))
13289 || VOID_TYPE_P (type)))
13290 return pedantic_non_lvalue_loc (loc, tem);
13291 return NULL_TREE;
13293 else if (TREE_CODE (arg0) == VECTOR_CST)
13295 if ((TREE_CODE (arg1) == VECTOR_CST
13296 || TREE_CODE (arg1) == CONSTRUCTOR)
13297 && (TREE_CODE (arg2) == VECTOR_CST
13298 || TREE_CODE (arg2) == CONSTRUCTOR))
13300 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13301 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13302 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13303 for (i = 0; i < nelts; i++)
13305 tree val = VECTOR_CST_ELT (arg0, i);
13306 if (integer_all_onesp (val))
13307 sel[i] = i;
13308 else if (integer_zerop (val))
13309 sel[i] = nelts + i;
13310 else /* Currently unreachable. */
13311 return NULL_TREE;
13313 tree t = fold_vec_perm (type, arg1, arg2, sel);
13314 if (t != NULL_TREE)
13315 return t;
13319 /* If we have A op B ? A : C, we may be able to convert this to a
13320 simpler expression, depending on the operation and the values
13321 of B and C. Signed zeros prevent all of these transformations,
13322 for reasons given above each one.
13324 Also try swapping the arguments and inverting the conditional. */
13325 if (COMPARISON_CLASS_P (arg0)
13326 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13327 arg1, TREE_OPERAND (arg0, 1))
13328 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13330 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13331 if (tem)
13332 return tem;
13335 if (COMPARISON_CLASS_P (arg0)
13336 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13337 op2,
13338 TREE_OPERAND (arg0, 1))
13339 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13341 location_t loc0 = expr_location_or (arg0, loc);
13342 tem = fold_invert_truthvalue (loc0, arg0);
13343 if (tem && COMPARISON_CLASS_P (tem))
13345 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13346 if (tem)
13347 return tem;
13351 /* If the second operand is simpler than the third, swap them
13352 since that produces better jump optimization results. */
13353 if (truth_value_p (TREE_CODE (arg0))
13354 && tree_swap_operands_p (op1, op2, false))
13356 location_t loc0 = expr_location_or (arg0, loc);
13357 /* See if this can be inverted. If it can't, possibly because
13358 it was a floating-point inequality comparison, don't do
13359 anything. */
13360 tem = fold_invert_truthvalue (loc0, arg0);
13361 if (tem)
13362 return fold_build3_loc (loc, code, type, tem, op2, op1);
13365 /* Convert A ? 1 : 0 to simply A. */
13366 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13367 : (integer_onep (op1)
13368 && !VECTOR_TYPE_P (type)))
13369 && integer_zerop (op2)
13370 /* If we try to convert OP0 to our type, the
13371 call to fold will try to move the conversion inside
13372 a COND, which will recurse. In that case, the COND_EXPR
13373 is probably the best choice, so leave it alone. */
13374 && type == TREE_TYPE (arg0))
13375 return pedantic_non_lvalue_loc (loc, arg0);
13377 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13378 over COND_EXPR in cases such as floating point comparisons. */
13379 if (integer_zerop (op1)
13380 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13381 : (integer_onep (op2)
13382 && !VECTOR_TYPE_P (type)))
13383 && truth_value_p (TREE_CODE (arg0)))
13384 return pedantic_non_lvalue_loc (loc,
13385 fold_convert_loc (loc, type,
13386 invert_truthvalue_loc (loc,
13387 arg0)));
13389 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13390 if (TREE_CODE (arg0) == LT_EXPR
13391 && integer_zerop (TREE_OPERAND (arg0, 1))
13392 && integer_zerop (op2)
13393 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13395 /* sign_bit_p looks through both zero and sign extensions,
13396 but for this optimization only sign extensions are
13397 usable. */
13398 tree tem2 = TREE_OPERAND (arg0, 0);
13399 while (tem != tem2)
13401 if (TREE_CODE (tem2) != NOP_EXPR
13402 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13404 tem = NULL_TREE;
13405 break;
13407 tem2 = TREE_OPERAND (tem2, 0);
13409 /* sign_bit_p only checks ARG1 bits within A's precision.
13410 If <sign bit of A> has wider type than A, bits outside
13411 of A's precision in <sign bit of A> need to be checked.
13412 If they are all 0, this optimization needs to be done
13413 in unsigned A's type, if they are all 1 in signed A's type,
13414 otherwise this can't be done. */
13415 if (tem
13416 && TYPE_PRECISION (TREE_TYPE (tem))
13417 < TYPE_PRECISION (TREE_TYPE (arg1))
13418 && TYPE_PRECISION (TREE_TYPE (tem))
13419 < TYPE_PRECISION (type))
13421 int inner_width, outer_width;
13422 tree tem_type;
13424 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13425 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13426 if (outer_width > TYPE_PRECISION (type))
13427 outer_width = TYPE_PRECISION (type);
13429 wide_int mask = wi::shifted_mask
13430 (inner_width, outer_width - inner_width, false,
13431 TYPE_PRECISION (TREE_TYPE (arg1)));
13433 wide_int common = mask & arg1;
13434 if (common == mask)
13436 tem_type = signed_type_for (TREE_TYPE (tem));
13437 tem = fold_convert_loc (loc, tem_type, tem);
13439 else if (common == 0)
13441 tem_type = unsigned_type_for (TREE_TYPE (tem));
13442 tem = fold_convert_loc (loc, tem_type, tem);
13444 else
13445 tem = NULL;
13448 if (tem)
13449 return
13450 fold_convert_loc (loc, type,
13451 fold_build2_loc (loc, BIT_AND_EXPR,
13452 TREE_TYPE (tem), tem,
13453 fold_convert_loc (loc,
13454 TREE_TYPE (tem),
13455 arg1)));
13458 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13459 already handled above. */
13460 if (TREE_CODE (arg0) == BIT_AND_EXPR
13461 && integer_onep (TREE_OPERAND (arg0, 1))
13462 && integer_zerop (op2)
13463 && integer_pow2p (arg1))
13465 tree tem = TREE_OPERAND (arg0, 0);
13466 STRIP_NOPS (tem);
13467 if (TREE_CODE (tem) == RSHIFT_EXPR
13468 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13469 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13470 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13471 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13472 TREE_OPERAND (tem, 0), arg1);
13475 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13476 is probably obsolete because the first operand should be a
13477 truth value (that's why we have the two cases above), but let's
13478 leave it in until we can confirm this for all front-ends. */
13479 if (integer_zerop (op2)
13480 && TREE_CODE (arg0) == NE_EXPR
13481 && integer_zerop (TREE_OPERAND (arg0, 1))
13482 && integer_pow2p (arg1)
13483 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13484 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13485 arg1, OEP_ONLY_CONST))
13486 return pedantic_non_lvalue_loc (loc,
13487 fold_convert_loc (loc, type,
13488 TREE_OPERAND (arg0, 0)));
13490 /* Disable the transformations below for vectors, since
13491 fold_binary_op_with_conditional_arg may undo them immediately,
13492 yielding an infinite loop. */
13493 if (code == VEC_COND_EXPR)
13494 return NULL_TREE;
13496 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13497 if (integer_zerop (op2)
13498 && truth_value_p (TREE_CODE (arg0))
13499 && truth_value_p (TREE_CODE (arg1))
13500 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13501 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13502 : TRUTH_ANDIF_EXPR,
13503 type, fold_convert_loc (loc, type, arg0), arg1);
13505 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13506 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13507 && truth_value_p (TREE_CODE (arg0))
13508 && truth_value_p (TREE_CODE (arg1))
13509 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13511 location_t loc0 = expr_location_or (arg0, loc);
13512 /* Only perform transformation if ARG0 is easily inverted. */
13513 tem = fold_invert_truthvalue (loc0, arg0);
13514 if (tem)
13515 return fold_build2_loc (loc, code == VEC_COND_EXPR
13516 ? BIT_IOR_EXPR
13517 : TRUTH_ORIF_EXPR,
13518 type, fold_convert_loc (loc, type, tem),
13519 arg1);
13522 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13523 if (integer_zerop (arg1)
13524 && truth_value_p (TREE_CODE (arg0))
13525 && truth_value_p (TREE_CODE (op2))
13526 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13528 location_t loc0 = expr_location_or (arg0, loc);
13529 /* Only perform transformation if ARG0 is easily inverted. */
13530 tem = fold_invert_truthvalue (loc0, arg0);
13531 if (tem)
13532 return fold_build2_loc (loc, code == VEC_COND_EXPR
13533 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13534 type, fold_convert_loc (loc, type, tem),
13535 op2);
13538 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13539 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13540 && truth_value_p (TREE_CODE (arg0))
13541 && truth_value_p (TREE_CODE (op2))
13542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13543 return fold_build2_loc (loc, code == VEC_COND_EXPR
13544 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13545 type, fold_convert_loc (loc, type, arg0), op2);
13547 return NULL_TREE;
13549 case CALL_EXPR:
13550 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13551 of fold_ternary on them. */
13552 gcc_unreachable ();
13554 case BIT_FIELD_REF:
13555 if ((TREE_CODE (arg0) == VECTOR_CST
13556 || (TREE_CODE (arg0) == CONSTRUCTOR
13557 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13558 && (type == TREE_TYPE (TREE_TYPE (arg0))
13559 || (TREE_CODE (type) == VECTOR_TYPE
13560 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13562 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13563 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13564 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13565 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13567 if (n != 0
13568 && (idx % width) == 0
13569 && (n % width) == 0
13570 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13572 idx = idx / width;
13573 n = n / width;
13575 if (TREE_CODE (arg0) == VECTOR_CST)
13577 if (n == 1)
13578 return VECTOR_CST_ELT (arg0, idx);
13580 tree *vals = XALLOCAVEC (tree, n);
13581 for (unsigned i = 0; i < n; ++i)
13582 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13583 return build_vector (type, vals);
13586 /* Constructor elements can be subvectors. */
13587 unsigned HOST_WIDE_INT k = 1;
13588 if (CONSTRUCTOR_NELTS (arg0) != 0)
13590 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13591 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13592 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13595 /* We keep an exact subset of the constructor elements. */
13596 if ((idx % k) == 0 && (n % k) == 0)
13598 if (CONSTRUCTOR_NELTS (arg0) == 0)
13599 return build_constructor (type, NULL);
13600 idx /= k;
13601 n /= k;
13602 if (n == 1)
13604 if (idx < CONSTRUCTOR_NELTS (arg0))
13605 return CONSTRUCTOR_ELT (arg0, idx)->value;
13606 return build_zero_cst (type);
13609 vec<constructor_elt, va_gc> *vals;
13610 vec_alloc (vals, n);
13611 for (unsigned i = 0;
13612 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13613 ++i)
13614 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13615 CONSTRUCTOR_ELT
13616 (arg0, idx + i)->value);
13617 return build_constructor (type, vals);
13619 /* The bitfield references a single constructor element. */
13620 else if (idx + n <= (idx / k + 1) * k)
13622 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13623 return build_zero_cst (type);
13624 else if (n == k)
13625 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13626 else
13627 return fold_build3_loc (loc, code, type,
13628 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13629 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13634 /* A bit-field-ref that referenced the full argument can be stripped. */
13635 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13636 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13637 && integer_zerop (op2))
13638 return fold_convert_loc (loc, type, arg0);
13640 /* On constants we can use native encode/interpret to constant
13641 fold (nearly) all BIT_FIELD_REFs. */
13642 if (CONSTANT_CLASS_P (arg0)
13643 && can_native_interpret_type_p (type)
13644 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13645 /* This limitation should not be necessary, we just need to
13646 round this up to mode size. */
13647 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13648 /* Need bit-shifting of the buffer to relax the following. */
13649 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13651 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13652 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13653 unsigned HOST_WIDE_INT clen;
13654 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13655 /* ??? We cannot tell native_encode_expr to start at
13656 some random byte only. So limit us to a reasonable amount
13657 of work. */
13658 if (clen <= 4096)
13660 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13661 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13662 if (len > 0
13663 && len * BITS_PER_UNIT >= bitpos + bitsize)
13665 tree v = native_interpret_expr (type,
13666 b + bitpos / BITS_PER_UNIT,
13667 bitsize / BITS_PER_UNIT);
13668 if (v)
13669 return v;
13674 return NULL_TREE;
13676 case FMA_EXPR:
13677 /* For integers we can decompose the FMA if possible. */
13678 if (TREE_CODE (arg0) == INTEGER_CST
13679 && TREE_CODE (arg1) == INTEGER_CST)
13680 return fold_build2_loc (loc, PLUS_EXPR, type,
13681 const_binop (MULT_EXPR, arg0, arg1), arg2);
13682 if (integer_zerop (arg2))
13683 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13685 return fold_fma (loc, type, arg0, arg1, arg2);
13687 case VEC_PERM_EXPR:
13688 if (TREE_CODE (arg2) == VECTOR_CST)
13690 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13691 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13692 unsigned char *sel2 = sel + nelts;
13693 bool need_mask_canon = false;
13694 bool need_mask_canon2 = false;
13695 bool all_in_vec0 = true;
13696 bool all_in_vec1 = true;
13697 bool maybe_identity = true;
13698 bool single_arg = (op0 == op1);
13699 bool changed = false;
13701 mask2 = 2 * nelts - 1;
13702 mask = single_arg ? (nelts - 1) : mask2;
13703 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13704 for (i = 0; i < nelts; i++)
13706 tree val = VECTOR_CST_ELT (arg2, i);
13707 if (TREE_CODE (val) != INTEGER_CST)
13708 return NULL_TREE;
13710 /* Make sure that the perm value is in an acceptable
13711 range. */
13712 wide_int t = val;
13713 need_mask_canon |= wi::gtu_p (t, mask);
13714 need_mask_canon2 |= wi::gtu_p (t, mask2);
13715 sel[i] = t.to_uhwi () & mask;
13716 sel2[i] = t.to_uhwi () & mask2;
13718 if (sel[i] < nelts)
13719 all_in_vec1 = false;
13720 else
13721 all_in_vec0 = false;
13723 if ((sel[i] & (nelts-1)) != i)
13724 maybe_identity = false;
13727 if (maybe_identity)
13729 if (all_in_vec0)
13730 return op0;
13731 if (all_in_vec1)
13732 return op1;
13735 if (all_in_vec0)
13736 op1 = op0;
13737 else if (all_in_vec1)
13739 op0 = op1;
13740 for (i = 0; i < nelts; i++)
13741 sel[i] -= nelts;
13742 need_mask_canon = true;
13745 if ((TREE_CODE (op0) == VECTOR_CST
13746 || TREE_CODE (op0) == CONSTRUCTOR)
13747 && (TREE_CODE (op1) == VECTOR_CST
13748 || TREE_CODE (op1) == CONSTRUCTOR))
13750 tree t = fold_vec_perm (type, op0, op1, sel);
13751 if (t != NULL_TREE)
13752 return t;
13755 if (op0 == op1 && !single_arg)
13756 changed = true;
13758 /* Some targets are deficient and fail to expand a single
13759 argument permutation while still allowing an equivalent
13760 2-argument version. */
13761 if (need_mask_canon && arg2 == op2
13762 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13763 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13765 need_mask_canon = need_mask_canon2;
13766 sel = sel2;
13769 if (need_mask_canon && arg2 == op2)
13771 tree *tsel = XALLOCAVEC (tree, nelts);
13772 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13773 for (i = 0; i < nelts; i++)
13774 tsel[i] = build_int_cst (eltype, sel[i]);
13775 op2 = build_vector (TREE_TYPE (arg2), tsel);
13776 changed = true;
13779 if (changed)
13780 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13782 return NULL_TREE;
13784 default:
13785 return NULL_TREE;
13786 } /* switch (code) */
13789 /* Perform constant folding and related simplification of EXPR.
13790 The related simplifications include x*1 => x, x*0 => 0, etc.,
13791 and application of the associative law.
13792 NOP_EXPR conversions may be removed freely (as long as we
13793 are careful not to change the type of the overall expression).
13794 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13795 but we can constant-fold them if they have constant operands. */
13797 #ifdef ENABLE_FOLD_CHECKING
13798 # define fold(x) fold_1 (x)
13799 static tree fold_1 (tree);
13800 static
13801 #endif
13802 tree
13803 fold (tree expr)
13805 const tree t = expr;
13806 enum tree_code code = TREE_CODE (t);
13807 enum tree_code_class kind = TREE_CODE_CLASS (code);
13808 tree tem;
13809 location_t loc = EXPR_LOCATION (expr);
13811 /* Return right away if a constant. */
13812 if (kind == tcc_constant)
13813 return t;
13815 /* CALL_EXPR-like objects with variable numbers of operands are
13816 treated specially. */
13817 if (kind == tcc_vl_exp)
13819 if (code == CALL_EXPR)
13821 tem = fold_call_expr (loc, expr, false);
13822 return tem ? tem : expr;
13824 return expr;
13827 if (IS_EXPR_CODE_CLASS (kind))
13829 tree type = TREE_TYPE (t);
13830 tree op0, op1, op2;
13832 switch (TREE_CODE_LENGTH (code))
13834 case 1:
13835 op0 = TREE_OPERAND (t, 0);
13836 tem = fold_unary_loc (loc, code, type, op0);
13837 return tem ? tem : expr;
13838 case 2:
13839 op0 = TREE_OPERAND (t, 0);
13840 op1 = TREE_OPERAND (t, 1);
13841 tem = fold_binary_loc (loc, code, type, op0, op1);
13842 return tem ? tem : expr;
13843 case 3:
13844 op0 = TREE_OPERAND (t, 0);
13845 op1 = TREE_OPERAND (t, 1);
13846 op2 = TREE_OPERAND (t, 2);
13847 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13848 return tem ? tem : expr;
13849 default:
13850 break;
13854 switch (code)
13856 case ARRAY_REF:
13858 tree op0 = TREE_OPERAND (t, 0);
13859 tree op1 = TREE_OPERAND (t, 1);
13861 if (TREE_CODE (op1) == INTEGER_CST
13862 && TREE_CODE (op0) == CONSTRUCTOR
13863 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13865 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13866 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13867 unsigned HOST_WIDE_INT begin = 0;
13869 /* Find a matching index by means of a binary search. */
13870 while (begin != end)
13872 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13873 tree index = (*elts)[middle].index;
13875 if (TREE_CODE (index) == INTEGER_CST
13876 && tree_int_cst_lt (index, op1))
13877 begin = middle + 1;
13878 else if (TREE_CODE (index) == INTEGER_CST
13879 && tree_int_cst_lt (op1, index))
13880 end = middle;
13881 else if (TREE_CODE (index) == RANGE_EXPR
13882 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13883 begin = middle + 1;
13884 else if (TREE_CODE (index) == RANGE_EXPR
13885 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13886 end = middle;
13887 else
13888 return (*elts)[middle].value;
13892 return t;
13895 /* Return a VECTOR_CST if possible. */
13896 case CONSTRUCTOR:
13898 tree type = TREE_TYPE (t);
13899 if (TREE_CODE (type) != VECTOR_TYPE)
13900 return t;
13902 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13903 unsigned HOST_WIDE_INT idx, pos = 0;
13904 tree value;
13906 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13908 if (!CONSTANT_CLASS_P (value))
13909 return t;
13910 if (TREE_CODE (value) == VECTOR_CST)
13912 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13913 vec[pos++] = VECTOR_CST_ELT (value, i);
13915 else
13916 vec[pos++] = value;
13918 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13919 vec[pos] = build_zero_cst (TREE_TYPE (type));
13921 return build_vector (type, vec);
13924 case CONST_DECL:
13925 return fold (DECL_INITIAL (t));
13927 default:
13928 return t;
13929 } /* switch (code) */
13932 #ifdef ENABLE_FOLD_CHECKING
13933 #undef fold
13935 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13936 hash_table<pointer_hash<const tree_node> > *);
13937 static void fold_check_failed (const_tree, const_tree);
13938 void print_fold_checksum (const_tree);
13940 /* When --enable-checking=fold, compute a digest of expr before
13941 and after actual fold call to see if fold did not accidentally
13942 change original expr. */
13944 tree
13945 fold (tree expr)
13947 tree ret;
13948 struct md5_ctx ctx;
13949 unsigned char checksum_before[16], checksum_after[16];
13950 hash_table<pointer_hash<const tree_node> > ht (32);
13952 md5_init_ctx (&ctx);
13953 fold_checksum_tree (expr, &ctx, &ht);
13954 md5_finish_ctx (&ctx, checksum_before);
13955 ht.empty ();
13957 ret = fold_1 (expr);
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (expr, &ctx, &ht);
13961 md5_finish_ctx (&ctx, checksum_after);
13963 if (memcmp (checksum_before, checksum_after, 16))
13964 fold_check_failed (expr, ret);
13966 return ret;
13969 void
13970 print_fold_checksum (const_tree expr)
13972 struct md5_ctx ctx;
13973 unsigned char checksum[16], cnt;
13974 hash_table<pointer_hash<const tree_node> > ht (32);
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (expr, &ctx, &ht);
13978 md5_finish_ctx (&ctx, checksum);
13979 for (cnt = 0; cnt < 16; ++cnt)
13980 fprintf (stderr, "%02x", checksum[cnt]);
13981 putc ('\n', stderr);
13984 static void
13985 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13987 internal_error ("fold check: original tree changed by fold");
13990 static void
13991 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13992 hash_table<pointer_hash <const tree_node> > *ht)
13994 const tree_node **slot;
13995 enum tree_code code;
13996 union tree_node buf;
13997 int i, len;
13999 recursive_label:
14000 if (expr == NULL)
14001 return;
14002 slot = ht->find_slot (expr, INSERT);
14003 if (*slot != NULL)
14004 return;
14005 *slot = expr;
14006 code = TREE_CODE (expr);
14007 if (TREE_CODE_CLASS (code) == tcc_declaration
14008 && DECL_ASSEMBLER_NAME_SET_P (expr))
14010 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14011 memcpy ((char *) &buf, expr, tree_size (expr));
14012 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14013 expr = (tree) &buf;
14015 else if (TREE_CODE_CLASS (code) == tcc_type
14016 && (TYPE_POINTER_TO (expr)
14017 || TYPE_REFERENCE_TO (expr)
14018 || TYPE_CACHED_VALUES_P (expr)
14019 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14020 || TYPE_NEXT_VARIANT (expr)))
14022 /* Allow these fields to be modified. */
14023 tree tmp;
14024 memcpy ((char *) &buf, expr, tree_size (expr));
14025 expr = tmp = (tree) &buf;
14026 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14027 TYPE_POINTER_TO (tmp) = NULL;
14028 TYPE_REFERENCE_TO (tmp) = NULL;
14029 TYPE_NEXT_VARIANT (tmp) = NULL;
14030 if (TYPE_CACHED_VALUES_P (tmp))
14032 TYPE_CACHED_VALUES_P (tmp) = 0;
14033 TYPE_CACHED_VALUES (tmp) = NULL;
14036 md5_process_bytes (expr, tree_size (expr), ctx);
14037 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14038 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14039 if (TREE_CODE_CLASS (code) != tcc_type
14040 && TREE_CODE_CLASS (code) != tcc_declaration
14041 && code != TREE_LIST
14042 && code != SSA_NAME
14043 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14044 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14045 switch (TREE_CODE_CLASS (code))
14047 case tcc_constant:
14048 switch (code)
14050 case STRING_CST:
14051 md5_process_bytes (TREE_STRING_POINTER (expr),
14052 TREE_STRING_LENGTH (expr), ctx);
14053 break;
14054 case COMPLEX_CST:
14055 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14056 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14057 break;
14058 case VECTOR_CST:
14059 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14060 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14061 break;
14062 default:
14063 break;
14065 break;
14066 case tcc_exceptional:
14067 switch (code)
14069 case TREE_LIST:
14070 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14071 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14072 expr = TREE_CHAIN (expr);
14073 goto recursive_label;
14074 break;
14075 case TREE_VEC:
14076 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14077 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14078 break;
14079 default:
14080 break;
14082 break;
14083 case tcc_expression:
14084 case tcc_reference:
14085 case tcc_comparison:
14086 case tcc_unary:
14087 case tcc_binary:
14088 case tcc_statement:
14089 case tcc_vl_exp:
14090 len = TREE_OPERAND_LENGTH (expr);
14091 for (i = 0; i < len; ++i)
14092 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14093 break;
14094 case tcc_declaration:
14095 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14096 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14097 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14099 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14100 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14101 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14102 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14103 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14106 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14108 if (TREE_CODE (expr) == FUNCTION_DECL)
14110 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14111 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14113 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14115 break;
14116 case tcc_type:
14117 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14118 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14119 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14120 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14121 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14122 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14123 if (INTEGRAL_TYPE_P (expr)
14124 || SCALAR_FLOAT_TYPE_P (expr))
14126 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14127 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14129 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14130 if (TREE_CODE (expr) == RECORD_TYPE
14131 || TREE_CODE (expr) == UNION_TYPE
14132 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14133 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14134 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14135 break;
14136 default:
14137 break;
14141 /* Helper function for outputting the checksum of a tree T. When
14142 debugging with gdb, you can "define mynext" to be "next" followed
14143 by "call debug_fold_checksum (op0)", then just trace down till the
14144 outputs differ. */
14146 DEBUG_FUNCTION void
14147 debug_fold_checksum (const_tree t)
14149 int i;
14150 unsigned char checksum[16];
14151 struct md5_ctx ctx;
14152 hash_table<pointer_hash<const tree_node> > ht (32);
14154 md5_init_ctx (&ctx);
14155 fold_checksum_tree (t, &ctx, &ht);
14156 md5_finish_ctx (&ctx, checksum);
14157 ht.empty ();
14159 for (i = 0; i < 16; i++)
14160 fprintf (stderr, "%d ", checksum[i]);
14162 fprintf (stderr, "\n");
14165 #endif
14167 /* Fold a unary tree expression with code CODE of type TYPE with an
14168 operand OP0. LOC is the location of the resulting expression.
14169 Return a folded expression if successful. Otherwise, return a tree
14170 expression with code CODE of type TYPE with an operand OP0. */
14172 tree
14173 fold_build1_stat_loc (location_t loc,
14174 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14176 tree tem;
14177 #ifdef ENABLE_FOLD_CHECKING
14178 unsigned char checksum_before[16], checksum_after[16];
14179 struct md5_ctx ctx;
14180 hash_table<pointer_hash<const tree_node> > ht (32);
14182 md5_init_ctx (&ctx);
14183 fold_checksum_tree (op0, &ctx, &ht);
14184 md5_finish_ctx (&ctx, checksum_before);
14185 ht.empty ();
14186 #endif
14188 tem = fold_unary_loc (loc, code, type, op0);
14189 if (!tem)
14190 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14192 #ifdef ENABLE_FOLD_CHECKING
14193 md5_init_ctx (&ctx);
14194 fold_checksum_tree (op0, &ctx, &ht);
14195 md5_finish_ctx (&ctx, checksum_after);
14197 if (memcmp (checksum_before, checksum_after, 16))
14198 fold_check_failed (op0, tem);
14199 #endif
14200 return tem;
14203 /* Fold a binary tree expression with code CODE of type TYPE with
14204 operands OP0 and OP1. LOC is the location of the resulting
14205 expression. Return a folded expression if successful. Otherwise,
14206 return a tree expression with code CODE of type TYPE with operands
14207 OP0 and OP1. */
14209 tree
14210 fold_build2_stat_loc (location_t loc,
14211 enum tree_code code, tree type, tree op0, tree op1
14212 MEM_STAT_DECL)
14214 tree tem;
14215 #ifdef ENABLE_FOLD_CHECKING
14216 unsigned char checksum_before_op0[16],
14217 checksum_before_op1[16],
14218 checksum_after_op0[16],
14219 checksum_after_op1[16];
14220 struct md5_ctx ctx;
14221 hash_table<pointer_hash<const tree_node> > ht (32);
14223 md5_init_ctx (&ctx);
14224 fold_checksum_tree (op0, &ctx, &ht);
14225 md5_finish_ctx (&ctx, checksum_before_op0);
14226 ht.empty ();
14228 md5_init_ctx (&ctx);
14229 fold_checksum_tree (op1, &ctx, &ht);
14230 md5_finish_ctx (&ctx, checksum_before_op1);
14231 ht.empty ();
14232 #endif
14234 tem = fold_binary_loc (loc, code, type, op0, op1);
14235 if (!tem)
14236 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14238 #ifdef ENABLE_FOLD_CHECKING
14239 md5_init_ctx (&ctx);
14240 fold_checksum_tree (op0, &ctx, &ht);
14241 md5_finish_ctx (&ctx, checksum_after_op0);
14242 ht.empty ();
14244 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14245 fold_check_failed (op0, tem);
14247 md5_init_ctx (&ctx);
14248 fold_checksum_tree (op1, &ctx, &ht);
14249 md5_finish_ctx (&ctx, checksum_after_op1);
14251 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14252 fold_check_failed (op1, tem);
14253 #endif
14254 return tem;
14257 /* Fold a ternary tree expression with code CODE of type TYPE with
14258 operands OP0, OP1, and OP2. Return a folded expression if
14259 successful. Otherwise, return a tree expression with code CODE of
14260 type TYPE with operands OP0, OP1, and OP2. */
14262 tree
14263 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14264 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14266 tree tem;
14267 #ifdef ENABLE_FOLD_CHECKING
14268 unsigned char checksum_before_op0[16],
14269 checksum_before_op1[16],
14270 checksum_before_op2[16],
14271 checksum_after_op0[16],
14272 checksum_after_op1[16],
14273 checksum_after_op2[16];
14274 struct md5_ctx ctx;
14275 hash_table<pointer_hash<const tree_node> > ht (32);
14277 md5_init_ctx (&ctx);
14278 fold_checksum_tree (op0, &ctx, &ht);
14279 md5_finish_ctx (&ctx, checksum_before_op0);
14280 ht.empty ();
14282 md5_init_ctx (&ctx);
14283 fold_checksum_tree (op1, &ctx, &ht);
14284 md5_finish_ctx (&ctx, checksum_before_op1);
14285 ht.empty ();
14287 md5_init_ctx (&ctx);
14288 fold_checksum_tree (op2, &ctx, &ht);
14289 md5_finish_ctx (&ctx, checksum_before_op2);
14290 ht.empty ();
14291 #endif
14293 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14294 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14295 if (!tem)
14296 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14298 #ifdef ENABLE_FOLD_CHECKING
14299 md5_init_ctx (&ctx);
14300 fold_checksum_tree (op0, &ctx, &ht);
14301 md5_finish_ctx (&ctx, checksum_after_op0);
14302 ht.empty ();
14304 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14305 fold_check_failed (op0, tem);
14307 md5_init_ctx (&ctx);
14308 fold_checksum_tree (op1, &ctx, &ht);
14309 md5_finish_ctx (&ctx, checksum_after_op1);
14310 ht.empty ();
14312 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14313 fold_check_failed (op1, tem);
14315 md5_init_ctx (&ctx);
14316 fold_checksum_tree (op2, &ctx, &ht);
14317 md5_finish_ctx (&ctx, checksum_after_op2);
14319 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14320 fold_check_failed (op2, tem);
14321 #endif
14322 return tem;
14325 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14326 arguments in ARGARRAY, and a null static chain.
14327 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14328 of type TYPE from the given operands as constructed by build_call_array. */
14330 tree
14331 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14332 int nargs, tree *argarray)
14334 tree tem;
14335 #ifdef ENABLE_FOLD_CHECKING
14336 unsigned char checksum_before_fn[16],
14337 checksum_before_arglist[16],
14338 checksum_after_fn[16],
14339 checksum_after_arglist[16];
14340 struct md5_ctx ctx;
14341 hash_table<pointer_hash<const tree_node> > ht (32);
14342 int i;
14344 md5_init_ctx (&ctx);
14345 fold_checksum_tree (fn, &ctx, &ht);
14346 md5_finish_ctx (&ctx, checksum_before_fn);
14347 ht.empty ();
14349 md5_init_ctx (&ctx);
14350 for (i = 0; i < nargs; i++)
14351 fold_checksum_tree (argarray[i], &ctx, &ht);
14352 md5_finish_ctx (&ctx, checksum_before_arglist);
14353 ht.empty ();
14354 #endif
14356 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14358 #ifdef ENABLE_FOLD_CHECKING
14359 md5_init_ctx (&ctx);
14360 fold_checksum_tree (fn, &ctx, &ht);
14361 md5_finish_ctx (&ctx, checksum_after_fn);
14362 ht.empty ();
14364 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14365 fold_check_failed (fn, tem);
14367 md5_init_ctx (&ctx);
14368 for (i = 0; i < nargs; i++)
14369 fold_checksum_tree (argarray[i], &ctx, &ht);
14370 md5_finish_ctx (&ctx, checksum_after_arglist);
14372 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14373 fold_check_failed (NULL_TREE, tem);
14374 #endif
14375 return tem;
14378 /* Perform constant folding and related simplification of initializer
14379 expression EXPR. These behave identically to "fold_buildN" but ignore
14380 potential run-time traps and exceptions that fold must preserve. */
14382 #define START_FOLD_INIT \
14383 int saved_signaling_nans = flag_signaling_nans;\
14384 int saved_trapping_math = flag_trapping_math;\
14385 int saved_rounding_math = flag_rounding_math;\
14386 int saved_trapv = flag_trapv;\
14387 int saved_folding_initializer = folding_initializer;\
14388 flag_signaling_nans = 0;\
14389 flag_trapping_math = 0;\
14390 flag_rounding_math = 0;\
14391 flag_trapv = 0;\
14392 folding_initializer = 1;
14394 #define END_FOLD_INIT \
14395 flag_signaling_nans = saved_signaling_nans;\
14396 flag_trapping_math = saved_trapping_math;\
14397 flag_rounding_math = saved_rounding_math;\
14398 flag_trapv = saved_trapv;\
14399 folding_initializer = saved_folding_initializer;
14401 tree
14402 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14403 tree type, tree op)
14405 tree result;
14406 START_FOLD_INIT;
14408 result = fold_build1_loc (loc, code, type, op);
14410 END_FOLD_INIT;
14411 return result;
14414 tree
14415 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14416 tree type, tree op0, tree op1)
14418 tree result;
14419 START_FOLD_INIT;
14421 result = fold_build2_loc (loc, code, type, op0, op1);
14423 END_FOLD_INIT;
14424 return result;
14427 tree
14428 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14429 int nargs, tree *argarray)
14431 tree result;
14432 START_FOLD_INIT;
14434 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14436 END_FOLD_INIT;
14437 return result;
14440 #undef START_FOLD_INIT
14441 #undef END_FOLD_INIT
14443 /* Determine if first argument is a multiple of second argument. Return 0 if
14444 it is not, or we cannot easily determined it to be.
14446 An example of the sort of thing we care about (at this point; this routine
14447 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14448 fold cases do now) is discovering that
14450 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14452 is a multiple of
14454 SAVE_EXPR (J * 8)
14456 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14458 This code also handles discovering that
14460 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14462 is a multiple of 8 so we don't have to worry about dealing with a
14463 possible remainder.
14465 Note that we *look* inside a SAVE_EXPR only to determine how it was
14466 calculated; it is not safe for fold to do much of anything else with the
14467 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14468 at run time. For example, the latter example above *cannot* be implemented
14469 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14470 evaluation time of the original SAVE_EXPR is not necessarily the same at
14471 the time the new expression is evaluated. The only optimization of this
14472 sort that would be valid is changing
14474 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14476 divided by 8 to
14478 SAVE_EXPR (I) * SAVE_EXPR (J)
14480 (where the same SAVE_EXPR (J) is used in the original and the
14481 transformed version). */
14484 multiple_of_p (tree type, const_tree top, const_tree bottom)
14486 if (operand_equal_p (top, bottom, 0))
14487 return 1;
14489 if (TREE_CODE (type) != INTEGER_TYPE)
14490 return 0;
14492 switch (TREE_CODE (top))
14494 case BIT_AND_EXPR:
14495 /* Bitwise and provides a power of two multiple. If the mask is
14496 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14497 if (!integer_pow2p (bottom))
14498 return 0;
14499 /* FALLTHRU */
14501 case MULT_EXPR:
14502 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14503 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14505 case PLUS_EXPR:
14506 case MINUS_EXPR:
14507 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14508 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14510 case LSHIFT_EXPR:
14511 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14513 tree op1, t1;
14515 op1 = TREE_OPERAND (top, 1);
14516 /* const_binop may not detect overflow correctly,
14517 so check for it explicitly here. */
14518 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14519 && 0 != (t1 = fold_convert (type,
14520 const_binop (LSHIFT_EXPR,
14521 size_one_node,
14522 op1)))
14523 && !TREE_OVERFLOW (t1))
14524 return multiple_of_p (type, t1, bottom);
14526 return 0;
14528 case NOP_EXPR:
14529 /* Can't handle conversions from non-integral or wider integral type. */
14530 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14531 || (TYPE_PRECISION (type)
14532 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14533 return 0;
14535 /* .. fall through ... */
14537 case SAVE_EXPR:
14538 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14540 case COND_EXPR:
14541 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14542 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14544 case INTEGER_CST:
14545 if (TREE_CODE (bottom) != INTEGER_CST
14546 || integer_zerop (bottom)
14547 || (TYPE_UNSIGNED (type)
14548 && (tree_int_cst_sgn (top) < 0
14549 || tree_int_cst_sgn (bottom) < 0)))
14550 return 0;
14551 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14552 SIGNED);
14554 default:
14555 return 0;
14559 /* Return true if CODE or TYPE is known to be non-negative. */
14561 static bool
14562 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14564 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14565 && truth_value_p (code))
14566 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14567 have a signed:1 type (where the value is -1 and 0). */
14568 return true;
14569 return false;
14572 /* Return true if (CODE OP0) is known to be non-negative. If the return
14573 value is based on the assumption that signed overflow is undefined,
14574 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14575 *STRICT_OVERFLOW_P. */
14577 bool
14578 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14579 bool *strict_overflow_p)
14581 if (TYPE_UNSIGNED (type))
14582 return true;
14584 switch (code)
14586 case ABS_EXPR:
14587 /* We can't return 1 if flag_wrapv is set because
14588 ABS_EXPR<INT_MIN> = INT_MIN. */
14589 if (!INTEGRAL_TYPE_P (type))
14590 return true;
14591 if (TYPE_OVERFLOW_UNDEFINED (type))
14593 *strict_overflow_p = true;
14594 return true;
14596 break;
14598 case NON_LVALUE_EXPR:
14599 case FLOAT_EXPR:
14600 case FIX_TRUNC_EXPR:
14601 return tree_expr_nonnegative_warnv_p (op0,
14602 strict_overflow_p);
14604 CASE_CONVERT:
14606 tree inner_type = TREE_TYPE (op0);
14607 tree outer_type = type;
14609 if (TREE_CODE (outer_type) == REAL_TYPE)
14611 if (TREE_CODE (inner_type) == REAL_TYPE)
14612 return tree_expr_nonnegative_warnv_p (op0,
14613 strict_overflow_p);
14614 if (INTEGRAL_TYPE_P (inner_type))
14616 if (TYPE_UNSIGNED (inner_type))
14617 return true;
14618 return tree_expr_nonnegative_warnv_p (op0,
14619 strict_overflow_p);
14622 else if (INTEGRAL_TYPE_P (outer_type))
14624 if (TREE_CODE (inner_type) == REAL_TYPE)
14625 return tree_expr_nonnegative_warnv_p (op0,
14626 strict_overflow_p);
14627 if (INTEGRAL_TYPE_P (inner_type))
14628 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14629 && TYPE_UNSIGNED (inner_type);
14632 break;
14634 default:
14635 return tree_simple_nonnegative_warnv_p (code, type);
14638 /* We don't know sign of `t', so be conservative and return false. */
14639 return false;
14642 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14643 value is based on the assumption that signed overflow is undefined,
14644 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14645 *STRICT_OVERFLOW_P. */
14647 bool
14648 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14649 tree op1, bool *strict_overflow_p)
14651 if (TYPE_UNSIGNED (type))
14652 return true;
14654 switch (code)
14656 case POINTER_PLUS_EXPR:
14657 case PLUS_EXPR:
14658 if (FLOAT_TYPE_P (type))
14659 return (tree_expr_nonnegative_warnv_p (op0,
14660 strict_overflow_p)
14661 && tree_expr_nonnegative_warnv_p (op1,
14662 strict_overflow_p));
14664 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14665 both unsigned and at least 2 bits shorter than the result. */
14666 if (TREE_CODE (type) == INTEGER_TYPE
14667 && TREE_CODE (op0) == NOP_EXPR
14668 && TREE_CODE (op1) == NOP_EXPR)
14670 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14671 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14672 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14673 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14675 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14676 TYPE_PRECISION (inner2)) + 1;
14677 return prec < TYPE_PRECISION (type);
14680 break;
14682 case MULT_EXPR:
14683 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14685 /* x * x is always non-negative for floating point x
14686 or without overflow. */
14687 if (operand_equal_p (op0, op1, 0)
14688 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14689 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14691 if (TYPE_OVERFLOW_UNDEFINED (type))
14692 *strict_overflow_p = true;
14693 return true;
14697 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14698 both unsigned and their total bits is shorter than the result. */
14699 if (TREE_CODE (type) == INTEGER_TYPE
14700 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14701 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14703 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14704 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14705 : TREE_TYPE (op0);
14706 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14707 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14708 : TREE_TYPE (op1);
14710 bool unsigned0 = TYPE_UNSIGNED (inner0);
14711 bool unsigned1 = TYPE_UNSIGNED (inner1);
14713 if (TREE_CODE (op0) == INTEGER_CST)
14714 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14716 if (TREE_CODE (op1) == INTEGER_CST)
14717 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14719 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14720 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14722 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14723 ? tree_int_cst_min_precision (op0, UNSIGNED)
14724 : TYPE_PRECISION (inner0);
14726 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14727 ? tree_int_cst_min_precision (op1, UNSIGNED)
14728 : TYPE_PRECISION (inner1);
14730 return precision0 + precision1 < TYPE_PRECISION (type);
14733 return false;
14735 case BIT_AND_EXPR:
14736 case MAX_EXPR:
14737 return (tree_expr_nonnegative_warnv_p (op0,
14738 strict_overflow_p)
14739 || tree_expr_nonnegative_warnv_p (op1,
14740 strict_overflow_p));
14742 case BIT_IOR_EXPR:
14743 case BIT_XOR_EXPR:
14744 case MIN_EXPR:
14745 case RDIV_EXPR:
14746 case TRUNC_DIV_EXPR:
14747 case CEIL_DIV_EXPR:
14748 case FLOOR_DIV_EXPR:
14749 case ROUND_DIV_EXPR:
14750 return (tree_expr_nonnegative_warnv_p (op0,
14751 strict_overflow_p)
14752 && tree_expr_nonnegative_warnv_p (op1,
14753 strict_overflow_p));
14755 case TRUNC_MOD_EXPR:
14756 case CEIL_MOD_EXPR:
14757 case FLOOR_MOD_EXPR:
14758 case ROUND_MOD_EXPR:
14759 return tree_expr_nonnegative_warnv_p (op0,
14760 strict_overflow_p);
14761 default:
14762 return tree_simple_nonnegative_warnv_p (code, type);
14765 /* We don't know sign of `t', so be conservative and return false. */
14766 return false;
14769 /* Return true if T is known to be non-negative. If the return
14770 value is based on the assumption that signed overflow is undefined,
14771 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14772 *STRICT_OVERFLOW_P. */
14774 bool
14775 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14777 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14778 return true;
14780 switch (TREE_CODE (t))
14782 case INTEGER_CST:
14783 return tree_int_cst_sgn (t) >= 0;
14785 case REAL_CST:
14786 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14788 case FIXED_CST:
14789 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14791 case COND_EXPR:
14792 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14793 strict_overflow_p)
14794 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14795 strict_overflow_p));
14796 default:
14797 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14798 TREE_TYPE (t));
14800 /* We don't know sign of `t', so be conservative and return false. */
14801 return false;
14804 /* Return true if T is known to be non-negative. If the return
14805 value is based on the assumption that signed overflow is undefined,
14806 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14807 *STRICT_OVERFLOW_P. */
14809 bool
14810 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14811 tree arg0, tree arg1, bool *strict_overflow_p)
14813 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14814 switch (DECL_FUNCTION_CODE (fndecl))
14816 CASE_FLT_FN (BUILT_IN_ACOS):
14817 CASE_FLT_FN (BUILT_IN_ACOSH):
14818 CASE_FLT_FN (BUILT_IN_CABS):
14819 CASE_FLT_FN (BUILT_IN_COSH):
14820 CASE_FLT_FN (BUILT_IN_ERFC):
14821 CASE_FLT_FN (BUILT_IN_EXP):
14822 CASE_FLT_FN (BUILT_IN_EXP10):
14823 CASE_FLT_FN (BUILT_IN_EXP2):
14824 CASE_FLT_FN (BUILT_IN_FABS):
14825 CASE_FLT_FN (BUILT_IN_FDIM):
14826 CASE_FLT_FN (BUILT_IN_HYPOT):
14827 CASE_FLT_FN (BUILT_IN_POW10):
14828 CASE_INT_FN (BUILT_IN_FFS):
14829 CASE_INT_FN (BUILT_IN_PARITY):
14830 CASE_INT_FN (BUILT_IN_POPCOUNT):
14831 CASE_INT_FN (BUILT_IN_CLZ):
14832 CASE_INT_FN (BUILT_IN_CLRSB):
14833 case BUILT_IN_BSWAP32:
14834 case BUILT_IN_BSWAP64:
14835 /* Always true. */
14836 return true;
14838 CASE_FLT_FN (BUILT_IN_SQRT):
14839 /* sqrt(-0.0) is -0.0. */
14840 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14841 return true;
14842 return tree_expr_nonnegative_warnv_p (arg0,
14843 strict_overflow_p);
14845 CASE_FLT_FN (BUILT_IN_ASINH):
14846 CASE_FLT_FN (BUILT_IN_ATAN):
14847 CASE_FLT_FN (BUILT_IN_ATANH):
14848 CASE_FLT_FN (BUILT_IN_CBRT):
14849 CASE_FLT_FN (BUILT_IN_CEIL):
14850 CASE_FLT_FN (BUILT_IN_ERF):
14851 CASE_FLT_FN (BUILT_IN_EXPM1):
14852 CASE_FLT_FN (BUILT_IN_FLOOR):
14853 CASE_FLT_FN (BUILT_IN_FMOD):
14854 CASE_FLT_FN (BUILT_IN_FREXP):
14855 CASE_FLT_FN (BUILT_IN_ICEIL):
14856 CASE_FLT_FN (BUILT_IN_IFLOOR):
14857 CASE_FLT_FN (BUILT_IN_IRINT):
14858 CASE_FLT_FN (BUILT_IN_IROUND):
14859 CASE_FLT_FN (BUILT_IN_LCEIL):
14860 CASE_FLT_FN (BUILT_IN_LDEXP):
14861 CASE_FLT_FN (BUILT_IN_LFLOOR):
14862 CASE_FLT_FN (BUILT_IN_LLCEIL):
14863 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14864 CASE_FLT_FN (BUILT_IN_LLRINT):
14865 CASE_FLT_FN (BUILT_IN_LLROUND):
14866 CASE_FLT_FN (BUILT_IN_LRINT):
14867 CASE_FLT_FN (BUILT_IN_LROUND):
14868 CASE_FLT_FN (BUILT_IN_MODF):
14869 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14870 CASE_FLT_FN (BUILT_IN_RINT):
14871 CASE_FLT_FN (BUILT_IN_ROUND):
14872 CASE_FLT_FN (BUILT_IN_SCALB):
14873 CASE_FLT_FN (BUILT_IN_SCALBLN):
14874 CASE_FLT_FN (BUILT_IN_SCALBN):
14875 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14876 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14877 CASE_FLT_FN (BUILT_IN_SINH):
14878 CASE_FLT_FN (BUILT_IN_TANH):
14879 CASE_FLT_FN (BUILT_IN_TRUNC):
14880 /* True if the 1st argument is nonnegative. */
14881 return tree_expr_nonnegative_warnv_p (arg0,
14882 strict_overflow_p);
14884 CASE_FLT_FN (BUILT_IN_FMAX):
14885 /* True if the 1st OR 2nd arguments are nonnegative. */
14886 return (tree_expr_nonnegative_warnv_p (arg0,
14887 strict_overflow_p)
14888 || (tree_expr_nonnegative_warnv_p (arg1,
14889 strict_overflow_p)));
14891 CASE_FLT_FN (BUILT_IN_FMIN):
14892 /* True if the 1st AND 2nd arguments are nonnegative. */
14893 return (tree_expr_nonnegative_warnv_p (arg0,
14894 strict_overflow_p)
14895 && (tree_expr_nonnegative_warnv_p (arg1,
14896 strict_overflow_p)));
14898 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14899 /* True if the 2nd argument is nonnegative. */
14900 return tree_expr_nonnegative_warnv_p (arg1,
14901 strict_overflow_p);
14903 CASE_FLT_FN (BUILT_IN_POWI):
14904 /* True if the 1st argument is nonnegative or the second
14905 argument is an even integer. */
14906 if (TREE_CODE (arg1) == INTEGER_CST
14907 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14908 return true;
14909 return tree_expr_nonnegative_warnv_p (arg0,
14910 strict_overflow_p);
14912 CASE_FLT_FN (BUILT_IN_POW):
14913 /* True if the 1st argument is nonnegative or the second
14914 argument is an even integer valued real. */
14915 if (TREE_CODE (arg1) == REAL_CST)
14917 REAL_VALUE_TYPE c;
14918 HOST_WIDE_INT n;
14920 c = TREE_REAL_CST (arg1);
14921 n = real_to_integer (&c);
14922 if ((n & 1) == 0)
14924 REAL_VALUE_TYPE cint;
14925 real_from_integer (&cint, VOIDmode, n, SIGNED);
14926 if (real_identical (&c, &cint))
14927 return true;
14930 return tree_expr_nonnegative_warnv_p (arg0,
14931 strict_overflow_p);
14933 default:
14934 break;
14936 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14937 type);
14940 /* Return true if T is known to be non-negative. If the return
14941 value is based on the assumption that signed overflow is undefined,
14942 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14943 *STRICT_OVERFLOW_P. */
14945 static bool
14946 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14948 enum tree_code code = TREE_CODE (t);
14949 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14950 return true;
14952 switch (code)
14954 case TARGET_EXPR:
14956 tree temp = TARGET_EXPR_SLOT (t);
14957 t = TARGET_EXPR_INITIAL (t);
14959 /* If the initializer is non-void, then it's a normal expression
14960 that will be assigned to the slot. */
14961 if (!VOID_TYPE_P (t))
14962 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14964 /* Otherwise, the initializer sets the slot in some way. One common
14965 way is an assignment statement at the end of the initializer. */
14966 while (1)
14968 if (TREE_CODE (t) == BIND_EXPR)
14969 t = expr_last (BIND_EXPR_BODY (t));
14970 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14971 || TREE_CODE (t) == TRY_CATCH_EXPR)
14972 t = expr_last (TREE_OPERAND (t, 0));
14973 else if (TREE_CODE (t) == STATEMENT_LIST)
14974 t = expr_last (t);
14975 else
14976 break;
14978 if (TREE_CODE (t) == MODIFY_EXPR
14979 && TREE_OPERAND (t, 0) == temp)
14980 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14981 strict_overflow_p);
14983 return false;
14986 case CALL_EXPR:
14988 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14989 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14991 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14992 get_callee_fndecl (t),
14993 arg0,
14994 arg1,
14995 strict_overflow_p);
14997 case COMPOUND_EXPR:
14998 case MODIFY_EXPR:
14999 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15000 strict_overflow_p);
15001 case BIND_EXPR:
15002 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15003 strict_overflow_p);
15004 case SAVE_EXPR:
15005 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15006 strict_overflow_p);
15008 default:
15009 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15010 TREE_TYPE (t));
15013 /* We don't know sign of `t', so be conservative and return false. */
15014 return false;
15017 /* Return true if T is known to be non-negative. If the return
15018 value is based on the assumption that signed overflow is undefined,
15019 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15020 *STRICT_OVERFLOW_P. */
15022 bool
15023 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15025 enum tree_code code;
15026 if (t == error_mark_node)
15027 return false;
15029 code = TREE_CODE (t);
15030 switch (TREE_CODE_CLASS (code))
15032 case tcc_binary:
15033 case tcc_comparison:
15034 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15035 TREE_TYPE (t),
15036 TREE_OPERAND (t, 0),
15037 TREE_OPERAND (t, 1),
15038 strict_overflow_p);
15040 case tcc_unary:
15041 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15042 TREE_TYPE (t),
15043 TREE_OPERAND (t, 0),
15044 strict_overflow_p);
15046 case tcc_constant:
15047 case tcc_declaration:
15048 case tcc_reference:
15049 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15051 default:
15052 break;
15055 switch (code)
15057 case TRUTH_AND_EXPR:
15058 case TRUTH_OR_EXPR:
15059 case TRUTH_XOR_EXPR:
15060 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15061 TREE_TYPE (t),
15062 TREE_OPERAND (t, 0),
15063 TREE_OPERAND (t, 1),
15064 strict_overflow_p);
15065 case TRUTH_NOT_EXPR:
15066 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15067 TREE_TYPE (t),
15068 TREE_OPERAND (t, 0),
15069 strict_overflow_p);
15071 case COND_EXPR:
15072 case CONSTRUCTOR:
15073 case OBJ_TYPE_REF:
15074 case ASSERT_EXPR:
15075 case ADDR_EXPR:
15076 case WITH_SIZE_EXPR:
15077 case SSA_NAME:
15078 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15080 default:
15081 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15085 /* Return true if `t' is known to be non-negative. Handle warnings
15086 about undefined signed overflow. */
15088 bool
15089 tree_expr_nonnegative_p (tree t)
15091 bool ret, strict_overflow_p;
15093 strict_overflow_p = false;
15094 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15095 if (strict_overflow_p)
15096 fold_overflow_warning (("assuming signed overflow does not occur when "
15097 "determining that expression is always "
15098 "non-negative"),
15099 WARN_STRICT_OVERFLOW_MISC);
15100 return ret;
15104 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15105 For floating point we further ensure that T is not denormal.
15106 Similar logic is present in nonzero_address in rtlanal.h.
15108 If the return value is based on the assumption that signed overflow
15109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15110 change *STRICT_OVERFLOW_P. */
15112 bool
15113 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15114 bool *strict_overflow_p)
15116 switch (code)
15118 case ABS_EXPR:
15119 return tree_expr_nonzero_warnv_p (op0,
15120 strict_overflow_p);
15122 case NOP_EXPR:
15124 tree inner_type = TREE_TYPE (op0);
15125 tree outer_type = type;
15127 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15128 && tree_expr_nonzero_warnv_p (op0,
15129 strict_overflow_p));
15131 break;
15133 case NON_LVALUE_EXPR:
15134 return tree_expr_nonzero_warnv_p (op0,
15135 strict_overflow_p);
15137 default:
15138 break;
15141 return false;
15144 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15145 For floating point we further ensure that T is not denormal.
15146 Similar logic is present in nonzero_address in rtlanal.h.
15148 If the return value is based on the assumption that signed overflow
15149 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15150 change *STRICT_OVERFLOW_P. */
15152 bool
15153 tree_binary_nonzero_warnv_p (enum tree_code code,
15154 tree type,
15155 tree op0,
15156 tree op1, bool *strict_overflow_p)
15158 bool sub_strict_overflow_p;
15159 switch (code)
15161 case POINTER_PLUS_EXPR:
15162 case PLUS_EXPR:
15163 if (TYPE_OVERFLOW_UNDEFINED (type))
15165 /* With the presence of negative values it is hard
15166 to say something. */
15167 sub_strict_overflow_p = false;
15168 if (!tree_expr_nonnegative_warnv_p (op0,
15169 &sub_strict_overflow_p)
15170 || !tree_expr_nonnegative_warnv_p (op1,
15171 &sub_strict_overflow_p))
15172 return false;
15173 /* One of operands must be positive and the other non-negative. */
15174 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15175 overflows, on a twos-complement machine the sum of two
15176 nonnegative numbers can never be zero. */
15177 return (tree_expr_nonzero_warnv_p (op0,
15178 strict_overflow_p)
15179 || tree_expr_nonzero_warnv_p (op1,
15180 strict_overflow_p));
15182 break;
15184 case MULT_EXPR:
15185 if (TYPE_OVERFLOW_UNDEFINED (type))
15187 if (tree_expr_nonzero_warnv_p (op0,
15188 strict_overflow_p)
15189 && tree_expr_nonzero_warnv_p (op1,
15190 strict_overflow_p))
15192 *strict_overflow_p = true;
15193 return true;
15196 break;
15198 case MIN_EXPR:
15199 sub_strict_overflow_p = false;
15200 if (tree_expr_nonzero_warnv_p (op0,
15201 &sub_strict_overflow_p)
15202 && tree_expr_nonzero_warnv_p (op1,
15203 &sub_strict_overflow_p))
15205 if (sub_strict_overflow_p)
15206 *strict_overflow_p = true;
15208 break;
15210 case MAX_EXPR:
15211 sub_strict_overflow_p = false;
15212 if (tree_expr_nonzero_warnv_p (op0,
15213 &sub_strict_overflow_p))
15215 if (sub_strict_overflow_p)
15216 *strict_overflow_p = true;
15218 /* When both operands are nonzero, then MAX must be too. */
15219 if (tree_expr_nonzero_warnv_p (op1,
15220 strict_overflow_p))
15221 return true;
15223 /* MAX where operand 0 is positive is positive. */
15224 return tree_expr_nonnegative_warnv_p (op0,
15225 strict_overflow_p);
15227 /* MAX where operand 1 is positive is positive. */
15228 else if (tree_expr_nonzero_warnv_p (op1,
15229 &sub_strict_overflow_p)
15230 && tree_expr_nonnegative_warnv_p (op1,
15231 &sub_strict_overflow_p))
15233 if (sub_strict_overflow_p)
15234 *strict_overflow_p = true;
15235 return true;
15237 break;
15239 case BIT_IOR_EXPR:
15240 return (tree_expr_nonzero_warnv_p (op1,
15241 strict_overflow_p)
15242 || tree_expr_nonzero_warnv_p (op0,
15243 strict_overflow_p));
15245 default:
15246 break;
15249 return false;
15252 /* Return true when T is an address and is known to be nonzero.
15253 For floating point we further ensure that T is not denormal.
15254 Similar logic is present in nonzero_address in rtlanal.h.
15256 If the return value is based on the assumption that signed overflow
15257 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15258 change *STRICT_OVERFLOW_P. */
15260 bool
15261 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15263 bool sub_strict_overflow_p;
15264 switch (TREE_CODE (t))
15266 case INTEGER_CST:
15267 return !integer_zerop (t);
15269 case ADDR_EXPR:
15271 tree base = TREE_OPERAND (t, 0);
15273 if (!DECL_P (base))
15274 base = get_base_address (base);
15276 if (!base)
15277 return false;
15279 /* For objects in symbol table check if we know they are non-zero.
15280 Don't do anything for variables and functions before symtab is built;
15281 it is quite possible that they will be declared weak later. */
15282 if (DECL_P (base) && decl_in_symtab_p (base))
15284 struct symtab_node *symbol;
15286 symbol = symtab_node::get_create (base);
15287 if (symbol)
15288 return symbol->nonzero_address ();
15289 else
15290 return false;
15293 /* Function local objects are never NULL. */
15294 if (DECL_P (base)
15295 && (DECL_CONTEXT (base)
15296 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15297 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15298 return true;
15300 /* Constants are never weak. */
15301 if (CONSTANT_CLASS_P (base))
15302 return true;
15304 return false;
15307 case COND_EXPR:
15308 sub_strict_overflow_p = false;
15309 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15310 &sub_strict_overflow_p)
15311 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15312 &sub_strict_overflow_p))
15314 if (sub_strict_overflow_p)
15315 *strict_overflow_p = true;
15316 return true;
15318 break;
15320 default:
15321 break;
15323 return false;
15326 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15327 attempt to fold the expression to a constant without modifying TYPE,
15328 OP0 or OP1.
15330 If the expression could be simplified to a constant, then return
15331 the constant. If the expression would not be simplified to a
15332 constant, then return NULL_TREE. */
15334 tree
15335 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15337 tree tem = fold_binary (code, type, op0, op1);
15338 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15341 /* Given the components of a unary expression CODE, TYPE and OP0,
15342 attempt to fold the expression to a constant without modifying
15343 TYPE or OP0.
15345 If the expression could be simplified to a constant, then return
15346 the constant. If the expression would not be simplified to a
15347 constant, then return NULL_TREE. */
15349 tree
15350 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15352 tree tem = fold_unary (code, type, op0);
15353 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15356 /* If EXP represents referencing an element in a constant string
15357 (either via pointer arithmetic or array indexing), return the
15358 tree representing the value accessed, otherwise return NULL. */
15360 tree
15361 fold_read_from_constant_string (tree exp)
15363 if ((TREE_CODE (exp) == INDIRECT_REF
15364 || TREE_CODE (exp) == ARRAY_REF)
15365 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15367 tree exp1 = TREE_OPERAND (exp, 0);
15368 tree index;
15369 tree string;
15370 location_t loc = EXPR_LOCATION (exp);
15372 if (TREE_CODE (exp) == INDIRECT_REF)
15373 string = string_constant (exp1, &index);
15374 else
15376 tree low_bound = array_ref_low_bound (exp);
15377 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15379 /* Optimize the special-case of a zero lower bound.
15381 We convert the low_bound to sizetype to avoid some problems
15382 with constant folding. (E.g. suppose the lower bound is 1,
15383 and its mode is QI. Without the conversion,l (ARRAY
15384 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15385 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15386 if (! integer_zerop (low_bound))
15387 index = size_diffop_loc (loc, index,
15388 fold_convert_loc (loc, sizetype, low_bound));
15390 string = exp1;
15393 if (string
15394 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15395 && TREE_CODE (string) == STRING_CST
15396 && TREE_CODE (index) == INTEGER_CST
15397 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15398 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15399 == MODE_INT)
15400 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15401 return build_int_cst_type (TREE_TYPE (exp),
15402 (TREE_STRING_POINTER (string)
15403 [TREE_INT_CST_LOW (index)]));
15405 return NULL;
15408 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15409 an integer constant, real, or fixed-point constant.
15411 TYPE is the type of the result. */
15413 static tree
15414 fold_negate_const (tree arg0, tree type)
15416 tree t = NULL_TREE;
15418 switch (TREE_CODE (arg0))
15420 case INTEGER_CST:
15422 bool overflow;
15423 wide_int val = wi::neg (arg0, &overflow);
15424 t = force_fit_type (type, val, 1,
15425 (overflow | TREE_OVERFLOW (arg0))
15426 && !TYPE_UNSIGNED (type));
15427 break;
15430 case REAL_CST:
15431 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15432 break;
15434 case FIXED_CST:
15436 FIXED_VALUE_TYPE f;
15437 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15438 &(TREE_FIXED_CST (arg0)), NULL,
15439 TYPE_SATURATING (type));
15440 t = build_fixed (type, f);
15441 /* Propagate overflow flags. */
15442 if (overflow_p | TREE_OVERFLOW (arg0))
15443 TREE_OVERFLOW (t) = 1;
15444 break;
15447 default:
15448 gcc_unreachable ();
15451 return t;
15454 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15455 an integer constant or real constant.
15457 TYPE is the type of the result. */
15459 tree
15460 fold_abs_const (tree arg0, tree type)
15462 tree t = NULL_TREE;
15464 switch (TREE_CODE (arg0))
15466 case INTEGER_CST:
15468 /* If the value is unsigned or non-negative, then the absolute value
15469 is the same as the ordinary value. */
15470 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15471 t = arg0;
15473 /* If the value is negative, then the absolute value is
15474 its negation. */
15475 else
15477 bool overflow;
15478 wide_int val = wi::neg (arg0, &overflow);
15479 t = force_fit_type (type, val, -1,
15480 overflow | TREE_OVERFLOW (arg0));
15483 break;
15485 case REAL_CST:
15486 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15487 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15488 else
15489 t = arg0;
15490 break;
15492 default:
15493 gcc_unreachable ();
15496 return t;
15499 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15500 constant. TYPE is the type of the result. */
15502 static tree
15503 fold_not_const (const_tree arg0, tree type)
15505 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15507 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15510 /* Given CODE, a relational operator, the target type, TYPE and two
15511 constant operands OP0 and OP1, return the result of the
15512 relational operation. If the result is not a compile time
15513 constant, then return NULL_TREE. */
15515 static tree
15516 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15518 int result, invert;
15520 /* From here on, the only cases we handle are when the result is
15521 known to be a constant. */
15523 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15525 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15526 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15528 /* Handle the cases where either operand is a NaN. */
15529 if (real_isnan (c0) || real_isnan (c1))
15531 switch (code)
15533 case EQ_EXPR:
15534 case ORDERED_EXPR:
15535 result = 0;
15536 break;
15538 case NE_EXPR:
15539 case UNORDERED_EXPR:
15540 case UNLT_EXPR:
15541 case UNLE_EXPR:
15542 case UNGT_EXPR:
15543 case UNGE_EXPR:
15544 case UNEQ_EXPR:
15545 result = 1;
15546 break;
15548 case LT_EXPR:
15549 case LE_EXPR:
15550 case GT_EXPR:
15551 case GE_EXPR:
15552 case LTGT_EXPR:
15553 if (flag_trapping_math)
15554 return NULL_TREE;
15555 result = 0;
15556 break;
15558 default:
15559 gcc_unreachable ();
15562 return constant_boolean_node (result, type);
15565 return constant_boolean_node (real_compare (code, c0, c1), type);
15568 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15570 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15571 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15572 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15575 /* Handle equality/inequality of complex constants. */
15576 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15578 tree rcond = fold_relational_const (code, type,
15579 TREE_REALPART (op0),
15580 TREE_REALPART (op1));
15581 tree icond = fold_relational_const (code, type,
15582 TREE_IMAGPART (op0),
15583 TREE_IMAGPART (op1));
15584 if (code == EQ_EXPR)
15585 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15586 else if (code == NE_EXPR)
15587 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15588 else
15589 return NULL_TREE;
15592 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15594 unsigned count = VECTOR_CST_NELTS (op0);
15595 tree *elts = XALLOCAVEC (tree, count);
15596 gcc_assert (VECTOR_CST_NELTS (op1) == count
15597 && TYPE_VECTOR_SUBPARTS (type) == count);
15599 for (unsigned i = 0; i < count; i++)
15601 tree elem_type = TREE_TYPE (type);
15602 tree elem0 = VECTOR_CST_ELT (op0, i);
15603 tree elem1 = VECTOR_CST_ELT (op1, i);
15605 tree tem = fold_relational_const (code, elem_type,
15606 elem0, elem1);
15608 if (tem == NULL_TREE)
15609 return NULL_TREE;
15611 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15614 return build_vector (type, elts);
15617 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15619 To compute GT, swap the arguments and do LT.
15620 To compute GE, do LT and invert the result.
15621 To compute LE, swap the arguments, do LT and invert the result.
15622 To compute NE, do EQ and invert the result.
15624 Therefore, the code below must handle only EQ and LT. */
15626 if (code == LE_EXPR || code == GT_EXPR)
15628 tree tem = op0;
15629 op0 = op1;
15630 op1 = tem;
15631 code = swap_tree_comparison (code);
15634 /* Note that it is safe to invert for real values here because we
15635 have already handled the one case that it matters. */
15637 invert = 0;
15638 if (code == NE_EXPR || code == GE_EXPR)
15640 invert = 1;
15641 code = invert_tree_comparison (code, false);
15644 /* Compute a result for LT or EQ if args permit;
15645 Otherwise return T. */
15646 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15648 if (code == EQ_EXPR)
15649 result = tree_int_cst_equal (op0, op1);
15650 else
15651 result = tree_int_cst_lt (op0, op1);
15653 else
15654 return NULL_TREE;
15656 if (invert)
15657 result ^= 1;
15658 return constant_boolean_node (result, type);
15661 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15662 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15663 itself. */
15665 tree
15666 fold_build_cleanup_point_expr (tree type, tree expr)
15668 /* If the expression does not have side effects then we don't have to wrap
15669 it with a cleanup point expression. */
15670 if (!TREE_SIDE_EFFECTS (expr))
15671 return expr;
15673 /* If the expression is a return, check to see if the expression inside the
15674 return has no side effects or the right hand side of the modify expression
15675 inside the return. If either don't have side effects set we don't need to
15676 wrap the expression in a cleanup point expression. Note we don't check the
15677 left hand side of the modify because it should always be a return decl. */
15678 if (TREE_CODE (expr) == RETURN_EXPR)
15680 tree op = TREE_OPERAND (expr, 0);
15681 if (!op || !TREE_SIDE_EFFECTS (op))
15682 return expr;
15683 op = TREE_OPERAND (op, 1);
15684 if (!TREE_SIDE_EFFECTS (op))
15685 return expr;
15688 return build1 (CLEANUP_POINT_EXPR, type, expr);
15691 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15692 of an indirection through OP0, or NULL_TREE if no simplification is
15693 possible. */
15695 tree
15696 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15698 tree sub = op0;
15699 tree subtype;
15701 STRIP_NOPS (sub);
15702 subtype = TREE_TYPE (sub);
15703 if (!POINTER_TYPE_P (subtype))
15704 return NULL_TREE;
15706 if (TREE_CODE (sub) == ADDR_EXPR)
15708 tree op = TREE_OPERAND (sub, 0);
15709 tree optype = TREE_TYPE (op);
15710 /* *&CONST_DECL -> to the value of the const decl. */
15711 if (TREE_CODE (op) == CONST_DECL)
15712 return DECL_INITIAL (op);
15713 /* *&p => p; make sure to handle *&"str"[cst] here. */
15714 if (type == optype)
15716 tree fop = fold_read_from_constant_string (op);
15717 if (fop)
15718 return fop;
15719 else
15720 return op;
15722 /* *(foo *)&fooarray => fooarray[0] */
15723 else if (TREE_CODE (optype) == ARRAY_TYPE
15724 && type == TREE_TYPE (optype)
15725 && (!in_gimple_form
15726 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15728 tree type_domain = TYPE_DOMAIN (optype);
15729 tree min_val = size_zero_node;
15730 if (type_domain && TYPE_MIN_VALUE (type_domain))
15731 min_val = TYPE_MIN_VALUE (type_domain);
15732 if (in_gimple_form
15733 && TREE_CODE (min_val) != INTEGER_CST)
15734 return NULL_TREE;
15735 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15736 NULL_TREE, NULL_TREE);
15738 /* *(foo *)&complexfoo => __real__ complexfoo */
15739 else if (TREE_CODE (optype) == COMPLEX_TYPE
15740 && type == TREE_TYPE (optype))
15741 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15742 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15743 else if (TREE_CODE (optype) == VECTOR_TYPE
15744 && type == TREE_TYPE (optype))
15746 tree part_width = TYPE_SIZE (type);
15747 tree index = bitsize_int (0);
15748 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15752 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15753 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15755 tree op00 = TREE_OPERAND (sub, 0);
15756 tree op01 = TREE_OPERAND (sub, 1);
15758 STRIP_NOPS (op00);
15759 if (TREE_CODE (op00) == ADDR_EXPR)
15761 tree op00type;
15762 op00 = TREE_OPERAND (op00, 0);
15763 op00type = TREE_TYPE (op00);
15765 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15766 if (TREE_CODE (op00type) == VECTOR_TYPE
15767 && type == TREE_TYPE (op00type))
15769 HOST_WIDE_INT offset = tree_to_shwi (op01);
15770 tree part_width = TYPE_SIZE (type);
15771 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15772 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15773 tree index = bitsize_int (indexi);
15775 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15776 return fold_build3_loc (loc,
15777 BIT_FIELD_REF, type, op00,
15778 part_width, index);
15781 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15782 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15783 && type == TREE_TYPE (op00type))
15785 tree size = TYPE_SIZE_UNIT (type);
15786 if (tree_int_cst_equal (size, op01))
15787 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15789 /* ((foo *)&fooarray)[1] => fooarray[1] */
15790 else if (TREE_CODE (op00type) == ARRAY_TYPE
15791 && type == TREE_TYPE (op00type))
15793 tree type_domain = TYPE_DOMAIN (op00type);
15794 tree min_val = size_zero_node;
15795 if (type_domain && TYPE_MIN_VALUE (type_domain))
15796 min_val = TYPE_MIN_VALUE (type_domain);
15797 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15798 TYPE_SIZE_UNIT (type));
15799 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15800 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15801 NULL_TREE, NULL_TREE);
15806 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15807 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15808 && type == TREE_TYPE (TREE_TYPE (subtype))
15809 && (!in_gimple_form
15810 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15812 tree type_domain;
15813 tree min_val = size_zero_node;
15814 sub = build_fold_indirect_ref_loc (loc, sub);
15815 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15816 if (type_domain && TYPE_MIN_VALUE (type_domain))
15817 min_val = TYPE_MIN_VALUE (type_domain);
15818 if (in_gimple_form
15819 && TREE_CODE (min_val) != INTEGER_CST)
15820 return NULL_TREE;
15821 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15822 NULL_TREE);
15825 return NULL_TREE;
15828 /* Builds an expression for an indirection through T, simplifying some
15829 cases. */
15831 tree
15832 build_fold_indirect_ref_loc (location_t loc, tree t)
15834 tree type = TREE_TYPE (TREE_TYPE (t));
15835 tree sub = fold_indirect_ref_1 (loc, type, t);
15837 if (sub)
15838 return sub;
15840 return build1_loc (loc, INDIRECT_REF, type, t);
15843 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15845 tree
15846 fold_indirect_ref_loc (location_t loc, tree t)
15848 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15850 if (sub)
15851 return sub;
15852 else
15853 return t;
15856 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15857 whose result is ignored. The type of the returned tree need not be
15858 the same as the original expression. */
15860 tree
15861 fold_ignored_result (tree t)
15863 if (!TREE_SIDE_EFFECTS (t))
15864 return integer_zero_node;
15866 for (;;)
15867 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15869 case tcc_unary:
15870 t = TREE_OPERAND (t, 0);
15871 break;
15873 case tcc_binary:
15874 case tcc_comparison:
15875 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15876 t = TREE_OPERAND (t, 0);
15877 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15878 t = TREE_OPERAND (t, 1);
15879 else
15880 return t;
15881 break;
15883 case tcc_expression:
15884 switch (TREE_CODE (t))
15886 case COMPOUND_EXPR:
15887 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15888 return t;
15889 t = TREE_OPERAND (t, 0);
15890 break;
15892 case COND_EXPR:
15893 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15894 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15895 return t;
15896 t = TREE_OPERAND (t, 0);
15897 break;
15899 default:
15900 return t;
15902 break;
15904 default:
15905 return t;
15909 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15911 tree
15912 round_up_loc (location_t loc, tree value, unsigned int divisor)
15914 tree div = NULL_TREE;
15916 if (divisor == 1)
15917 return value;
15919 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15920 have to do anything. Only do this when we are not given a const,
15921 because in that case, this check is more expensive than just
15922 doing it. */
15923 if (TREE_CODE (value) != INTEGER_CST)
15925 div = build_int_cst (TREE_TYPE (value), divisor);
15927 if (multiple_of_p (TREE_TYPE (value), value, div))
15928 return value;
15931 /* If divisor is a power of two, simplify this to bit manipulation. */
15932 if (divisor == (divisor & -divisor))
15934 if (TREE_CODE (value) == INTEGER_CST)
15936 wide_int val = value;
15937 bool overflow_p;
15939 if ((val & (divisor - 1)) == 0)
15940 return value;
15942 overflow_p = TREE_OVERFLOW (value);
15943 val &= ~(divisor - 1);
15944 val += divisor;
15945 if (val == 0)
15946 overflow_p = true;
15948 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15950 else
15952 tree t;
15954 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15955 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15956 t = build_int_cst (TREE_TYPE (value), -divisor);
15957 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15960 else
15962 if (!div)
15963 div = build_int_cst (TREE_TYPE (value), divisor);
15964 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15965 value = size_binop_loc (loc, MULT_EXPR, value, div);
15968 return value;
15971 /* Likewise, but round down. */
15973 tree
15974 round_down_loc (location_t loc, tree value, int divisor)
15976 tree div = NULL_TREE;
15978 gcc_assert (divisor > 0);
15979 if (divisor == 1)
15980 return value;
15982 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15983 have to do anything. Only do this when we are not given a const,
15984 because in that case, this check is more expensive than just
15985 doing it. */
15986 if (TREE_CODE (value) != INTEGER_CST)
15988 div = build_int_cst (TREE_TYPE (value), divisor);
15990 if (multiple_of_p (TREE_TYPE (value), value, div))
15991 return value;
15994 /* If divisor is a power of two, simplify this to bit manipulation. */
15995 if (divisor == (divisor & -divisor))
15997 tree t;
15999 t = build_int_cst (TREE_TYPE (value), -divisor);
16000 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16002 else
16004 if (!div)
16005 div = build_int_cst (TREE_TYPE (value), divisor);
16006 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16007 value = size_binop_loc (loc, MULT_EXPR, value, div);
16010 return value;
16013 /* Returns the pointer to the base of the object addressed by EXP and
16014 extracts the information about the offset of the access, storing it
16015 to PBITPOS and POFFSET. */
16017 static tree
16018 split_address_to_core_and_offset (tree exp,
16019 HOST_WIDE_INT *pbitpos, tree *poffset)
16021 tree core;
16022 machine_mode mode;
16023 int unsignedp, volatilep;
16024 HOST_WIDE_INT bitsize;
16025 location_t loc = EXPR_LOCATION (exp);
16027 if (TREE_CODE (exp) == ADDR_EXPR)
16029 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16030 poffset, &mode, &unsignedp, &volatilep,
16031 false);
16032 core = build_fold_addr_expr_loc (loc, core);
16034 else
16036 core = exp;
16037 *pbitpos = 0;
16038 *poffset = NULL_TREE;
16041 return core;
16044 /* Returns true if addresses of E1 and E2 differ by a constant, false
16045 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16047 bool
16048 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16050 tree core1, core2;
16051 HOST_WIDE_INT bitpos1, bitpos2;
16052 tree toffset1, toffset2, tdiff, type;
16054 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16055 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16057 if (bitpos1 % BITS_PER_UNIT != 0
16058 || bitpos2 % BITS_PER_UNIT != 0
16059 || !operand_equal_p (core1, core2, 0))
16060 return false;
16062 if (toffset1 && toffset2)
16064 type = TREE_TYPE (toffset1);
16065 if (type != TREE_TYPE (toffset2))
16066 toffset2 = fold_convert (type, toffset2);
16068 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16069 if (!cst_and_fits_in_hwi (tdiff))
16070 return false;
16072 *diff = int_cst_value (tdiff);
16074 else if (toffset1 || toffset2)
16076 /* If only one of the offsets is non-constant, the difference cannot
16077 be a constant. */
16078 return false;
16080 else
16081 *diff = 0;
16083 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16084 return true;
16087 /* Simplify the floating point expression EXP when the sign of the
16088 result is not significant. Return NULL_TREE if no simplification
16089 is possible. */
16091 tree
16092 fold_strip_sign_ops (tree exp)
16094 tree arg0, arg1;
16095 location_t loc = EXPR_LOCATION (exp);
16097 switch (TREE_CODE (exp))
16099 case ABS_EXPR:
16100 case NEGATE_EXPR:
16101 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16102 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16104 case MULT_EXPR:
16105 case RDIV_EXPR:
16106 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16107 return NULL_TREE;
16108 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16109 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16110 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16111 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16112 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16113 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16114 break;
16116 case COMPOUND_EXPR:
16117 arg0 = TREE_OPERAND (exp, 0);
16118 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16119 if (arg1)
16120 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16121 break;
16123 case COND_EXPR:
16124 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16125 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16126 if (arg0 || arg1)
16127 return fold_build3_loc (loc,
16128 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16129 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16130 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16131 break;
16133 case CALL_EXPR:
16135 const enum built_in_function fcode = builtin_mathfn_code (exp);
16136 switch (fcode)
16138 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16139 /* Strip copysign function call, return the 1st argument. */
16140 arg0 = CALL_EXPR_ARG (exp, 0);
16141 arg1 = CALL_EXPR_ARG (exp, 1);
16142 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16144 default:
16145 /* Strip sign ops from the argument of "odd" math functions. */
16146 if (negate_mathfn_p (fcode))
16148 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16149 if (arg0)
16150 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16152 break;
16155 break;
16157 default:
16158 break;
16160 return NULL_TREE;