Merge trunk version 220104 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
bloba0ab9ebdcf7bb3b45be51d5f0f01431ca1072f82
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
181 static location_t
182 expr_location_or (tree t, location_t loc)
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
203 return x;
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
213 widest_int quo;
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
219 return NULL_TREE;
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
231 static int fold_deferring_overflow_warnings;
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
238 static const char* fold_deferred_overflow_warning;
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
248 void
249 fold_defer_overflow_warnings (void)
251 ++fold_deferring_overflow_warnings;
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
266 const char *warnmsg;
267 location_t locus;
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
283 if (!issue || warnmsg == NULL)
284 return;
286 if (gimple_no_warning_p (stmt))
287 return;
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
294 if (!issue_strict_overflow_warning (code))
295 return;
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
310 fold_undefer_overflow_warnings (false, NULL, 0);
313 /* Whether we are deferring overflow warnings. */
315 bool
316 fold_deferring_overflow_warnings_p (void)
318 return fold_deferring_overflow_warnings > 0;
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
327 if (fold_deferring_overflow_warnings > 0)
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
343 static bool
344 negate_mathfn_p (enum built_in_function code)
346 switch (code)
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
379 default:
380 break;
382 return false;
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
388 bool
389 may_negate_without_overflow_p (const_tree t)
391 tree type;
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
399 return !wi::only_sign_bit_p (t);
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
405 static bool
406 negate_expr_p (tree t)
408 tree type;
410 if (t == 0)
411 return false;
413 type = TREE_TYPE (t);
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
428 case FIXED_CST:
429 return true;
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
443 case VECTOR_CST:
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
454 return true;
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
487 /* Fall through. */
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
530 break;
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
546 break;
548 default:
549 break;
551 return false;
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
559 static tree
560 fold_negate_expr (location_t loc, tree t)
562 tree type = TREE_TYPE (t);
563 tree tem;
565 switch (TREE_CODE (t))
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
592 case COMPLEX_CST:
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
599 break;
601 case VECTOR_CST:
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
606 for (i = 0; i < count; i++)
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
613 return build_vector (type, elts);
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
656 break;
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
671 /* Fall through. */
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
685 break;
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
724 break;
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
734 break;
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
741 tree fndecl, arg;
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
747 break;
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
764 break;
766 default:
767 break;
770 return NULL_TREE;
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
777 static tree
778 negate_expr (tree t)
780 tree type, tem;
781 location_t loc;
783 if (t == NULL_TREE)
784 return NULL_TREE;
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
811 If IN is itself a literal or constant, return it as appropriate.
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
820 tree var = 0;
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
889 if (negate_p)
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
899 return var;
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
921 if (code == PLUS_EXPR)
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
936 else if (code == MINUS_EXPR)
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
961 switch (code)
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
969 default:
970 break;
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
996 switch (code)
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1120 default:
1121 return NULL_TREE;
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1129 return t;
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1159 return int_const_binop (code, arg1, arg2);
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1183 default:
1184 return NULL_TREE;
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1234 t = build_real (type, result);
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1240 if (TREE_CODE (arg1) == FIXED_CST)
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1271 break;
1273 default:
1274 return NULL_TREE;
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1297 switch (code)
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1354 else
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1365 if (integer_nonzerop (compare))
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1385 else
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1407 break;
1409 default:
1410 return NULL_TREE;
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1424 for (i = 0; i < count; i++)
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1429 elts[i] = const_binop (code, elem1, elem2);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1437 return build_vector (type, elts);
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1503 for (i = 0; i < nelts; i++)
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1512 return build_vector (type, elts);
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1543 for (out = 0; out < nelts; out++)
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1559 return build_vector (type, elts);
1562 default:;
1565 if (TREE_CODE_CLASS (code) != tcc_binary)
1566 return NULL_TREE;
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type)
1570 == TYPE_SATURATING (TREE_TYPE (arg1)));
1572 return const_binop (code, arg1, arg2);
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1578 tree
1579 const_unop (enum tree_code code, tree type, tree arg0)
1581 switch (code)
1583 CASE_CONVERT:
1584 case FLOAT_EXPR:
1585 case FIX_TRUNC_EXPR:
1586 case FIXED_CONVERT_EXPR:
1587 return fold_convert_const (code, type, arg0);
1589 case ADDR_SPACE_CONVERT_EXPR:
1590 if (integer_zerop (arg0))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1597 case NEGATE_EXPR:
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1620 break;
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1641 if (i == count)
1642 return build_vector (type, elements);
1644 break;
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1687 for (i = 0; i < nelts; i++)
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1694 return build_vector (type, elts);
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1713 switch (code)
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1721 for (i = 1; i < nelts; i++)
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1728 return elts[0];
1731 default:
1732 break;
1735 return NULL_TREE;
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1755 tree type = TREE_TYPE (arg0);
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1774 else if (code == MINUS_EXPR)
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1779 else if (code == MULT_EXPR)
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1863 bool overflow = false;
1864 tree t;
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1879 switch (code)
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1885 default:
1886 gcc_unreachable ();
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1899 if (! overflow)
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (REAL_VALUES_LESS (r, l))
1905 overflow = true;
1906 val = lt;
1910 if (! overflow)
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (REAL_VALUES_LESS (u, r))
1918 overflow = true;
1919 val = ut;
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1955 else
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1976 return t;
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1985 REAL_VALUE_TYPE value;
1986 tree t;
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1997 TREE_OVERFLOW (t) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_NANS (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2004 && REAL_VALUE_ISINF (value)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2006 TREE_OVERFLOW (t) = 1;
2007 else
2008 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 return t;
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2015 static tree
2016 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2018 REAL_VALUE_TYPE value;
2019 tree t;
2021 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2022 t = build_real (type, value);
2024 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 return t;
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2031 static tree
2032 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2038 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2039 TYPE_SATURATING (type));
2040 t = build_fixed (type, value);
2042 /* Propagate overflow flags. */
2043 if (overflow_p | TREE_OVERFLOW (arg1))
2044 TREE_OVERFLOW (t) = 1;
2045 return t;
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2051 static tree
2052 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2054 FIXED_VALUE_TYPE value;
2055 tree t;
2056 bool overflow_p;
2057 double_int di;
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2061 di.low = TREE_INT_CST_ELT (arg1, 0);
2062 if (TREE_INT_CST_NUNITS (arg1) == 1)
2063 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2064 else
2065 di.high = TREE_INT_CST_ELT (arg1, 1);
2067 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2081 static tree
2082 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2084 FIXED_VALUE_TYPE value;
2085 tree t;
2086 bool overflow_p;
2088 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2089 &TREE_REAL_CST (arg1),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2102 static tree
2103 fold_convert_const (enum tree_code code, tree type, tree arg1)
2105 if (TREE_TYPE (arg1) == type)
2106 return arg1;
2108 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2109 || TREE_CODE (type) == OFFSET_TYPE)
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_int_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_int_from_real (code, type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_int_from_fixed (type, arg1);
2118 else if (TREE_CODE (type) == REAL_TYPE)
2120 if (TREE_CODE (arg1) == INTEGER_CST)
2121 return build_real_from_int_cst (type, arg1);
2122 else if (TREE_CODE (arg1) == REAL_CST)
2123 return fold_convert_const_real_from_real (type, arg1);
2124 else if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_real_from_fixed (type, arg1);
2127 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2129 if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_fixed_from_fixed (type, arg1);
2131 else if (TREE_CODE (arg1) == INTEGER_CST)
2132 return fold_convert_const_fixed_from_int (type, arg1);
2133 else if (TREE_CODE (arg1) == REAL_CST)
2134 return fold_convert_const_fixed_from_real (type, arg1);
2136 return NULL_TREE;
2139 /* Construct a vector of zero elements of vector type TYPE. */
2141 static tree
2142 build_zero_vector (tree type)
2144 tree t;
2146 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2147 return build_vector_from_val (type, t);
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2152 bool
2153 fold_convertible_p (const_tree type, const_tree arg)
2155 tree orig = TREE_TYPE (arg);
2157 if (type == orig)
2158 return true;
2160 if (TREE_CODE (arg) == ERROR_MARK
2161 || TREE_CODE (type) == ERROR_MARK
2162 || TREE_CODE (orig) == ERROR_MARK)
2163 return false;
2165 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2166 return true;
2168 switch (TREE_CODE (type))
2170 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2171 case POINTER_TYPE: case REFERENCE_TYPE:
2172 case OFFSET_TYPE:
2173 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == OFFSET_TYPE)
2175 return true;
2176 return (TREE_CODE (orig) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2179 case REAL_TYPE:
2180 case FIXED_POINT_TYPE:
2181 case COMPLEX_TYPE:
2182 case VECTOR_TYPE:
2183 case VOID_TYPE:
2184 return TREE_CODE (type) == TREE_CODE (orig);
2186 default:
2187 return false;
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2194 tree
2195 fold_convert_loc (location_t loc, tree type, tree arg)
2197 tree orig = TREE_TYPE (arg);
2198 tree tem;
2200 if (type == orig)
2201 return arg;
2203 if (TREE_CODE (arg) == ERROR_MARK
2204 || TREE_CODE (type) == ERROR_MARK
2205 || TREE_CODE (orig) == ERROR_MARK)
2206 return error_mark_node;
2208 switch (TREE_CODE (type))
2210 case POINTER_TYPE:
2211 case REFERENCE_TYPE:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2216 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2217 /* fall through */
2219 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2220 case OFFSET_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2223 tem = fold_convert_const (NOP_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2227 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2228 || TREE_CODE (orig) == OFFSET_TYPE)
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 if (TREE_CODE (orig) == COMPLEX_TYPE)
2231 return fold_convert_loc (loc, type,
2232 fold_build1_loc (loc, REALPART_EXPR,
2233 TREE_TYPE (orig), arg));
2234 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2236 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2238 case REAL_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2241 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2245 else if (TREE_CODE (arg) == REAL_CST)
2247 tem = fold_convert_const (NOP_EXPR, type, arg);
2248 if (tem != NULL_TREE)
2249 return tem;
2251 else if (TREE_CODE (arg) == FIXED_CST)
2253 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2258 switch (TREE_CODE (orig))
2260 case INTEGER_TYPE:
2261 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2262 case POINTER_TYPE: case REFERENCE_TYPE:
2263 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2265 case REAL_TYPE:
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2268 case FIXED_POINT_TYPE:
2269 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2271 case COMPLEX_TYPE:
2272 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2273 return fold_convert_loc (loc, type, tem);
2275 default:
2276 gcc_unreachable ();
2279 case FIXED_POINT_TYPE:
2280 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2281 || TREE_CODE (arg) == REAL_CST)
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 goto fold_convert_exit;
2288 switch (TREE_CODE (orig))
2290 case FIXED_POINT_TYPE:
2291 case INTEGER_TYPE:
2292 case ENUMERAL_TYPE:
2293 case BOOLEAN_TYPE:
2294 case REAL_TYPE:
2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2297 case COMPLEX_TYPE:
2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2299 return fold_convert_loc (loc, type, tem);
2301 default:
2302 gcc_unreachable ();
2305 case COMPLEX_TYPE:
2306 switch (TREE_CODE (orig))
2308 case INTEGER_TYPE:
2309 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2310 case POINTER_TYPE: case REFERENCE_TYPE:
2311 case REAL_TYPE:
2312 case FIXED_POINT_TYPE:
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2314 fold_convert_loc (loc, TREE_TYPE (type), arg),
2315 fold_convert_loc (loc, TREE_TYPE (type),
2316 integer_zero_node));
2317 case COMPLEX_TYPE:
2319 tree rpart, ipart;
2321 if (TREE_CODE (arg) == COMPLEX_EXPR)
2323 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2324 TREE_OPERAND (arg, 0));
2325 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2326 TREE_OPERAND (arg, 1));
2327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2330 arg = save_expr (arg);
2331 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2332 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2334 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2338 default:
2339 gcc_unreachable ();
2342 case VECTOR_TYPE:
2343 if (integer_zerop (arg))
2344 return build_zero_vector (type);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2347 || TREE_CODE (orig) == VECTOR_TYPE);
2348 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2350 case VOID_TYPE:
2351 tem = fold_ignored_result (arg);
2352 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2354 default:
2355 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2356 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2357 gcc_unreachable ();
2359 fold_convert_exit:
2360 protected_set_expr_location_unshare (tem, loc);
2361 return tem;
2364 /* Return false if expr can be assumed not to be an lvalue, true
2365 otherwise. */
2367 static bool
2368 maybe_lvalue_p (const_tree x)
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x))
2373 case VAR_DECL:
2374 case PARM_DECL:
2375 case RESULT_DECL:
2376 case LABEL_DECL:
2377 case FUNCTION_DECL:
2378 case SSA_NAME:
2380 case COMPONENT_REF:
2381 case MEM_REF:
2382 case INDIRECT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case BIT_FIELD_REF:
2386 case OBJ_TYPE_REF:
2388 case REALPART_EXPR:
2389 case IMAGPART_EXPR:
2390 case PREINCREMENT_EXPR:
2391 case PREDECREMENT_EXPR:
2392 case SAVE_EXPR:
2393 case TRY_CATCH_EXPR:
2394 case WITH_CLEANUP_EXPR:
2395 case COMPOUND_EXPR:
2396 case MODIFY_EXPR:
2397 case TARGET_EXPR:
2398 case COND_EXPR:
2399 case BIND_EXPR:
2400 break;
2402 default:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2405 break;
2406 return false;
2409 return true;
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2414 tree
2415 non_lvalue_loc (location_t loc, tree x)
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2418 us. */
2419 if (in_gimple_form)
2420 return x;
2422 if (! maybe_lvalue_p (x))
2423 return x;
2424 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2430 static tree
2431 pedantic_non_lvalue_loc (location_t loc, tree x)
2433 return protected_set_expr_location_unshare (x, loc);
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2441 enum tree_code
2442 invert_tree_comparison (enum tree_code code, bool honor_nans)
2444 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2445 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2446 return ERROR_MARK;
2448 switch (code)
2450 case EQ_EXPR:
2451 return NE_EXPR;
2452 case NE_EXPR:
2453 return EQ_EXPR;
2454 case GT_EXPR:
2455 return honor_nans ? UNLE_EXPR : LE_EXPR;
2456 case GE_EXPR:
2457 return honor_nans ? UNLT_EXPR : LT_EXPR;
2458 case LT_EXPR:
2459 return honor_nans ? UNGE_EXPR : GE_EXPR;
2460 case LE_EXPR:
2461 return honor_nans ? UNGT_EXPR : GT_EXPR;
2462 case LTGT_EXPR:
2463 return UNEQ_EXPR;
2464 case UNEQ_EXPR:
2465 return LTGT_EXPR;
2466 case UNGT_EXPR:
2467 return LE_EXPR;
2468 case UNGE_EXPR:
2469 return LT_EXPR;
2470 case UNLT_EXPR:
2471 return GE_EXPR;
2472 case UNLE_EXPR:
2473 return GT_EXPR;
2474 case ORDERED_EXPR:
2475 return UNORDERED_EXPR;
2476 case UNORDERED_EXPR:
2477 return ORDERED_EXPR;
2478 default:
2479 gcc_unreachable ();
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2486 enum tree_code
2487 swap_tree_comparison (enum tree_code code)
2489 switch (code)
2491 case EQ_EXPR:
2492 case NE_EXPR:
2493 case ORDERED_EXPR:
2494 case UNORDERED_EXPR:
2495 case LTGT_EXPR:
2496 case UNEQ_EXPR:
2497 return code;
2498 case GT_EXPR:
2499 return LT_EXPR;
2500 case GE_EXPR:
2501 return LE_EXPR;
2502 case LT_EXPR:
2503 return GT_EXPR;
2504 case LE_EXPR:
2505 return GE_EXPR;
2506 case UNGT_EXPR:
2507 return UNLT_EXPR;
2508 case UNGE_EXPR:
2509 return UNLE_EXPR;
2510 case UNLT_EXPR:
2511 return UNGT_EXPR;
2512 case UNLE_EXPR:
2513 return UNGE_EXPR;
2514 default:
2515 gcc_unreachable ();
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code)
2527 switch (code)
2529 case LT_EXPR:
2530 return COMPCODE_LT;
2531 case EQ_EXPR:
2532 return COMPCODE_EQ;
2533 case LE_EXPR:
2534 return COMPCODE_LE;
2535 case GT_EXPR:
2536 return COMPCODE_GT;
2537 case NE_EXPR:
2538 return COMPCODE_NE;
2539 case GE_EXPR:
2540 return COMPCODE_GE;
2541 case ORDERED_EXPR:
2542 return COMPCODE_ORD;
2543 case UNORDERED_EXPR:
2544 return COMPCODE_UNORD;
2545 case UNLT_EXPR:
2546 return COMPCODE_UNLT;
2547 case UNEQ_EXPR:
2548 return COMPCODE_UNEQ;
2549 case UNLE_EXPR:
2550 return COMPCODE_UNLE;
2551 case UNGT_EXPR:
2552 return COMPCODE_UNGT;
2553 case LTGT_EXPR:
2554 return COMPCODE_LTGT;
2555 case UNGE_EXPR:
2556 return COMPCODE_UNGE;
2557 default:
2558 gcc_unreachable ();
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code)
2569 switch (code)
2571 case COMPCODE_LT:
2572 return LT_EXPR;
2573 case COMPCODE_EQ:
2574 return EQ_EXPR;
2575 case COMPCODE_LE:
2576 return LE_EXPR;
2577 case COMPCODE_GT:
2578 return GT_EXPR;
2579 case COMPCODE_NE:
2580 return NE_EXPR;
2581 case COMPCODE_GE:
2582 return GE_EXPR;
2583 case COMPCODE_ORD:
2584 return ORDERED_EXPR;
2585 case COMPCODE_UNORD:
2586 return UNORDERED_EXPR;
2587 case COMPCODE_UNLT:
2588 return UNLT_EXPR;
2589 case COMPCODE_UNEQ:
2590 return UNEQ_EXPR;
2591 case COMPCODE_UNLE:
2592 return UNLE_EXPR;
2593 case COMPCODE_UNGT:
2594 return UNGT_EXPR;
2595 case COMPCODE_LTGT:
2596 return LTGT_EXPR;
2597 case COMPCODE_UNGE:
2598 return UNGE_EXPR;
2599 default:
2600 gcc_unreachable ();
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2610 tree
2611 combine_comparisons (location_t loc,
2612 enum tree_code code, enum tree_code lcode,
2613 enum tree_code rcode, tree truth_type,
2614 tree ll_arg, tree lr_arg)
2616 bool honor_nans = HONOR_NANS (ll_arg);
2617 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2618 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2619 int compcode;
2621 switch (code)
2623 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2624 compcode = lcompcode & rcompcode;
2625 break;
2627 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2628 compcode = lcompcode | rcompcode;
2629 break;
2631 default:
2632 return NULL_TREE;
2635 if (!honor_nans)
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode &= ~COMPCODE_UNORD;
2640 if (compcode == COMPCODE_LTGT)
2641 compcode = COMPCODE_NE;
2642 else if (compcode == COMPCODE_ORD)
2643 compcode = COMPCODE_TRUE;
2645 else if (flag_trapping_math)
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2650 && (lcompcode != COMPCODE_EQ)
2651 && (lcompcode != COMPCODE_ORD);
2652 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2653 && (rcompcode != COMPCODE_EQ)
2654 && (rcompcode != COMPCODE_ORD);
2655 bool trap = (compcode & COMPCODE_UNORD) == 0
2656 && (compcode != COMPCODE_EQ)
2657 && (compcode != COMPCODE_ORD);
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2666 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2667 rtrap = false;
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2671 if (rtrap && !ltrap
2672 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2673 return NULL_TREE;
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap || rtrap) != trap)
2677 return NULL_TREE;
2680 if (compcode == COMPCODE_TRUE)
2681 return constant_boolean_node (true, truth_type);
2682 else if (compcode == COMPCODE_FALSE)
2683 return constant_boolean_node (false, truth_type);
2684 else
2686 enum tree_code tcode;
2688 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2689 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2720 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2724 || TREE_TYPE (arg0) == error_mark_node
2725 || TREE_TYPE (arg1) == error_mark_node)
2726 return 0;
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2731 return 0;
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2736 return tree_int_cst_equal (arg0, arg1);
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2751 return 0;
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2766 && COMPARISON_CLASS_P (arg0)
2767 && COMPARISON_CLASS_P (arg1))
2769 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2771 if (TREE_CODE (arg0) == swap_code)
2772 return operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags);
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2781 return 0;
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2788 return 0;
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 return 1;
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2810 case INTEGER_CST:
2811 return tree_int_cst_equal (arg0, arg1);
2813 case FIXED_CST:
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2817 case REAL_CST:
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2819 TREE_REAL_CST (arg1)))
2820 return 1;
2823 if (!HONOR_SIGNED_ZEROS (arg0))
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0) && real_zerop (arg1))
2828 return 1;
2830 return 0;
2832 case VECTOR_CST:
2834 unsigned i;
2836 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2837 return 0;
2839 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2842 VECTOR_CST_ELT (arg1, i), flags))
2843 return 0;
2845 return 1;
2848 case COMPLEX_CST:
2849 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 flags)
2851 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 flags));
2854 case STRING_CST:
2855 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2856 && ! memcmp (TREE_STRING_POINTER (arg0),
2857 TREE_STRING_POINTER (arg1),
2858 TREE_STRING_LENGTH (arg0)));
2860 case ADDR_EXPR:
2861 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2862 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2863 ? OEP_CONSTANT_ADDRESS_OF : 0);
2864 default:
2865 break;
2868 if (flags & OEP_ONLY_CONST)
2869 return 0;
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2884 case tcc_unary:
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0))
2888 CASE_CONVERT:
2889 case FIX_TRUNC_EXPR:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2892 return 0;
2893 break;
2894 default:
2895 break;
2898 return OP_SAME (0);
2901 case tcc_comparison:
2902 case tcc_binary:
2903 if (OP_SAME (0) && OP_SAME (1))
2904 return 1;
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0))
2908 && operand_equal_p (TREE_OPERAND (arg0, 0),
2909 TREE_OPERAND (arg1, 1), flags)
2910 && operand_equal_p (TREE_OPERAND (arg0, 1),
2911 TREE_OPERAND (arg1, 0), flags));
2913 case tcc_reference:
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2918 && (TREE_SIDE_EFFECTS (arg0)
2919 || TREE_SIDE_EFFECTS (arg1)))
2920 return 0;
2922 switch (TREE_CODE (arg0))
2924 case INDIRECT_REF:
2925 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2926 return OP_SAME (0);
2928 case REALPART_EXPR:
2929 case IMAGPART_EXPR:
2930 return OP_SAME (0);
2932 case TARGET_MEM_REF:
2933 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2934 /* Require equal extra operands and then fall through to MEM_REF
2935 handling of the two common operands. */
2936 if (!OP_SAME_WITH_NULL (2)
2937 || !OP_SAME_WITH_NULL (3)
2938 || !OP_SAME_WITH_NULL (4))
2939 return 0;
2940 /* Fallthru. */
2941 case MEM_REF:
2942 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2943 /* Require equal access sizes, and similar pointer types.
2944 We can have incomplete types for array references of
2945 variable-sized arrays from the Fortran frontend
2946 though. Also verify the types are compatible. */
2947 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2948 || (TYPE_SIZE (TREE_TYPE (arg0))
2949 && TYPE_SIZE (TREE_TYPE (arg1))
2950 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2951 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2952 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2953 && alias_ptr_types_compatible_p
2954 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2955 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2956 && OP_SAME (0) && OP_SAME (1));
2958 case ARRAY_REF:
2959 case ARRAY_RANGE_REF:
2960 /* Operands 2 and 3 may be null.
2961 Compare the array index by value if it is constant first as we
2962 may have different types but same value here. */
2963 if (!OP_SAME (0))
2964 return 0;
2965 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2966 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2967 TREE_OPERAND (arg1, 1))
2968 || OP_SAME (1))
2969 && OP_SAME_WITH_NULL (2)
2970 && OP_SAME_WITH_NULL (3));
2972 case COMPONENT_REF:
2973 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2974 may be NULL when we're called to compare MEM_EXPRs. */
2975 if (!OP_SAME_WITH_NULL (0)
2976 || !OP_SAME (1))
2977 return 0;
2978 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2979 return OP_SAME_WITH_NULL (2);
2981 case BIT_FIELD_REF:
2982 if (!OP_SAME (0))
2983 return 0;
2984 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2985 return OP_SAME (1) && OP_SAME (2);
2987 default:
2988 return 0;
2991 case tcc_expression:
2992 switch (TREE_CODE (arg0))
2994 case ADDR_EXPR:
2995 case TRUTH_NOT_EXPR:
2996 return OP_SAME (0);
2998 case TRUTH_ANDIF_EXPR:
2999 case TRUTH_ORIF_EXPR:
3000 return OP_SAME (0) && OP_SAME (1);
3002 case FMA_EXPR:
3003 case WIDEN_MULT_PLUS_EXPR:
3004 case WIDEN_MULT_MINUS_EXPR:
3005 if (!OP_SAME (2))
3006 return 0;
3007 /* The multiplcation operands are commutative. */
3008 /* FALLTHRU */
3010 case TRUTH_AND_EXPR:
3011 case TRUTH_OR_EXPR:
3012 case TRUTH_XOR_EXPR:
3013 if (OP_SAME (0) && OP_SAME (1))
3014 return 1;
3016 /* Otherwise take into account this is a commutative operation. */
3017 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3018 TREE_OPERAND (arg1, 1), flags)
3019 && operand_equal_p (TREE_OPERAND (arg0, 1),
3020 TREE_OPERAND (arg1, 0), flags));
3022 case COND_EXPR:
3023 case VEC_COND_EXPR:
3024 case DOT_PROD_EXPR:
3025 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3027 default:
3028 return 0;
3031 case tcc_vl_exp:
3032 switch (TREE_CODE (arg0))
3034 case CALL_EXPR:
3035 /* If the CALL_EXPRs call different functions, then they
3036 clearly can not be equal. */
3037 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3038 flags))
3039 return 0;
3042 unsigned int cef = call_expr_flags (arg0);
3043 if (flags & OEP_PURE_SAME)
3044 cef &= ECF_CONST | ECF_PURE;
3045 else
3046 cef &= ECF_CONST;
3047 if (!cef)
3048 return 0;
3051 /* Now see if all the arguments are the same. */
3053 const_call_expr_arg_iterator iter0, iter1;
3054 const_tree a0, a1;
3055 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3056 a1 = first_const_call_expr_arg (arg1, &iter1);
3057 a0 && a1;
3058 a0 = next_const_call_expr_arg (&iter0),
3059 a1 = next_const_call_expr_arg (&iter1))
3060 if (! operand_equal_p (a0, a1, flags))
3061 return 0;
3063 /* If we get here and both argument lists are exhausted
3064 then the CALL_EXPRs are equal. */
3065 return ! (a0 || a1);
3067 default:
3068 return 0;
3071 case tcc_declaration:
3072 /* Consider __builtin_sqrt equal to sqrt. */
3073 return (TREE_CODE (arg0) == FUNCTION_DECL
3074 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3075 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3076 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3078 default:
3079 return 0;
3082 #undef OP_SAME
3083 #undef OP_SAME_WITH_NULL
3086 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3087 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3089 When in doubt, return 0. */
3091 static int
3092 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3094 int unsignedp1, unsignedpo;
3095 tree primarg0, primarg1, primother;
3096 unsigned int correct_width;
3098 if (operand_equal_p (arg0, arg1, 0))
3099 return 1;
3101 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3102 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3103 return 0;
3105 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3106 and see if the inner values are the same. This removes any
3107 signedness comparison, which doesn't matter here. */
3108 primarg0 = arg0, primarg1 = arg1;
3109 STRIP_NOPS (primarg0);
3110 STRIP_NOPS (primarg1);
3111 if (operand_equal_p (primarg0, primarg1, 0))
3112 return 1;
3114 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3115 actual comparison operand, ARG0.
3117 First throw away any conversions to wider types
3118 already present in the operands. */
3120 primarg1 = get_narrower (arg1, &unsignedp1);
3121 primother = get_narrower (other, &unsignedpo);
3123 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3124 if (unsignedp1 == unsignedpo
3125 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3126 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3128 tree type = TREE_TYPE (arg0);
3130 /* Make sure shorter operand is extended the right way
3131 to match the longer operand. */
3132 primarg1 = fold_convert (signed_or_unsigned_type_for
3133 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3135 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3136 return 1;
3139 return 0;
3142 /* See if ARG is an expression that is either a comparison or is performing
3143 arithmetic on comparisons. The comparisons must only be comparing
3144 two different values, which will be stored in *CVAL1 and *CVAL2; if
3145 they are nonzero it means that some operands have already been found.
3146 No variables may be used anywhere else in the expression except in the
3147 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3148 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3150 If this is true, return 1. Otherwise, return zero. */
3152 static int
3153 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3155 enum tree_code code = TREE_CODE (arg);
3156 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3158 /* We can handle some of the tcc_expression cases here. */
3159 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3160 tclass = tcc_unary;
3161 else if (tclass == tcc_expression
3162 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3163 || code == COMPOUND_EXPR))
3164 tclass = tcc_binary;
3166 else if (tclass == tcc_expression && code == SAVE_EXPR
3167 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3169 /* If we've already found a CVAL1 or CVAL2, this expression is
3170 two complex to handle. */
3171 if (*cval1 || *cval2)
3172 return 0;
3174 tclass = tcc_unary;
3175 *save_p = 1;
3178 switch (tclass)
3180 case tcc_unary:
3181 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3183 case tcc_binary:
3184 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3185 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3186 cval1, cval2, save_p));
3188 case tcc_constant:
3189 return 1;
3191 case tcc_expression:
3192 if (code == COND_EXPR)
3193 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3194 cval1, cval2, save_p)
3195 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3196 cval1, cval2, save_p)
3197 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3198 cval1, cval2, save_p));
3199 return 0;
3201 case tcc_comparison:
3202 /* First see if we can handle the first operand, then the second. For
3203 the second operand, we know *CVAL1 can't be zero. It must be that
3204 one side of the comparison is each of the values; test for the
3205 case where this isn't true by failing if the two operands
3206 are the same. */
3208 if (operand_equal_p (TREE_OPERAND (arg, 0),
3209 TREE_OPERAND (arg, 1), 0))
3210 return 0;
3212 if (*cval1 == 0)
3213 *cval1 = TREE_OPERAND (arg, 0);
3214 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3216 else if (*cval2 == 0)
3217 *cval2 = TREE_OPERAND (arg, 0);
3218 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3220 else
3221 return 0;
3223 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3225 else if (*cval2 == 0)
3226 *cval2 = TREE_OPERAND (arg, 1);
3227 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3229 else
3230 return 0;
3232 return 1;
3234 default:
3235 return 0;
3239 /* ARG is a tree that is known to contain just arithmetic operations and
3240 comparisons. Evaluate the operations in the tree substituting NEW0 for
3241 any occurrence of OLD0 as an operand of a comparison and likewise for
3242 NEW1 and OLD1. */
3244 static tree
3245 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3246 tree old1, tree new1)
3248 tree type = TREE_TYPE (arg);
3249 enum tree_code code = TREE_CODE (arg);
3250 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3252 /* We can handle some of the tcc_expression cases here. */
3253 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3254 tclass = tcc_unary;
3255 else if (tclass == tcc_expression
3256 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3257 tclass = tcc_binary;
3259 switch (tclass)
3261 case tcc_unary:
3262 return fold_build1_loc (loc, code, type,
3263 eval_subst (loc, TREE_OPERAND (arg, 0),
3264 old0, new0, old1, new1));
3266 case tcc_binary:
3267 return fold_build2_loc (loc, code, type,
3268 eval_subst (loc, TREE_OPERAND (arg, 0),
3269 old0, new0, old1, new1),
3270 eval_subst (loc, TREE_OPERAND (arg, 1),
3271 old0, new0, old1, new1));
3273 case tcc_expression:
3274 switch (code)
3276 case SAVE_EXPR:
3277 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3278 old1, new1);
3280 case COMPOUND_EXPR:
3281 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3282 old1, new1);
3284 case COND_EXPR:
3285 return fold_build3_loc (loc, code, type,
3286 eval_subst (loc, TREE_OPERAND (arg, 0),
3287 old0, new0, old1, new1),
3288 eval_subst (loc, TREE_OPERAND (arg, 1),
3289 old0, new0, old1, new1),
3290 eval_subst (loc, TREE_OPERAND (arg, 2),
3291 old0, new0, old1, new1));
3292 default:
3293 break;
3295 /* Fall through - ??? */
3297 case tcc_comparison:
3299 tree arg0 = TREE_OPERAND (arg, 0);
3300 tree arg1 = TREE_OPERAND (arg, 1);
3302 /* We need to check both for exact equality and tree equality. The
3303 former will be true if the operand has a side-effect. In that
3304 case, we know the operand occurred exactly once. */
3306 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3307 arg0 = new0;
3308 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3309 arg0 = new1;
3311 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3312 arg1 = new0;
3313 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3314 arg1 = new1;
3316 return fold_build2_loc (loc, code, type, arg0, arg1);
3319 default:
3320 return arg;
3324 /* Return a tree for the case when the result of an expression is RESULT
3325 converted to TYPE and OMITTED was previously an operand of the expression
3326 but is now not needed (e.g., we folded OMITTED * 0).
3328 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3329 the conversion of RESULT to TYPE. */
3331 tree
3332 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3334 tree t = fold_convert_loc (loc, type, result);
3336 /* If the resulting operand is an empty statement, just return the omitted
3337 statement casted to void. */
3338 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3339 return build1_loc (loc, NOP_EXPR, void_type_node,
3340 fold_ignored_result (omitted));
3342 if (TREE_SIDE_EFFECTS (omitted))
3343 return build2_loc (loc, COMPOUND_EXPR, type,
3344 fold_ignored_result (omitted), t);
3346 return non_lvalue_loc (loc, t);
3349 /* Return a tree for the case when the result of an expression is RESULT
3350 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3351 of the expression but are now not needed.
3353 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3354 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3355 evaluated before OMITTED2. Otherwise, if neither has side effects,
3356 just do the conversion of RESULT to TYPE. */
3358 tree
3359 omit_two_operands_loc (location_t loc, tree type, tree result,
3360 tree omitted1, tree omitted2)
3362 tree t = fold_convert_loc (loc, type, result);
3364 if (TREE_SIDE_EFFECTS (omitted2))
3365 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3366 if (TREE_SIDE_EFFECTS (omitted1))
3367 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3369 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3373 /* Return a simplified tree node for the truth-negation of ARG. This
3374 never alters ARG itself. We assume that ARG is an operation that
3375 returns a truth value (0 or 1).
3377 FIXME: one would think we would fold the result, but it causes
3378 problems with the dominator optimizer. */
3380 static tree
3381 fold_truth_not_expr (location_t loc, tree arg)
3383 tree type = TREE_TYPE (arg);
3384 enum tree_code code = TREE_CODE (arg);
3385 location_t loc1, loc2;
3387 /* If this is a comparison, we can simply invert it, except for
3388 floating-point non-equality comparisons, in which case we just
3389 enclose a TRUTH_NOT_EXPR around what we have. */
3391 if (TREE_CODE_CLASS (code) == tcc_comparison)
3393 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3394 if (FLOAT_TYPE_P (op_type)
3395 && flag_trapping_math
3396 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3397 && code != NE_EXPR && code != EQ_EXPR)
3398 return NULL_TREE;
3400 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3401 if (code == ERROR_MARK)
3402 return NULL_TREE;
3404 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3405 TREE_OPERAND (arg, 1));
3408 switch (code)
3410 case INTEGER_CST:
3411 return constant_boolean_node (integer_zerop (arg), type);
3413 case TRUTH_AND_EXPR:
3414 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3415 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3416 return build2_loc (loc, TRUTH_OR_EXPR, type,
3417 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3418 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3420 case TRUTH_OR_EXPR:
3421 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3422 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3423 return build2_loc (loc, TRUTH_AND_EXPR, type,
3424 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3425 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3427 case TRUTH_XOR_EXPR:
3428 /* Here we can invert either operand. We invert the first operand
3429 unless the second operand is a TRUTH_NOT_EXPR in which case our
3430 result is the XOR of the first operand with the inside of the
3431 negation of the second operand. */
3433 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3434 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3435 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3436 else
3437 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3438 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3439 TREE_OPERAND (arg, 1));
3441 case TRUTH_ANDIF_EXPR:
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3444 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3445 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3446 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3448 case TRUTH_ORIF_EXPR:
3449 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3450 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3451 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3452 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3453 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3455 case TRUTH_NOT_EXPR:
3456 return TREE_OPERAND (arg, 0);
3458 case COND_EXPR:
3460 tree arg1 = TREE_OPERAND (arg, 1);
3461 tree arg2 = TREE_OPERAND (arg, 2);
3463 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3464 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3466 /* A COND_EXPR may have a throw as one operand, which
3467 then has void type. Just leave void operands
3468 as they are. */
3469 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3470 VOID_TYPE_P (TREE_TYPE (arg1))
3471 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3472 VOID_TYPE_P (TREE_TYPE (arg2))
3473 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3476 case COMPOUND_EXPR:
3477 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3478 return build2_loc (loc, COMPOUND_EXPR, type,
3479 TREE_OPERAND (arg, 0),
3480 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3482 case NON_LVALUE_EXPR:
3483 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3484 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3486 CASE_CONVERT:
3487 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3488 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3490 /* ... fall through ... */
3492 case FLOAT_EXPR:
3493 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3494 return build1_loc (loc, TREE_CODE (arg), type,
3495 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3497 case BIT_AND_EXPR:
3498 if (!integer_onep (TREE_OPERAND (arg, 1)))
3499 return NULL_TREE;
3500 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3502 case SAVE_EXPR:
3503 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3505 case CLEANUP_POINT_EXPR:
3506 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3507 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3508 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3510 default:
3511 return NULL_TREE;
3515 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3516 assume that ARG is an operation that returns a truth value (0 or 1
3517 for scalars, 0 or -1 for vectors). Return the folded expression if
3518 folding is successful. Otherwise, return NULL_TREE. */
3520 static tree
3521 fold_invert_truthvalue (location_t loc, tree arg)
3523 tree type = TREE_TYPE (arg);
3524 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3525 ? BIT_NOT_EXPR
3526 : TRUTH_NOT_EXPR,
3527 type, arg);
3530 /* Return a simplified tree node for the truth-negation of ARG. This
3531 never alters ARG itself. We assume that ARG is an operation that
3532 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3534 tree
3535 invert_truthvalue_loc (location_t loc, tree arg)
3537 if (TREE_CODE (arg) == ERROR_MARK)
3538 return arg;
3540 tree type = TREE_TYPE (arg);
3541 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3542 ? BIT_NOT_EXPR
3543 : TRUTH_NOT_EXPR,
3544 type, arg);
3547 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3548 operands are another bit-wise operation with a common input. If so,
3549 distribute the bit operations to save an operation and possibly two if
3550 constants are involved. For example, convert
3551 (A | B) & (A | C) into A | (B & C)
3552 Further simplification will occur if B and C are constants.
3554 If this optimization cannot be done, 0 will be returned. */
3556 static tree
3557 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3558 tree arg0, tree arg1)
3560 tree common;
3561 tree left, right;
3563 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3564 || TREE_CODE (arg0) == code
3565 || (TREE_CODE (arg0) != BIT_AND_EXPR
3566 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3567 return 0;
3569 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3571 common = TREE_OPERAND (arg0, 0);
3572 left = TREE_OPERAND (arg0, 1);
3573 right = TREE_OPERAND (arg1, 1);
3575 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3577 common = TREE_OPERAND (arg0, 0);
3578 left = TREE_OPERAND (arg0, 1);
3579 right = TREE_OPERAND (arg1, 0);
3581 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3583 common = TREE_OPERAND (arg0, 1);
3584 left = TREE_OPERAND (arg0, 0);
3585 right = TREE_OPERAND (arg1, 1);
3587 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3589 common = TREE_OPERAND (arg0, 1);
3590 left = TREE_OPERAND (arg0, 0);
3591 right = TREE_OPERAND (arg1, 0);
3593 else
3594 return 0;
3596 common = fold_convert_loc (loc, type, common);
3597 left = fold_convert_loc (loc, type, left);
3598 right = fold_convert_loc (loc, type, right);
3599 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3600 fold_build2_loc (loc, code, type, left, right));
3603 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3604 with code CODE. This optimization is unsafe. */
3605 static tree
3606 distribute_real_division (location_t loc, enum tree_code code, tree type,
3607 tree arg0, tree arg1)
3609 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3610 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3612 /* (A / C) +- (B / C) -> (A +- B) / C. */
3613 if (mul0 == mul1
3614 && operand_equal_p (TREE_OPERAND (arg0, 1),
3615 TREE_OPERAND (arg1, 1), 0))
3616 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3617 fold_build2_loc (loc, code, type,
3618 TREE_OPERAND (arg0, 0),
3619 TREE_OPERAND (arg1, 0)),
3620 TREE_OPERAND (arg0, 1));
3622 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3623 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3624 TREE_OPERAND (arg1, 0), 0)
3625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3626 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3628 REAL_VALUE_TYPE r0, r1;
3629 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3630 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3631 if (!mul0)
3632 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3633 if (!mul1)
3634 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3635 real_arithmetic (&r0, code, &r0, &r1);
3636 return fold_build2_loc (loc, MULT_EXPR, type,
3637 TREE_OPERAND (arg0, 0),
3638 build_real (type, r0));
3641 return NULL_TREE;
3644 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3645 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3647 static tree
3648 make_bit_field_ref (location_t loc, tree inner, tree type,
3649 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3651 tree result, bftype;
3653 if (bitpos == 0)
3655 tree size = TYPE_SIZE (TREE_TYPE (inner));
3656 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3657 || POINTER_TYPE_P (TREE_TYPE (inner)))
3658 && tree_fits_shwi_p (size)
3659 && tree_to_shwi (size) == bitsize)
3660 return fold_convert_loc (loc, type, inner);
3663 bftype = type;
3664 if (TYPE_PRECISION (bftype) != bitsize
3665 || TYPE_UNSIGNED (bftype) == !unsignedp)
3666 bftype = build_nonstandard_integer_type (bitsize, 0);
3668 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3669 size_int (bitsize), bitsize_int (bitpos));
3671 if (bftype != type)
3672 result = fold_convert_loc (loc, type, result);
3674 return result;
3677 /* Optimize a bit-field compare.
3679 There are two cases: First is a compare against a constant and the
3680 second is a comparison of two items where the fields are at the same
3681 bit position relative to the start of a chunk (byte, halfword, word)
3682 large enough to contain it. In these cases we can avoid the shift
3683 implicit in bitfield extractions.
3685 For constants, we emit a compare of the shifted constant with the
3686 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3687 compared. For two fields at the same position, we do the ANDs with the
3688 similar mask and compare the result of the ANDs.
3690 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3691 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3692 are the left and right operands of the comparison, respectively.
3694 If the optimization described above can be done, we return the resulting
3695 tree. Otherwise we return zero. */
3697 static tree
3698 optimize_bit_field_compare (location_t loc, enum tree_code code,
3699 tree compare_type, tree lhs, tree rhs)
3701 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3702 tree type = TREE_TYPE (lhs);
3703 tree unsigned_type;
3704 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3705 machine_mode lmode, rmode, nmode;
3706 int lunsignedp, runsignedp;
3707 int lvolatilep = 0, rvolatilep = 0;
3708 tree linner, rinner = NULL_TREE;
3709 tree mask;
3710 tree offset;
3712 /* Get all the information about the extractions being done. If the bit size
3713 if the same as the size of the underlying object, we aren't doing an
3714 extraction at all and so can do nothing. We also don't want to
3715 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3716 then will no longer be able to replace it. */
3717 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3718 &lunsignedp, &lvolatilep, false);
3719 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3720 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3721 return 0;
3723 if (!const_p)
3725 /* If this is not a constant, we can only do something if bit positions,
3726 sizes, and signedness are the same. */
3727 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3728 &runsignedp, &rvolatilep, false);
3730 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3731 || lunsignedp != runsignedp || offset != 0
3732 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3733 return 0;
3736 /* See if we can find a mode to refer to this field. We should be able to,
3737 but fail if we can't. */
3738 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3739 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3740 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3741 TYPE_ALIGN (TREE_TYPE (rinner))),
3742 word_mode, false);
3743 if (nmode == VOIDmode)
3744 return 0;
3746 /* Set signed and unsigned types of the precision of this mode for the
3747 shifts below. */
3748 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3750 /* Compute the bit position and size for the new reference and our offset
3751 within it. If the new reference is the same size as the original, we
3752 won't optimize anything, so return zero. */
3753 nbitsize = GET_MODE_BITSIZE (nmode);
3754 nbitpos = lbitpos & ~ (nbitsize - 1);
3755 lbitpos -= nbitpos;
3756 if (nbitsize == lbitsize)
3757 return 0;
3759 if (BYTES_BIG_ENDIAN)
3760 lbitpos = nbitsize - lbitsize - lbitpos;
3762 /* Make the mask to be used against the extracted field. */
3763 mask = build_int_cst_type (unsigned_type, -1);
3764 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3765 mask = const_binop (RSHIFT_EXPR, mask,
3766 size_int (nbitsize - lbitsize - lbitpos));
3768 if (! const_p)
3769 /* If not comparing with constant, just rework the comparison
3770 and return. */
3771 return fold_build2_loc (loc, code, compare_type,
3772 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3773 make_bit_field_ref (loc, linner,
3774 unsigned_type,
3775 nbitsize, nbitpos,
3777 mask),
3778 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3779 make_bit_field_ref (loc, rinner,
3780 unsigned_type,
3781 nbitsize, nbitpos,
3783 mask));
3785 /* Otherwise, we are handling the constant case. See if the constant is too
3786 big for the field. Warn and return a tree of for 0 (false) if so. We do
3787 this not only for its own sake, but to avoid having to test for this
3788 error case below. If we didn't, we might generate wrong code.
3790 For unsigned fields, the constant shifted right by the field length should
3791 be all zero. For signed fields, the high-order bits should agree with
3792 the sign bit. */
3794 if (lunsignedp)
3796 if (wi::lrshift (rhs, lbitsize) != 0)
3798 warning (0, "comparison is always %d due to width of bit-field",
3799 code == NE_EXPR);
3800 return constant_boolean_node (code == NE_EXPR, compare_type);
3803 else
3805 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3806 if (tem != 0 && tem != -1)
3808 warning (0, "comparison is always %d due to width of bit-field",
3809 code == NE_EXPR);
3810 return constant_boolean_node (code == NE_EXPR, compare_type);
3814 /* Single-bit compares should always be against zero. */
3815 if (lbitsize == 1 && ! integer_zerop (rhs))
3817 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3818 rhs = build_int_cst (type, 0);
3821 /* Make a new bitfield reference, shift the constant over the
3822 appropriate number of bits and mask it with the computed mask
3823 (in case this was a signed field). If we changed it, make a new one. */
3824 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3826 rhs = const_binop (BIT_AND_EXPR,
3827 const_binop (LSHIFT_EXPR,
3828 fold_convert_loc (loc, unsigned_type, rhs),
3829 size_int (lbitpos)),
3830 mask);
3832 lhs = build2_loc (loc, code, compare_type,
3833 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3834 return lhs;
3837 /* Subroutine for fold_truth_andor_1: decode a field reference.
3839 If EXP is a comparison reference, we return the innermost reference.
3841 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3842 set to the starting bit number.
3844 If the innermost field can be completely contained in a mode-sized
3845 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3847 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3848 otherwise it is not changed.
3850 *PUNSIGNEDP is set to the signedness of the field.
3852 *PMASK is set to the mask used. This is either contained in a
3853 BIT_AND_EXPR or derived from the width of the field.
3855 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3857 Return 0 if this is not a component reference or is one that we can't
3858 do anything with. */
3860 static tree
3861 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3862 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3863 int *punsignedp, int *pvolatilep,
3864 tree *pmask, tree *pand_mask)
3866 tree outer_type = 0;
3867 tree and_mask = 0;
3868 tree mask, inner, offset;
3869 tree unsigned_type;
3870 unsigned int precision;
3872 /* All the optimizations using this function assume integer fields.
3873 There are problems with FP fields since the type_for_size call
3874 below can fail for, e.g., XFmode. */
3875 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3876 return 0;
3878 /* We are interested in the bare arrangement of bits, so strip everything
3879 that doesn't affect the machine mode. However, record the type of the
3880 outermost expression if it may matter below. */
3881 if (CONVERT_EXPR_P (exp)
3882 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3883 outer_type = TREE_TYPE (exp);
3884 STRIP_NOPS (exp);
3886 if (TREE_CODE (exp) == BIT_AND_EXPR)
3888 and_mask = TREE_OPERAND (exp, 1);
3889 exp = TREE_OPERAND (exp, 0);
3890 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3891 if (TREE_CODE (and_mask) != INTEGER_CST)
3892 return 0;
3895 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3896 punsignedp, pvolatilep, false);
3897 if ((inner == exp && and_mask == 0)
3898 || *pbitsize < 0 || offset != 0
3899 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3900 return 0;
3902 /* If the number of bits in the reference is the same as the bitsize of
3903 the outer type, then the outer type gives the signedness. Otherwise
3904 (in case of a small bitfield) the signedness is unchanged. */
3905 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3906 *punsignedp = TYPE_UNSIGNED (outer_type);
3908 /* Compute the mask to access the bitfield. */
3909 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3910 precision = TYPE_PRECISION (unsigned_type);
3912 mask = build_int_cst_type (unsigned_type, -1);
3914 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3915 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3917 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3918 if (and_mask != 0)
3919 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3920 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3922 *pmask = mask;
3923 *pand_mask = and_mask;
3924 return inner;
3927 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3928 bit positions and MASK is SIGNED. */
3930 static int
3931 all_ones_mask_p (const_tree mask, unsigned int size)
3933 tree type = TREE_TYPE (mask);
3934 unsigned int precision = TYPE_PRECISION (type);
3936 /* If this function returns true when the type of the mask is
3937 UNSIGNED, then there will be errors. In particular see
3938 gcc.c-torture/execute/990326-1.c. There does not appear to be
3939 any documentation paper trail as to why this is so. But the pre
3940 wide-int worked with that restriction and it has been preserved
3941 here. */
3942 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3943 return false;
3945 return wi::mask (size, false, precision) == mask;
3948 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3949 represents the sign bit of EXP's type. If EXP represents a sign
3950 or zero extension, also test VAL against the unextended type.
3951 The return value is the (sub)expression whose sign bit is VAL,
3952 or NULL_TREE otherwise. */
3954 tree
3955 sign_bit_p (tree exp, const_tree val)
3957 int width;
3958 tree t;
3960 /* Tree EXP must have an integral type. */
3961 t = TREE_TYPE (exp);
3962 if (! INTEGRAL_TYPE_P (t))
3963 return NULL_TREE;
3965 /* Tree VAL must be an integer constant. */
3966 if (TREE_CODE (val) != INTEGER_CST
3967 || TREE_OVERFLOW (val))
3968 return NULL_TREE;
3970 width = TYPE_PRECISION (t);
3971 if (wi::only_sign_bit_p (val, width))
3972 return exp;
3974 /* Handle extension from a narrower type. */
3975 if (TREE_CODE (exp) == NOP_EXPR
3976 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3977 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3979 return NULL_TREE;
3982 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3983 to be evaluated unconditionally. */
3985 static int
3986 simple_operand_p (const_tree exp)
3988 /* Strip any conversions that don't change the machine mode. */
3989 STRIP_NOPS (exp);
3991 return (CONSTANT_CLASS_P (exp)
3992 || TREE_CODE (exp) == SSA_NAME
3993 || (DECL_P (exp)
3994 && ! TREE_ADDRESSABLE (exp)
3995 && ! TREE_THIS_VOLATILE (exp)
3996 && ! DECL_NONLOCAL (exp)
3997 /* Don't regard global variables as simple. They may be
3998 allocated in ways unknown to the compiler (shared memory,
3999 #pragma weak, etc). */
4000 && ! TREE_PUBLIC (exp)
4001 && ! DECL_EXTERNAL (exp)
4002 /* Weakrefs are not safe to be read, since they can be NULL.
4003 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4004 have DECL_WEAK flag set. */
4005 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4006 /* Loading a static variable is unduly expensive, but global
4007 registers aren't expensive. */
4008 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4011 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4012 to be evaluated unconditionally.
4013 I addition to simple_operand_p, we assume that comparisons, conversions,
4014 and logic-not operations are simple, if their operands are simple, too. */
4016 static bool
4017 simple_operand_p_2 (tree exp)
4019 enum tree_code code;
4021 if (TREE_SIDE_EFFECTS (exp)
4022 || tree_could_trap_p (exp))
4023 return false;
4025 while (CONVERT_EXPR_P (exp))
4026 exp = TREE_OPERAND (exp, 0);
4028 code = TREE_CODE (exp);
4030 if (TREE_CODE_CLASS (code) == tcc_comparison)
4031 return (simple_operand_p (TREE_OPERAND (exp, 0))
4032 && simple_operand_p (TREE_OPERAND (exp, 1)));
4034 if (code == TRUTH_NOT_EXPR)
4035 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4037 return simple_operand_p (exp);
4041 /* The following functions are subroutines to fold_range_test and allow it to
4042 try to change a logical combination of comparisons into a range test.
4044 For example, both
4045 X == 2 || X == 3 || X == 4 || X == 5
4047 X >= 2 && X <= 5
4048 are converted to
4049 (unsigned) (X - 2) <= 3
4051 We describe each set of comparisons as being either inside or outside
4052 a range, using a variable named like IN_P, and then describe the
4053 range with a lower and upper bound. If one of the bounds is omitted,
4054 it represents either the highest or lowest value of the type.
4056 In the comments below, we represent a range by two numbers in brackets
4057 preceded by a "+" to designate being inside that range, or a "-" to
4058 designate being outside that range, so the condition can be inverted by
4059 flipping the prefix. An omitted bound is represented by a "-". For
4060 example, "- [-, 10]" means being outside the range starting at the lowest
4061 possible value and ending at 10, in other words, being greater than 10.
4062 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4063 always false.
4065 We set up things so that the missing bounds are handled in a consistent
4066 manner so neither a missing bound nor "true" and "false" need to be
4067 handled using a special case. */
4069 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4070 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4071 and UPPER1_P are nonzero if the respective argument is an upper bound
4072 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4073 must be specified for a comparison. ARG1 will be converted to ARG0's
4074 type if both are specified. */
4076 static tree
4077 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4078 tree arg1, int upper1_p)
4080 tree tem;
4081 int result;
4082 int sgn0, sgn1;
4084 /* If neither arg represents infinity, do the normal operation.
4085 Else, if not a comparison, return infinity. Else handle the special
4086 comparison rules. Note that most of the cases below won't occur, but
4087 are handled for consistency. */
4089 if (arg0 != 0 && arg1 != 0)
4091 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4092 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4093 STRIP_NOPS (tem);
4094 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4097 if (TREE_CODE_CLASS (code) != tcc_comparison)
4098 return 0;
4100 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4101 for neither. In real maths, we cannot assume open ended ranges are
4102 the same. But, this is computer arithmetic, where numbers are finite.
4103 We can therefore make the transformation of any unbounded range with
4104 the value Z, Z being greater than any representable number. This permits
4105 us to treat unbounded ranges as equal. */
4106 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4107 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4108 switch (code)
4110 case EQ_EXPR:
4111 result = sgn0 == sgn1;
4112 break;
4113 case NE_EXPR:
4114 result = sgn0 != sgn1;
4115 break;
4116 case LT_EXPR:
4117 result = sgn0 < sgn1;
4118 break;
4119 case LE_EXPR:
4120 result = sgn0 <= sgn1;
4121 break;
4122 case GT_EXPR:
4123 result = sgn0 > sgn1;
4124 break;
4125 case GE_EXPR:
4126 result = sgn0 >= sgn1;
4127 break;
4128 default:
4129 gcc_unreachable ();
4132 return constant_boolean_node (result, type);
4135 /* Helper routine for make_range. Perform one step for it, return
4136 new expression if the loop should continue or NULL_TREE if it should
4137 stop. */
4139 tree
4140 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4141 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4142 bool *strict_overflow_p)
4144 tree arg0_type = TREE_TYPE (arg0);
4145 tree n_low, n_high, low = *p_low, high = *p_high;
4146 int in_p = *p_in_p, n_in_p;
4148 switch (code)
4150 case TRUTH_NOT_EXPR:
4151 /* We can only do something if the range is testing for zero. */
4152 if (low == NULL_TREE || high == NULL_TREE
4153 || ! integer_zerop (low) || ! integer_zerop (high))
4154 return NULL_TREE;
4155 *p_in_p = ! in_p;
4156 return arg0;
4158 case EQ_EXPR: case NE_EXPR:
4159 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4160 /* We can only do something if the range is testing for zero
4161 and if the second operand is an integer constant. Note that
4162 saying something is "in" the range we make is done by
4163 complementing IN_P since it will set in the initial case of
4164 being not equal to zero; "out" is leaving it alone. */
4165 if (low == NULL_TREE || high == NULL_TREE
4166 || ! integer_zerop (low) || ! integer_zerop (high)
4167 || TREE_CODE (arg1) != INTEGER_CST)
4168 return NULL_TREE;
4170 switch (code)
4172 case NE_EXPR: /* - [c, c] */
4173 low = high = arg1;
4174 break;
4175 case EQ_EXPR: /* + [c, c] */
4176 in_p = ! in_p, low = high = arg1;
4177 break;
4178 case GT_EXPR: /* - [-, c] */
4179 low = 0, high = arg1;
4180 break;
4181 case GE_EXPR: /* + [c, -] */
4182 in_p = ! in_p, low = arg1, high = 0;
4183 break;
4184 case LT_EXPR: /* - [c, -] */
4185 low = arg1, high = 0;
4186 break;
4187 case LE_EXPR: /* + [-, c] */
4188 in_p = ! in_p, low = 0, high = arg1;
4189 break;
4190 default:
4191 gcc_unreachable ();
4194 /* If this is an unsigned comparison, we also know that EXP is
4195 greater than or equal to zero. We base the range tests we make
4196 on that fact, so we record it here so we can parse existing
4197 range tests. We test arg0_type since often the return type
4198 of, e.g. EQ_EXPR, is boolean. */
4199 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4201 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4202 in_p, low, high, 1,
4203 build_int_cst (arg0_type, 0),
4204 NULL_TREE))
4205 return NULL_TREE;
4207 in_p = n_in_p, low = n_low, high = n_high;
4209 /* If the high bound is missing, but we have a nonzero low
4210 bound, reverse the range so it goes from zero to the low bound
4211 minus 1. */
4212 if (high == 0 && low && ! integer_zerop (low))
4214 in_p = ! in_p;
4215 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4216 build_int_cst (TREE_TYPE (low), 1), 0);
4217 low = build_int_cst (arg0_type, 0);
4221 *p_low = low;
4222 *p_high = high;
4223 *p_in_p = in_p;
4224 return arg0;
4226 case NEGATE_EXPR:
4227 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4228 low and high are non-NULL, then normalize will DTRT. */
4229 if (!TYPE_UNSIGNED (arg0_type)
4230 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4232 if (low == NULL_TREE)
4233 low = TYPE_MIN_VALUE (arg0_type);
4234 if (high == NULL_TREE)
4235 high = TYPE_MAX_VALUE (arg0_type);
4238 /* (-x) IN [a,b] -> x in [-b, -a] */
4239 n_low = range_binop (MINUS_EXPR, exp_type,
4240 build_int_cst (exp_type, 0),
4241 0, high, 1);
4242 n_high = range_binop (MINUS_EXPR, exp_type,
4243 build_int_cst (exp_type, 0),
4244 0, low, 0);
4245 if (n_high != 0 && TREE_OVERFLOW (n_high))
4246 return NULL_TREE;
4247 goto normalize;
4249 case BIT_NOT_EXPR:
4250 /* ~ X -> -X - 1 */
4251 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4252 build_int_cst (exp_type, 1));
4254 case PLUS_EXPR:
4255 case MINUS_EXPR:
4256 if (TREE_CODE (arg1) != INTEGER_CST)
4257 return NULL_TREE;
4259 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4260 move a constant to the other side. */
4261 if (!TYPE_UNSIGNED (arg0_type)
4262 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4263 return NULL_TREE;
4265 /* If EXP is signed, any overflow in the computation is undefined,
4266 so we don't worry about it so long as our computations on
4267 the bounds don't overflow. For unsigned, overflow is defined
4268 and this is exactly the right thing. */
4269 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4270 arg0_type, low, 0, arg1, 0);
4271 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4272 arg0_type, high, 1, arg1, 0);
4273 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4274 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4275 return NULL_TREE;
4277 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4278 *strict_overflow_p = true;
4280 normalize:
4281 /* Check for an unsigned range which has wrapped around the maximum
4282 value thus making n_high < n_low, and normalize it. */
4283 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4285 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4286 build_int_cst (TREE_TYPE (n_high), 1), 0);
4287 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4288 build_int_cst (TREE_TYPE (n_low), 1), 0);
4290 /* If the range is of the form +/- [ x+1, x ], we won't
4291 be able to normalize it. But then, it represents the
4292 whole range or the empty set, so make it
4293 +/- [ -, - ]. */
4294 if (tree_int_cst_equal (n_low, low)
4295 && tree_int_cst_equal (n_high, high))
4296 low = high = 0;
4297 else
4298 in_p = ! in_p;
4300 else
4301 low = n_low, high = n_high;
4303 *p_low = low;
4304 *p_high = high;
4305 *p_in_p = in_p;
4306 return arg0;
4308 CASE_CONVERT:
4309 case NON_LVALUE_EXPR:
4310 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4311 return NULL_TREE;
4313 if (! INTEGRAL_TYPE_P (arg0_type)
4314 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4315 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4316 return NULL_TREE;
4318 n_low = low, n_high = high;
4320 if (n_low != 0)
4321 n_low = fold_convert_loc (loc, arg0_type, n_low);
4323 if (n_high != 0)
4324 n_high = fold_convert_loc (loc, arg0_type, n_high);
4326 /* If we're converting arg0 from an unsigned type, to exp,
4327 a signed type, we will be doing the comparison as unsigned.
4328 The tests above have already verified that LOW and HIGH
4329 are both positive.
4331 So we have to ensure that we will handle large unsigned
4332 values the same way that the current signed bounds treat
4333 negative values. */
4335 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4337 tree high_positive;
4338 tree equiv_type;
4339 /* For fixed-point modes, we need to pass the saturating flag
4340 as the 2nd parameter. */
4341 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4342 equiv_type
4343 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4344 TYPE_SATURATING (arg0_type));
4345 else
4346 equiv_type
4347 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4349 /* A range without an upper bound is, naturally, unbounded.
4350 Since convert would have cropped a very large value, use
4351 the max value for the destination type. */
4352 high_positive
4353 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4354 : TYPE_MAX_VALUE (arg0_type);
4356 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4357 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4358 fold_convert_loc (loc, arg0_type,
4359 high_positive),
4360 build_int_cst (arg0_type, 1));
4362 /* If the low bound is specified, "and" the range with the
4363 range for which the original unsigned value will be
4364 positive. */
4365 if (low != 0)
4367 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4368 1, fold_convert_loc (loc, arg0_type,
4369 integer_zero_node),
4370 high_positive))
4371 return NULL_TREE;
4373 in_p = (n_in_p == in_p);
4375 else
4377 /* Otherwise, "or" the range with the range of the input
4378 that will be interpreted as negative. */
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4380 1, fold_convert_loc (loc, arg0_type,
4381 integer_zero_node),
4382 high_positive))
4383 return NULL_TREE;
4385 in_p = (in_p != n_in_p);
4389 *p_low = n_low;
4390 *p_high = n_high;
4391 *p_in_p = in_p;
4392 return arg0;
4394 default:
4395 return NULL_TREE;
4399 /* Given EXP, a logical expression, set the range it is testing into
4400 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4401 actually being tested. *PLOW and *PHIGH will be made of the same
4402 type as the returned expression. If EXP is not a comparison, we
4403 will most likely not be returning a useful value and range. Set
4404 *STRICT_OVERFLOW_P to true if the return value is only valid
4405 because signed overflow is undefined; otherwise, do not change
4406 *STRICT_OVERFLOW_P. */
4408 tree
4409 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4410 bool *strict_overflow_p)
4412 enum tree_code code;
4413 tree arg0, arg1 = NULL_TREE;
4414 tree exp_type, nexp;
4415 int in_p;
4416 tree low, high;
4417 location_t loc = EXPR_LOCATION (exp);
4419 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4420 and see if we can refine the range. Some of the cases below may not
4421 happen, but it doesn't seem worth worrying about this. We "continue"
4422 the outer loop when we've changed something; otherwise we "break"
4423 the switch, which will "break" the while. */
4425 in_p = 0;
4426 low = high = build_int_cst (TREE_TYPE (exp), 0);
4428 while (1)
4430 code = TREE_CODE (exp);
4431 exp_type = TREE_TYPE (exp);
4432 arg0 = NULL_TREE;
4434 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4436 if (TREE_OPERAND_LENGTH (exp) > 0)
4437 arg0 = TREE_OPERAND (exp, 0);
4438 if (TREE_CODE_CLASS (code) == tcc_binary
4439 || TREE_CODE_CLASS (code) == tcc_comparison
4440 || (TREE_CODE_CLASS (code) == tcc_expression
4441 && TREE_OPERAND_LENGTH (exp) > 1))
4442 arg1 = TREE_OPERAND (exp, 1);
4444 if (arg0 == NULL_TREE)
4445 break;
4447 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4448 &high, &in_p, strict_overflow_p);
4449 if (nexp == NULL_TREE)
4450 break;
4451 exp = nexp;
4454 /* If EXP is a constant, we can evaluate whether this is true or false. */
4455 if (TREE_CODE (exp) == INTEGER_CST)
4457 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4458 exp, 0, low, 0))
4459 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4460 exp, 1, high, 1)));
4461 low = high = 0;
4462 exp = 0;
4465 *pin_p = in_p, *plow = low, *phigh = high;
4466 return exp;
4469 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4470 type, TYPE, return an expression to test if EXP is in (or out of, depending
4471 on IN_P) the range. Return 0 if the test couldn't be created. */
4473 tree
4474 build_range_check (location_t loc, tree type, tree exp, int in_p,
4475 tree low, tree high)
4477 tree etype = TREE_TYPE (exp), value;
4479 #ifdef HAVE_canonicalize_funcptr_for_compare
4480 /* Disable this optimization for function pointer expressions
4481 on targets that require function pointer canonicalization. */
4482 if (HAVE_canonicalize_funcptr_for_compare
4483 && TREE_CODE (etype) == POINTER_TYPE
4484 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4485 return NULL_TREE;
4486 #endif
4488 if (! in_p)
4490 value = build_range_check (loc, type, exp, 1, low, high);
4491 if (value != 0)
4492 return invert_truthvalue_loc (loc, value);
4494 return 0;
4497 if (low == 0 && high == 0)
4498 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4500 if (low == 0)
4501 return fold_build2_loc (loc, LE_EXPR, type, exp,
4502 fold_convert_loc (loc, etype, high));
4504 if (high == 0)
4505 return fold_build2_loc (loc, GE_EXPR, type, exp,
4506 fold_convert_loc (loc, etype, low));
4508 if (operand_equal_p (low, high, 0))
4509 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4510 fold_convert_loc (loc, etype, low));
4512 if (integer_zerop (low))
4514 if (! TYPE_UNSIGNED (etype))
4516 etype = unsigned_type_for (etype);
4517 high = fold_convert_loc (loc, etype, high);
4518 exp = fold_convert_loc (loc, etype, exp);
4520 return build_range_check (loc, type, exp, 1, 0, high);
4523 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4524 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4526 int prec = TYPE_PRECISION (etype);
4528 if (wi::mask (prec - 1, false, prec) == high)
4530 if (TYPE_UNSIGNED (etype))
4532 tree signed_etype = signed_type_for (etype);
4533 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4534 etype
4535 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4536 else
4537 etype = signed_etype;
4538 exp = fold_convert_loc (loc, etype, exp);
4540 return fold_build2_loc (loc, GT_EXPR, type, exp,
4541 build_int_cst (etype, 0));
4545 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4546 This requires wrap-around arithmetics for the type of the expression.
4547 First make sure that arithmetics in this type is valid, then make sure
4548 that it wraps around. */
4549 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4550 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4551 TYPE_UNSIGNED (etype));
4553 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4555 tree utype, minv, maxv;
4557 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4558 for the type in question, as we rely on this here. */
4559 utype = unsigned_type_for (etype);
4560 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4561 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4562 build_int_cst (TREE_TYPE (maxv), 1), 1);
4563 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4565 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4566 minv, 1, maxv, 1)))
4567 etype = utype;
4568 else
4569 return 0;
4572 high = fold_convert_loc (loc, etype, high);
4573 low = fold_convert_loc (loc, etype, low);
4574 exp = fold_convert_loc (loc, etype, exp);
4576 value = const_binop (MINUS_EXPR, high, low);
4579 if (POINTER_TYPE_P (etype))
4581 if (value != 0 && !TREE_OVERFLOW (value))
4583 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4584 return build_range_check (loc, type,
4585 fold_build_pointer_plus_loc (loc, exp, low),
4586 1, build_int_cst (etype, 0), value);
4588 return 0;
4591 if (value != 0 && !TREE_OVERFLOW (value))
4592 return build_range_check (loc, type,
4593 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4594 1, build_int_cst (etype, 0), value);
4596 return 0;
4599 /* Return the predecessor of VAL in its type, handling the infinite case. */
4601 static tree
4602 range_predecessor (tree val)
4604 tree type = TREE_TYPE (val);
4606 if (INTEGRAL_TYPE_P (type)
4607 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4608 return 0;
4609 else
4610 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4611 build_int_cst (TREE_TYPE (val), 1), 0);
4614 /* Return the successor of VAL in its type, handling the infinite case. */
4616 static tree
4617 range_successor (tree val)
4619 tree type = TREE_TYPE (val);
4621 if (INTEGRAL_TYPE_P (type)
4622 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4623 return 0;
4624 else
4625 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4626 build_int_cst (TREE_TYPE (val), 1), 0);
4629 /* Given two ranges, see if we can merge them into one. Return 1 if we
4630 can, 0 if we can't. Set the output range into the specified parameters. */
4632 bool
4633 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4634 tree high0, int in1_p, tree low1, tree high1)
4636 int no_overlap;
4637 int subset;
4638 int temp;
4639 tree tem;
4640 int in_p;
4641 tree low, high;
4642 int lowequal = ((low0 == 0 && low1 == 0)
4643 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4644 low0, 0, low1, 0)));
4645 int highequal = ((high0 == 0 && high1 == 0)
4646 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4647 high0, 1, high1, 1)));
4649 /* Make range 0 be the range that starts first, or ends last if they
4650 start at the same value. Swap them if it isn't. */
4651 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4652 low0, 0, low1, 0))
4653 || (lowequal
4654 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4655 high1, 1, high0, 1))))
4657 temp = in0_p, in0_p = in1_p, in1_p = temp;
4658 tem = low0, low0 = low1, low1 = tem;
4659 tem = high0, high0 = high1, high1 = tem;
4662 /* Now flag two cases, whether the ranges are disjoint or whether the
4663 second range is totally subsumed in the first. Note that the tests
4664 below are simplified by the ones above. */
4665 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4666 high0, 1, low1, 0));
4667 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4668 high1, 1, high0, 1));
4670 /* We now have four cases, depending on whether we are including or
4671 excluding the two ranges. */
4672 if (in0_p && in1_p)
4674 /* If they don't overlap, the result is false. If the second range
4675 is a subset it is the result. Otherwise, the range is from the start
4676 of the second to the end of the first. */
4677 if (no_overlap)
4678 in_p = 0, low = high = 0;
4679 else if (subset)
4680 in_p = 1, low = low1, high = high1;
4681 else
4682 in_p = 1, low = low1, high = high0;
4685 else if (in0_p && ! in1_p)
4687 /* If they don't overlap, the result is the first range. If they are
4688 equal, the result is false. If the second range is a subset of the
4689 first, and the ranges begin at the same place, we go from just after
4690 the end of the second range to the end of the first. If the second
4691 range is not a subset of the first, or if it is a subset and both
4692 ranges end at the same place, the range starts at the start of the
4693 first range and ends just before the second range.
4694 Otherwise, we can't describe this as a single range. */
4695 if (no_overlap)
4696 in_p = 1, low = low0, high = high0;
4697 else if (lowequal && highequal)
4698 in_p = 0, low = high = 0;
4699 else if (subset && lowequal)
4701 low = range_successor (high1);
4702 high = high0;
4703 in_p = 1;
4704 if (low == 0)
4706 /* We are in the weird situation where high0 > high1 but
4707 high1 has no successor. Punt. */
4708 return 0;
4711 else if (! subset || highequal)
4713 low = low0;
4714 high = range_predecessor (low1);
4715 in_p = 1;
4716 if (high == 0)
4718 /* low0 < low1 but low1 has no predecessor. Punt. */
4719 return 0;
4722 else
4723 return 0;
4726 else if (! in0_p && in1_p)
4728 /* If they don't overlap, the result is the second range. If the second
4729 is a subset of the first, the result is false. Otherwise,
4730 the range starts just after the first range and ends at the
4731 end of the second. */
4732 if (no_overlap)
4733 in_p = 1, low = low1, high = high1;
4734 else if (subset || highequal)
4735 in_p = 0, low = high = 0;
4736 else
4738 low = range_successor (high0);
4739 high = high1;
4740 in_p = 1;
4741 if (low == 0)
4743 /* high1 > high0 but high0 has no successor. Punt. */
4744 return 0;
4749 else
4751 /* The case where we are excluding both ranges. Here the complex case
4752 is if they don't overlap. In that case, the only time we have a
4753 range is if they are adjacent. If the second is a subset of the
4754 first, the result is the first. Otherwise, the range to exclude
4755 starts at the beginning of the first range and ends at the end of the
4756 second. */
4757 if (no_overlap)
4759 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4760 range_successor (high0),
4761 1, low1, 0)))
4762 in_p = 0, low = low0, high = high1;
4763 else
4765 /* Canonicalize - [min, x] into - [-, x]. */
4766 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4767 switch (TREE_CODE (TREE_TYPE (low0)))
4769 case ENUMERAL_TYPE:
4770 if (TYPE_PRECISION (TREE_TYPE (low0))
4771 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4772 break;
4773 /* FALLTHROUGH */
4774 case INTEGER_TYPE:
4775 if (tree_int_cst_equal (low0,
4776 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4777 low0 = 0;
4778 break;
4779 case POINTER_TYPE:
4780 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4781 && integer_zerop (low0))
4782 low0 = 0;
4783 break;
4784 default:
4785 break;
4788 /* Canonicalize - [x, max] into - [x, -]. */
4789 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4790 switch (TREE_CODE (TREE_TYPE (high1)))
4792 case ENUMERAL_TYPE:
4793 if (TYPE_PRECISION (TREE_TYPE (high1))
4794 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4795 break;
4796 /* FALLTHROUGH */
4797 case INTEGER_TYPE:
4798 if (tree_int_cst_equal (high1,
4799 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4800 high1 = 0;
4801 break;
4802 case POINTER_TYPE:
4803 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4804 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4805 high1, 1,
4806 build_int_cst (TREE_TYPE (high1), 1),
4807 1)))
4808 high1 = 0;
4809 break;
4810 default:
4811 break;
4814 /* The ranges might be also adjacent between the maximum and
4815 minimum values of the given type. For
4816 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4817 return + [x + 1, y - 1]. */
4818 if (low0 == 0 && high1 == 0)
4820 low = range_successor (high0);
4821 high = range_predecessor (low1);
4822 if (low == 0 || high == 0)
4823 return 0;
4825 in_p = 1;
4827 else
4828 return 0;
4831 else if (subset)
4832 in_p = 0, low = low0, high = high0;
4833 else
4834 in_p = 0, low = low0, high = high1;
4837 *pin_p = in_p, *plow = low, *phigh = high;
4838 return 1;
4842 /* Subroutine of fold, looking inside expressions of the form
4843 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4844 of the COND_EXPR. This function is being used also to optimize
4845 A op B ? C : A, by reversing the comparison first.
4847 Return a folded expression whose code is not a COND_EXPR
4848 anymore, or NULL_TREE if no folding opportunity is found. */
4850 static tree
4851 fold_cond_expr_with_comparison (location_t loc, tree type,
4852 tree arg0, tree arg1, tree arg2)
4854 enum tree_code comp_code = TREE_CODE (arg0);
4855 tree arg00 = TREE_OPERAND (arg0, 0);
4856 tree arg01 = TREE_OPERAND (arg0, 1);
4857 tree arg1_type = TREE_TYPE (arg1);
4858 tree tem;
4860 STRIP_NOPS (arg1);
4861 STRIP_NOPS (arg2);
4863 /* If we have A op 0 ? A : -A, consider applying the following
4864 transformations:
4866 A == 0? A : -A same as -A
4867 A != 0? A : -A same as A
4868 A >= 0? A : -A same as abs (A)
4869 A > 0? A : -A same as abs (A)
4870 A <= 0? A : -A same as -abs (A)
4871 A < 0? A : -A same as -abs (A)
4873 None of these transformations work for modes with signed
4874 zeros. If A is +/-0, the first two transformations will
4875 change the sign of the result (from +0 to -0, or vice
4876 versa). The last four will fix the sign of the result,
4877 even though the original expressions could be positive or
4878 negative, depending on the sign of A.
4880 Note that all these transformations are correct if A is
4881 NaN, since the two alternatives (A and -A) are also NaNs. */
4882 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4883 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4884 ? real_zerop (arg01)
4885 : integer_zerop (arg01))
4886 && ((TREE_CODE (arg2) == NEGATE_EXPR
4887 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4888 /* In the case that A is of the form X-Y, '-A' (arg2) may
4889 have already been folded to Y-X, check for that. */
4890 || (TREE_CODE (arg1) == MINUS_EXPR
4891 && TREE_CODE (arg2) == MINUS_EXPR
4892 && operand_equal_p (TREE_OPERAND (arg1, 0),
4893 TREE_OPERAND (arg2, 1), 0)
4894 && operand_equal_p (TREE_OPERAND (arg1, 1),
4895 TREE_OPERAND (arg2, 0), 0))))
4896 switch (comp_code)
4898 case EQ_EXPR:
4899 case UNEQ_EXPR:
4900 tem = fold_convert_loc (loc, arg1_type, arg1);
4901 return pedantic_non_lvalue_loc (loc,
4902 fold_convert_loc (loc, type,
4903 negate_expr (tem)));
4904 case NE_EXPR:
4905 case LTGT_EXPR:
4906 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4907 case UNGE_EXPR:
4908 case UNGT_EXPR:
4909 if (flag_trapping_math)
4910 break;
4911 /* Fall through. */
4912 case GE_EXPR:
4913 case GT_EXPR:
4914 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4915 arg1 = fold_convert_loc (loc, signed_type_for
4916 (TREE_TYPE (arg1)), arg1);
4917 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4918 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4919 case UNLE_EXPR:
4920 case UNLT_EXPR:
4921 if (flag_trapping_math)
4922 break;
4923 case LE_EXPR:
4924 case LT_EXPR:
4925 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4926 arg1 = fold_convert_loc (loc, signed_type_for
4927 (TREE_TYPE (arg1)), arg1);
4928 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4929 return negate_expr (fold_convert_loc (loc, type, tem));
4930 default:
4931 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4932 break;
4935 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4936 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4937 both transformations are correct when A is NaN: A != 0
4938 is then true, and A == 0 is false. */
4940 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4941 && integer_zerop (arg01) && integer_zerop (arg2))
4943 if (comp_code == NE_EXPR)
4944 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4945 else if (comp_code == EQ_EXPR)
4946 return build_zero_cst (type);
4949 /* Try some transformations of A op B ? A : B.
4951 A == B? A : B same as B
4952 A != B? A : B same as A
4953 A >= B? A : B same as max (A, B)
4954 A > B? A : B same as max (B, A)
4955 A <= B? A : B same as min (A, B)
4956 A < B? A : B same as min (B, A)
4958 As above, these transformations don't work in the presence
4959 of signed zeros. For example, if A and B are zeros of
4960 opposite sign, the first two transformations will change
4961 the sign of the result. In the last four, the original
4962 expressions give different results for (A=+0, B=-0) and
4963 (A=-0, B=+0), but the transformed expressions do not.
4965 The first two transformations are correct if either A or B
4966 is a NaN. In the first transformation, the condition will
4967 be false, and B will indeed be chosen. In the case of the
4968 second transformation, the condition A != B will be true,
4969 and A will be chosen.
4971 The conversions to max() and min() are not correct if B is
4972 a number and A is not. The conditions in the original
4973 expressions will be false, so all four give B. The min()
4974 and max() versions would give a NaN instead. */
4975 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4976 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4977 /* Avoid these transformations if the COND_EXPR may be used
4978 as an lvalue in the C++ front-end. PR c++/19199. */
4979 && (in_gimple_form
4980 || VECTOR_TYPE_P (type)
4981 || (! lang_GNU_CXX ()
4982 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4983 || ! maybe_lvalue_p (arg1)
4984 || ! maybe_lvalue_p (arg2)))
4986 tree comp_op0 = arg00;
4987 tree comp_op1 = arg01;
4988 tree comp_type = TREE_TYPE (comp_op0);
4990 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4991 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4993 comp_type = type;
4994 comp_op0 = arg1;
4995 comp_op1 = arg2;
4998 switch (comp_code)
5000 case EQ_EXPR:
5001 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5002 case NE_EXPR:
5003 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5004 case LE_EXPR:
5005 case LT_EXPR:
5006 case UNLE_EXPR:
5007 case UNLT_EXPR:
5008 /* In C++ a ?: expression can be an lvalue, so put the
5009 operand which will be used if they are equal first
5010 so that we can convert this back to the
5011 corresponding COND_EXPR. */
5012 if (!HONOR_NANS (arg1))
5014 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5015 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5016 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5017 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5018 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5019 comp_op1, comp_op0);
5020 return pedantic_non_lvalue_loc (loc,
5021 fold_convert_loc (loc, type, tem));
5023 break;
5024 case GE_EXPR:
5025 case GT_EXPR:
5026 case UNGE_EXPR:
5027 case UNGT_EXPR:
5028 if (!HONOR_NANS (arg1))
5030 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5031 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5032 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5033 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5034 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5035 comp_op1, comp_op0);
5036 return pedantic_non_lvalue_loc (loc,
5037 fold_convert_loc (loc, type, tem));
5039 break;
5040 case UNEQ_EXPR:
5041 if (!HONOR_NANS (arg1))
5042 return pedantic_non_lvalue_loc (loc,
5043 fold_convert_loc (loc, type, arg2));
5044 break;
5045 case LTGT_EXPR:
5046 if (!HONOR_NANS (arg1))
5047 return pedantic_non_lvalue_loc (loc,
5048 fold_convert_loc (loc, type, arg1));
5049 break;
5050 default:
5051 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5052 break;
5056 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5057 we might still be able to simplify this. For example,
5058 if C1 is one less or one more than C2, this might have started
5059 out as a MIN or MAX and been transformed by this function.
5060 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5062 if (INTEGRAL_TYPE_P (type)
5063 && TREE_CODE (arg01) == INTEGER_CST
5064 && TREE_CODE (arg2) == INTEGER_CST)
5065 switch (comp_code)
5067 case EQ_EXPR:
5068 if (TREE_CODE (arg1) == INTEGER_CST)
5069 break;
5070 /* We can replace A with C1 in this case. */
5071 arg1 = fold_convert_loc (loc, type, arg01);
5072 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5074 case LT_EXPR:
5075 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5076 MIN_EXPR, to preserve the signedness of the comparison. */
5077 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5078 OEP_ONLY_CONST)
5079 && operand_equal_p (arg01,
5080 const_binop (PLUS_EXPR, arg2,
5081 build_int_cst (type, 1)),
5082 OEP_ONLY_CONST))
5084 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5085 fold_convert_loc (loc, TREE_TYPE (arg00),
5086 arg2));
5087 return pedantic_non_lvalue_loc (loc,
5088 fold_convert_loc (loc, type, tem));
5090 break;
5092 case LE_EXPR:
5093 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5094 as above. */
5095 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5096 OEP_ONLY_CONST)
5097 && operand_equal_p (arg01,
5098 const_binop (MINUS_EXPR, arg2,
5099 build_int_cst (type, 1)),
5100 OEP_ONLY_CONST))
5102 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5103 fold_convert_loc (loc, TREE_TYPE (arg00),
5104 arg2));
5105 return pedantic_non_lvalue_loc (loc,
5106 fold_convert_loc (loc, type, tem));
5108 break;
5110 case GT_EXPR:
5111 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5112 MAX_EXPR, to preserve the signedness of the comparison. */
5113 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5114 OEP_ONLY_CONST)
5115 && operand_equal_p (arg01,
5116 const_binop (MINUS_EXPR, arg2,
5117 build_int_cst (type, 1)),
5118 OEP_ONLY_CONST))
5120 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5121 fold_convert_loc (loc, TREE_TYPE (arg00),
5122 arg2));
5123 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5125 break;
5127 case GE_EXPR:
5128 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5129 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5130 OEP_ONLY_CONST)
5131 && operand_equal_p (arg01,
5132 const_binop (PLUS_EXPR, arg2,
5133 build_int_cst (type, 1)),
5134 OEP_ONLY_CONST))
5136 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5137 fold_convert_loc (loc, TREE_TYPE (arg00),
5138 arg2));
5139 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5141 break;
5142 case NE_EXPR:
5143 break;
5144 default:
5145 gcc_unreachable ();
5148 return NULL_TREE;
5153 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5154 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5155 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5156 false) >= 2)
5157 #endif
5159 /* EXP is some logical combination of boolean tests. See if we can
5160 merge it into some range test. Return the new tree if so. */
5162 static tree
5163 fold_range_test (location_t loc, enum tree_code code, tree type,
5164 tree op0, tree op1)
5166 int or_op = (code == TRUTH_ORIF_EXPR
5167 || code == TRUTH_OR_EXPR);
5168 int in0_p, in1_p, in_p;
5169 tree low0, low1, low, high0, high1, high;
5170 bool strict_overflow_p = false;
5171 tree tem, lhs, rhs;
5172 const char * const warnmsg = G_("assuming signed overflow does not occur "
5173 "when simplifying range test");
5175 if (!INTEGRAL_TYPE_P (type))
5176 return 0;
5178 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5179 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5181 /* If this is an OR operation, invert both sides; we will invert
5182 again at the end. */
5183 if (or_op)
5184 in0_p = ! in0_p, in1_p = ! in1_p;
5186 /* If both expressions are the same, if we can merge the ranges, and we
5187 can build the range test, return it or it inverted. If one of the
5188 ranges is always true or always false, consider it to be the same
5189 expression as the other. */
5190 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5191 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5192 in1_p, low1, high1)
5193 && 0 != (tem = (build_range_check (loc, type,
5194 lhs != 0 ? lhs
5195 : rhs != 0 ? rhs : integer_zero_node,
5196 in_p, low, high))))
5198 if (strict_overflow_p)
5199 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5200 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5203 /* On machines where the branch cost is expensive, if this is a
5204 short-circuited branch and the underlying object on both sides
5205 is the same, make a non-short-circuit operation. */
5206 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5207 && lhs != 0 && rhs != 0
5208 && (code == TRUTH_ANDIF_EXPR
5209 || code == TRUTH_ORIF_EXPR)
5210 && operand_equal_p (lhs, rhs, 0))
5212 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5213 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5214 which cases we can't do this. */
5215 if (simple_operand_p (lhs))
5216 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5217 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5218 type, op0, op1);
5220 else if (!lang_hooks.decls.global_bindings_p ()
5221 && !CONTAINS_PLACEHOLDER_P (lhs))
5223 tree common = save_expr (lhs);
5225 if (0 != (lhs = build_range_check (loc, type, common,
5226 or_op ? ! in0_p : in0_p,
5227 low0, high0))
5228 && (0 != (rhs = build_range_check (loc, type, common,
5229 or_op ? ! in1_p : in1_p,
5230 low1, high1))))
5232 if (strict_overflow_p)
5233 fold_overflow_warning (warnmsg,
5234 WARN_STRICT_OVERFLOW_COMPARISON);
5235 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5236 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5237 type, lhs, rhs);
5242 return 0;
5245 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5246 bit value. Arrange things so the extra bits will be set to zero if and
5247 only if C is signed-extended to its full width. If MASK is nonzero,
5248 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5250 static tree
5251 unextend (tree c, int p, int unsignedp, tree mask)
5253 tree type = TREE_TYPE (c);
5254 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5255 tree temp;
5257 if (p == modesize || unsignedp)
5258 return c;
5260 /* We work by getting just the sign bit into the low-order bit, then
5261 into the high-order bit, then sign-extend. We then XOR that value
5262 with C. */
5263 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5265 /* We must use a signed type in order to get an arithmetic right shift.
5266 However, we must also avoid introducing accidental overflows, so that
5267 a subsequent call to integer_zerop will work. Hence we must
5268 do the type conversion here. At this point, the constant is either
5269 zero or one, and the conversion to a signed type can never overflow.
5270 We could get an overflow if this conversion is done anywhere else. */
5271 if (TYPE_UNSIGNED (type))
5272 temp = fold_convert (signed_type_for (type), temp);
5274 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5275 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5276 if (mask != 0)
5277 temp = const_binop (BIT_AND_EXPR, temp,
5278 fold_convert (TREE_TYPE (c), mask));
5279 /* If necessary, convert the type back to match the type of C. */
5280 if (TYPE_UNSIGNED (type))
5281 temp = fold_convert (type, temp);
5283 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5286 /* For an expression that has the form
5287 (A && B) || ~B
5289 (A || B) && ~B,
5290 we can drop one of the inner expressions and simplify to
5291 A || ~B
5293 A && ~B
5294 LOC is the location of the resulting expression. OP is the inner
5295 logical operation; the left-hand side in the examples above, while CMPOP
5296 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5297 removing a condition that guards another, as in
5298 (A != NULL && A->...) || A == NULL
5299 which we must not transform. If RHS_ONLY is true, only eliminate the
5300 right-most operand of the inner logical operation. */
5302 static tree
5303 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5304 bool rhs_only)
5306 tree type = TREE_TYPE (cmpop);
5307 enum tree_code code = TREE_CODE (cmpop);
5308 enum tree_code truthop_code = TREE_CODE (op);
5309 tree lhs = TREE_OPERAND (op, 0);
5310 tree rhs = TREE_OPERAND (op, 1);
5311 tree orig_lhs = lhs, orig_rhs = rhs;
5312 enum tree_code rhs_code = TREE_CODE (rhs);
5313 enum tree_code lhs_code = TREE_CODE (lhs);
5314 enum tree_code inv_code;
5316 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5317 return NULL_TREE;
5319 if (TREE_CODE_CLASS (code) != tcc_comparison)
5320 return NULL_TREE;
5322 if (rhs_code == truthop_code)
5324 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5325 if (newrhs != NULL_TREE)
5327 rhs = newrhs;
5328 rhs_code = TREE_CODE (rhs);
5331 if (lhs_code == truthop_code && !rhs_only)
5333 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5334 if (newlhs != NULL_TREE)
5336 lhs = newlhs;
5337 lhs_code = TREE_CODE (lhs);
5341 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5342 if (inv_code == rhs_code
5343 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5344 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5345 return lhs;
5346 if (!rhs_only && inv_code == lhs_code
5347 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5348 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5349 return rhs;
5350 if (rhs != orig_rhs || lhs != orig_lhs)
5351 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5352 lhs, rhs);
5353 return NULL_TREE;
5356 /* Find ways of folding logical expressions of LHS and RHS:
5357 Try to merge two comparisons to the same innermost item.
5358 Look for range tests like "ch >= '0' && ch <= '9'".
5359 Look for combinations of simple terms on machines with expensive branches
5360 and evaluate the RHS unconditionally.
5362 For example, if we have p->a == 2 && p->b == 4 and we can make an
5363 object large enough to span both A and B, we can do this with a comparison
5364 against the object ANDed with the a mask.
5366 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5367 operations to do this with one comparison.
5369 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5370 function and the one above.
5372 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5373 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5375 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5376 two operands.
5378 We return the simplified tree or 0 if no optimization is possible. */
5380 static tree
5381 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5382 tree lhs, tree rhs)
5384 /* If this is the "or" of two comparisons, we can do something if
5385 the comparisons are NE_EXPR. If this is the "and", we can do something
5386 if the comparisons are EQ_EXPR. I.e.,
5387 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5389 WANTED_CODE is this operation code. For single bit fields, we can
5390 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5391 comparison for one-bit fields. */
5393 enum tree_code wanted_code;
5394 enum tree_code lcode, rcode;
5395 tree ll_arg, lr_arg, rl_arg, rr_arg;
5396 tree ll_inner, lr_inner, rl_inner, rr_inner;
5397 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5398 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5399 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5400 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5401 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5402 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5403 machine_mode lnmode, rnmode;
5404 tree ll_mask, lr_mask, rl_mask, rr_mask;
5405 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5406 tree l_const, r_const;
5407 tree lntype, rntype, result;
5408 HOST_WIDE_INT first_bit, end_bit;
5409 int volatilep;
5411 /* Start by getting the comparison codes. Fail if anything is volatile.
5412 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5413 it were surrounded with a NE_EXPR. */
5415 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5416 return 0;
5418 lcode = TREE_CODE (lhs);
5419 rcode = TREE_CODE (rhs);
5421 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5423 lhs = build2 (NE_EXPR, truth_type, lhs,
5424 build_int_cst (TREE_TYPE (lhs), 0));
5425 lcode = NE_EXPR;
5428 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5430 rhs = build2 (NE_EXPR, truth_type, rhs,
5431 build_int_cst (TREE_TYPE (rhs), 0));
5432 rcode = NE_EXPR;
5435 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5436 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5437 return 0;
5439 ll_arg = TREE_OPERAND (lhs, 0);
5440 lr_arg = TREE_OPERAND (lhs, 1);
5441 rl_arg = TREE_OPERAND (rhs, 0);
5442 rr_arg = TREE_OPERAND (rhs, 1);
5444 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5445 if (simple_operand_p (ll_arg)
5446 && simple_operand_p (lr_arg))
5448 if (operand_equal_p (ll_arg, rl_arg, 0)
5449 && operand_equal_p (lr_arg, rr_arg, 0))
5451 result = combine_comparisons (loc, code, lcode, rcode,
5452 truth_type, ll_arg, lr_arg);
5453 if (result)
5454 return result;
5456 else if (operand_equal_p (ll_arg, rr_arg, 0)
5457 && operand_equal_p (lr_arg, rl_arg, 0))
5459 result = combine_comparisons (loc, code, lcode,
5460 swap_tree_comparison (rcode),
5461 truth_type, ll_arg, lr_arg);
5462 if (result)
5463 return result;
5467 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5468 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5470 /* If the RHS can be evaluated unconditionally and its operands are
5471 simple, it wins to evaluate the RHS unconditionally on machines
5472 with expensive branches. In this case, this isn't a comparison
5473 that can be merged. */
5475 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5476 false) >= 2
5477 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5478 && simple_operand_p (rl_arg)
5479 && simple_operand_p (rr_arg))
5481 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5482 if (code == TRUTH_OR_EXPR
5483 && lcode == NE_EXPR && integer_zerop (lr_arg)
5484 && rcode == NE_EXPR && integer_zerop (rr_arg)
5485 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5486 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5487 return build2_loc (loc, NE_EXPR, truth_type,
5488 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5489 ll_arg, rl_arg),
5490 build_int_cst (TREE_TYPE (ll_arg), 0));
5492 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5493 if (code == TRUTH_AND_EXPR
5494 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5495 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5496 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5497 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5498 return build2_loc (loc, EQ_EXPR, truth_type,
5499 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5500 ll_arg, rl_arg),
5501 build_int_cst (TREE_TYPE (ll_arg), 0));
5504 /* See if the comparisons can be merged. Then get all the parameters for
5505 each side. */
5507 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5508 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5509 return 0;
5511 volatilep = 0;
5512 ll_inner = decode_field_reference (loc, ll_arg,
5513 &ll_bitsize, &ll_bitpos, &ll_mode,
5514 &ll_unsignedp, &volatilep, &ll_mask,
5515 &ll_and_mask);
5516 lr_inner = decode_field_reference (loc, lr_arg,
5517 &lr_bitsize, &lr_bitpos, &lr_mode,
5518 &lr_unsignedp, &volatilep, &lr_mask,
5519 &lr_and_mask);
5520 rl_inner = decode_field_reference (loc, rl_arg,
5521 &rl_bitsize, &rl_bitpos, &rl_mode,
5522 &rl_unsignedp, &volatilep, &rl_mask,
5523 &rl_and_mask);
5524 rr_inner = decode_field_reference (loc, rr_arg,
5525 &rr_bitsize, &rr_bitpos, &rr_mode,
5526 &rr_unsignedp, &volatilep, &rr_mask,
5527 &rr_and_mask);
5529 /* It must be true that the inner operation on the lhs of each
5530 comparison must be the same if we are to be able to do anything.
5531 Then see if we have constants. If not, the same must be true for
5532 the rhs's. */
5533 if (volatilep || ll_inner == 0 || rl_inner == 0
5534 || ! operand_equal_p (ll_inner, rl_inner, 0))
5535 return 0;
5537 if (TREE_CODE (lr_arg) == INTEGER_CST
5538 && TREE_CODE (rr_arg) == INTEGER_CST)
5539 l_const = lr_arg, r_const = rr_arg;
5540 else if (lr_inner == 0 || rr_inner == 0
5541 || ! operand_equal_p (lr_inner, rr_inner, 0))
5542 return 0;
5543 else
5544 l_const = r_const = 0;
5546 /* If either comparison code is not correct for our logical operation,
5547 fail. However, we can convert a one-bit comparison against zero into
5548 the opposite comparison against that bit being set in the field. */
5550 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5551 if (lcode != wanted_code)
5553 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5555 /* Make the left operand unsigned, since we are only interested
5556 in the value of one bit. Otherwise we are doing the wrong
5557 thing below. */
5558 ll_unsignedp = 1;
5559 l_const = ll_mask;
5561 else
5562 return 0;
5565 /* This is analogous to the code for l_const above. */
5566 if (rcode != wanted_code)
5568 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5570 rl_unsignedp = 1;
5571 r_const = rl_mask;
5573 else
5574 return 0;
5577 /* See if we can find a mode that contains both fields being compared on
5578 the left. If we can't, fail. Otherwise, update all constants and masks
5579 to be relative to a field of that size. */
5580 first_bit = MIN (ll_bitpos, rl_bitpos);
5581 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5582 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5583 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5584 volatilep);
5585 if (lnmode == VOIDmode)
5586 return 0;
5588 lnbitsize = GET_MODE_BITSIZE (lnmode);
5589 lnbitpos = first_bit & ~ (lnbitsize - 1);
5590 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5591 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5593 if (BYTES_BIG_ENDIAN)
5595 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5596 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5599 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5600 size_int (xll_bitpos));
5601 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5602 size_int (xrl_bitpos));
5604 if (l_const)
5606 l_const = fold_convert_loc (loc, lntype, l_const);
5607 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5608 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5609 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5610 fold_build1_loc (loc, BIT_NOT_EXPR,
5611 lntype, ll_mask))))
5613 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5615 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5618 if (r_const)
5620 r_const = fold_convert_loc (loc, lntype, r_const);
5621 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5622 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5623 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5624 fold_build1_loc (loc, BIT_NOT_EXPR,
5625 lntype, rl_mask))))
5627 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5629 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5633 /* If the right sides are not constant, do the same for it. Also,
5634 disallow this optimization if a size or signedness mismatch occurs
5635 between the left and right sides. */
5636 if (l_const == 0)
5638 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5639 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5640 /* Make sure the two fields on the right
5641 correspond to the left without being swapped. */
5642 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5643 return 0;
5645 first_bit = MIN (lr_bitpos, rr_bitpos);
5646 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5647 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5648 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5649 volatilep);
5650 if (rnmode == VOIDmode)
5651 return 0;
5653 rnbitsize = GET_MODE_BITSIZE (rnmode);
5654 rnbitpos = first_bit & ~ (rnbitsize - 1);
5655 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5656 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5658 if (BYTES_BIG_ENDIAN)
5660 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5661 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5664 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5665 rntype, lr_mask),
5666 size_int (xlr_bitpos));
5667 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5668 rntype, rr_mask),
5669 size_int (xrr_bitpos));
5671 /* Make a mask that corresponds to both fields being compared.
5672 Do this for both items being compared. If the operands are the
5673 same size and the bits being compared are in the same position
5674 then we can do this by masking both and comparing the masked
5675 results. */
5676 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5677 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5678 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5680 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5681 ll_unsignedp || rl_unsignedp);
5682 if (! all_ones_mask_p (ll_mask, lnbitsize))
5683 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5685 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5686 lr_unsignedp || rr_unsignedp);
5687 if (! all_ones_mask_p (lr_mask, rnbitsize))
5688 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5690 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5693 /* There is still another way we can do something: If both pairs of
5694 fields being compared are adjacent, we may be able to make a wider
5695 field containing them both.
5697 Note that we still must mask the lhs/rhs expressions. Furthermore,
5698 the mask must be shifted to account for the shift done by
5699 make_bit_field_ref. */
5700 if ((ll_bitsize + ll_bitpos == rl_bitpos
5701 && lr_bitsize + lr_bitpos == rr_bitpos)
5702 || (ll_bitpos == rl_bitpos + rl_bitsize
5703 && lr_bitpos == rr_bitpos + rr_bitsize))
5705 tree type;
5707 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5708 ll_bitsize + rl_bitsize,
5709 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5710 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5711 lr_bitsize + rr_bitsize,
5712 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5714 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5715 size_int (MIN (xll_bitpos, xrl_bitpos)));
5716 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5717 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5719 /* Convert to the smaller type before masking out unwanted bits. */
5720 type = lntype;
5721 if (lntype != rntype)
5723 if (lnbitsize > rnbitsize)
5725 lhs = fold_convert_loc (loc, rntype, lhs);
5726 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5727 type = rntype;
5729 else if (lnbitsize < rnbitsize)
5731 rhs = fold_convert_loc (loc, lntype, rhs);
5732 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5733 type = lntype;
5737 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5738 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5740 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5741 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5743 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5746 return 0;
5749 /* Handle the case of comparisons with constants. If there is something in
5750 common between the masks, those bits of the constants must be the same.
5751 If not, the condition is always false. Test for this to avoid generating
5752 incorrect code below. */
5753 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5754 if (! integer_zerop (result)
5755 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5756 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5758 if (wanted_code == NE_EXPR)
5760 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5761 return constant_boolean_node (true, truth_type);
5763 else
5765 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5766 return constant_boolean_node (false, truth_type);
5770 /* Construct the expression we will return. First get the component
5771 reference we will make. Unless the mask is all ones the width of
5772 that field, perform the mask operation. Then compare with the
5773 merged constant. */
5774 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5775 ll_unsignedp || rl_unsignedp);
5777 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5778 if (! all_ones_mask_p (ll_mask, lnbitsize))
5779 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5781 return build2_loc (loc, wanted_code, truth_type, result,
5782 const_binop (BIT_IOR_EXPR, l_const, r_const));
5785 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5786 constant. */
5788 static tree
5789 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5790 tree op0, tree op1)
5792 tree arg0 = op0;
5793 enum tree_code op_code;
5794 tree comp_const;
5795 tree minmax_const;
5796 int consts_equal, consts_lt;
5797 tree inner;
5799 STRIP_SIGN_NOPS (arg0);
5801 op_code = TREE_CODE (arg0);
5802 minmax_const = TREE_OPERAND (arg0, 1);
5803 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5804 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5805 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5806 inner = TREE_OPERAND (arg0, 0);
5808 /* If something does not permit us to optimize, return the original tree. */
5809 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5810 || TREE_CODE (comp_const) != INTEGER_CST
5811 || TREE_OVERFLOW (comp_const)
5812 || TREE_CODE (minmax_const) != INTEGER_CST
5813 || TREE_OVERFLOW (minmax_const))
5814 return NULL_TREE;
5816 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5817 and GT_EXPR, doing the rest with recursive calls using logical
5818 simplifications. */
5819 switch (code)
5821 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5823 tree tem
5824 = optimize_minmax_comparison (loc,
5825 invert_tree_comparison (code, false),
5826 type, op0, op1);
5827 if (tem)
5828 return invert_truthvalue_loc (loc, tem);
5829 return NULL_TREE;
5832 case GE_EXPR:
5833 return
5834 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5835 optimize_minmax_comparison
5836 (loc, EQ_EXPR, type, arg0, comp_const),
5837 optimize_minmax_comparison
5838 (loc, GT_EXPR, type, arg0, comp_const));
5840 case EQ_EXPR:
5841 if (op_code == MAX_EXPR && consts_equal)
5842 /* MAX (X, 0) == 0 -> X <= 0 */
5843 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5845 else if (op_code == MAX_EXPR && consts_lt)
5846 /* MAX (X, 0) == 5 -> X == 5 */
5847 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5849 else if (op_code == MAX_EXPR)
5850 /* MAX (X, 0) == -1 -> false */
5851 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5853 else if (consts_equal)
5854 /* MIN (X, 0) == 0 -> X >= 0 */
5855 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5857 else if (consts_lt)
5858 /* MIN (X, 0) == 5 -> false */
5859 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5861 else
5862 /* MIN (X, 0) == -1 -> X == -1 */
5863 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5865 case GT_EXPR:
5866 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5867 /* MAX (X, 0) > 0 -> X > 0
5868 MAX (X, 0) > 5 -> X > 5 */
5869 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5871 else if (op_code == MAX_EXPR)
5872 /* MAX (X, 0) > -1 -> true */
5873 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5875 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5876 /* MIN (X, 0) > 0 -> false
5877 MIN (X, 0) > 5 -> false */
5878 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5880 else
5881 /* MIN (X, 0) > -1 -> X > -1 */
5882 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5884 default:
5885 return NULL_TREE;
5889 /* T is an integer expression that is being multiplied, divided, or taken a
5890 modulus (CODE says which and what kind of divide or modulus) by a
5891 constant C. See if we can eliminate that operation by folding it with
5892 other operations already in T. WIDE_TYPE, if non-null, is a type that
5893 should be used for the computation if wider than our type.
5895 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5896 (X * 2) + (Y * 4). We must, however, be assured that either the original
5897 expression would not overflow or that overflow is undefined for the type
5898 in the language in question.
5900 If we return a non-null expression, it is an equivalent form of the
5901 original computation, but need not be in the original type.
5903 We set *STRICT_OVERFLOW_P to true if the return values depends on
5904 signed overflow being undefined. Otherwise we do not change
5905 *STRICT_OVERFLOW_P. */
5907 static tree
5908 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5909 bool *strict_overflow_p)
5911 /* To avoid exponential search depth, refuse to allow recursion past
5912 three levels. Beyond that (1) it's highly unlikely that we'll find
5913 something interesting and (2) we've probably processed it before
5914 when we built the inner expression. */
5916 static int depth;
5917 tree ret;
5919 if (depth > 3)
5920 return NULL;
5922 depth++;
5923 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5924 depth--;
5926 return ret;
5929 static tree
5930 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5931 bool *strict_overflow_p)
5933 tree type = TREE_TYPE (t);
5934 enum tree_code tcode = TREE_CODE (t);
5935 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5936 > GET_MODE_SIZE (TYPE_MODE (type)))
5937 ? wide_type : type);
5938 tree t1, t2;
5939 int same_p = tcode == code;
5940 tree op0 = NULL_TREE, op1 = NULL_TREE;
5941 bool sub_strict_overflow_p;
5943 /* Don't deal with constants of zero here; they confuse the code below. */
5944 if (integer_zerop (c))
5945 return NULL_TREE;
5947 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5948 op0 = TREE_OPERAND (t, 0);
5950 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5951 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5953 /* Note that we need not handle conditional operations here since fold
5954 already handles those cases. So just do arithmetic here. */
5955 switch (tcode)
5957 case INTEGER_CST:
5958 /* For a constant, we can always simplify if we are a multiply
5959 or (for divide and modulus) if it is a multiple of our constant. */
5960 if (code == MULT_EXPR
5961 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5962 return const_binop (code, fold_convert (ctype, t),
5963 fold_convert (ctype, c));
5964 break;
5966 CASE_CONVERT: case NON_LVALUE_EXPR:
5967 /* If op0 is an expression ... */
5968 if ((COMPARISON_CLASS_P (op0)
5969 || UNARY_CLASS_P (op0)
5970 || BINARY_CLASS_P (op0)
5971 || VL_EXP_CLASS_P (op0)
5972 || EXPRESSION_CLASS_P (op0))
5973 /* ... and has wrapping overflow, and its type is smaller
5974 than ctype, then we cannot pass through as widening. */
5975 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5976 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5977 && (TYPE_PRECISION (ctype)
5978 > TYPE_PRECISION (TREE_TYPE (op0))))
5979 /* ... or this is a truncation (t is narrower than op0),
5980 then we cannot pass through this narrowing. */
5981 || (TYPE_PRECISION (type)
5982 < TYPE_PRECISION (TREE_TYPE (op0)))
5983 /* ... or signedness changes for division or modulus,
5984 then we cannot pass through this conversion. */
5985 || (code != MULT_EXPR
5986 && (TYPE_UNSIGNED (ctype)
5987 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5988 /* ... or has undefined overflow while the converted to
5989 type has not, we cannot do the operation in the inner type
5990 as that would introduce undefined overflow. */
5991 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5992 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5993 && !TYPE_OVERFLOW_UNDEFINED (type))))
5994 break;
5996 /* Pass the constant down and see if we can make a simplification. If
5997 we can, replace this expression with the inner simplification for
5998 possible later conversion to our or some other type. */
5999 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6000 && TREE_CODE (t2) == INTEGER_CST
6001 && !TREE_OVERFLOW (t2)
6002 && (0 != (t1 = extract_muldiv (op0, t2, code,
6003 code == MULT_EXPR
6004 ? ctype : NULL_TREE,
6005 strict_overflow_p))))
6006 return t1;
6007 break;
6009 case ABS_EXPR:
6010 /* If widening the type changes it from signed to unsigned, then we
6011 must avoid building ABS_EXPR itself as unsigned. */
6012 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6014 tree cstype = (*signed_type_for) (ctype);
6015 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6016 != 0)
6018 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6019 return fold_convert (ctype, t1);
6021 break;
6023 /* If the constant is negative, we cannot simplify this. */
6024 if (tree_int_cst_sgn (c) == -1)
6025 break;
6026 /* FALLTHROUGH */
6027 case NEGATE_EXPR:
6028 /* For division and modulus, type can't be unsigned, as e.g.
6029 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6030 For signed types, even with wrapping overflow, this is fine. */
6031 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6032 break;
6033 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6034 != 0)
6035 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6036 break;
6038 case MIN_EXPR: case MAX_EXPR:
6039 /* If widening the type changes the signedness, then we can't perform
6040 this optimization as that changes the result. */
6041 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6042 break;
6044 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6045 sub_strict_overflow_p = false;
6046 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6047 &sub_strict_overflow_p)) != 0
6048 && (t2 = extract_muldiv (op1, c, code, wide_type,
6049 &sub_strict_overflow_p)) != 0)
6051 if (tree_int_cst_sgn (c) < 0)
6052 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6053 if (sub_strict_overflow_p)
6054 *strict_overflow_p = true;
6055 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6056 fold_convert (ctype, t2));
6058 break;
6060 case LSHIFT_EXPR: case RSHIFT_EXPR:
6061 /* If the second operand is constant, this is a multiplication
6062 or floor division, by a power of two, so we can treat it that
6063 way unless the multiplier or divisor overflows. Signed
6064 left-shift overflow is implementation-defined rather than
6065 undefined in C90, so do not convert signed left shift into
6066 multiplication. */
6067 if (TREE_CODE (op1) == INTEGER_CST
6068 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6069 /* const_binop may not detect overflow correctly,
6070 so check for it explicitly here. */
6071 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6072 && 0 != (t1 = fold_convert (ctype,
6073 const_binop (LSHIFT_EXPR,
6074 size_one_node,
6075 op1)))
6076 && !TREE_OVERFLOW (t1))
6077 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6078 ? MULT_EXPR : FLOOR_DIV_EXPR,
6079 ctype,
6080 fold_convert (ctype, op0),
6081 t1),
6082 c, code, wide_type, strict_overflow_p);
6083 break;
6085 case PLUS_EXPR: case MINUS_EXPR:
6086 /* See if we can eliminate the operation on both sides. If we can, we
6087 can return a new PLUS or MINUS. If we can't, the only remaining
6088 cases where we can do anything are if the second operand is a
6089 constant. */
6090 sub_strict_overflow_p = false;
6091 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6092 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6093 if (t1 != 0 && t2 != 0
6094 && (code == MULT_EXPR
6095 /* If not multiplication, we can only do this if both operands
6096 are divisible by c. */
6097 || (multiple_of_p (ctype, op0, c)
6098 && multiple_of_p (ctype, op1, c))))
6100 if (sub_strict_overflow_p)
6101 *strict_overflow_p = true;
6102 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6103 fold_convert (ctype, t2));
6106 /* If this was a subtraction, negate OP1 and set it to be an addition.
6107 This simplifies the logic below. */
6108 if (tcode == MINUS_EXPR)
6110 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6111 /* If OP1 was not easily negatable, the constant may be OP0. */
6112 if (TREE_CODE (op0) == INTEGER_CST)
6114 tree tem = op0;
6115 op0 = op1;
6116 op1 = tem;
6117 tem = t1;
6118 t1 = t2;
6119 t2 = tem;
6123 if (TREE_CODE (op1) != INTEGER_CST)
6124 break;
6126 /* If either OP1 or C are negative, this optimization is not safe for
6127 some of the division and remainder types while for others we need
6128 to change the code. */
6129 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6131 if (code == CEIL_DIV_EXPR)
6132 code = FLOOR_DIV_EXPR;
6133 else if (code == FLOOR_DIV_EXPR)
6134 code = CEIL_DIV_EXPR;
6135 else if (code != MULT_EXPR
6136 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6137 break;
6140 /* If it's a multiply or a division/modulus operation of a multiple
6141 of our constant, do the operation and verify it doesn't overflow. */
6142 if (code == MULT_EXPR
6143 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6145 op1 = const_binop (code, fold_convert (ctype, op1),
6146 fold_convert (ctype, c));
6147 /* We allow the constant to overflow with wrapping semantics. */
6148 if (op1 == 0
6149 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6150 break;
6152 else
6153 break;
6155 /* If we have an unsigned type, we cannot widen the operation since it
6156 will change the result if the original computation overflowed. */
6157 if (TYPE_UNSIGNED (ctype) && ctype != type)
6158 break;
6160 /* If we were able to eliminate our operation from the first side,
6161 apply our operation to the second side and reform the PLUS. */
6162 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6163 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6165 /* The last case is if we are a multiply. In that case, we can
6166 apply the distributive law to commute the multiply and addition
6167 if the multiplication of the constants doesn't overflow
6168 and overflow is defined. With undefined overflow
6169 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6170 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6171 return fold_build2 (tcode, ctype,
6172 fold_build2 (code, ctype,
6173 fold_convert (ctype, op0),
6174 fold_convert (ctype, c)),
6175 op1);
6177 break;
6179 case MULT_EXPR:
6180 /* We have a special case here if we are doing something like
6181 (C * 8) % 4 since we know that's zero. */
6182 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6183 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6184 /* If the multiplication can overflow we cannot optimize this. */
6185 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6186 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6187 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6189 *strict_overflow_p = true;
6190 return omit_one_operand (type, integer_zero_node, op0);
6193 /* ... fall through ... */
6195 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6196 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6197 /* If we can extract our operation from the LHS, do so and return a
6198 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6199 do something only if the second operand is a constant. */
6200 if (same_p
6201 && (t1 = extract_muldiv (op0, c, code, wide_type,
6202 strict_overflow_p)) != 0)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6204 fold_convert (ctype, op1));
6205 else if (tcode == MULT_EXPR && code == MULT_EXPR
6206 && (t1 = extract_muldiv (op1, c, code, wide_type,
6207 strict_overflow_p)) != 0)
6208 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6209 fold_convert (ctype, t1));
6210 else if (TREE_CODE (op1) != INTEGER_CST)
6211 return 0;
6213 /* If these are the same operation types, we can associate them
6214 assuming no overflow. */
6215 if (tcode == code)
6217 bool overflow_p = false;
6218 bool overflow_mul_p;
6219 signop sign = TYPE_SIGN (ctype);
6220 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6221 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6222 if (overflow_mul_p
6223 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6224 overflow_p = true;
6225 if (!overflow_p)
6226 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6227 wide_int_to_tree (ctype, mul));
6230 /* If these operations "cancel" each other, we have the main
6231 optimizations of this pass, which occur when either constant is a
6232 multiple of the other, in which case we replace this with either an
6233 operation or CODE or TCODE.
6235 If we have an unsigned type, we cannot do this since it will change
6236 the result if the original computation overflowed. */
6237 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6238 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6239 || (tcode == MULT_EXPR
6240 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6241 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6242 && code != MULT_EXPR)))
6244 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6246 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6247 *strict_overflow_p = true;
6248 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6249 fold_convert (ctype,
6250 const_binop (TRUNC_DIV_EXPR,
6251 op1, c)));
6253 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6255 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6256 *strict_overflow_p = true;
6257 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6258 fold_convert (ctype,
6259 const_binop (TRUNC_DIV_EXPR,
6260 c, op1)));
6263 break;
6265 default:
6266 break;
6269 return 0;
6272 /* Return a node which has the indicated constant VALUE (either 0 or
6273 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6274 and is of the indicated TYPE. */
6276 tree
6277 constant_boolean_node (bool value, tree type)
6279 if (type == integer_type_node)
6280 return value ? integer_one_node : integer_zero_node;
6281 else if (type == boolean_type_node)
6282 return value ? boolean_true_node : boolean_false_node;
6283 else if (TREE_CODE (type) == VECTOR_TYPE)
6284 return build_vector_from_val (type,
6285 build_int_cst (TREE_TYPE (type),
6286 value ? -1 : 0));
6287 else
6288 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6292 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6293 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6294 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6295 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6296 COND is the first argument to CODE; otherwise (as in the example
6297 given here), it is the second argument. TYPE is the type of the
6298 original expression. Return NULL_TREE if no simplification is
6299 possible. */
6301 static tree
6302 fold_binary_op_with_conditional_arg (location_t loc,
6303 enum tree_code code,
6304 tree type, tree op0, tree op1,
6305 tree cond, tree arg, int cond_first_p)
6307 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6308 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6309 tree test, true_value, false_value;
6310 tree lhs = NULL_TREE;
6311 tree rhs = NULL_TREE;
6312 enum tree_code cond_code = COND_EXPR;
6314 if (TREE_CODE (cond) == COND_EXPR
6315 || TREE_CODE (cond) == VEC_COND_EXPR)
6317 test = TREE_OPERAND (cond, 0);
6318 true_value = TREE_OPERAND (cond, 1);
6319 false_value = TREE_OPERAND (cond, 2);
6320 /* If this operand throws an expression, then it does not make
6321 sense to try to perform a logical or arithmetic operation
6322 involving it. */
6323 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6324 lhs = true_value;
6325 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6326 rhs = false_value;
6328 else
6330 tree testtype = TREE_TYPE (cond);
6331 test = cond;
6332 true_value = constant_boolean_node (true, testtype);
6333 false_value = constant_boolean_node (false, testtype);
6336 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6337 cond_code = VEC_COND_EXPR;
6339 /* This transformation is only worthwhile if we don't have to wrap ARG
6340 in a SAVE_EXPR and the operation can be simplified without recursing
6341 on at least one of the branches once its pushed inside the COND_EXPR. */
6342 if (!TREE_CONSTANT (arg)
6343 && (TREE_SIDE_EFFECTS (arg)
6344 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6345 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6346 return NULL_TREE;
6348 arg = fold_convert_loc (loc, arg_type, arg);
6349 if (lhs == 0)
6351 true_value = fold_convert_loc (loc, cond_type, true_value);
6352 if (cond_first_p)
6353 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6354 else
6355 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6357 if (rhs == 0)
6359 false_value = fold_convert_loc (loc, cond_type, false_value);
6360 if (cond_first_p)
6361 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6362 else
6363 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6366 /* Check that we have simplified at least one of the branches. */
6367 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6368 return NULL_TREE;
6370 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6374 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6376 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6377 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6378 ADDEND is the same as X.
6380 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6381 and finite. The problematic cases are when X is zero, and its mode
6382 has signed zeros. In the case of rounding towards -infinity,
6383 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6384 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6386 bool
6387 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6389 if (!real_zerop (addend))
6390 return false;
6392 /* Don't allow the fold with -fsignaling-nans. */
6393 if (HONOR_SNANS (element_mode (type)))
6394 return false;
6396 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6397 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6398 return true;
6400 /* In a vector or complex, we would need to check the sign of all zeros. */
6401 if (TREE_CODE (addend) != REAL_CST)
6402 return false;
6404 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6405 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6406 negate = !negate;
6408 /* The mode has signed zeros, and we have to honor their sign.
6409 In this situation, there is only one case we can return true for.
6410 X - 0 is the same as X unless rounding towards -infinity is
6411 supported. */
6412 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6415 /* Subroutine of fold() that checks comparisons of built-in math
6416 functions against real constants.
6418 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6419 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6420 is the type of the result and ARG0 and ARG1 are the operands of the
6421 comparison. ARG1 must be a TREE_REAL_CST.
6423 The function returns the constant folded tree if a simplification
6424 can be made, and NULL_TREE otherwise. */
6426 static tree
6427 fold_mathfn_compare (location_t loc,
6428 enum built_in_function fcode, enum tree_code code,
6429 tree type, tree arg0, tree arg1)
6431 REAL_VALUE_TYPE c;
6433 if (BUILTIN_SQRT_P (fcode))
6435 tree arg = CALL_EXPR_ARG (arg0, 0);
6436 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6438 c = TREE_REAL_CST (arg1);
6439 if (REAL_VALUE_NEGATIVE (c))
6441 /* sqrt(x) < y is always false, if y is negative. */
6442 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6443 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6445 /* sqrt(x) > y is always true, if y is negative and we
6446 don't care about NaNs, i.e. negative values of x. */
6447 if (code == NE_EXPR || !HONOR_NANS (mode))
6448 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6450 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6451 return fold_build2_loc (loc, GE_EXPR, type, arg,
6452 build_real (TREE_TYPE (arg), dconst0));
6454 else if (code == GT_EXPR || code == GE_EXPR)
6456 REAL_VALUE_TYPE c2;
6458 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6459 real_convert (&c2, mode, &c2);
6461 if (REAL_VALUE_ISINF (c2))
6463 /* sqrt(x) > y is x == +Inf, when y is very large. */
6464 if (HONOR_INFINITIES (mode))
6465 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6466 build_real (TREE_TYPE (arg), c2));
6468 /* sqrt(x) > y is always false, when y is very large
6469 and we don't care about infinities. */
6470 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6473 /* sqrt(x) > c is the same as x > c*c. */
6474 return fold_build2_loc (loc, code, type, arg,
6475 build_real (TREE_TYPE (arg), c2));
6477 else if (code == LT_EXPR || code == LE_EXPR)
6479 REAL_VALUE_TYPE c2;
6481 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6482 real_convert (&c2, mode, &c2);
6484 if (REAL_VALUE_ISINF (c2))
6486 /* sqrt(x) < y is always true, when y is a very large
6487 value and we don't care about NaNs or Infinities. */
6488 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6489 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6491 /* sqrt(x) < y is x != +Inf when y is very large and we
6492 don't care about NaNs. */
6493 if (! HONOR_NANS (mode))
6494 return fold_build2_loc (loc, NE_EXPR, type, arg,
6495 build_real (TREE_TYPE (arg), c2));
6497 /* sqrt(x) < y is x >= 0 when y is very large and we
6498 don't care about Infinities. */
6499 if (! HONOR_INFINITIES (mode))
6500 return fold_build2_loc (loc, GE_EXPR, type, arg,
6501 build_real (TREE_TYPE (arg), dconst0));
6503 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6504 arg = save_expr (arg);
6505 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6506 fold_build2_loc (loc, GE_EXPR, type, arg,
6507 build_real (TREE_TYPE (arg),
6508 dconst0)),
6509 fold_build2_loc (loc, NE_EXPR, type, arg,
6510 build_real (TREE_TYPE (arg),
6511 c2)));
6514 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6515 if (! HONOR_NANS (mode))
6516 return fold_build2_loc (loc, code, type, arg,
6517 build_real (TREE_TYPE (arg), c2));
6519 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6520 arg = save_expr (arg);
6521 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6522 fold_build2_loc (loc, GE_EXPR, type, arg,
6523 build_real (TREE_TYPE (arg),
6524 dconst0)),
6525 fold_build2_loc (loc, code, type, arg,
6526 build_real (TREE_TYPE (arg),
6527 c2)));
6531 return NULL_TREE;
6534 /* Subroutine of fold() that optimizes comparisons against Infinities,
6535 either +Inf or -Inf.
6537 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6538 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6539 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6541 The function returns the constant folded tree if a simplification
6542 can be made, and NULL_TREE otherwise. */
6544 static tree
6545 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6546 tree arg0, tree arg1)
6548 machine_mode mode;
6549 REAL_VALUE_TYPE max;
6550 tree temp;
6551 bool neg;
6553 mode = TYPE_MODE (TREE_TYPE (arg0));
6555 /* For negative infinity swap the sense of the comparison. */
6556 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6557 if (neg)
6558 code = swap_tree_comparison (code);
6560 switch (code)
6562 case GT_EXPR:
6563 /* x > +Inf is always false, if with ignore sNANs. */
6564 if (HONOR_SNANS (mode))
6565 return NULL_TREE;
6566 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6568 case LE_EXPR:
6569 /* x <= +Inf is always true, if we don't case about NaNs. */
6570 if (! HONOR_NANS (mode))
6571 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6573 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6574 arg0 = save_expr (arg0);
6575 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6577 case EQ_EXPR:
6578 case GE_EXPR:
6579 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6580 real_maxval (&max, neg, mode);
6581 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6582 arg0, build_real (TREE_TYPE (arg0), max));
6584 case LT_EXPR:
6585 /* x < +Inf is always equal to x <= DBL_MAX. */
6586 real_maxval (&max, neg, mode);
6587 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6588 arg0, build_real (TREE_TYPE (arg0), max));
6590 case NE_EXPR:
6591 /* x != +Inf is always equal to !(x > DBL_MAX). */
6592 real_maxval (&max, neg, mode);
6593 if (! HONOR_NANS (mode))
6594 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6595 arg0, build_real (TREE_TYPE (arg0), max));
6597 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6598 arg0, build_real (TREE_TYPE (arg0), max));
6599 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6601 default:
6602 break;
6605 return NULL_TREE;
6608 /* Subroutine of fold() that optimizes comparisons of a division by
6609 a nonzero integer constant against an integer constant, i.e.
6610 X/C1 op C2.
6612 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6613 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6614 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6616 The function returns the constant folded tree if a simplification
6617 can be made, and NULL_TREE otherwise. */
6619 static tree
6620 fold_div_compare (location_t loc,
6621 enum tree_code code, tree type, tree arg0, tree arg1)
6623 tree prod, tmp, hi, lo;
6624 tree arg00 = TREE_OPERAND (arg0, 0);
6625 tree arg01 = TREE_OPERAND (arg0, 1);
6626 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6627 bool neg_overflow = false;
6628 bool overflow;
6630 /* We have to do this the hard way to detect unsigned overflow.
6631 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6632 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6633 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6634 neg_overflow = false;
6636 if (sign == UNSIGNED)
6638 tmp = int_const_binop (MINUS_EXPR, arg01,
6639 build_int_cst (TREE_TYPE (arg01), 1));
6640 lo = prod;
6642 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6643 val = wi::add (prod, tmp, sign, &overflow);
6644 hi = force_fit_type (TREE_TYPE (arg00), val,
6645 -1, overflow | TREE_OVERFLOW (prod));
6647 else if (tree_int_cst_sgn (arg01) >= 0)
6649 tmp = int_const_binop (MINUS_EXPR, arg01,
6650 build_int_cst (TREE_TYPE (arg01), 1));
6651 switch (tree_int_cst_sgn (arg1))
6653 case -1:
6654 neg_overflow = true;
6655 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6656 hi = prod;
6657 break;
6659 case 0:
6660 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6661 hi = tmp;
6662 break;
6664 case 1:
6665 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6666 lo = prod;
6667 break;
6669 default:
6670 gcc_unreachable ();
6673 else
6675 /* A negative divisor reverses the relational operators. */
6676 code = swap_tree_comparison (code);
6678 tmp = int_const_binop (PLUS_EXPR, arg01,
6679 build_int_cst (TREE_TYPE (arg01), 1));
6680 switch (tree_int_cst_sgn (arg1))
6682 case -1:
6683 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6684 lo = prod;
6685 break;
6687 case 0:
6688 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6689 lo = tmp;
6690 break;
6692 case 1:
6693 neg_overflow = true;
6694 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6695 hi = prod;
6696 break;
6698 default:
6699 gcc_unreachable ();
6703 switch (code)
6705 case EQ_EXPR:
6706 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6707 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6708 if (TREE_OVERFLOW (hi))
6709 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6710 if (TREE_OVERFLOW (lo))
6711 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6712 return build_range_check (loc, type, arg00, 1, lo, hi);
6714 case NE_EXPR:
6715 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6717 if (TREE_OVERFLOW (hi))
6718 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6719 if (TREE_OVERFLOW (lo))
6720 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6721 return build_range_check (loc, type, arg00, 0, lo, hi);
6723 case LT_EXPR:
6724 if (TREE_OVERFLOW (lo))
6726 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6727 return omit_one_operand_loc (loc, type, tmp, arg00);
6729 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6731 case LE_EXPR:
6732 if (TREE_OVERFLOW (hi))
6734 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6735 return omit_one_operand_loc (loc, type, tmp, arg00);
6737 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6739 case GT_EXPR:
6740 if (TREE_OVERFLOW (hi))
6742 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6743 return omit_one_operand_loc (loc, type, tmp, arg00);
6745 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6747 case GE_EXPR:
6748 if (TREE_OVERFLOW (lo))
6750 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6751 return omit_one_operand_loc (loc, type, tmp, arg00);
6753 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6755 default:
6756 break;
6759 return NULL_TREE;
6763 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6764 equality/inequality test, then return a simplified form of the test
6765 using a sign testing. Otherwise return NULL. TYPE is the desired
6766 result type. */
6768 static tree
6769 fold_single_bit_test_into_sign_test (location_t loc,
6770 enum tree_code code, tree arg0, tree arg1,
6771 tree result_type)
6773 /* If this is testing a single bit, we can optimize the test. */
6774 if ((code == NE_EXPR || code == EQ_EXPR)
6775 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6776 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6778 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6779 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6780 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6782 if (arg00 != NULL_TREE
6783 /* This is only a win if casting to a signed type is cheap,
6784 i.e. when arg00's type is not a partial mode. */
6785 && TYPE_PRECISION (TREE_TYPE (arg00))
6786 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6788 tree stype = signed_type_for (TREE_TYPE (arg00));
6789 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6790 result_type,
6791 fold_convert_loc (loc, stype, arg00),
6792 build_int_cst (stype, 0));
6796 return NULL_TREE;
6799 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6800 equality/inequality test, then return a simplified form of
6801 the test using shifts and logical operations. Otherwise return
6802 NULL. TYPE is the desired result type. */
6804 tree
6805 fold_single_bit_test (location_t loc, enum tree_code code,
6806 tree arg0, tree arg1, tree result_type)
6808 /* If this is testing a single bit, we can optimize the test. */
6809 if ((code == NE_EXPR || code == EQ_EXPR)
6810 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6811 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6813 tree inner = TREE_OPERAND (arg0, 0);
6814 tree type = TREE_TYPE (arg0);
6815 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6816 machine_mode operand_mode = TYPE_MODE (type);
6817 int ops_unsigned;
6818 tree signed_type, unsigned_type, intermediate_type;
6819 tree tem, one;
6821 /* First, see if we can fold the single bit test into a sign-bit
6822 test. */
6823 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6824 result_type);
6825 if (tem)
6826 return tem;
6828 /* Otherwise we have (A & C) != 0 where C is a single bit,
6829 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6830 Similarly for (A & C) == 0. */
6832 /* If INNER is a right shift of a constant and it plus BITNUM does
6833 not overflow, adjust BITNUM and INNER. */
6834 if (TREE_CODE (inner) == RSHIFT_EXPR
6835 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6836 && bitnum < TYPE_PRECISION (type)
6837 && wi::ltu_p (TREE_OPERAND (inner, 1),
6838 TYPE_PRECISION (type) - bitnum))
6840 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6841 inner = TREE_OPERAND (inner, 0);
6844 /* If we are going to be able to omit the AND below, we must do our
6845 operations as unsigned. If we must use the AND, we have a choice.
6846 Normally unsigned is faster, but for some machines signed is. */
6847 #ifdef LOAD_EXTEND_OP
6848 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6849 && !flag_syntax_only) ? 0 : 1;
6850 #else
6851 ops_unsigned = 1;
6852 #endif
6854 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6855 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6856 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6857 inner = fold_convert_loc (loc, intermediate_type, inner);
6859 if (bitnum != 0)
6860 inner = build2 (RSHIFT_EXPR, intermediate_type,
6861 inner, size_int (bitnum));
6863 one = build_int_cst (intermediate_type, 1);
6865 if (code == EQ_EXPR)
6866 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6868 /* Put the AND last so it can combine with more things. */
6869 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6871 /* Make sure to return the proper type. */
6872 inner = fold_convert_loc (loc, result_type, inner);
6874 return inner;
6876 return NULL_TREE;
6879 /* Check whether we are allowed to reorder operands arg0 and arg1,
6880 such that the evaluation of arg1 occurs before arg0. */
6882 static bool
6883 reorder_operands_p (const_tree arg0, const_tree arg1)
6885 if (! flag_evaluation_order)
6886 return true;
6887 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6888 return true;
6889 return ! TREE_SIDE_EFFECTS (arg0)
6890 && ! TREE_SIDE_EFFECTS (arg1);
6893 /* Test whether it is preferable two swap two operands, ARG0 and
6894 ARG1, for example because ARG0 is an integer constant and ARG1
6895 isn't. If REORDER is true, only recommend swapping if we can
6896 evaluate the operands in reverse order. */
6898 bool
6899 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6901 if (CONSTANT_CLASS_P (arg1))
6902 return 0;
6903 if (CONSTANT_CLASS_P (arg0))
6904 return 1;
6906 STRIP_NOPS (arg0);
6907 STRIP_NOPS (arg1);
6909 if (TREE_CONSTANT (arg1))
6910 return 0;
6911 if (TREE_CONSTANT (arg0))
6912 return 1;
6914 if (reorder && flag_evaluation_order
6915 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6916 return 0;
6918 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6919 for commutative and comparison operators. Ensuring a canonical
6920 form allows the optimizers to find additional redundancies without
6921 having to explicitly check for both orderings. */
6922 if (TREE_CODE (arg0) == SSA_NAME
6923 && TREE_CODE (arg1) == SSA_NAME
6924 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6925 return 1;
6927 /* Put SSA_NAMEs last. */
6928 if (TREE_CODE (arg1) == SSA_NAME)
6929 return 0;
6930 if (TREE_CODE (arg0) == SSA_NAME)
6931 return 1;
6933 /* Put variables last. */
6934 if (DECL_P (arg1))
6935 return 0;
6936 if (DECL_P (arg0))
6937 return 1;
6939 return 0;
6942 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6943 ARG0 is extended to a wider type. */
6945 static tree
6946 fold_widened_comparison (location_t loc, enum tree_code code,
6947 tree type, tree arg0, tree arg1)
6949 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6950 tree arg1_unw;
6951 tree shorter_type, outer_type;
6952 tree min, max;
6953 bool above, below;
6955 if (arg0_unw == arg0)
6956 return NULL_TREE;
6957 shorter_type = TREE_TYPE (arg0_unw);
6959 #ifdef HAVE_canonicalize_funcptr_for_compare
6960 /* Disable this optimization if we're casting a function pointer
6961 type on targets that require function pointer canonicalization. */
6962 if (HAVE_canonicalize_funcptr_for_compare
6963 && TREE_CODE (shorter_type) == POINTER_TYPE
6964 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6965 return NULL_TREE;
6966 #endif
6968 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6969 return NULL_TREE;
6971 arg1_unw = get_unwidened (arg1, NULL_TREE);
6973 /* If possible, express the comparison in the shorter mode. */
6974 if ((code == EQ_EXPR || code == NE_EXPR
6975 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6976 && (TREE_TYPE (arg1_unw) == shorter_type
6977 || ((TYPE_PRECISION (shorter_type)
6978 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6979 && (TYPE_UNSIGNED (shorter_type)
6980 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6981 || (TREE_CODE (arg1_unw) == INTEGER_CST
6982 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6983 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6984 && int_fits_type_p (arg1_unw, shorter_type))))
6985 return fold_build2_loc (loc, code, type, arg0_unw,
6986 fold_convert_loc (loc, shorter_type, arg1_unw));
6988 if (TREE_CODE (arg1_unw) != INTEGER_CST
6989 || TREE_CODE (shorter_type) != INTEGER_TYPE
6990 || !int_fits_type_p (arg1_unw, shorter_type))
6991 return NULL_TREE;
6993 /* If we are comparing with the integer that does not fit into the range
6994 of the shorter type, the result is known. */
6995 outer_type = TREE_TYPE (arg1_unw);
6996 min = lower_bound_in_type (outer_type, shorter_type);
6997 max = upper_bound_in_type (outer_type, shorter_type);
6999 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7000 max, arg1_unw));
7001 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7002 arg1_unw, min));
7004 switch (code)
7006 case EQ_EXPR:
7007 if (above || below)
7008 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7009 break;
7011 case NE_EXPR:
7012 if (above || below)
7013 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7014 break;
7016 case LT_EXPR:
7017 case LE_EXPR:
7018 if (above)
7019 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7020 else if (below)
7021 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7023 case GT_EXPR:
7024 case GE_EXPR:
7025 if (above)
7026 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7027 else if (below)
7028 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7030 default:
7031 break;
7034 return NULL_TREE;
7037 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7038 ARG0 just the signedness is changed. */
7040 static tree
7041 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7042 tree arg0, tree arg1)
7044 tree arg0_inner;
7045 tree inner_type, outer_type;
7047 if (!CONVERT_EXPR_P (arg0))
7048 return NULL_TREE;
7050 outer_type = TREE_TYPE (arg0);
7051 arg0_inner = TREE_OPERAND (arg0, 0);
7052 inner_type = TREE_TYPE (arg0_inner);
7054 #ifdef HAVE_canonicalize_funcptr_for_compare
7055 /* Disable this optimization if we're casting a function pointer
7056 type on targets that require function pointer canonicalization. */
7057 if (HAVE_canonicalize_funcptr_for_compare
7058 && TREE_CODE (inner_type) == POINTER_TYPE
7059 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7060 return NULL_TREE;
7061 #endif
7063 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7064 return NULL_TREE;
7066 if (TREE_CODE (arg1) != INTEGER_CST
7067 && !(CONVERT_EXPR_P (arg1)
7068 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7069 return NULL_TREE;
7071 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7072 && code != NE_EXPR
7073 && code != EQ_EXPR)
7074 return NULL_TREE;
7076 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7077 return NULL_TREE;
7079 if (TREE_CODE (arg1) == INTEGER_CST)
7080 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7081 TREE_OVERFLOW (arg1));
7082 else
7083 arg1 = fold_convert_loc (loc, inner_type, arg1);
7085 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7089 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7090 means A >= Y && A != MAX, but in this case we know that
7091 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7093 static tree
7094 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7096 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7098 if (TREE_CODE (bound) == LT_EXPR)
7099 a = TREE_OPERAND (bound, 0);
7100 else if (TREE_CODE (bound) == GT_EXPR)
7101 a = TREE_OPERAND (bound, 1);
7102 else
7103 return NULL_TREE;
7105 typea = TREE_TYPE (a);
7106 if (!INTEGRAL_TYPE_P (typea)
7107 && !POINTER_TYPE_P (typea))
7108 return NULL_TREE;
7110 if (TREE_CODE (ineq) == LT_EXPR)
7112 a1 = TREE_OPERAND (ineq, 1);
7113 y = TREE_OPERAND (ineq, 0);
7115 else if (TREE_CODE (ineq) == GT_EXPR)
7117 a1 = TREE_OPERAND (ineq, 0);
7118 y = TREE_OPERAND (ineq, 1);
7120 else
7121 return NULL_TREE;
7123 if (TREE_TYPE (a1) != typea)
7124 return NULL_TREE;
7126 if (POINTER_TYPE_P (typea))
7128 /* Convert the pointer types into integer before taking the difference. */
7129 tree ta = fold_convert_loc (loc, ssizetype, a);
7130 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7131 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7133 else
7134 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7136 if (!diff || !integer_onep (diff))
7137 return NULL_TREE;
7139 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7142 /* Fold a sum or difference of at least one multiplication.
7143 Returns the folded tree or NULL if no simplification could be made. */
7145 static tree
7146 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7147 tree arg0, tree arg1)
7149 tree arg00, arg01, arg10, arg11;
7150 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7152 /* (A * C) +- (B * C) -> (A+-B) * C.
7153 (A * C) +- A -> A * (C+-1).
7154 We are most concerned about the case where C is a constant,
7155 but other combinations show up during loop reduction. Since
7156 it is not difficult, try all four possibilities. */
7158 if (TREE_CODE (arg0) == MULT_EXPR)
7160 arg00 = TREE_OPERAND (arg0, 0);
7161 arg01 = TREE_OPERAND (arg0, 1);
7163 else if (TREE_CODE (arg0) == INTEGER_CST)
7165 arg00 = build_one_cst (type);
7166 arg01 = arg0;
7168 else
7170 /* We cannot generate constant 1 for fract. */
7171 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7172 return NULL_TREE;
7173 arg00 = arg0;
7174 arg01 = build_one_cst (type);
7176 if (TREE_CODE (arg1) == MULT_EXPR)
7178 arg10 = TREE_OPERAND (arg1, 0);
7179 arg11 = TREE_OPERAND (arg1, 1);
7181 else if (TREE_CODE (arg1) == INTEGER_CST)
7183 arg10 = build_one_cst (type);
7184 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7185 the purpose of this canonicalization. */
7186 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7187 && negate_expr_p (arg1)
7188 && code == PLUS_EXPR)
7190 arg11 = negate_expr (arg1);
7191 code = MINUS_EXPR;
7193 else
7194 arg11 = arg1;
7196 else
7198 /* We cannot generate constant 1 for fract. */
7199 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7200 return NULL_TREE;
7201 arg10 = arg1;
7202 arg11 = build_one_cst (type);
7204 same = NULL_TREE;
7206 if (operand_equal_p (arg01, arg11, 0))
7207 same = arg01, alt0 = arg00, alt1 = arg10;
7208 else if (operand_equal_p (arg00, arg10, 0))
7209 same = arg00, alt0 = arg01, alt1 = arg11;
7210 else if (operand_equal_p (arg00, arg11, 0))
7211 same = arg00, alt0 = arg01, alt1 = arg10;
7212 else if (operand_equal_p (arg01, arg10, 0))
7213 same = arg01, alt0 = arg00, alt1 = arg11;
7215 /* No identical multiplicands; see if we can find a common
7216 power-of-two factor in non-power-of-two multiplies. This
7217 can help in multi-dimensional array access. */
7218 else if (tree_fits_shwi_p (arg01)
7219 && tree_fits_shwi_p (arg11))
7221 HOST_WIDE_INT int01, int11, tmp;
7222 bool swap = false;
7223 tree maybe_same;
7224 int01 = tree_to_shwi (arg01);
7225 int11 = tree_to_shwi (arg11);
7227 /* Move min of absolute values to int11. */
7228 if (absu_hwi (int01) < absu_hwi (int11))
7230 tmp = int01, int01 = int11, int11 = tmp;
7231 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7232 maybe_same = arg01;
7233 swap = true;
7235 else
7236 maybe_same = arg11;
7238 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7239 /* The remainder should not be a constant, otherwise we
7240 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7241 increased the number of multiplications necessary. */
7242 && TREE_CODE (arg10) != INTEGER_CST)
7244 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7245 build_int_cst (TREE_TYPE (arg00),
7246 int01 / int11));
7247 alt1 = arg10;
7248 same = maybe_same;
7249 if (swap)
7250 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7254 if (same)
7255 return fold_build2_loc (loc, MULT_EXPR, type,
7256 fold_build2_loc (loc, code, type,
7257 fold_convert_loc (loc, type, alt0),
7258 fold_convert_loc (loc, type, alt1)),
7259 fold_convert_loc (loc, type, same));
7261 return NULL_TREE;
7264 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7265 specified by EXPR into the buffer PTR of length LEN bytes.
7266 Return the number of bytes placed in the buffer, or zero
7267 upon failure. */
7269 static int
7270 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7272 tree type = TREE_TYPE (expr);
7273 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7274 int byte, offset, word, words;
7275 unsigned char value;
7277 if ((off == -1 && total_bytes > len)
7278 || off >= total_bytes)
7279 return 0;
7280 if (off == -1)
7281 off = 0;
7282 words = total_bytes / UNITS_PER_WORD;
7284 for (byte = 0; byte < total_bytes; byte++)
7286 int bitpos = byte * BITS_PER_UNIT;
7287 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7288 number of bytes. */
7289 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7291 if (total_bytes > UNITS_PER_WORD)
7293 word = byte / UNITS_PER_WORD;
7294 if (WORDS_BIG_ENDIAN)
7295 word = (words - 1) - word;
7296 offset = word * UNITS_PER_WORD;
7297 if (BYTES_BIG_ENDIAN)
7298 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7299 else
7300 offset += byte % UNITS_PER_WORD;
7302 else
7303 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7304 if (offset >= off
7305 && offset - off < len)
7306 ptr[offset - off] = value;
7308 return MIN (len, total_bytes - off);
7312 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7313 specified by EXPR into the buffer PTR of length LEN bytes.
7314 Return the number of bytes placed in the buffer, or zero
7315 upon failure. */
7317 static int
7318 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7320 tree type = TREE_TYPE (expr);
7321 machine_mode mode = TYPE_MODE (type);
7322 int total_bytes = GET_MODE_SIZE (mode);
7323 FIXED_VALUE_TYPE value;
7324 tree i_value, i_type;
7326 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7327 return 0;
7329 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7331 if (NULL_TREE == i_type
7332 || TYPE_PRECISION (i_type) != total_bytes)
7333 return 0;
7335 value = TREE_FIXED_CST (expr);
7336 i_value = double_int_to_tree (i_type, value.data);
7338 return native_encode_int (i_value, ptr, len, off);
7342 /* Subroutine of native_encode_expr. Encode the REAL_CST
7343 specified by EXPR into the buffer PTR of length LEN bytes.
7344 Return the number of bytes placed in the buffer, or zero
7345 upon failure. */
7347 static int
7348 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7350 tree type = TREE_TYPE (expr);
7351 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7352 int byte, offset, word, words, bitpos;
7353 unsigned char value;
7355 /* There are always 32 bits in each long, no matter the size of
7356 the hosts long. We handle floating point representations with
7357 up to 192 bits. */
7358 long tmp[6];
7360 if ((off == -1 && total_bytes > len)
7361 || off >= total_bytes)
7362 return 0;
7363 if (off == -1)
7364 off = 0;
7365 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7367 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7369 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7370 bitpos += BITS_PER_UNIT)
7372 byte = (bitpos / BITS_PER_UNIT) & 3;
7373 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7375 if (UNITS_PER_WORD < 4)
7377 word = byte / UNITS_PER_WORD;
7378 if (WORDS_BIG_ENDIAN)
7379 word = (words - 1) - word;
7380 offset = word * UNITS_PER_WORD;
7381 if (BYTES_BIG_ENDIAN)
7382 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7383 else
7384 offset += byte % UNITS_PER_WORD;
7386 else
7387 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7388 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7389 if (offset >= off
7390 && offset - off < len)
7391 ptr[offset - off] = value;
7393 return MIN (len, total_bytes - off);
7396 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7397 specified by EXPR into the buffer PTR of length LEN bytes.
7398 Return the number of bytes placed in the buffer, or zero
7399 upon failure. */
7401 static int
7402 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7404 int rsize, isize;
7405 tree part;
7407 part = TREE_REALPART (expr);
7408 rsize = native_encode_expr (part, ptr, len, off);
7409 if (off == -1
7410 && rsize == 0)
7411 return 0;
7412 part = TREE_IMAGPART (expr);
7413 if (off != -1)
7414 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7415 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7416 if (off == -1
7417 && isize != rsize)
7418 return 0;
7419 return rsize + isize;
7423 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7424 specified by EXPR into the buffer PTR of length LEN bytes.
7425 Return the number of bytes placed in the buffer, or zero
7426 upon failure. */
7428 static int
7429 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7431 unsigned i, count;
7432 int size, offset;
7433 tree itype, elem;
7435 offset = 0;
7436 count = VECTOR_CST_NELTS (expr);
7437 itype = TREE_TYPE (TREE_TYPE (expr));
7438 size = GET_MODE_SIZE (TYPE_MODE (itype));
7439 for (i = 0; i < count; i++)
7441 if (off >= size)
7443 off -= size;
7444 continue;
7446 elem = VECTOR_CST_ELT (expr, i);
7447 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7448 if ((off == -1 && res != size)
7449 || res == 0)
7450 return 0;
7451 offset += res;
7452 if (offset >= len)
7453 return offset;
7454 if (off != -1)
7455 off = 0;
7457 return offset;
7461 /* Subroutine of native_encode_expr. Encode the STRING_CST
7462 specified by EXPR into the buffer PTR of length LEN bytes.
7463 Return the number of bytes placed in the buffer, or zero
7464 upon failure. */
7466 static int
7467 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7469 tree type = TREE_TYPE (expr);
7470 HOST_WIDE_INT total_bytes;
7472 if (TREE_CODE (type) != ARRAY_TYPE
7473 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7474 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7475 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7476 return 0;
7477 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7478 if ((off == -1 && total_bytes > len)
7479 || off >= total_bytes)
7480 return 0;
7481 if (off == -1)
7482 off = 0;
7483 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7485 int written = 0;
7486 if (off < TREE_STRING_LENGTH (expr))
7488 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7489 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7491 memset (ptr + written, 0,
7492 MIN (total_bytes - written, len - written));
7494 else
7495 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7496 return MIN (total_bytes - off, len);
7500 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7501 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7502 buffer PTR of length LEN bytes. If OFF is not -1 then start
7503 the encoding at byte offset OFF and encode at most LEN bytes.
7504 Return the number of bytes placed in the buffer, or zero upon failure. */
7507 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7509 switch (TREE_CODE (expr))
7511 case INTEGER_CST:
7512 return native_encode_int (expr, ptr, len, off);
7514 case REAL_CST:
7515 return native_encode_real (expr, ptr, len, off);
7517 case FIXED_CST:
7518 return native_encode_fixed (expr, ptr, len, off);
7520 case COMPLEX_CST:
7521 return native_encode_complex (expr, ptr, len, off);
7523 case VECTOR_CST:
7524 return native_encode_vector (expr, ptr, len, off);
7526 case STRING_CST:
7527 return native_encode_string (expr, ptr, len, off);
7529 default:
7530 return 0;
7535 /* Subroutine of native_interpret_expr. Interpret the contents of
7536 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7537 If the buffer cannot be interpreted, return NULL_TREE. */
7539 static tree
7540 native_interpret_int (tree type, const unsigned char *ptr, int len)
7542 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7544 if (total_bytes > len
7545 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7546 return NULL_TREE;
7548 wide_int result = wi::from_buffer (ptr, total_bytes);
7550 return wide_int_to_tree (type, result);
7554 /* Subroutine of native_interpret_expr. Interpret the contents of
7555 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7556 If the buffer cannot be interpreted, return NULL_TREE. */
7558 static tree
7559 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7561 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7562 double_int result;
7563 FIXED_VALUE_TYPE fixed_value;
7565 if (total_bytes > len
7566 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7567 return NULL_TREE;
7569 result = double_int::from_buffer (ptr, total_bytes);
7570 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7572 return build_fixed (type, fixed_value);
7576 /* Subroutine of native_interpret_expr. Interpret the contents of
7577 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7578 If the buffer cannot be interpreted, return NULL_TREE. */
7580 static tree
7581 native_interpret_real (tree type, const unsigned char *ptr, int len)
7583 machine_mode mode = TYPE_MODE (type);
7584 int total_bytes = GET_MODE_SIZE (mode);
7585 int byte, offset, word, words, bitpos;
7586 unsigned char value;
7587 /* There are always 32 bits in each long, no matter the size of
7588 the hosts long. We handle floating point representations with
7589 up to 192 bits. */
7590 REAL_VALUE_TYPE r;
7591 long tmp[6];
7593 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7594 if (total_bytes > len || total_bytes > 24)
7595 return NULL_TREE;
7596 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7598 memset (tmp, 0, sizeof (tmp));
7599 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7600 bitpos += BITS_PER_UNIT)
7602 byte = (bitpos / BITS_PER_UNIT) & 3;
7603 if (UNITS_PER_WORD < 4)
7605 word = byte / UNITS_PER_WORD;
7606 if (WORDS_BIG_ENDIAN)
7607 word = (words - 1) - word;
7608 offset = word * UNITS_PER_WORD;
7609 if (BYTES_BIG_ENDIAN)
7610 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7611 else
7612 offset += byte % UNITS_PER_WORD;
7614 else
7615 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7616 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7618 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7621 real_from_target (&r, tmp, mode);
7622 return build_real (type, r);
7626 /* Subroutine of native_interpret_expr. Interpret the contents of
7627 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7628 If the buffer cannot be interpreted, return NULL_TREE. */
7630 static tree
7631 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7633 tree etype, rpart, ipart;
7634 int size;
7636 etype = TREE_TYPE (type);
7637 size = GET_MODE_SIZE (TYPE_MODE (etype));
7638 if (size * 2 > len)
7639 return NULL_TREE;
7640 rpart = native_interpret_expr (etype, ptr, size);
7641 if (!rpart)
7642 return NULL_TREE;
7643 ipart = native_interpret_expr (etype, ptr+size, size);
7644 if (!ipart)
7645 return NULL_TREE;
7646 return build_complex (type, rpart, ipart);
7650 /* Subroutine of native_interpret_expr. Interpret the contents of
7651 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7652 If the buffer cannot be interpreted, return NULL_TREE. */
7654 static tree
7655 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7657 tree etype, elem;
7658 int i, size, count;
7659 tree *elements;
7661 etype = TREE_TYPE (type);
7662 size = GET_MODE_SIZE (TYPE_MODE (etype));
7663 count = TYPE_VECTOR_SUBPARTS (type);
7664 if (size * count > len)
7665 return NULL_TREE;
7667 elements = XALLOCAVEC (tree, count);
7668 for (i = count - 1; i >= 0; i--)
7670 elem = native_interpret_expr (etype, ptr+(i*size), size);
7671 if (!elem)
7672 return NULL_TREE;
7673 elements[i] = elem;
7675 return build_vector (type, elements);
7679 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7680 the buffer PTR of length LEN as a constant of type TYPE. For
7681 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7682 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7683 return NULL_TREE. */
7685 tree
7686 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7688 switch (TREE_CODE (type))
7690 case INTEGER_TYPE:
7691 case ENUMERAL_TYPE:
7692 case BOOLEAN_TYPE:
7693 case POINTER_TYPE:
7694 case REFERENCE_TYPE:
7695 return native_interpret_int (type, ptr, len);
7697 case REAL_TYPE:
7698 return native_interpret_real (type, ptr, len);
7700 case FIXED_POINT_TYPE:
7701 return native_interpret_fixed (type, ptr, len);
7703 case COMPLEX_TYPE:
7704 return native_interpret_complex (type, ptr, len);
7706 case VECTOR_TYPE:
7707 return native_interpret_vector (type, ptr, len);
7709 default:
7710 return NULL_TREE;
7714 /* Returns true if we can interpret the contents of a native encoding
7715 as TYPE. */
7717 static bool
7718 can_native_interpret_type_p (tree type)
7720 switch (TREE_CODE (type))
7722 case INTEGER_TYPE:
7723 case ENUMERAL_TYPE:
7724 case BOOLEAN_TYPE:
7725 case POINTER_TYPE:
7726 case REFERENCE_TYPE:
7727 case FIXED_POINT_TYPE:
7728 case REAL_TYPE:
7729 case COMPLEX_TYPE:
7730 case VECTOR_TYPE:
7731 return true;
7732 default:
7733 return false;
7737 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7738 TYPE at compile-time. If we're unable to perform the conversion
7739 return NULL_TREE. */
7741 static tree
7742 fold_view_convert_expr (tree type, tree expr)
7744 /* We support up to 512-bit values (for V8DFmode). */
7745 unsigned char buffer[64];
7746 int len;
7748 /* Check that the host and target are sane. */
7749 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7750 return NULL_TREE;
7752 len = native_encode_expr (expr, buffer, sizeof (buffer));
7753 if (len == 0)
7754 return NULL_TREE;
7756 return native_interpret_expr (type, buffer, len);
7759 /* Build an expression for the address of T. Folds away INDIRECT_REF
7760 to avoid confusing the gimplify process. */
7762 tree
7763 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7765 /* The size of the object is not relevant when talking about its address. */
7766 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7767 t = TREE_OPERAND (t, 0);
7769 if (TREE_CODE (t) == INDIRECT_REF)
7771 t = TREE_OPERAND (t, 0);
7773 if (TREE_TYPE (t) != ptrtype)
7774 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7776 else if (TREE_CODE (t) == MEM_REF
7777 && integer_zerop (TREE_OPERAND (t, 1)))
7778 return TREE_OPERAND (t, 0);
7779 else if (TREE_CODE (t) == MEM_REF
7780 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7781 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7782 TREE_OPERAND (t, 0),
7783 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7784 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7786 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7788 if (TREE_TYPE (t) != ptrtype)
7789 t = fold_convert_loc (loc, ptrtype, t);
7791 else
7792 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7794 return t;
7797 /* Build an expression for the address of T. */
7799 tree
7800 build_fold_addr_expr_loc (location_t loc, tree t)
7802 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7804 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7807 /* Fold a unary expression of code CODE and type TYPE with operand
7808 OP0. Return the folded expression if folding is successful.
7809 Otherwise, return NULL_TREE. */
7811 tree
7812 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7814 tree tem;
7815 tree arg0;
7816 enum tree_code_class kind = TREE_CODE_CLASS (code);
7818 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7819 && TREE_CODE_LENGTH (code) == 1);
7821 arg0 = op0;
7822 if (arg0)
7824 if (CONVERT_EXPR_CODE_P (code)
7825 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7827 /* Don't use STRIP_NOPS, because signedness of argument type
7828 matters. */
7829 STRIP_SIGN_NOPS (arg0);
7831 else
7833 /* Strip any conversions that don't change the mode. This
7834 is safe for every expression, except for a comparison
7835 expression because its signedness is derived from its
7836 operands.
7838 Note that this is done as an internal manipulation within
7839 the constant folder, in order to find the simplest
7840 representation of the arguments so that their form can be
7841 studied. In any cases, the appropriate type conversions
7842 should be put back in the tree that will get out of the
7843 constant folder. */
7844 STRIP_NOPS (arg0);
7847 if (CONSTANT_CLASS_P (arg0))
7849 tree tem = const_unop (code, type, arg0);
7850 if (tem)
7852 if (TREE_TYPE (tem) != type)
7853 tem = fold_convert_loc (loc, type, tem);
7854 return tem;
7859 tem = generic_simplify (loc, code, type, op0);
7860 if (tem)
7861 return tem;
7863 if (TREE_CODE_CLASS (code) == tcc_unary)
7865 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7866 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7867 fold_build1_loc (loc, code, type,
7868 fold_convert_loc (loc, TREE_TYPE (op0),
7869 TREE_OPERAND (arg0, 1))));
7870 else if (TREE_CODE (arg0) == COND_EXPR)
7872 tree arg01 = TREE_OPERAND (arg0, 1);
7873 tree arg02 = TREE_OPERAND (arg0, 2);
7874 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7875 arg01 = fold_build1_loc (loc, code, type,
7876 fold_convert_loc (loc,
7877 TREE_TYPE (op0), arg01));
7878 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7879 arg02 = fold_build1_loc (loc, code, type,
7880 fold_convert_loc (loc,
7881 TREE_TYPE (op0), arg02));
7882 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7883 arg01, arg02);
7885 /* If this was a conversion, and all we did was to move into
7886 inside the COND_EXPR, bring it back out. But leave it if
7887 it is a conversion from integer to integer and the
7888 result precision is no wider than a word since such a
7889 conversion is cheap and may be optimized away by combine,
7890 while it couldn't if it were outside the COND_EXPR. Then return
7891 so we don't get into an infinite recursion loop taking the
7892 conversion out and then back in. */
7894 if ((CONVERT_EXPR_CODE_P (code)
7895 || code == NON_LVALUE_EXPR)
7896 && TREE_CODE (tem) == COND_EXPR
7897 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7898 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7899 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7900 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7901 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7902 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7903 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7904 && (INTEGRAL_TYPE_P
7905 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7906 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7907 || flag_syntax_only))
7908 tem = build1_loc (loc, code, type,
7909 build3 (COND_EXPR,
7910 TREE_TYPE (TREE_OPERAND
7911 (TREE_OPERAND (tem, 1), 0)),
7912 TREE_OPERAND (tem, 0),
7913 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7914 TREE_OPERAND (TREE_OPERAND (tem, 2),
7915 0)));
7916 return tem;
7920 switch (code)
7922 case NON_LVALUE_EXPR:
7923 if (!maybe_lvalue_p (op0))
7924 return fold_convert_loc (loc, type, op0);
7925 return NULL_TREE;
7927 CASE_CONVERT:
7928 case FLOAT_EXPR:
7929 case FIX_TRUNC_EXPR:
7930 if (COMPARISON_CLASS_P (op0))
7932 /* If we have (type) (a CMP b) and type is an integral type, return
7933 new expression involving the new type. Canonicalize
7934 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7935 non-integral type.
7936 Do not fold the result as that would not simplify further, also
7937 folding again results in recursions. */
7938 if (TREE_CODE (type) == BOOLEAN_TYPE)
7939 return build2_loc (loc, TREE_CODE (op0), type,
7940 TREE_OPERAND (op0, 0),
7941 TREE_OPERAND (op0, 1));
7942 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7943 && TREE_CODE (type) != VECTOR_TYPE)
7944 return build3_loc (loc, COND_EXPR, type, op0,
7945 constant_boolean_node (true, type),
7946 constant_boolean_node (false, type));
7949 /* Handle (T *)&A.B.C for A being of type T and B and C
7950 living at offset zero. This occurs frequently in
7951 C++ upcasting and then accessing the base. */
7952 if (TREE_CODE (op0) == ADDR_EXPR
7953 && POINTER_TYPE_P (type)
7954 && handled_component_p (TREE_OPERAND (op0, 0)))
7956 HOST_WIDE_INT bitsize, bitpos;
7957 tree offset;
7958 machine_mode mode;
7959 int unsignedp, volatilep;
7960 tree base = TREE_OPERAND (op0, 0);
7961 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7962 &mode, &unsignedp, &volatilep, false);
7963 /* If the reference was to a (constant) zero offset, we can use
7964 the address of the base if it has the same base type
7965 as the result type and the pointer type is unqualified. */
7966 if (! offset && bitpos == 0
7967 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7968 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7969 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7970 return fold_convert_loc (loc, type,
7971 build_fold_addr_expr_loc (loc, base));
7974 if (TREE_CODE (op0) == MODIFY_EXPR
7975 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7976 /* Detect assigning a bitfield. */
7977 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7978 && DECL_BIT_FIELD
7979 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7981 /* Don't leave an assignment inside a conversion
7982 unless assigning a bitfield. */
7983 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7984 /* First do the assignment, then return converted constant. */
7985 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7986 TREE_NO_WARNING (tem) = 1;
7987 TREE_USED (tem) = 1;
7988 return tem;
7991 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7992 constants (if x has signed type, the sign bit cannot be set
7993 in c). This folds extension into the BIT_AND_EXPR.
7994 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7995 very likely don't have maximal range for their precision and this
7996 transformation effectively doesn't preserve non-maximal ranges. */
7997 if (TREE_CODE (type) == INTEGER_TYPE
7998 && TREE_CODE (op0) == BIT_AND_EXPR
7999 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8001 tree and_expr = op0;
8002 tree and0 = TREE_OPERAND (and_expr, 0);
8003 tree and1 = TREE_OPERAND (and_expr, 1);
8004 int change = 0;
8006 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8007 || (TYPE_PRECISION (type)
8008 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8009 change = 1;
8010 else if (TYPE_PRECISION (TREE_TYPE (and1))
8011 <= HOST_BITS_PER_WIDE_INT
8012 && tree_fits_uhwi_p (and1))
8014 unsigned HOST_WIDE_INT cst;
8016 cst = tree_to_uhwi (and1);
8017 cst &= HOST_WIDE_INT_M1U
8018 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8019 change = (cst == 0);
8020 #ifdef LOAD_EXTEND_OP
8021 if (change
8022 && !flag_syntax_only
8023 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8024 == ZERO_EXTEND))
8026 tree uns = unsigned_type_for (TREE_TYPE (and0));
8027 and0 = fold_convert_loc (loc, uns, and0);
8028 and1 = fold_convert_loc (loc, uns, and1);
8030 #endif
8032 if (change)
8034 tem = force_fit_type (type, wi::to_widest (and1), 0,
8035 TREE_OVERFLOW (and1));
8036 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8037 fold_convert_loc (loc, type, and0), tem);
8041 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8042 when one of the new casts will fold away. Conservatively we assume
8043 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8044 if (POINTER_TYPE_P (type)
8045 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8046 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8047 && !upc_shared_type_p (TREE_TYPE (type))
8048 && !upc_shared_type_p (TREE_TYPE (
8049 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8050 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8051 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8052 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8054 tree arg00 = TREE_OPERAND (arg0, 0);
8055 tree arg01 = TREE_OPERAND (arg0, 1);
8057 return fold_build_pointer_plus_loc
8058 (loc, fold_convert_loc (loc, type, arg00), arg01);
8061 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8062 of the same precision, and X is an integer type not narrower than
8063 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8064 if (INTEGRAL_TYPE_P (type)
8065 && TREE_CODE (op0) == BIT_NOT_EXPR
8066 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8067 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8068 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8070 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8071 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8072 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8073 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8074 fold_convert_loc (loc, type, tem));
8077 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8078 type of X and Y (integer types only). */
8079 if (INTEGRAL_TYPE_P (type)
8080 && TREE_CODE (op0) == MULT_EXPR
8081 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8082 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8084 /* Be careful not to introduce new overflows. */
8085 tree mult_type;
8086 if (TYPE_OVERFLOW_WRAPS (type))
8087 mult_type = type;
8088 else
8089 mult_type = unsigned_type_for (type);
8091 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8093 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8094 fold_convert_loc (loc, mult_type,
8095 TREE_OPERAND (op0, 0)),
8096 fold_convert_loc (loc, mult_type,
8097 TREE_OPERAND (op0, 1)));
8098 return fold_convert_loc (loc, type, tem);
8102 return NULL_TREE;
8104 case VIEW_CONVERT_EXPR:
8105 if (TREE_CODE (op0) == MEM_REF)
8106 return fold_build2_loc (loc, MEM_REF, type,
8107 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8109 return NULL_TREE;
8111 case NEGATE_EXPR:
8112 tem = fold_negate_expr (loc, arg0);
8113 if (tem)
8114 return fold_convert_loc (loc, type, tem);
8115 return NULL_TREE;
8117 case ABS_EXPR:
8118 /* Convert fabs((double)float) into (double)fabsf(float). */
8119 if (TREE_CODE (arg0) == NOP_EXPR
8120 && TREE_CODE (type) == REAL_TYPE)
8122 tree targ0 = strip_float_extensions (arg0);
8123 if (targ0 != arg0)
8124 return fold_convert_loc (loc, type,
8125 fold_build1_loc (loc, ABS_EXPR,
8126 TREE_TYPE (targ0),
8127 targ0));
8129 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8130 else if (TREE_CODE (arg0) == ABS_EXPR)
8131 return arg0;
8133 /* Strip sign ops from argument. */
8134 if (TREE_CODE (type) == REAL_TYPE)
8136 tem = fold_strip_sign_ops (arg0);
8137 if (tem)
8138 return fold_build1_loc (loc, ABS_EXPR, type,
8139 fold_convert_loc (loc, type, tem));
8141 return NULL_TREE;
8143 case CONJ_EXPR:
8144 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8145 return fold_convert_loc (loc, type, arg0);
8146 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8148 tree itype = TREE_TYPE (type);
8149 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8150 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8151 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8152 negate_expr (ipart));
8154 if (TREE_CODE (arg0) == CONJ_EXPR)
8155 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8156 return NULL_TREE;
8158 case BIT_NOT_EXPR:
8159 /* Convert ~ (-A) to A - 1. */
8160 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8161 return fold_build2_loc (loc, MINUS_EXPR, type,
8162 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8163 build_int_cst (type, 1));
8164 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8165 else if (INTEGRAL_TYPE_P (type)
8166 && ((TREE_CODE (arg0) == MINUS_EXPR
8167 && integer_onep (TREE_OPERAND (arg0, 1)))
8168 || (TREE_CODE (arg0) == PLUS_EXPR
8169 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8171 /* Perform the negation in ARG0's type and only then convert
8172 to TYPE as to avoid introducing undefined behavior. */
8173 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8174 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8175 TREE_OPERAND (arg0, 0));
8176 return fold_convert_loc (loc, type, t);
8178 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8179 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8180 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8181 fold_convert_loc (loc, type,
8182 TREE_OPERAND (arg0, 0)))))
8183 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8184 fold_convert_loc (loc, type,
8185 TREE_OPERAND (arg0, 1)));
8186 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8187 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8188 fold_convert_loc (loc, type,
8189 TREE_OPERAND (arg0, 1)))))
8190 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8191 fold_convert_loc (loc, type,
8192 TREE_OPERAND (arg0, 0)), tem);
8194 return NULL_TREE;
8196 case TRUTH_NOT_EXPR:
8197 /* Note that the operand of this must be an int
8198 and its values must be 0 or 1.
8199 ("true" is a fixed value perhaps depending on the language,
8200 but we don't handle values other than 1 correctly yet.) */
8201 tem = fold_truth_not_expr (loc, arg0);
8202 if (!tem)
8203 return NULL_TREE;
8204 return fold_convert_loc (loc, type, tem);
8206 case REALPART_EXPR:
8207 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8208 return fold_convert_loc (loc, type, arg0);
8209 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8211 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8212 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8213 fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0)),
8215 fold_build1_loc (loc, REALPART_EXPR, itype,
8216 TREE_OPERAND (arg0, 1)));
8217 return fold_convert_loc (loc, type, tem);
8219 if (TREE_CODE (arg0) == CONJ_EXPR)
8221 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8222 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8223 TREE_OPERAND (arg0, 0));
8224 return fold_convert_loc (loc, type, tem);
8226 if (TREE_CODE (arg0) == CALL_EXPR)
8228 tree fn = get_callee_fndecl (arg0);
8229 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8230 switch (DECL_FUNCTION_CODE (fn))
8232 CASE_FLT_FN (BUILT_IN_CEXPI):
8233 fn = mathfn_built_in (type, BUILT_IN_COS);
8234 if (fn)
8235 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8236 break;
8238 default:
8239 break;
8242 return NULL_TREE;
8244 case IMAGPART_EXPR:
8245 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8246 return build_zero_cst (type);
8247 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8249 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8250 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8251 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8252 TREE_OPERAND (arg0, 0)),
8253 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8254 TREE_OPERAND (arg0, 1)));
8255 return fold_convert_loc (loc, type, tem);
8257 if (TREE_CODE (arg0) == CONJ_EXPR)
8259 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8260 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8261 return fold_convert_loc (loc, type, negate_expr (tem));
8263 if (TREE_CODE (arg0) == CALL_EXPR)
8265 tree fn = get_callee_fndecl (arg0);
8266 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8267 switch (DECL_FUNCTION_CODE (fn))
8269 CASE_FLT_FN (BUILT_IN_CEXPI):
8270 fn = mathfn_built_in (type, BUILT_IN_SIN);
8271 if (fn)
8272 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8273 break;
8275 default:
8276 break;
8279 return NULL_TREE;
8281 case INDIRECT_REF:
8282 /* Fold *&X to X if X is an lvalue. */
8283 if (TREE_CODE (op0) == ADDR_EXPR)
8285 tree op00 = TREE_OPERAND (op0, 0);
8286 if ((TREE_CODE (op00) == VAR_DECL
8287 || TREE_CODE (op00) == PARM_DECL
8288 || TREE_CODE (op00) == RESULT_DECL)
8289 && !TREE_READONLY (op00))
8290 return op00;
8292 return NULL_TREE;
8294 default:
8295 return NULL_TREE;
8296 } /* switch (code) */
8300 /* If the operation was a conversion do _not_ mark a resulting constant
8301 with TREE_OVERFLOW if the original constant was not. These conversions
8302 have implementation defined behavior and retaining the TREE_OVERFLOW
8303 flag here would confuse later passes such as VRP. */
8304 tree
8305 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8306 tree type, tree op0)
8308 tree res = fold_unary_loc (loc, code, type, op0);
8309 if (res
8310 && TREE_CODE (res) == INTEGER_CST
8311 && TREE_CODE (op0) == INTEGER_CST
8312 && CONVERT_EXPR_CODE_P (code))
8313 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8315 return res;
8318 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8319 operands OP0 and OP1. LOC is the location of the resulting expression.
8320 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8321 Return the folded expression if folding is successful. Otherwise,
8322 return NULL_TREE. */
8323 static tree
8324 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8325 tree arg0, tree arg1, tree op0, tree op1)
8327 tree tem;
8329 /* We only do these simplifications if we are optimizing. */
8330 if (!optimize)
8331 return NULL_TREE;
8333 /* Check for things like (A || B) && (A || C). We can convert this
8334 to A || (B && C). Note that either operator can be any of the four
8335 truth and/or operations and the transformation will still be
8336 valid. Also note that we only care about order for the
8337 ANDIF and ORIF operators. If B contains side effects, this
8338 might change the truth-value of A. */
8339 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8340 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8341 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8342 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8343 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8344 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8346 tree a00 = TREE_OPERAND (arg0, 0);
8347 tree a01 = TREE_OPERAND (arg0, 1);
8348 tree a10 = TREE_OPERAND (arg1, 0);
8349 tree a11 = TREE_OPERAND (arg1, 1);
8350 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8351 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8352 && (code == TRUTH_AND_EXPR
8353 || code == TRUTH_OR_EXPR));
8355 if (operand_equal_p (a00, a10, 0))
8356 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8357 fold_build2_loc (loc, code, type, a01, a11));
8358 else if (commutative && operand_equal_p (a00, a11, 0))
8359 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8360 fold_build2_loc (loc, code, type, a01, a10));
8361 else if (commutative && operand_equal_p (a01, a10, 0))
8362 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8363 fold_build2_loc (loc, code, type, a00, a11));
8365 /* This case if tricky because we must either have commutative
8366 operators or else A10 must not have side-effects. */
8368 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8369 && operand_equal_p (a01, a11, 0))
8370 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8371 fold_build2_loc (loc, code, type, a00, a10),
8372 a01);
8375 /* See if we can build a range comparison. */
8376 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8377 return tem;
8379 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8380 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8382 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8383 if (tem)
8384 return fold_build2_loc (loc, code, type, tem, arg1);
8387 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8388 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8390 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8391 if (tem)
8392 return fold_build2_loc (loc, code, type, arg0, tem);
8395 /* Check for the possibility of merging component references. If our
8396 lhs is another similar operation, try to merge its rhs with our
8397 rhs. Then try to merge our lhs and rhs. */
8398 if (TREE_CODE (arg0) == code
8399 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8400 TREE_OPERAND (arg0, 1), arg1)))
8401 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8403 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8404 return tem;
8406 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8407 && (code == TRUTH_AND_EXPR
8408 || code == TRUTH_ANDIF_EXPR
8409 || code == TRUTH_OR_EXPR
8410 || code == TRUTH_ORIF_EXPR))
8412 enum tree_code ncode, icode;
8414 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8415 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8416 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8418 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8419 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8420 We don't want to pack more than two leafs to a non-IF AND/OR
8421 expression.
8422 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8423 equal to IF-CODE, then we don't want to add right-hand operand.
8424 If the inner right-hand side of left-hand operand has
8425 side-effects, or isn't simple, then we can't add to it,
8426 as otherwise we might destroy if-sequence. */
8427 if (TREE_CODE (arg0) == icode
8428 && simple_operand_p_2 (arg1)
8429 /* Needed for sequence points to handle trappings, and
8430 side-effects. */
8431 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8433 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8434 arg1);
8435 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8436 tem);
8438 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8439 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8440 else if (TREE_CODE (arg1) == icode
8441 && simple_operand_p_2 (arg0)
8442 /* Needed for sequence points to handle trappings, and
8443 side-effects. */
8444 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8446 tem = fold_build2_loc (loc, ncode, type,
8447 arg0, TREE_OPERAND (arg1, 0));
8448 return fold_build2_loc (loc, icode, type, tem,
8449 TREE_OPERAND (arg1, 1));
8451 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8452 into (A OR B).
8453 For sequence point consistancy, we need to check for trapping,
8454 and side-effects. */
8455 else if (code == icode && simple_operand_p_2 (arg0)
8456 && simple_operand_p_2 (arg1))
8457 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8460 return NULL_TREE;
8463 /* Fold a binary expression of code CODE and type TYPE with operands
8464 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8465 Return the folded expression if folding is successful. Otherwise,
8466 return NULL_TREE. */
8468 static tree
8469 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8471 enum tree_code compl_code;
8473 if (code == MIN_EXPR)
8474 compl_code = MAX_EXPR;
8475 else if (code == MAX_EXPR)
8476 compl_code = MIN_EXPR;
8477 else
8478 gcc_unreachable ();
8480 /* MIN (MAX (a, b), b) == b. */
8481 if (TREE_CODE (op0) == compl_code
8482 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8483 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8485 /* MIN (MAX (b, a), b) == b. */
8486 if (TREE_CODE (op0) == compl_code
8487 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8488 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8489 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8491 /* MIN (a, MAX (a, b)) == a. */
8492 if (TREE_CODE (op1) == compl_code
8493 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8494 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8495 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8497 /* MIN (a, MAX (b, a)) == a. */
8498 if (TREE_CODE (op1) == compl_code
8499 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8500 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8501 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8503 return NULL_TREE;
8506 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8507 by changing CODE to reduce the magnitude of constants involved in
8508 ARG0 of the comparison.
8509 Returns a canonicalized comparison tree if a simplification was
8510 possible, otherwise returns NULL_TREE.
8511 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8512 valid if signed overflow is undefined. */
8514 static tree
8515 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8516 tree arg0, tree arg1,
8517 bool *strict_overflow_p)
8519 enum tree_code code0 = TREE_CODE (arg0);
8520 tree t, cst0 = NULL_TREE;
8521 int sgn0;
8522 bool swap = false;
8524 /* Match A +- CST code arg1 and CST code arg1. We can change the
8525 first form only if overflow is undefined. */
8526 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8527 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8528 /* In principle pointers also have undefined overflow behavior,
8529 but that causes problems elsewhere. */
8530 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8531 && (code0 == MINUS_EXPR
8532 || code0 == PLUS_EXPR)
8533 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8534 || code0 == INTEGER_CST))
8535 return NULL_TREE;
8537 /* Identify the constant in arg0 and its sign. */
8538 if (code0 == INTEGER_CST)
8539 cst0 = arg0;
8540 else
8541 cst0 = TREE_OPERAND (arg0, 1);
8542 sgn0 = tree_int_cst_sgn (cst0);
8544 /* Overflowed constants and zero will cause problems. */
8545 if (integer_zerop (cst0)
8546 || TREE_OVERFLOW (cst0))
8547 return NULL_TREE;
8549 /* See if we can reduce the magnitude of the constant in
8550 arg0 by changing the comparison code. */
8551 if (code0 == INTEGER_CST)
8553 /* CST <= arg1 -> CST-1 < arg1. */
8554 if (code == LE_EXPR && sgn0 == 1)
8555 code = LT_EXPR;
8556 /* -CST < arg1 -> -CST-1 <= arg1. */
8557 else if (code == LT_EXPR && sgn0 == -1)
8558 code = LE_EXPR;
8559 /* CST > arg1 -> CST-1 >= arg1. */
8560 else if (code == GT_EXPR && sgn0 == 1)
8561 code = GE_EXPR;
8562 /* -CST >= arg1 -> -CST-1 > arg1. */
8563 else if (code == GE_EXPR && sgn0 == -1)
8564 code = GT_EXPR;
8565 else
8566 return NULL_TREE;
8567 /* arg1 code' CST' might be more canonical. */
8568 swap = true;
8570 else
8572 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8573 if (code == LT_EXPR
8574 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8575 code = LE_EXPR;
8576 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8577 else if (code == GT_EXPR
8578 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8579 code = GE_EXPR;
8580 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8581 else if (code == LE_EXPR
8582 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8583 code = LT_EXPR;
8584 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8585 else if (code == GE_EXPR
8586 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8587 code = GT_EXPR;
8588 else
8589 return NULL_TREE;
8590 *strict_overflow_p = true;
8593 /* Now build the constant reduced in magnitude. But not if that
8594 would produce one outside of its types range. */
8595 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8596 && ((sgn0 == 1
8597 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8598 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8599 || (sgn0 == -1
8600 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8601 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8602 /* We cannot swap the comparison here as that would cause us to
8603 endlessly recurse. */
8604 return NULL_TREE;
8606 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8607 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8608 if (code0 != INTEGER_CST)
8609 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8610 t = fold_convert (TREE_TYPE (arg1), t);
8612 /* If swapping might yield to a more canonical form, do so. */
8613 if (swap)
8614 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8615 else
8616 return fold_build2_loc (loc, code, type, t, arg1);
8619 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8620 overflow further. Try to decrease the magnitude of constants involved
8621 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8622 and put sole constants at the second argument position.
8623 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8625 static tree
8626 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8627 tree arg0, tree arg1)
8629 tree t;
8630 bool strict_overflow_p;
8631 const char * const warnmsg = G_("assuming signed overflow does not occur "
8632 "when reducing constant in comparison");
8634 /* Try canonicalization by simplifying arg0. */
8635 strict_overflow_p = false;
8636 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8637 &strict_overflow_p);
8638 if (t)
8640 if (strict_overflow_p)
8641 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8642 return t;
8645 /* Try canonicalization by simplifying arg1 using the swapped
8646 comparison. */
8647 code = swap_tree_comparison (code);
8648 strict_overflow_p = false;
8649 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8650 &strict_overflow_p);
8651 if (t && strict_overflow_p)
8652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8653 return t;
8656 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8657 space. This is used to avoid issuing overflow warnings for
8658 expressions like &p->x which can not wrap. */
8660 static bool
8661 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8663 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8664 return true;
8666 if (bitpos < 0)
8667 return true;
8669 wide_int wi_offset;
8670 int precision = TYPE_PRECISION (TREE_TYPE (base));
8671 if (offset == NULL_TREE)
8672 wi_offset = wi::zero (precision);
8673 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8674 return true;
8675 else
8676 wi_offset = offset;
8678 bool overflow;
8679 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8680 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8681 if (overflow)
8682 return true;
8684 if (!wi::fits_uhwi_p (total))
8685 return true;
8687 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8688 if (size <= 0)
8689 return true;
8691 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8692 array. */
8693 if (TREE_CODE (base) == ADDR_EXPR)
8695 HOST_WIDE_INT base_size;
8697 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8698 if (base_size > 0 && size < base_size)
8699 size = base_size;
8702 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8705 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8706 kind INTEGER_CST. This makes sure to properly sign-extend the
8707 constant. */
8709 static HOST_WIDE_INT
8710 size_low_cst (const_tree t)
8712 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8713 int prec = TYPE_PRECISION (TREE_TYPE (t));
8714 if (prec < HOST_BITS_PER_WIDE_INT)
8715 return sext_hwi (w, prec);
8716 return w;
8719 /* Subroutine of fold_binary. This routine performs all of the
8720 transformations that are common to the equality/inequality
8721 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8722 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8723 fold_binary should call fold_binary. Fold a comparison with
8724 tree code CODE and type TYPE with operands OP0 and OP1. Return
8725 the folded comparison or NULL_TREE. */
8727 static tree
8728 fold_comparison (location_t loc, enum tree_code code, tree type,
8729 tree op0, tree op1)
8731 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8732 tree arg0, arg1, tem;
8734 arg0 = op0;
8735 arg1 = op1;
8737 STRIP_SIGN_NOPS (arg0);
8738 STRIP_SIGN_NOPS (arg1);
8740 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8741 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8742 && (equality_code
8743 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8744 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8745 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8746 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8747 && TREE_CODE (arg1) == INTEGER_CST
8748 && !TREE_OVERFLOW (arg1))
8750 const enum tree_code
8751 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8752 tree const1 = TREE_OPERAND (arg0, 1);
8753 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8754 tree variable = TREE_OPERAND (arg0, 0);
8755 tree new_const = int_const_binop (reverse_op, const2, const1);
8757 /* If the constant operation overflowed this can be
8758 simplified as a comparison against INT_MAX/INT_MIN. */
8759 if (TREE_OVERFLOW (new_const)
8760 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8762 int const1_sgn = tree_int_cst_sgn (const1);
8763 enum tree_code code2 = code;
8765 /* Get the sign of the constant on the lhs if the
8766 operation were VARIABLE + CONST1. */
8767 if (TREE_CODE (arg0) == MINUS_EXPR)
8768 const1_sgn = -const1_sgn;
8770 /* The sign of the constant determines if we overflowed
8771 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8772 Canonicalize to the INT_MIN overflow by swapping the comparison
8773 if necessary. */
8774 if (const1_sgn == -1)
8775 code2 = swap_tree_comparison (code);
8777 /* We now can look at the canonicalized case
8778 VARIABLE + 1 CODE2 INT_MIN
8779 and decide on the result. */
8780 switch (code2)
8782 case EQ_EXPR:
8783 case LT_EXPR:
8784 case LE_EXPR:
8785 return
8786 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8788 case NE_EXPR:
8789 case GE_EXPR:
8790 case GT_EXPR:
8791 return
8792 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8794 default:
8795 gcc_unreachable ();
8798 else
8800 if (!equality_code)
8801 fold_overflow_warning ("assuming signed overflow does not occur "
8802 "when changing X +- C1 cmp C2 to "
8803 "X cmp C2 -+ C1",
8804 WARN_STRICT_OVERFLOW_COMPARISON);
8805 return fold_build2_loc (loc, code, type, variable, new_const);
8809 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8810 if (TREE_CODE (arg0) == MINUS_EXPR
8811 && equality_code
8812 && integer_zerop (arg1))
8814 /* ??? The transformation is valid for the other operators if overflow
8815 is undefined for the type, but performing it here badly interacts
8816 with the transformation in fold_cond_expr_with_comparison which
8817 attempts to synthetize ABS_EXPR. */
8818 if (!equality_code)
8819 fold_overflow_warning ("assuming signed overflow does not occur "
8820 "when changing X - Y cmp 0 to X cmp Y",
8821 WARN_STRICT_OVERFLOW_COMPARISON);
8822 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8823 TREE_OPERAND (arg0, 1));
8826 /* For comparisons of pointers we can decompose it to a compile time
8827 comparison of the base objects and the offsets into the object.
8828 This requires at least one operand being an ADDR_EXPR or a
8829 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8830 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8831 && (TREE_CODE (arg0) == ADDR_EXPR
8832 || TREE_CODE (arg1) == ADDR_EXPR
8833 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8834 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8836 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8837 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8838 machine_mode mode;
8839 int volatilep, unsignedp;
8840 bool indirect_base0 = false, indirect_base1 = false;
8842 /* Get base and offset for the access. Strip ADDR_EXPR for
8843 get_inner_reference, but put it back by stripping INDIRECT_REF
8844 off the base object if possible. indirect_baseN will be true
8845 if baseN is not an address but refers to the object itself. */
8846 base0 = arg0;
8847 if (TREE_CODE (arg0) == ADDR_EXPR)
8849 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8850 &bitsize, &bitpos0, &offset0, &mode,
8851 &unsignedp, &volatilep, false);
8852 if (TREE_CODE (base0) == INDIRECT_REF)
8853 base0 = TREE_OPERAND (base0, 0);
8854 else
8855 indirect_base0 = true;
8857 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8859 base0 = TREE_OPERAND (arg0, 0);
8860 STRIP_SIGN_NOPS (base0);
8861 if (TREE_CODE (base0) == ADDR_EXPR)
8863 base0 = TREE_OPERAND (base0, 0);
8864 indirect_base0 = true;
8866 offset0 = TREE_OPERAND (arg0, 1);
8867 if (tree_fits_shwi_p (offset0))
8869 HOST_WIDE_INT off = size_low_cst (offset0);
8870 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8871 * BITS_PER_UNIT)
8872 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8874 bitpos0 = off * BITS_PER_UNIT;
8875 offset0 = NULL_TREE;
8880 base1 = arg1;
8881 if (TREE_CODE (arg1) == ADDR_EXPR)
8883 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8884 &bitsize, &bitpos1, &offset1, &mode,
8885 &unsignedp, &volatilep, false);
8886 if (TREE_CODE (base1) == INDIRECT_REF)
8887 base1 = TREE_OPERAND (base1, 0);
8888 else
8889 indirect_base1 = true;
8891 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8893 base1 = TREE_OPERAND (arg1, 0);
8894 STRIP_SIGN_NOPS (base1);
8895 if (TREE_CODE (base1) == ADDR_EXPR)
8897 base1 = TREE_OPERAND (base1, 0);
8898 indirect_base1 = true;
8900 offset1 = TREE_OPERAND (arg1, 1);
8901 if (tree_fits_shwi_p (offset1))
8903 HOST_WIDE_INT off = size_low_cst (offset1);
8904 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8905 * BITS_PER_UNIT)
8906 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8908 bitpos1 = off * BITS_PER_UNIT;
8909 offset1 = NULL_TREE;
8914 /* A local variable can never be pointed to by
8915 the default SSA name of an incoming parameter. */
8916 if ((TREE_CODE (arg0) == ADDR_EXPR
8917 && indirect_base0
8918 && TREE_CODE (base0) == VAR_DECL
8919 && auto_var_in_fn_p (base0, current_function_decl)
8920 && !indirect_base1
8921 && TREE_CODE (base1) == SSA_NAME
8922 && SSA_NAME_IS_DEFAULT_DEF (base1)
8923 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8924 || (TREE_CODE (arg1) == ADDR_EXPR
8925 && indirect_base1
8926 && TREE_CODE (base1) == VAR_DECL
8927 && auto_var_in_fn_p (base1, current_function_decl)
8928 && !indirect_base0
8929 && TREE_CODE (base0) == SSA_NAME
8930 && SSA_NAME_IS_DEFAULT_DEF (base0)
8931 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8933 if (code == NE_EXPR)
8934 return constant_boolean_node (1, type);
8935 else if (code == EQ_EXPR)
8936 return constant_boolean_node (0, type);
8938 /* If we have equivalent bases we might be able to simplify. */
8939 else if (indirect_base0 == indirect_base1
8940 && operand_equal_p (base0, base1, 0))
8942 /* We can fold this expression to a constant if the non-constant
8943 offset parts are equal. */
8944 if ((offset0 == offset1
8945 || (offset0 && offset1
8946 && operand_equal_p (offset0, offset1, 0)))
8947 && (code == EQ_EXPR
8948 || code == NE_EXPR
8949 || (indirect_base0 && DECL_P (base0))
8950 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8953 if (!equality_code
8954 && bitpos0 != bitpos1
8955 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8956 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8957 fold_overflow_warning (("assuming pointer wraparound does not "
8958 "occur when comparing P +- C1 with "
8959 "P +- C2"),
8960 WARN_STRICT_OVERFLOW_CONDITIONAL);
8962 switch (code)
8964 case EQ_EXPR:
8965 return constant_boolean_node (bitpos0 == bitpos1, type);
8966 case NE_EXPR:
8967 return constant_boolean_node (bitpos0 != bitpos1, type);
8968 case LT_EXPR:
8969 return constant_boolean_node (bitpos0 < bitpos1, type);
8970 case LE_EXPR:
8971 return constant_boolean_node (bitpos0 <= bitpos1, type);
8972 case GE_EXPR:
8973 return constant_boolean_node (bitpos0 >= bitpos1, type);
8974 case GT_EXPR:
8975 return constant_boolean_node (bitpos0 > bitpos1, type);
8976 default:;
8979 /* We can simplify the comparison to a comparison of the variable
8980 offset parts if the constant offset parts are equal.
8981 Be careful to use signed sizetype here because otherwise we
8982 mess with array offsets in the wrong way. This is possible
8983 because pointer arithmetic is restricted to retain within an
8984 object and overflow on pointer differences is undefined as of
8985 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8986 else if (bitpos0 == bitpos1
8987 && (equality_code
8988 || (indirect_base0 && DECL_P (base0))
8989 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8991 /* By converting to signed sizetype we cover middle-end pointer
8992 arithmetic which operates on unsigned pointer types of size
8993 type size and ARRAY_REF offsets which are properly sign or
8994 zero extended from their type in case it is narrower than
8995 sizetype. */
8996 if (offset0 == NULL_TREE)
8997 offset0 = build_int_cst (ssizetype, 0);
8998 else
8999 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9000 if (offset1 == NULL_TREE)
9001 offset1 = build_int_cst (ssizetype, 0);
9002 else
9003 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9005 if (!equality_code
9006 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9007 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9008 fold_overflow_warning (("assuming pointer wraparound does not "
9009 "occur when comparing P +- C1 with "
9010 "P +- C2"),
9011 WARN_STRICT_OVERFLOW_COMPARISON);
9013 return fold_build2_loc (loc, code, type, offset0, offset1);
9016 /* For non-equal bases we can simplify if they are addresses
9017 declarations with different addresses. */
9018 else if (indirect_base0 && indirect_base1
9019 /* We know that !operand_equal_p (base0, base1, 0)
9020 because the if condition was false. But make
9021 sure two decls are not the same. */
9022 && base0 != base1
9023 && TREE_CODE (arg0) == ADDR_EXPR
9024 && TREE_CODE (arg1) == ADDR_EXPR
9025 && DECL_P (base0)
9026 && DECL_P (base1)
9027 /* Watch for aliases. */
9028 && (!decl_in_symtab_p (base0)
9029 || !decl_in_symtab_p (base1)
9030 || !symtab_node::get_create (base0)->equal_address_to
9031 (symtab_node::get_create (base1))))
9033 if (code == EQ_EXPR)
9034 return omit_two_operands_loc (loc, type, boolean_false_node,
9035 arg0, arg1);
9036 else if (code == NE_EXPR)
9037 return omit_two_operands_loc (loc, type, boolean_true_node,
9038 arg0, arg1);
9040 /* For equal offsets we can simplify to a comparison of the
9041 base addresses. */
9042 else if (bitpos0 == bitpos1
9043 && (indirect_base0
9044 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9045 && (indirect_base1
9046 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9047 && ((offset0 == offset1)
9048 || (offset0 && offset1
9049 && operand_equal_p (offset0, offset1, 0))))
9051 if (indirect_base0)
9052 base0 = build_fold_addr_expr_loc (loc, base0);
9053 if (indirect_base1)
9054 base1 = build_fold_addr_expr_loc (loc, base1);
9055 return fold_build2_loc (loc, code, type, base0, base1);
9059 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9060 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9061 the resulting offset is smaller in absolute value than the
9062 original one and has the same sign. */
9063 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9064 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9065 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9066 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9067 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9068 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9069 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9070 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9072 tree const1 = TREE_OPERAND (arg0, 1);
9073 tree const2 = TREE_OPERAND (arg1, 1);
9074 tree variable1 = TREE_OPERAND (arg0, 0);
9075 tree variable2 = TREE_OPERAND (arg1, 0);
9076 tree cst;
9077 const char * const warnmsg = G_("assuming signed overflow does not "
9078 "occur when combining constants around "
9079 "a comparison");
9081 /* Put the constant on the side where it doesn't overflow and is
9082 of lower absolute value and of same sign than before. */
9083 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9084 ? MINUS_EXPR : PLUS_EXPR,
9085 const2, const1);
9086 if (!TREE_OVERFLOW (cst)
9087 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9088 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9090 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9091 return fold_build2_loc (loc, code, type,
9092 variable1,
9093 fold_build2_loc (loc, TREE_CODE (arg1),
9094 TREE_TYPE (arg1),
9095 variable2, cst));
9098 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9099 ? MINUS_EXPR : PLUS_EXPR,
9100 const1, const2);
9101 if (!TREE_OVERFLOW (cst)
9102 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9103 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9105 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9106 return fold_build2_loc (loc, code, type,
9107 fold_build2_loc (loc, TREE_CODE (arg0),
9108 TREE_TYPE (arg0),
9109 variable1, cst),
9110 variable2);
9114 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9115 signed arithmetic case. That form is created by the compiler
9116 often enough for folding it to be of value. One example is in
9117 computing loop trip counts after Operator Strength Reduction. */
9118 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9119 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9120 && TREE_CODE (arg0) == MULT_EXPR
9121 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9122 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9123 && integer_zerop (arg1))
9125 tree const1 = TREE_OPERAND (arg0, 1);
9126 tree const2 = arg1; /* zero */
9127 tree variable1 = TREE_OPERAND (arg0, 0);
9128 enum tree_code cmp_code = code;
9130 /* Handle unfolded multiplication by zero. */
9131 if (integer_zerop (const1))
9132 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9134 fold_overflow_warning (("assuming signed overflow does not occur when "
9135 "eliminating multiplication in comparison "
9136 "with zero"),
9137 WARN_STRICT_OVERFLOW_COMPARISON);
9139 /* If const1 is negative we swap the sense of the comparison. */
9140 if (tree_int_cst_sgn (const1) < 0)
9141 cmp_code = swap_tree_comparison (cmp_code);
9143 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9146 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9147 if (tem)
9148 return tem;
9150 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9152 tree targ0 = strip_float_extensions (arg0);
9153 tree targ1 = strip_float_extensions (arg1);
9154 tree newtype = TREE_TYPE (targ0);
9156 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9157 newtype = TREE_TYPE (targ1);
9159 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9160 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9161 return fold_build2_loc (loc, code, type,
9162 fold_convert_loc (loc, newtype, targ0),
9163 fold_convert_loc (loc, newtype, targ1));
9165 /* (-a) CMP (-b) -> b CMP a */
9166 if (TREE_CODE (arg0) == NEGATE_EXPR
9167 && TREE_CODE (arg1) == NEGATE_EXPR)
9168 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9169 TREE_OPERAND (arg0, 0));
9171 if (TREE_CODE (arg1) == REAL_CST)
9173 REAL_VALUE_TYPE cst;
9174 cst = TREE_REAL_CST (arg1);
9176 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9177 if (TREE_CODE (arg0) == NEGATE_EXPR)
9178 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9179 TREE_OPERAND (arg0, 0),
9180 build_real (TREE_TYPE (arg1),
9181 real_value_negate (&cst)));
9183 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9184 /* a CMP (-0) -> a CMP 0 */
9185 if (REAL_VALUE_MINUS_ZERO (cst))
9186 return fold_build2_loc (loc, code, type, arg0,
9187 build_real (TREE_TYPE (arg1), dconst0));
9189 /* x != NaN is always true, other ops are always false. */
9190 if (REAL_VALUE_ISNAN (cst)
9191 && ! HONOR_SNANS (arg1))
9193 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9194 return omit_one_operand_loc (loc, type, tem, arg0);
9197 /* Fold comparisons against infinity. */
9198 if (REAL_VALUE_ISINF (cst)
9199 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9201 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9202 if (tem != NULL_TREE)
9203 return tem;
9207 /* If this is a comparison of a real constant with a PLUS_EXPR
9208 or a MINUS_EXPR of a real constant, we can convert it into a
9209 comparison with a revised real constant as long as no overflow
9210 occurs when unsafe_math_optimizations are enabled. */
9211 if (flag_unsafe_math_optimizations
9212 && TREE_CODE (arg1) == REAL_CST
9213 && (TREE_CODE (arg0) == PLUS_EXPR
9214 || TREE_CODE (arg0) == MINUS_EXPR)
9215 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9216 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9217 ? MINUS_EXPR : PLUS_EXPR,
9218 arg1, TREE_OPERAND (arg0, 1)))
9219 && !TREE_OVERFLOW (tem))
9220 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9222 /* Likewise, we can simplify a comparison of a real constant with
9223 a MINUS_EXPR whose first operand is also a real constant, i.e.
9224 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9225 floating-point types only if -fassociative-math is set. */
9226 if (flag_associative_math
9227 && TREE_CODE (arg1) == REAL_CST
9228 && TREE_CODE (arg0) == MINUS_EXPR
9229 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9230 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9231 arg1))
9232 && !TREE_OVERFLOW (tem))
9233 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9234 TREE_OPERAND (arg0, 1), tem);
9236 /* Fold comparisons against built-in math functions. */
9237 if (TREE_CODE (arg1) == REAL_CST
9238 && flag_unsafe_math_optimizations
9239 && ! flag_errno_math)
9241 enum built_in_function fcode = builtin_mathfn_code (arg0);
9243 if (fcode != END_BUILTINS)
9245 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9246 if (tem != NULL_TREE)
9247 return tem;
9252 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9253 && CONVERT_EXPR_P (arg0))
9255 /* If we are widening one operand of an integer comparison,
9256 see if the other operand is similarly being widened. Perhaps we
9257 can do the comparison in the narrower type. */
9258 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9259 if (tem)
9260 return tem;
9262 /* Or if we are changing signedness. */
9263 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9264 if (tem)
9265 return tem;
9268 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9269 constant, we can simplify it. */
9270 if (TREE_CODE (arg1) == INTEGER_CST
9271 && (TREE_CODE (arg0) == MIN_EXPR
9272 || TREE_CODE (arg0) == MAX_EXPR)
9273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9275 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9276 if (tem)
9277 return tem;
9280 /* Simplify comparison of something with itself. (For IEEE
9281 floating-point, we can only do some of these simplifications.) */
9282 if (operand_equal_p (arg0, arg1, 0))
9284 switch (code)
9286 case EQ_EXPR:
9287 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9288 || ! HONOR_NANS (arg0))
9289 return constant_boolean_node (1, type);
9290 break;
9292 case GE_EXPR:
9293 case LE_EXPR:
9294 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9295 || ! HONOR_NANS (arg0))
9296 return constant_boolean_node (1, type);
9297 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9299 case NE_EXPR:
9300 /* For NE, we can only do this simplification if integer
9301 or we don't honor IEEE floating point NaNs. */
9302 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9303 && HONOR_NANS (arg0))
9304 break;
9305 /* ... fall through ... */
9306 case GT_EXPR:
9307 case LT_EXPR:
9308 return constant_boolean_node (0, type);
9309 default:
9310 gcc_unreachable ();
9314 /* If we are comparing an expression that just has comparisons
9315 of two integer values, arithmetic expressions of those comparisons,
9316 and constants, we can simplify it. There are only three cases
9317 to check: the two values can either be equal, the first can be
9318 greater, or the second can be greater. Fold the expression for
9319 those three values. Since each value must be 0 or 1, we have
9320 eight possibilities, each of which corresponds to the constant 0
9321 or 1 or one of the six possible comparisons.
9323 This handles common cases like (a > b) == 0 but also handles
9324 expressions like ((x > y) - (y > x)) > 0, which supposedly
9325 occur in macroized code. */
9327 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9329 tree cval1 = 0, cval2 = 0;
9330 int save_p = 0;
9332 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9333 /* Don't handle degenerate cases here; they should already
9334 have been handled anyway. */
9335 && cval1 != 0 && cval2 != 0
9336 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9337 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9338 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9339 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9340 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9341 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9342 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9344 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9345 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9347 /* We can't just pass T to eval_subst in case cval1 or cval2
9348 was the same as ARG1. */
9350 tree high_result
9351 = fold_build2_loc (loc, code, type,
9352 eval_subst (loc, arg0, cval1, maxval,
9353 cval2, minval),
9354 arg1);
9355 tree equal_result
9356 = fold_build2_loc (loc, code, type,
9357 eval_subst (loc, arg0, cval1, maxval,
9358 cval2, maxval),
9359 arg1);
9360 tree low_result
9361 = fold_build2_loc (loc, code, type,
9362 eval_subst (loc, arg0, cval1, minval,
9363 cval2, maxval),
9364 arg1);
9366 /* All three of these results should be 0 or 1. Confirm they are.
9367 Then use those values to select the proper code to use. */
9369 if (TREE_CODE (high_result) == INTEGER_CST
9370 && TREE_CODE (equal_result) == INTEGER_CST
9371 && TREE_CODE (low_result) == INTEGER_CST)
9373 /* Make a 3-bit mask with the high-order bit being the
9374 value for `>', the next for '=', and the low for '<'. */
9375 switch ((integer_onep (high_result) * 4)
9376 + (integer_onep (equal_result) * 2)
9377 + integer_onep (low_result))
9379 case 0:
9380 /* Always false. */
9381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9382 case 1:
9383 code = LT_EXPR;
9384 break;
9385 case 2:
9386 code = EQ_EXPR;
9387 break;
9388 case 3:
9389 code = LE_EXPR;
9390 break;
9391 case 4:
9392 code = GT_EXPR;
9393 break;
9394 case 5:
9395 code = NE_EXPR;
9396 break;
9397 case 6:
9398 code = GE_EXPR;
9399 break;
9400 case 7:
9401 /* Always true. */
9402 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9405 if (save_p)
9407 tem = save_expr (build2 (code, type, cval1, cval2));
9408 SET_EXPR_LOCATION (tem, loc);
9409 return tem;
9411 return fold_build2_loc (loc, code, type, cval1, cval2);
9416 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9417 into a single range test. */
9418 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9419 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9420 && TREE_CODE (arg1) == INTEGER_CST
9421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9422 && !integer_zerop (TREE_OPERAND (arg0, 1))
9423 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9424 && !TREE_OVERFLOW (arg1))
9426 tem = fold_div_compare (loc, code, type, arg0, arg1);
9427 if (tem != NULL_TREE)
9428 return tem;
9431 /* Fold ~X op ~Y as Y op X. */
9432 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9433 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9435 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9436 return fold_build2_loc (loc, code, type,
9437 fold_convert_loc (loc, cmp_type,
9438 TREE_OPERAND (arg1, 0)),
9439 TREE_OPERAND (arg0, 0));
9442 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9443 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9444 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9446 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9447 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9448 TREE_OPERAND (arg0, 0),
9449 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9450 fold_convert_loc (loc, cmp_type, arg1)));
9453 return NULL_TREE;
9457 /* Subroutine of fold_binary. Optimize complex multiplications of the
9458 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9459 argument EXPR represents the expression "z" of type TYPE. */
9461 static tree
9462 fold_mult_zconjz (location_t loc, tree type, tree expr)
9464 tree itype = TREE_TYPE (type);
9465 tree rpart, ipart, tem;
9467 if (TREE_CODE (expr) == COMPLEX_EXPR)
9469 rpart = TREE_OPERAND (expr, 0);
9470 ipart = TREE_OPERAND (expr, 1);
9472 else if (TREE_CODE (expr) == COMPLEX_CST)
9474 rpart = TREE_REALPART (expr);
9475 ipart = TREE_IMAGPART (expr);
9477 else
9479 expr = save_expr (expr);
9480 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9481 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9484 rpart = save_expr (rpart);
9485 ipart = save_expr (ipart);
9486 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9487 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9488 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9489 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9490 build_zero_cst (itype));
9494 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9495 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9496 guarantees that P and N have the same least significant log2(M) bits.
9497 N is not otherwise constrained. In particular, N is not normalized to
9498 0 <= N < M as is common. In general, the precise value of P is unknown.
9499 M is chosen as large as possible such that constant N can be determined.
9501 Returns M and sets *RESIDUE to N.
9503 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9504 account. This is not always possible due to PR 35705.
9507 static unsigned HOST_WIDE_INT
9508 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9509 bool allow_func_align)
9511 enum tree_code code;
9513 *residue = 0;
9515 code = TREE_CODE (expr);
9516 if (code == ADDR_EXPR)
9518 unsigned int bitalign;
9519 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9520 *residue /= BITS_PER_UNIT;
9521 return bitalign / BITS_PER_UNIT;
9523 else if (code == POINTER_PLUS_EXPR)
9525 tree op0, op1;
9526 unsigned HOST_WIDE_INT modulus;
9527 enum tree_code inner_code;
9529 op0 = TREE_OPERAND (expr, 0);
9530 STRIP_NOPS (op0);
9531 modulus = get_pointer_modulus_and_residue (op0, residue,
9532 allow_func_align);
9534 op1 = TREE_OPERAND (expr, 1);
9535 STRIP_NOPS (op1);
9536 inner_code = TREE_CODE (op1);
9537 if (inner_code == INTEGER_CST)
9539 *residue += TREE_INT_CST_LOW (op1);
9540 return modulus;
9542 else if (inner_code == MULT_EXPR)
9544 op1 = TREE_OPERAND (op1, 1);
9545 if (TREE_CODE (op1) == INTEGER_CST)
9547 unsigned HOST_WIDE_INT align;
9549 /* Compute the greatest power-of-2 divisor of op1. */
9550 align = TREE_INT_CST_LOW (op1);
9551 align &= -align;
9553 /* If align is non-zero and less than *modulus, replace
9554 *modulus with align., If align is 0, then either op1 is 0
9555 or the greatest power-of-2 divisor of op1 doesn't fit in an
9556 unsigned HOST_WIDE_INT. In either case, no additional
9557 constraint is imposed. */
9558 if (align)
9559 modulus = MIN (modulus, align);
9561 return modulus;
9566 /* If we get here, we were unable to determine anything useful about the
9567 expression. */
9568 return 1;
9571 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9572 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9574 static bool
9575 vec_cst_ctor_to_array (tree arg, tree *elts)
9577 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9579 if (TREE_CODE (arg) == VECTOR_CST)
9581 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9582 elts[i] = VECTOR_CST_ELT (arg, i);
9584 else if (TREE_CODE (arg) == CONSTRUCTOR)
9586 constructor_elt *elt;
9588 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9589 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9590 return false;
9591 else
9592 elts[i] = elt->value;
9594 else
9595 return false;
9596 for (; i < nelts; i++)
9597 elts[i]
9598 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9599 return true;
9602 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9603 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9604 NULL_TREE otherwise. */
9606 static tree
9607 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9609 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9610 tree *elts;
9611 bool need_ctor = false;
9613 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9614 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9615 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9616 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9617 return NULL_TREE;
9619 elts = XALLOCAVEC (tree, nelts * 3);
9620 if (!vec_cst_ctor_to_array (arg0, elts)
9621 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9622 return NULL_TREE;
9624 for (i = 0; i < nelts; i++)
9626 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9627 need_ctor = true;
9628 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9631 if (need_ctor)
9633 vec<constructor_elt, va_gc> *v;
9634 vec_alloc (v, nelts);
9635 for (i = 0; i < nelts; i++)
9636 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9637 return build_constructor (type, v);
9639 else
9640 return build_vector (type, &elts[2 * nelts]);
9643 /* Try to fold a pointer difference of type TYPE two address expressions of
9644 array references AREF0 and AREF1 using location LOC. Return a
9645 simplified expression for the difference or NULL_TREE. */
9647 static tree
9648 fold_addr_of_array_ref_difference (location_t loc, tree type,
9649 tree aref0, tree aref1)
9651 tree base0 = TREE_OPERAND (aref0, 0);
9652 tree base1 = TREE_OPERAND (aref1, 0);
9653 tree base_offset = build_int_cst (type, 0);
9655 /* If the bases are array references as well, recurse. If the bases
9656 are pointer indirections compute the difference of the pointers.
9657 If the bases are equal, we are set. */
9658 if ((TREE_CODE (base0) == ARRAY_REF
9659 && TREE_CODE (base1) == ARRAY_REF
9660 && (base_offset
9661 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9662 || (INDIRECT_REF_P (base0)
9663 && INDIRECT_REF_P (base1)
9664 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9665 TREE_OPERAND (base0, 0),
9666 TREE_OPERAND (base1, 0))))
9667 || operand_equal_p (base0, base1, 0))
9669 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9670 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9671 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9672 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9673 return fold_build2_loc (loc, PLUS_EXPR, type,
9674 base_offset,
9675 fold_build2_loc (loc, MULT_EXPR, type,
9676 diff, esz));
9678 return NULL_TREE;
9681 /* If the real or vector real constant CST of type TYPE has an exact
9682 inverse, return it, else return NULL. */
9684 tree
9685 exact_inverse (tree type, tree cst)
9687 REAL_VALUE_TYPE r;
9688 tree unit_type, *elts;
9689 machine_mode mode;
9690 unsigned vec_nelts, i;
9692 switch (TREE_CODE (cst))
9694 case REAL_CST:
9695 r = TREE_REAL_CST (cst);
9697 if (exact_real_inverse (TYPE_MODE (type), &r))
9698 return build_real (type, r);
9700 return NULL_TREE;
9702 case VECTOR_CST:
9703 vec_nelts = VECTOR_CST_NELTS (cst);
9704 elts = XALLOCAVEC (tree, vec_nelts);
9705 unit_type = TREE_TYPE (type);
9706 mode = TYPE_MODE (unit_type);
9708 for (i = 0; i < vec_nelts; i++)
9710 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9711 if (!exact_real_inverse (mode, &r))
9712 return NULL_TREE;
9713 elts[i] = build_real (unit_type, r);
9716 return build_vector (type, elts);
9718 default:
9719 return NULL_TREE;
9723 /* Mask out the tz least significant bits of X of type TYPE where
9724 tz is the number of trailing zeroes in Y. */
9725 static wide_int
9726 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9728 int tz = wi::ctz (y);
9729 if (tz > 0)
9730 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9731 return x;
9734 /* Return true when T is an address and is known to be nonzero.
9735 For floating point we further ensure that T is not denormal.
9736 Similar logic is present in nonzero_address in rtlanal.h.
9738 If the return value is based on the assumption that signed overflow
9739 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9740 change *STRICT_OVERFLOW_P. */
9742 static bool
9743 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9745 tree type = TREE_TYPE (t);
9746 enum tree_code code;
9748 /* Doing something useful for floating point would need more work. */
9749 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9750 return false;
9752 code = TREE_CODE (t);
9753 switch (TREE_CODE_CLASS (code))
9755 case tcc_unary:
9756 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9757 strict_overflow_p);
9758 case tcc_binary:
9759 case tcc_comparison:
9760 return tree_binary_nonzero_warnv_p (code, type,
9761 TREE_OPERAND (t, 0),
9762 TREE_OPERAND (t, 1),
9763 strict_overflow_p);
9764 case tcc_constant:
9765 case tcc_declaration:
9766 case tcc_reference:
9767 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9769 default:
9770 break;
9773 switch (code)
9775 case TRUTH_NOT_EXPR:
9776 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9777 strict_overflow_p);
9779 case TRUTH_AND_EXPR:
9780 case TRUTH_OR_EXPR:
9781 case TRUTH_XOR_EXPR:
9782 return tree_binary_nonzero_warnv_p (code, type,
9783 TREE_OPERAND (t, 0),
9784 TREE_OPERAND (t, 1),
9785 strict_overflow_p);
9787 case COND_EXPR:
9788 case CONSTRUCTOR:
9789 case OBJ_TYPE_REF:
9790 case ASSERT_EXPR:
9791 case ADDR_EXPR:
9792 case WITH_SIZE_EXPR:
9793 case SSA_NAME:
9794 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9796 case COMPOUND_EXPR:
9797 case MODIFY_EXPR:
9798 case BIND_EXPR:
9799 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9800 strict_overflow_p);
9802 case SAVE_EXPR:
9803 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9804 strict_overflow_p);
9806 case CALL_EXPR:
9808 tree fndecl = get_callee_fndecl (t);
9809 if (!fndecl) return false;
9810 if (flag_delete_null_pointer_checks && !flag_check_new
9811 && DECL_IS_OPERATOR_NEW (fndecl)
9812 && !TREE_NOTHROW (fndecl))
9813 return true;
9814 if (flag_delete_null_pointer_checks
9815 && lookup_attribute ("returns_nonnull",
9816 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9817 return true;
9818 return alloca_call_p (t);
9821 default:
9822 break;
9824 return false;
9827 /* Return true when T is an address and is known to be nonzero.
9828 Handle warnings about undefined signed overflow. */
9830 static bool
9831 tree_expr_nonzero_p (tree t)
9833 bool ret, strict_overflow_p;
9835 strict_overflow_p = false;
9836 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9837 if (strict_overflow_p)
9838 fold_overflow_warning (("assuming signed overflow does not occur when "
9839 "determining that expression is always "
9840 "non-zero"),
9841 WARN_STRICT_OVERFLOW_MISC);
9842 return ret;
9845 /* Fold a binary expression of code CODE and type TYPE with operands
9846 OP0 and OP1. LOC is the location of the resulting expression.
9847 Return the folded expression if folding is successful. Otherwise,
9848 return NULL_TREE. */
9850 tree
9851 fold_binary_loc (location_t loc,
9852 enum tree_code code, tree type, tree op0, tree op1)
9854 enum tree_code_class kind = TREE_CODE_CLASS (code);
9855 tree arg0, arg1, tem;
9856 tree t1 = NULL_TREE;
9857 bool strict_overflow_p;
9858 unsigned int prec;
9860 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9861 && TREE_CODE_LENGTH (code) == 2
9862 && op0 != NULL_TREE
9863 && op1 != NULL_TREE);
9865 arg0 = op0;
9866 arg1 = op1;
9868 /* Strip any conversions that don't change the mode. This is
9869 safe for every expression, except for a comparison expression
9870 because its signedness is derived from its operands. So, in
9871 the latter case, only strip conversions that don't change the
9872 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9873 preserved.
9875 Note that this is done as an internal manipulation within the
9876 constant folder, in order to find the simplest representation
9877 of the arguments so that their form can be studied. In any
9878 cases, the appropriate type conversions should be put back in
9879 the tree that will get out of the constant folder. */
9881 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9883 STRIP_SIGN_NOPS (arg0);
9884 STRIP_SIGN_NOPS (arg1);
9886 else
9888 STRIP_NOPS (arg0);
9889 STRIP_NOPS (arg1);
9892 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9893 constant but we can't do arithmetic on them. */
9894 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9896 tem = const_binop (code, type, arg0, arg1);
9897 if (tem != NULL_TREE)
9899 if (TREE_TYPE (tem) != type)
9900 tem = fold_convert_loc (loc, type, tem);
9901 return tem;
9905 /* If this is a commutative operation, and ARG0 is a constant, move it
9906 to ARG1 to reduce the number of tests below. */
9907 if (commutative_tree_code (code)
9908 && tree_swap_operands_p (arg0, arg1, true))
9909 return fold_build2_loc (loc, code, type, op1, op0);
9911 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9912 to ARG1 to reduce the number of tests below. */
9913 if (kind == tcc_comparison
9914 && tree_swap_operands_p (arg0, arg1, true))
9915 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9917 tem = generic_simplify (loc, code, type, op0, op1);
9918 if (tem)
9919 return tem;
9921 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9923 First check for cases where an arithmetic operation is applied to a
9924 compound, conditional, or comparison operation. Push the arithmetic
9925 operation inside the compound or conditional to see if any folding
9926 can then be done. Convert comparison to conditional for this purpose.
9927 The also optimizes non-constant cases that used to be done in
9928 expand_expr.
9930 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9931 one of the operands is a comparison and the other is a comparison, a
9932 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9933 code below would make the expression more complex. Change it to a
9934 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9935 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9937 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9938 || code == EQ_EXPR || code == NE_EXPR)
9939 && TREE_CODE (type) != VECTOR_TYPE
9940 && ((truth_value_p (TREE_CODE (arg0))
9941 && (truth_value_p (TREE_CODE (arg1))
9942 || (TREE_CODE (arg1) == BIT_AND_EXPR
9943 && integer_onep (TREE_OPERAND (arg1, 1)))))
9944 || (truth_value_p (TREE_CODE (arg1))
9945 && (truth_value_p (TREE_CODE (arg0))
9946 || (TREE_CODE (arg0) == BIT_AND_EXPR
9947 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9949 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9950 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9951 : TRUTH_XOR_EXPR,
9952 boolean_type_node,
9953 fold_convert_loc (loc, boolean_type_node, arg0),
9954 fold_convert_loc (loc, boolean_type_node, arg1));
9956 if (code == EQ_EXPR)
9957 tem = invert_truthvalue_loc (loc, tem);
9959 return fold_convert_loc (loc, type, tem);
9962 if (TREE_CODE_CLASS (code) == tcc_binary
9963 || TREE_CODE_CLASS (code) == tcc_comparison)
9965 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9967 tem = fold_build2_loc (loc, code, type,
9968 fold_convert_loc (loc, TREE_TYPE (op0),
9969 TREE_OPERAND (arg0, 1)), op1);
9970 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9971 tem);
9973 if (TREE_CODE (arg1) == COMPOUND_EXPR
9974 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9976 tem = fold_build2_loc (loc, code, type, op0,
9977 fold_convert_loc (loc, TREE_TYPE (op1),
9978 TREE_OPERAND (arg1, 1)));
9979 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9980 tem);
9983 if (TREE_CODE (arg0) == COND_EXPR
9984 || TREE_CODE (arg0) == VEC_COND_EXPR
9985 || COMPARISON_CLASS_P (arg0))
9987 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9988 arg0, arg1,
9989 /*cond_first_p=*/1);
9990 if (tem != NULL_TREE)
9991 return tem;
9994 if (TREE_CODE (arg1) == COND_EXPR
9995 || TREE_CODE (arg1) == VEC_COND_EXPR
9996 || COMPARISON_CLASS_P (arg1))
9998 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9999 arg1, arg0,
10000 /*cond_first_p=*/0);
10001 if (tem != NULL_TREE)
10002 return tem;
10006 switch (code)
10008 case MEM_REF:
10009 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10010 if (TREE_CODE (arg0) == ADDR_EXPR
10011 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10013 tree iref = TREE_OPERAND (arg0, 0);
10014 return fold_build2 (MEM_REF, type,
10015 TREE_OPERAND (iref, 0),
10016 int_const_binop (PLUS_EXPR, arg1,
10017 TREE_OPERAND (iref, 1)));
10020 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10021 if (TREE_CODE (arg0) == ADDR_EXPR
10022 && handled_component_p (TREE_OPERAND (arg0, 0)))
10024 tree base;
10025 HOST_WIDE_INT coffset;
10026 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10027 &coffset);
10028 if (!base)
10029 return NULL_TREE;
10030 return fold_build2 (MEM_REF, type,
10031 build_fold_addr_expr (base),
10032 int_const_binop (PLUS_EXPR, arg1,
10033 size_int (coffset)));
10036 return NULL_TREE;
10038 case POINTER_PLUS_EXPR:
10039 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10040 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10041 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10042 return fold_convert_loc (loc, type,
10043 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10044 fold_convert_loc (loc, sizetype,
10045 arg1),
10046 fold_convert_loc (loc, sizetype,
10047 arg0)));
10049 return NULL_TREE;
10051 case PLUS_EXPR:
10052 /* Disable further optimizations involving UPC shared pointers,
10053 because integers are not interoperable with shared pointers. */
10054 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10055 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10056 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10057 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10058 return NULL_TREE;
10060 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10062 /* X + (X / CST) * -CST is X % CST. */
10063 if (TREE_CODE (arg1) == MULT_EXPR
10064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10065 && operand_equal_p (arg0,
10066 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10068 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10069 tree cst1 = TREE_OPERAND (arg1, 1);
10070 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10071 cst1, cst0);
10072 if (sum && integer_zerop (sum))
10073 return fold_convert_loc (loc, type,
10074 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10075 TREE_TYPE (arg0), arg0,
10076 cst0));
10080 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10081 one. Make sure the type is not saturating and has the signedness of
10082 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10083 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10084 if ((TREE_CODE (arg0) == MULT_EXPR
10085 || TREE_CODE (arg1) == MULT_EXPR)
10086 && !TYPE_SATURATING (type)
10087 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10088 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10089 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10091 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10092 if (tem)
10093 return tem;
10096 if (! FLOAT_TYPE_P (type))
10098 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10099 with a constant, and the two constants have no bits in common,
10100 we should treat this as a BIT_IOR_EXPR since this may produce more
10101 simplifications. */
10102 if (TREE_CODE (arg0) == BIT_AND_EXPR
10103 && TREE_CODE (arg1) == BIT_AND_EXPR
10104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10105 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10106 && wi::bit_and (TREE_OPERAND (arg0, 1),
10107 TREE_OPERAND (arg1, 1)) == 0)
10109 code = BIT_IOR_EXPR;
10110 goto bit_ior;
10113 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10114 (plus (plus (mult) (mult)) (foo)) so that we can
10115 take advantage of the factoring cases below. */
10116 if (ANY_INTEGRAL_TYPE_P (type)
10117 && TYPE_OVERFLOW_WRAPS (type)
10118 && (((TREE_CODE (arg0) == PLUS_EXPR
10119 || TREE_CODE (arg0) == MINUS_EXPR)
10120 && TREE_CODE (arg1) == MULT_EXPR)
10121 || ((TREE_CODE (arg1) == PLUS_EXPR
10122 || TREE_CODE (arg1) == MINUS_EXPR)
10123 && TREE_CODE (arg0) == MULT_EXPR)))
10125 tree parg0, parg1, parg, marg;
10126 enum tree_code pcode;
10128 if (TREE_CODE (arg1) == MULT_EXPR)
10129 parg = arg0, marg = arg1;
10130 else
10131 parg = arg1, marg = arg0;
10132 pcode = TREE_CODE (parg);
10133 parg0 = TREE_OPERAND (parg, 0);
10134 parg1 = TREE_OPERAND (parg, 1);
10135 STRIP_NOPS (parg0);
10136 STRIP_NOPS (parg1);
10138 if (TREE_CODE (parg0) == MULT_EXPR
10139 && TREE_CODE (parg1) != MULT_EXPR)
10140 return fold_build2_loc (loc, pcode, type,
10141 fold_build2_loc (loc, PLUS_EXPR, type,
10142 fold_convert_loc (loc, type,
10143 parg0),
10144 fold_convert_loc (loc, type,
10145 marg)),
10146 fold_convert_loc (loc, type, parg1));
10147 if (TREE_CODE (parg0) != MULT_EXPR
10148 && TREE_CODE (parg1) == MULT_EXPR)
10149 return
10150 fold_build2_loc (loc, PLUS_EXPR, type,
10151 fold_convert_loc (loc, type, parg0),
10152 fold_build2_loc (loc, pcode, type,
10153 fold_convert_loc (loc, type, marg),
10154 fold_convert_loc (loc, type,
10155 parg1)));
10158 else
10160 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10161 to __complex__ ( x, y ). This is not the same for SNaNs or
10162 if signed zeros are involved. */
10163 if (!HONOR_SNANS (element_mode (arg0))
10164 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10165 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10167 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10168 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10169 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10170 bool arg0rz = false, arg0iz = false;
10171 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10172 || (arg0i && (arg0iz = real_zerop (arg0i))))
10174 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10175 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10176 if (arg0rz && arg1i && real_zerop (arg1i))
10178 tree rp = arg1r ? arg1r
10179 : build1 (REALPART_EXPR, rtype, arg1);
10180 tree ip = arg0i ? arg0i
10181 : build1 (IMAGPART_EXPR, rtype, arg0);
10182 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10184 else if (arg0iz && arg1r && real_zerop (arg1r))
10186 tree rp = arg0r ? arg0r
10187 : build1 (REALPART_EXPR, rtype, arg0);
10188 tree ip = arg1i ? arg1i
10189 : build1 (IMAGPART_EXPR, rtype, arg1);
10190 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10195 if (flag_unsafe_math_optimizations
10196 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10197 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10198 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10199 return tem;
10201 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10202 We associate floats only if the user has specified
10203 -fassociative-math. */
10204 if (flag_associative_math
10205 && TREE_CODE (arg1) == PLUS_EXPR
10206 && TREE_CODE (arg0) != MULT_EXPR)
10208 tree tree10 = TREE_OPERAND (arg1, 0);
10209 tree tree11 = TREE_OPERAND (arg1, 1);
10210 if (TREE_CODE (tree11) == MULT_EXPR
10211 && TREE_CODE (tree10) == MULT_EXPR)
10213 tree tree0;
10214 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10215 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10218 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10219 We associate floats only if the user has specified
10220 -fassociative-math. */
10221 if (flag_associative_math
10222 && TREE_CODE (arg0) == PLUS_EXPR
10223 && TREE_CODE (arg1) != MULT_EXPR)
10225 tree tree00 = TREE_OPERAND (arg0, 0);
10226 tree tree01 = TREE_OPERAND (arg0, 1);
10227 if (TREE_CODE (tree01) == MULT_EXPR
10228 && TREE_CODE (tree00) == MULT_EXPR)
10230 tree tree0;
10231 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10232 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10237 bit_rotate:
10238 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10239 is a rotate of A by C1 bits. */
10240 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10241 is a rotate of A by B bits. */
10243 enum tree_code code0, code1;
10244 tree rtype;
10245 code0 = TREE_CODE (arg0);
10246 code1 = TREE_CODE (arg1);
10247 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10248 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10249 && operand_equal_p (TREE_OPERAND (arg0, 0),
10250 TREE_OPERAND (arg1, 0), 0)
10251 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10252 TYPE_UNSIGNED (rtype))
10253 /* Only create rotates in complete modes. Other cases are not
10254 expanded properly. */
10255 && (element_precision (rtype)
10256 == element_precision (TYPE_MODE (rtype))))
10258 tree tree01, tree11;
10259 enum tree_code code01, code11;
10261 tree01 = TREE_OPERAND (arg0, 1);
10262 tree11 = TREE_OPERAND (arg1, 1);
10263 STRIP_NOPS (tree01);
10264 STRIP_NOPS (tree11);
10265 code01 = TREE_CODE (tree01);
10266 code11 = TREE_CODE (tree11);
10267 if (code01 == INTEGER_CST
10268 && code11 == INTEGER_CST
10269 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10270 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10272 tem = build2_loc (loc, LROTATE_EXPR,
10273 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10274 TREE_OPERAND (arg0, 0),
10275 code0 == LSHIFT_EXPR ? tree01 : tree11);
10276 return fold_convert_loc (loc, type, tem);
10278 else if (code11 == MINUS_EXPR)
10280 tree tree110, tree111;
10281 tree110 = TREE_OPERAND (tree11, 0);
10282 tree111 = TREE_OPERAND (tree11, 1);
10283 STRIP_NOPS (tree110);
10284 STRIP_NOPS (tree111);
10285 if (TREE_CODE (tree110) == INTEGER_CST
10286 && 0 == compare_tree_int (tree110,
10287 element_precision
10288 (TREE_TYPE (TREE_OPERAND
10289 (arg0, 0))))
10290 && operand_equal_p (tree01, tree111, 0))
10291 return
10292 fold_convert_loc (loc, type,
10293 build2 ((code0 == LSHIFT_EXPR
10294 ? LROTATE_EXPR
10295 : RROTATE_EXPR),
10296 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10297 TREE_OPERAND (arg0, 0), tree01));
10299 else if (code01 == MINUS_EXPR)
10301 tree tree010, tree011;
10302 tree010 = TREE_OPERAND (tree01, 0);
10303 tree011 = TREE_OPERAND (tree01, 1);
10304 STRIP_NOPS (tree010);
10305 STRIP_NOPS (tree011);
10306 if (TREE_CODE (tree010) == INTEGER_CST
10307 && 0 == compare_tree_int (tree010,
10308 element_precision
10309 (TREE_TYPE (TREE_OPERAND
10310 (arg0, 0))))
10311 && operand_equal_p (tree11, tree011, 0))
10312 return fold_convert_loc
10313 (loc, type,
10314 build2 ((code0 != LSHIFT_EXPR
10315 ? LROTATE_EXPR
10316 : RROTATE_EXPR),
10317 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10318 TREE_OPERAND (arg0, 0), tree11));
10323 associate:
10324 /* In most languages, can't associate operations on floats through
10325 parentheses. Rather than remember where the parentheses were, we
10326 don't associate floats at all, unless the user has specified
10327 -fassociative-math.
10328 And, we need to make sure type is not saturating. */
10330 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10331 && !TYPE_SATURATING (type))
10333 tree var0, con0, lit0, minus_lit0;
10334 tree var1, con1, lit1, minus_lit1;
10335 tree atype = type;
10336 bool ok = true;
10338 /* Split both trees into variables, constants, and literals. Then
10339 associate each group together, the constants with literals,
10340 then the result with variables. This increases the chances of
10341 literals being recombined later and of generating relocatable
10342 expressions for the sum of a constant and literal. */
10343 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10344 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10345 code == MINUS_EXPR);
10347 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10348 if (code == MINUS_EXPR)
10349 code = PLUS_EXPR;
10351 /* With undefined overflow prefer doing association in a type
10352 which wraps on overflow, if that is one of the operand types. */
10353 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10354 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10356 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10357 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10358 atype = TREE_TYPE (arg0);
10359 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10360 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10361 atype = TREE_TYPE (arg1);
10362 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10365 /* With undefined overflow we can only associate constants with one
10366 variable, and constants whose association doesn't overflow. */
10367 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10368 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10370 if (var0 && var1)
10372 tree tmp0 = var0;
10373 tree tmp1 = var1;
10375 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10376 tmp0 = TREE_OPERAND (tmp0, 0);
10377 if (CONVERT_EXPR_P (tmp0)
10378 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10379 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10380 <= TYPE_PRECISION (atype)))
10381 tmp0 = TREE_OPERAND (tmp0, 0);
10382 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10383 tmp1 = TREE_OPERAND (tmp1, 0);
10384 if (CONVERT_EXPR_P (tmp1)
10385 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10386 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10387 <= TYPE_PRECISION (atype)))
10388 tmp1 = TREE_OPERAND (tmp1, 0);
10389 /* The only case we can still associate with two variables
10390 is if they are the same, modulo negation and bit-pattern
10391 preserving conversions. */
10392 if (!operand_equal_p (tmp0, tmp1, 0))
10393 ok = false;
10397 /* Only do something if we found more than two objects. Otherwise,
10398 nothing has changed and we risk infinite recursion. */
10399 if (ok
10400 && (2 < ((var0 != 0) + (var1 != 0)
10401 + (con0 != 0) + (con1 != 0)
10402 + (lit0 != 0) + (lit1 != 0)
10403 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10405 bool any_overflows = false;
10406 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10407 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10408 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10409 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10410 var0 = associate_trees (loc, var0, var1, code, atype);
10411 con0 = associate_trees (loc, con0, con1, code, atype);
10412 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10413 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10414 code, atype);
10416 /* Preserve the MINUS_EXPR if the negative part of the literal is
10417 greater than the positive part. Otherwise, the multiplicative
10418 folding code (i.e extract_muldiv) may be fooled in case
10419 unsigned constants are subtracted, like in the following
10420 example: ((X*2 + 4) - 8U)/2. */
10421 if (minus_lit0 && lit0)
10423 if (TREE_CODE (lit0) == INTEGER_CST
10424 && TREE_CODE (minus_lit0) == INTEGER_CST
10425 && tree_int_cst_lt (lit0, minus_lit0))
10427 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10428 MINUS_EXPR, atype);
10429 lit0 = 0;
10431 else
10433 lit0 = associate_trees (loc, lit0, minus_lit0,
10434 MINUS_EXPR, atype);
10435 minus_lit0 = 0;
10439 /* Don't introduce overflows through reassociation. */
10440 if (!any_overflows
10441 && ((lit0 && TREE_OVERFLOW_P (lit0))
10442 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10443 return NULL_TREE;
10445 if (minus_lit0)
10447 if (con0 == 0)
10448 return
10449 fold_convert_loc (loc, type,
10450 associate_trees (loc, var0, minus_lit0,
10451 MINUS_EXPR, atype));
10452 else
10454 con0 = associate_trees (loc, con0, minus_lit0,
10455 MINUS_EXPR, atype);
10456 return
10457 fold_convert_loc (loc, type,
10458 associate_trees (loc, var0, con0,
10459 PLUS_EXPR, atype));
10463 con0 = associate_trees (loc, con0, lit0, code, atype);
10464 return
10465 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10466 code, atype));
10470 return NULL_TREE;
10472 case MINUS_EXPR:
10473 /* Pointer simplifications for subtraction, simple reassociations. */
10474 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10476 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10477 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10478 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10480 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10481 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10482 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10483 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10484 return fold_build2_loc (loc, PLUS_EXPR, type,
10485 fold_build2_loc (loc, MINUS_EXPR, type,
10486 arg00, arg10),
10487 fold_build2_loc (loc, MINUS_EXPR, type,
10488 arg01, arg11));
10490 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10491 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10493 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10494 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10495 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10496 fold_convert_loc (loc, type, arg1));
10497 if (tmp)
10498 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10500 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10501 simplifies. */
10502 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10504 tree arg10 = fold_convert_loc (loc, type,
10505 TREE_OPERAND (arg1, 0));
10506 tree arg11 = fold_convert_loc (loc, type,
10507 TREE_OPERAND (arg1, 1));
10508 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10509 fold_convert_loc (loc, type, arg0),
10510 arg10);
10511 if (tmp)
10512 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10516 /* Disable further optimizations involving UPC shared pointers,
10517 because integers are not interoperable with shared pointers.
10518 (The test below also detects pointer difference between
10519 shared pointers, which cannot be folded. */
10521 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10522 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10523 return NULL_TREE;
10525 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10526 if (TREE_CODE (arg0) == NEGATE_EXPR
10527 && negate_expr_p (arg1)
10528 && reorder_operands_p (arg0, arg1))
10529 return fold_build2_loc (loc, MINUS_EXPR, type,
10530 fold_convert_loc (loc, type,
10531 negate_expr (arg1)),
10532 fold_convert_loc (loc, type,
10533 TREE_OPERAND (arg0, 0)));
10535 /* X - (X / Y) * Y is X % Y. */
10536 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10537 && TREE_CODE (arg1) == MULT_EXPR
10538 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10539 && operand_equal_p (arg0,
10540 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10541 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10542 TREE_OPERAND (arg1, 1), 0))
10543 return
10544 fold_convert_loc (loc, type,
10545 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10546 arg0, TREE_OPERAND (arg1, 1)));
10548 if (! FLOAT_TYPE_P (type))
10550 /* Fold A - (A & B) into ~B & A. */
10551 if (!TREE_SIDE_EFFECTS (arg0)
10552 && TREE_CODE (arg1) == BIT_AND_EXPR)
10554 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10556 tree arg10 = fold_convert_loc (loc, type,
10557 TREE_OPERAND (arg1, 0));
10558 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10559 fold_build1_loc (loc, BIT_NOT_EXPR,
10560 type, arg10),
10561 fold_convert_loc (loc, type, arg0));
10563 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10565 tree arg11 = fold_convert_loc (loc,
10566 type, TREE_OPERAND (arg1, 1));
10567 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10568 fold_build1_loc (loc, BIT_NOT_EXPR,
10569 type, arg11),
10570 fold_convert_loc (loc, type, arg0));
10574 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10575 any power of 2 minus 1. */
10576 if (TREE_CODE (arg0) == BIT_AND_EXPR
10577 && TREE_CODE (arg1) == BIT_AND_EXPR
10578 && operand_equal_p (TREE_OPERAND (arg0, 0),
10579 TREE_OPERAND (arg1, 0), 0))
10581 tree mask0 = TREE_OPERAND (arg0, 1);
10582 tree mask1 = TREE_OPERAND (arg1, 1);
10583 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10585 if (operand_equal_p (tem, mask1, 0))
10587 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10588 TREE_OPERAND (arg0, 0), mask1);
10589 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10594 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10595 __complex__ ( x, -y ). This is not the same for SNaNs or if
10596 signed zeros are involved. */
10597 if (!HONOR_SNANS (element_mode (arg0))
10598 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10601 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10602 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10603 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10604 bool arg0rz = false, arg0iz = false;
10605 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10606 || (arg0i && (arg0iz = real_zerop (arg0i))))
10608 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10609 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10610 if (arg0rz && arg1i && real_zerop (arg1i))
10612 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10613 arg1r ? arg1r
10614 : build1 (REALPART_EXPR, rtype, arg1));
10615 tree ip = arg0i ? arg0i
10616 : build1 (IMAGPART_EXPR, rtype, arg0);
10617 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10619 else if (arg0iz && arg1r && real_zerop (arg1r))
10621 tree rp = arg0r ? arg0r
10622 : build1 (REALPART_EXPR, rtype, arg0);
10623 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10624 arg1i ? arg1i
10625 : build1 (IMAGPART_EXPR, rtype, arg1));
10626 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10631 /* A - B -> A + (-B) if B is easily negatable. */
10632 if (negate_expr_p (arg1)
10633 && !TYPE_OVERFLOW_SANITIZED (type)
10634 && ((FLOAT_TYPE_P (type)
10635 /* Avoid this transformation if B is a positive REAL_CST. */
10636 && (TREE_CODE (arg1) != REAL_CST
10637 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10638 || INTEGRAL_TYPE_P (type)))
10639 return fold_build2_loc (loc, PLUS_EXPR, type,
10640 fold_convert_loc (loc, type, arg0),
10641 fold_convert_loc (loc, type,
10642 negate_expr (arg1)));
10644 /* Try folding difference of addresses. */
10646 HOST_WIDE_INT diff;
10648 if ((TREE_CODE (arg0) == ADDR_EXPR
10649 || TREE_CODE (arg1) == ADDR_EXPR)
10650 && ptr_difference_const (arg0, arg1, &diff))
10651 return build_int_cst_type (type, diff);
10654 /* Fold &a[i] - &a[j] to i-j. */
10655 if (TREE_CODE (arg0) == ADDR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10657 && TREE_CODE (arg1) == ADDR_EXPR
10658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10660 tree tem = fold_addr_of_array_ref_difference (loc, type,
10661 TREE_OPERAND (arg0, 0),
10662 TREE_OPERAND (arg1, 0));
10663 if (tem)
10664 return tem;
10667 if (FLOAT_TYPE_P (type)
10668 && flag_unsafe_math_optimizations
10669 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10670 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10671 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10672 return tem;
10674 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10675 one. Make sure the type is not saturating and has the signedness of
10676 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10677 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10678 if ((TREE_CODE (arg0) == MULT_EXPR
10679 || TREE_CODE (arg1) == MULT_EXPR)
10680 && !TYPE_SATURATING (type)
10681 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10682 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10683 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10685 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10686 if (tem)
10687 return tem;
10690 goto associate;
10692 case MULT_EXPR:
10693 /* (-A) * (-B) -> A * B */
10694 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10695 return fold_build2_loc (loc, MULT_EXPR, type,
10696 fold_convert_loc (loc, type,
10697 TREE_OPERAND (arg0, 0)),
10698 fold_convert_loc (loc, type,
10699 negate_expr (arg1)));
10700 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10701 return fold_build2_loc (loc, MULT_EXPR, type,
10702 fold_convert_loc (loc, type,
10703 negate_expr (arg0)),
10704 fold_convert_loc (loc, type,
10705 TREE_OPERAND (arg1, 0)));
10707 if (! FLOAT_TYPE_P (type))
10709 /* Transform x * -C into -x * C if x is easily negatable. */
10710 if (TREE_CODE (arg1) == INTEGER_CST
10711 && tree_int_cst_sgn (arg1) == -1
10712 && negate_expr_p (arg0)
10713 && (tem = negate_expr (arg1)) != arg1
10714 && !TREE_OVERFLOW (tem))
10715 return fold_build2_loc (loc, MULT_EXPR, type,
10716 fold_convert_loc (loc, type,
10717 negate_expr (arg0)),
10718 tem);
10720 /* (a * (1 << b)) is (a << b) */
10721 if (TREE_CODE (arg1) == LSHIFT_EXPR
10722 && integer_onep (TREE_OPERAND (arg1, 0)))
10723 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10724 TREE_OPERAND (arg1, 1));
10725 if (TREE_CODE (arg0) == LSHIFT_EXPR
10726 && integer_onep (TREE_OPERAND (arg0, 0)))
10727 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10728 TREE_OPERAND (arg0, 1));
10730 /* (A + A) * C -> A * 2 * C */
10731 if (TREE_CODE (arg0) == PLUS_EXPR
10732 && TREE_CODE (arg1) == INTEGER_CST
10733 && operand_equal_p (TREE_OPERAND (arg0, 0),
10734 TREE_OPERAND (arg0, 1), 0))
10735 return fold_build2_loc (loc, MULT_EXPR, type,
10736 omit_one_operand_loc (loc, type,
10737 TREE_OPERAND (arg0, 0),
10738 TREE_OPERAND (arg0, 1)),
10739 fold_build2_loc (loc, MULT_EXPR, type,
10740 build_int_cst (type, 2) , arg1));
10742 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10743 sign-changing only. */
10744 if (TREE_CODE (arg1) == INTEGER_CST
10745 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10746 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10747 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10749 strict_overflow_p = false;
10750 if (TREE_CODE (arg1) == INTEGER_CST
10751 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10752 &strict_overflow_p)))
10754 if (strict_overflow_p)
10755 fold_overflow_warning (("assuming signed overflow does not "
10756 "occur when simplifying "
10757 "multiplication"),
10758 WARN_STRICT_OVERFLOW_MISC);
10759 return fold_convert_loc (loc, type, tem);
10762 /* Optimize z * conj(z) for integer complex numbers. */
10763 if (TREE_CODE (arg0) == CONJ_EXPR
10764 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10765 return fold_mult_zconjz (loc, type, arg1);
10766 if (TREE_CODE (arg1) == CONJ_EXPR
10767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10768 return fold_mult_zconjz (loc, type, arg0);
10770 else
10772 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10773 the result for floating point types due to rounding so it is applied
10774 only if -fassociative-math was specify. */
10775 if (flag_associative_math
10776 && TREE_CODE (arg0) == RDIV_EXPR
10777 && TREE_CODE (arg1) == REAL_CST
10778 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10780 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10781 arg1);
10782 if (tem)
10783 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10784 TREE_OPERAND (arg0, 1));
10787 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10788 if (operand_equal_p (arg0, arg1, 0))
10790 tree tem = fold_strip_sign_ops (arg0);
10791 if (tem != NULL_TREE)
10793 tem = fold_convert_loc (loc, type, tem);
10794 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10798 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10799 This is not the same for NaNs or if signed zeros are
10800 involved. */
10801 if (!HONOR_NANS (arg0)
10802 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10803 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10804 && TREE_CODE (arg1) == COMPLEX_CST
10805 && real_zerop (TREE_REALPART (arg1)))
10807 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10808 if (real_onep (TREE_IMAGPART (arg1)))
10809 return
10810 fold_build2_loc (loc, COMPLEX_EXPR, type,
10811 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10812 rtype, arg0)),
10813 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10814 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10815 return
10816 fold_build2_loc (loc, COMPLEX_EXPR, type,
10817 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10818 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10819 rtype, arg0)));
10822 /* Optimize z * conj(z) for floating point complex numbers.
10823 Guarded by flag_unsafe_math_optimizations as non-finite
10824 imaginary components don't produce scalar results. */
10825 if (flag_unsafe_math_optimizations
10826 && TREE_CODE (arg0) == CONJ_EXPR
10827 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10828 return fold_mult_zconjz (loc, type, arg1);
10829 if (flag_unsafe_math_optimizations
10830 && TREE_CODE (arg1) == CONJ_EXPR
10831 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10832 return fold_mult_zconjz (loc, type, arg0);
10834 if (flag_unsafe_math_optimizations)
10836 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10837 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10839 /* Optimizations of root(...)*root(...). */
10840 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10842 tree rootfn, arg;
10843 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10844 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10846 /* Optimize sqrt(x)*sqrt(x) as x. */
10847 if (BUILTIN_SQRT_P (fcode0)
10848 && operand_equal_p (arg00, arg10, 0)
10849 && ! HONOR_SNANS (element_mode (type)))
10850 return arg00;
10852 /* Optimize root(x)*root(y) as root(x*y). */
10853 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10854 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10855 return build_call_expr_loc (loc, rootfn, 1, arg);
10858 /* Optimize expN(x)*expN(y) as expN(x+y). */
10859 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10861 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10862 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10863 CALL_EXPR_ARG (arg0, 0),
10864 CALL_EXPR_ARG (arg1, 0));
10865 return build_call_expr_loc (loc, expfn, 1, arg);
10868 /* Optimizations of pow(...)*pow(...). */
10869 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10870 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10871 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10873 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10874 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10875 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10876 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10878 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10879 if (operand_equal_p (arg01, arg11, 0))
10881 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10882 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10883 arg00, arg10);
10884 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10887 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10888 if (operand_equal_p (arg00, arg10, 0))
10890 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10891 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10892 arg01, arg11);
10893 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10897 /* Optimize tan(x)*cos(x) as sin(x). */
10898 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10899 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10900 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10901 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10902 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10903 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10904 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10905 CALL_EXPR_ARG (arg1, 0), 0))
10907 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10909 if (sinfn != NULL_TREE)
10910 return build_call_expr_loc (loc, sinfn, 1,
10911 CALL_EXPR_ARG (arg0, 0));
10914 /* Optimize x*pow(x,c) as pow(x,c+1). */
10915 if (fcode1 == BUILT_IN_POW
10916 || fcode1 == BUILT_IN_POWF
10917 || fcode1 == BUILT_IN_POWL)
10919 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10920 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10921 if (TREE_CODE (arg11) == REAL_CST
10922 && !TREE_OVERFLOW (arg11)
10923 && operand_equal_p (arg0, arg10, 0))
10925 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10926 REAL_VALUE_TYPE c;
10927 tree arg;
10929 c = TREE_REAL_CST (arg11);
10930 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10931 arg = build_real (type, c);
10932 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10936 /* Optimize pow(x,c)*x as pow(x,c+1). */
10937 if (fcode0 == BUILT_IN_POW
10938 || fcode0 == BUILT_IN_POWF
10939 || fcode0 == BUILT_IN_POWL)
10941 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10942 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10943 if (TREE_CODE (arg01) == REAL_CST
10944 && !TREE_OVERFLOW (arg01)
10945 && operand_equal_p (arg1, arg00, 0))
10947 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10948 REAL_VALUE_TYPE c;
10949 tree arg;
10951 c = TREE_REAL_CST (arg01);
10952 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10953 arg = build_real (type, c);
10954 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10958 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10959 if (!in_gimple_form
10960 && optimize
10961 && operand_equal_p (arg0, arg1, 0))
10963 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10965 if (powfn)
10967 tree arg = build_real (type, dconst2);
10968 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10973 goto associate;
10975 case BIT_IOR_EXPR:
10976 bit_ior:
10977 /* ~X | X is -1. */
10978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10979 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10981 t1 = build_zero_cst (type);
10982 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10983 return omit_one_operand_loc (loc, type, t1, arg1);
10986 /* X | ~X is -1. */
10987 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10988 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10990 t1 = build_zero_cst (type);
10991 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10992 return omit_one_operand_loc (loc, type, t1, arg0);
10995 /* Canonicalize (X & C1) | C2. */
10996 if (TREE_CODE (arg0) == BIT_AND_EXPR
10997 && TREE_CODE (arg1) == INTEGER_CST
10998 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11000 int width = TYPE_PRECISION (type), w;
11001 wide_int c1 = TREE_OPERAND (arg0, 1);
11002 wide_int c2 = arg1;
11004 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11005 if ((c1 & c2) == c1)
11006 return omit_one_operand_loc (loc, type, arg1,
11007 TREE_OPERAND (arg0, 0));
11009 wide_int msk = wi::mask (width, false,
11010 TYPE_PRECISION (TREE_TYPE (arg1)));
11012 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11013 if (msk.and_not (c1 | c2) == 0)
11014 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11015 TREE_OPERAND (arg0, 0), arg1);
11017 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11018 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11019 mode which allows further optimizations. */
11020 c1 &= msk;
11021 c2 &= msk;
11022 wide_int c3 = c1.and_not (c2);
11023 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11025 wide_int mask = wi::mask (w, false,
11026 TYPE_PRECISION (type));
11027 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11029 c3 = mask;
11030 break;
11034 if (c3 != c1)
11035 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11036 fold_build2_loc (loc, BIT_AND_EXPR, type,
11037 TREE_OPERAND (arg0, 0),
11038 wide_int_to_tree (type,
11039 c3)),
11040 arg1);
11043 /* (X & ~Y) | (~X & Y) is X ^ Y */
11044 if (TREE_CODE (arg0) == BIT_AND_EXPR
11045 && TREE_CODE (arg1) == BIT_AND_EXPR)
11047 tree a0, a1, l0, l1, n0, n1;
11049 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11050 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11052 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11053 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11055 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11056 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11058 if ((operand_equal_p (n0, a0, 0)
11059 && operand_equal_p (n1, a1, 0))
11060 || (operand_equal_p (n0, a1, 0)
11061 && operand_equal_p (n1, a0, 0)))
11062 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11065 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11066 if (t1 != NULL_TREE)
11067 return t1;
11069 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11071 This results in more efficient code for machines without a NAND
11072 instruction. Combine will canonicalize to the first form
11073 which will allow use of NAND instructions provided by the
11074 backend if they exist. */
11075 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11076 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11078 return
11079 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11080 build2 (BIT_AND_EXPR, type,
11081 fold_convert_loc (loc, type,
11082 TREE_OPERAND (arg0, 0)),
11083 fold_convert_loc (loc, type,
11084 TREE_OPERAND (arg1, 0))));
11087 /* See if this can be simplified into a rotate first. If that
11088 is unsuccessful continue in the association code. */
11089 goto bit_rotate;
11091 case BIT_XOR_EXPR:
11092 /* ~X ^ X is -1. */
11093 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11096 t1 = build_zero_cst (type);
11097 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11098 return omit_one_operand_loc (loc, type, t1, arg1);
11101 /* X ^ ~X is -1. */
11102 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11105 t1 = build_zero_cst (type);
11106 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11107 return omit_one_operand_loc (loc, type, t1, arg0);
11110 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11111 with a constant, and the two constants have no bits in common,
11112 we should treat this as a BIT_IOR_EXPR since this may produce more
11113 simplifications. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && TREE_CODE (arg1) == BIT_AND_EXPR
11116 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11117 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11118 && wi::bit_and (TREE_OPERAND (arg0, 1),
11119 TREE_OPERAND (arg1, 1)) == 0)
11121 code = BIT_IOR_EXPR;
11122 goto bit_ior;
11125 /* (X | Y) ^ X -> Y & ~ X*/
11126 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11129 tree t2 = TREE_OPERAND (arg0, 1);
11130 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11131 arg1);
11132 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11133 fold_convert_loc (loc, type, t2),
11134 fold_convert_loc (loc, type, t1));
11135 return t1;
11138 /* (Y | X) ^ X -> Y & ~ X*/
11139 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11140 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11142 tree t2 = TREE_OPERAND (arg0, 0);
11143 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11144 arg1);
11145 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11146 fold_convert_loc (loc, type, t2),
11147 fold_convert_loc (loc, type, t1));
11148 return t1;
11151 /* X ^ (X | Y) -> Y & ~ X*/
11152 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11155 tree t2 = TREE_OPERAND (arg1, 1);
11156 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11157 arg0);
11158 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11159 fold_convert_loc (loc, type, t2),
11160 fold_convert_loc (loc, type, t1));
11161 return t1;
11164 /* X ^ (Y | X) -> Y & ~ X*/
11165 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11166 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11168 tree t2 = TREE_OPERAND (arg1, 0);
11169 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11170 arg0);
11171 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11172 fold_convert_loc (loc, type, t2),
11173 fold_convert_loc (loc, type, t1));
11174 return t1;
11177 /* Convert ~X ^ ~Y to X ^ Y. */
11178 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11179 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11180 return fold_build2_loc (loc, code, type,
11181 fold_convert_loc (loc, type,
11182 TREE_OPERAND (arg0, 0)),
11183 fold_convert_loc (loc, type,
11184 TREE_OPERAND (arg1, 0)));
11186 /* Convert ~X ^ C to X ^ ~C. */
11187 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11188 && TREE_CODE (arg1) == INTEGER_CST)
11189 return fold_build2_loc (loc, code, type,
11190 fold_convert_loc (loc, type,
11191 TREE_OPERAND (arg0, 0)),
11192 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11194 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11195 if (TREE_CODE (arg0) == BIT_AND_EXPR
11196 && INTEGRAL_TYPE_P (type)
11197 && integer_onep (TREE_OPERAND (arg0, 1))
11198 && integer_onep (arg1))
11199 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11200 build_zero_cst (TREE_TYPE (arg0)));
11202 /* Fold (X & Y) ^ Y as ~X & Y. */
11203 if (TREE_CODE (arg0) == BIT_AND_EXPR
11204 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11206 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11207 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11208 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11209 fold_convert_loc (loc, type, arg1));
11211 /* Fold (X & Y) ^ X as ~Y & X. */
11212 if (TREE_CODE (arg0) == BIT_AND_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11214 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11216 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11217 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11219 fold_convert_loc (loc, type, arg1));
11221 /* Fold X ^ (X & Y) as X & ~Y. */
11222 if (TREE_CODE (arg1) == BIT_AND_EXPR
11223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11225 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11226 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11227 fold_convert_loc (loc, type, arg0),
11228 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11230 /* Fold X ^ (Y & X) as ~Y & X. */
11231 if (TREE_CODE (arg1) == BIT_AND_EXPR
11232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11235 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11237 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11238 fold_convert_loc (loc, type, arg0));
11241 /* See if this can be simplified into a rotate first. If that
11242 is unsuccessful continue in the association code. */
11243 goto bit_rotate;
11245 case BIT_AND_EXPR:
11246 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11247 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11248 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11249 || (TREE_CODE (arg0) == EQ_EXPR
11250 && integer_zerop (TREE_OPERAND (arg0, 1))))
11251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11252 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11254 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11255 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11256 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11257 || (TREE_CODE (arg1) == EQ_EXPR
11258 && integer_zerop (TREE_OPERAND (arg1, 1))))
11259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11260 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11262 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11263 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11264 && INTEGRAL_TYPE_P (type)
11265 && integer_onep (TREE_OPERAND (arg0, 1))
11266 && integer_onep (arg1))
11268 tree tem2;
11269 tem = TREE_OPERAND (arg0, 0);
11270 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11271 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11272 tem, tem2);
11273 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11274 build_zero_cst (TREE_TYPE (tem)));
11276 /* Fold ~X & 1 as (X & 1) == 0. */
11277 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11278 && INTEGRAL_TYPE_P (type)
11279 && integer_onep (arg1))
11281 tree tem2;
11282 tem = TREE_OPERAND (arg0, 0);
11283 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11284 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11285 tem, tem2);
11286 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11287 build_zero_cst (TREE_TYPE (tem)));
11289 /* Fold !X & 1 as X == 0. */
11290 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11291 && integer_onep (arg1))
11293 tem = TREE_OPERAND (arg0, 0);
11294 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11295 build_zero_cst (TREE_TYPE (tem)));
11298 /* Fold (X ^ Y) & Y as ~X & Y. */
11299 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11300 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11302 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11303 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11304 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11305 fold_convert_loc (loc, type, arg1));
11307 /* Fold (X ^ Y) & X as ~Y & X. */
11308 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11310 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11312 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11313 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11314 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11315 fold_convert_loc (loc, type, arg1));
11317 /* Fold X & (X ^ Y) as X & ~Y. */
11318 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11319 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11321 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11322 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11323 fold_convert_loc (loc, type, arg0),
11324 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11326 /* Fold X & (Y ^ X) as ~Y & X. */
11327 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11329 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11331 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11332 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11333 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11334 fold_convert_loc (loc, type, arg0));
11337 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11338 multiple of 1 << CST. */
11339 if (TREE_CODE (arg1) == INTEGER_CST)
11341 wide_int cst1 = arg1;
11342 wide_int ncst1 = -cst1;
11343 if ((cst1 & ncst1) == ncst1
11344 && multiple_of_p (type, arg0,
11345 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11346 return fold_convert_loc (loc, type, arg0);
11349 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11350 bits from CST2. */
11351 if (TREE_CODE (arg1) == INTEGER_CST
11352 && TREE_CODE (arg0) == MULT_EXPR
11353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11355 wide_int warg1 = arg1;
11356 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11358 if (masked == 0)
11359 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11360 arg0, arg1);
11361 else if (masked != warg1)
11363 /* Avoid the transform if arg1 is a mask of some
11364 mode which allows further optimizations. */
11365 int pop = wi::popcount (warg1);
11366 if (!(pop >= BITS_PER_UNIT
11367 && exact_log2 (pop) != -1
11368 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11369 return fold_build2_loc (loc, code, type, op0,
11370 wide_int_to_tree (type, masked));
11374 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11375 ((A & N) + B) & M -> (A + B) & M
11376 Similarly if (N & M) == 0,
11377 ((A | N) + B) & M -> (A + B) & M
11378 and for - instead of + (or unary - instead of +)
11379 and/or ^ instead of |.
11380 If B is constant and (B & M) == 0, fold into A & M. */
11381 if (TREE_CODE (arg1) == INTEGER_CST)
11383 wide_int cst1 = arg1;
11384 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11385 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11386 && (TREE_CODE (arg0) == PLUS_EXPR
11387 || TREE_CODE (arg0) == MINUS_EXPR
11388 || TREE_CODE (arg0) == NEGATE_EXPR)
11389 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11390 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11392 tree pmop[2];
11393 int which = 0;
11394 wide_int cst0;
11396 /* Now we know that arg0 is (C + D) or (C - D) or
11397 -C and arg1 (M) is == (1LL << cst) - 1.
11398 Store C into PMOP[0] and D into PMOP[1]. */
11399 pmop[0] = TREE_OPERAND (arg0, 0);
11400 pmop[1] = NULL;
11401 if (TREE_CODE (arg0) != NEGATE_EXPR)
11403 pmop[1] = TREE_OPERAND (arg0, 1);
11404 which = 1;
11407 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11408 which = -1;
11410 for (; which >= 0; which--)
11411 switch (TREE_CODE (pmop[which]))
11413 case BIT_AND_EXPR:
11414 case BIT_IOR_EXPR:
11415 case BIT_XOR_EXPR:
11416 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11417 != INTEGER_CST)
11418 break;
11419 cst0 = TREE_OPERAND (pmop[which], 1);
11420 cst0 &= cst1;
11421 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11423 if (cst0 != cst1)
11424 break;
11426 else if (cst0 != 0)
11427 break;
11428 /* If C or D is of the form (A & N) where
11429 (N & M) == M, or of the form (A | N) or
11430 (A ^ N) where (N & M) == 0, replace it with A. */
11431 pmop[which] = TREE_OPERAND (pmop[which], 0);
11432 break;
11433 case INTEGER_CST:
11434 /* If C or D is a N where (N & M) == 0, it can be
11435 omitted (assumed 0). */
11436 if ((TREE_CODE (arg0) == PLUS_EXPR
11437 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11438 && (cst1 & pmop[which]) == 0)
11439 pmop[which] = NULL;
11440 break;
11441 default:
11442 break;
11445 /* Only build anything new if we optimized one or both arguments
11446 above. */
11447 if (pmop[0] != TREE_OPERAND (arg0, 0)
11448 || (TREE_CODE (arg0) != NEGATE_EXPR
11449 && pmop[1] != TREE_OPERAND (arg0, 1)))
11451 tree utype = TREE_TYPE (arg0);
11452 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11454 /* Perform the operations in a type that has defined
11455 overflow behavior. */
11456 utype = unsigned_type_for (TREE_TYPE (arg0));
11457 if (pmop[0] != NULL)
11458 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11459 if (pmop[1] != NULL)
11460 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11463 if (TREE_CODE (arg0) == NEGATE_EXPR)
11464 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11465 else if (TREE_CODE (arg0) == PLUS_EXPR)
11467 if (pmop[0] != NULL && pmop[1] != NULL)
11468 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11469 pmop[0], pmop[1]);
11470 else if (pmop[0] != NULL)
11471 tem = pmop[0];
11472 else if (pmop[1] != NULL)
11473 tem = pmop[1];
11474 else
11475 return build_int_cst (type, 0);
11477 else if (pmop[0] == NULL)
11478 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11479 else
11480 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11481 pmop[0], pmop[1]);
11482 /* TEM is now the new binary +, - or unary - replacement. */
11483 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11484 fold_convert_loc (loc, utype, arg1));
11485 return fold_convert_loc (loc, type, tem);
11490 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11491 if (t1 != NULL_TREE)
11492 return t1;
11493 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11494 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11495 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11497 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11499 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11500 if (mask == -1)
11501 return
11502 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11505 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11507 This results in more efficient code for machines without a NOR
11508 instruction. Combine will canonicalize to the first form
11509 which will allow use of NOR instructions provided by the
11510 backend if they exist. */
11511 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11512 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11514 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11515 build2 (BIT_IOR_EXPR, type,
11516 fold_convert_loc (loc, type,
11517 TREE_OPERAND (arg0, 0)),
11518 fold_convert_loc (loc, type,
11519 TREE_OPERAND (arg1, 0))));
11522 /* If arg0 is derived from the address of an object or function, we may
11523 be able to fold this expression using the object or function's
11524 alignment. */
11525 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11527 unsigned HOST_WIDE_INT modulus, residue;
11528 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11530 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11531 integer_onep (arg1));
11533 /* This works because modulus is a power of 2. If this weren't the
11534 case, we'd have to replace it by its greatest power-of-2
11535 divisor: modulus & -modulus. */
11536 if (low < modulus)
11537 return build_int_cst (type, residue & low);
11540 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11541 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11542 if the new mask might be further optimized. */
11543 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11544 || TREE_CODE (arg0) == RSHIFT_EXPR)
11545 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11546 && TREE_CODE (arg1) == INTEGER_CST
11547 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11548 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11549 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11550 < TYPE_PRECISION (TREE_TYPE (arg0))))
11552 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11553 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11554 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11555 tree shift_type = TREE_TYPE (arg0);
11557 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11558 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11559 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11560 && TYPE_PRECISION (TREE_TYPE (arg0))
11561 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11563 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11564 tree arg00 = TREE_OPERAND (arg0, 0);
11565 /* See if more bits can be proven as zero because of
11566 zero extension. */
11567 if (TREE_CODE (arg00) == NOP_EXPR
11568 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11570 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11571 if (TYPE_PRECISION (inner_type)
11572 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11573 && TYPE_PRECISION (inner_type) < prec)
11575 prec = TYPE_PRECISION (inner_type);
11576 /* See if we can shorten the right shift. */
11577 if (shiftc < prec)
11578 shift_type = inner_type;
11579 /* Otherwise X >> C1 is all zeros, so we'll optimize
11580 it into (X, 0) later on by making sure zerobits
11581 is all ones. */
11584 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11585 if (shiftc < prec)
11587 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11588 zerobits <<= prec - shiftc;
11590 /* For arithmetic shift if sign bit could be set, zerobits
11591 can contain actually sign bits, so no transformation is
11592 possible, unless MASK masks them all away. In that
11593 case the shift needs to be converted into logical shift. */
11594 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11595 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11597 if ((mask & zerobits) == 0)
11598 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11599 else
11600 zerobits = 0;
11604 /* ((X << 16) & 0xff00) is (X, 0). */
11605 if ((mask & zerobits) == mask)
11606 return omit_one_operand_loc (loc, type,
11607 build_int_cst (type, 0), arg0);
11609 newmask = mask | zerobits;
11610 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11612 /* Only do the transformation if NEWMASK is some integer
11613 mode's mask. */
11614 for (prec = BITS_PER_UNIT;
11615 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11616 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11617 break;
11618 if (prec < HOST_BITS_PER_WIDE_INT
11619 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11621 tree newmaskt;
11623 if (shift_type != TREE_TYPE (arg0))
11625 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11626 fold_convert_loc (loc, shift_type,
11627 TREE_OPERAND (arg0, 0)),
11628 TREE_OPERAND (arg0, 1));
11629 tem = fold_convert_loc (loc, type, tem);
11631 else
11632 tem = op0;
11633 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11634 if (!tree_int_cst_equal (newmaskt, arg1))
11635 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11640 goto associate;
11642 case RDIV_EXPR:
11643 /* Don't touch a floating-point divide by zero unless the mode
11644 of the constant can represent infinity. */
11645 if (TREE_CODE (arg1) == REAL_CST
11646 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11647 && real_zerop (arg1))
11648 return NULL_TREE;
11650 /* (-A) / (-B) -> A / B */
11651 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11652 return fold_build2_loc (loc, RDIV_EXPR, type,
11653 TREE_OPERAND (arg0, 0),
11654 negate_expr (arg1));
11655 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11656 return fold_build2_loc (loc, RDIV_EXPR, type,
11657 negate_expr (arg0),
11658 TREE_OPERAND (arg1, 0));
11660 /* Convert A/B/C to A/(B*C). */
11661 if (flag_reciprocal_math
11662 && TREE_CODE (arg0) == RDIV_EXPR)
11663 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11664 fold_build2_loc (loc, MULT_EXPR, type,
11665 TREE_OPERAND (arg0, 1), arg1));
11667 /* Convert A/(B/C) to (A/B)*C. */
11668 if (flag_reciprocal_math
11669 && TREE_CODE (arg1) == RDIV_EXPR)
11670 return fold_build2_loc (loc, MULT_EXPR, type,
11671 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11672 TREE_OPERAND (arg1, 0)),
11673 TREE_OPERAND (arg1, 1));
11675 /* Convert C1/(X*C2) into (C1/C2)/X. */
11676 if (flag_reciprocal_math
11677 && TREE_CODE (arg1) == MULT_EXPR
11678 && TREE_CODE (arg0) == REAL_CST
11679 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11681 tree tem = const_binop (RDIV_EXPR, arg0,
11682 TREE_OPERAND (arg1, 1));
11683 if (tem)
11684 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11685 TREE_OPERAND (arg1, 0));
11688 if (flag_unsafe_math_optimizations)
11690 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11691 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11693 /* Optimize sin(x)/cos(x) as tan(x). */
11694 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11695 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11696 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11697 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11698 CALL_EXPR_ARG (arg1, 0), 0))
11700 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11702 if (tanfn != NULL_TREE)
11703 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11706 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11707 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11708 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11709 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11710 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11711 CALL_EXPR_ARG (arg1, 0), 0))
11713 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11715 if (tanfn != NULL_TREE)
11717 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11718 CALL_EXPR_ARG (arg0, 0));
11719 return fold_build2_loc (loc, RDIV_EXPR, type,
11720 build_real (type, dconst1), tmp);
11724 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11725 NaNs or Infinities. */
11726 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11727 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11728 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11730 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11731 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11733 if (! HONOR_NANS (arg00)
11734 && ! HONOR_INFINITIES (element_mode (arg00))
11735 && operand_equal_p (arg00, arg01, 0))
11737 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11739 if (cosfn != NULL_TREE)
11740 return build_call_expr_loc (loc, cosfn, 1, arg00);
11744 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11745 NaNs or Infinities. */
11746 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11747 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11748 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11750 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11751 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11753 if (! HONOR_NANS (arg00)
11754 && ! HONOR_INFINITIES (element_mode (arg00))
11755 && operand_equal_p (arg00, arg01, 0))
11757 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11759 if (cosfn != NULL_TREE)
11761 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11762 return fold_build2_loc (loc, RDIV_EXPR, type,
11763 build_real (type, dconst1),
11764 tmp);
11769 /* Optimize pow(x,c)/x as pow(x,c-1). */
11770 if (fcode0 == BUILT_IN_POW
11771 || fcode0 == BUILT_IN_POWF
11772 || fcode0 == BUILT_IN_POWL)
11774 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11775 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11776 if (TREE_CODE (arg01) == REAL_CST
11777 && !TREE_OVERFLOW (arg01)
11778 && operand_equal_p (arg1, arg00, 0))
11780 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11781 REAL_VALUE_TYPE c;
11782 tree arg;
11784 c = TREE_REAL_CST (arg01);
11785 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11786 arg = build_real (type, c);
11787 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11791 /* Optimize a/root(b/c) into a*root(c/b). */
11792 if (BUILTIN_ROOT_P (fcode1))
11794 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11796 if (TREE_CODE (rootarg) == RDIV_EXPR)
11798 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11799 tree b = TREE_OPERAND (rootarg, 0);
11800 tree c = TREE_OPERAND (rootarg, 1);
11802 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11804 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11805 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11809 /* Optimize x/expN(y) into x*expN(-y). */
11810 if (BUILTIN_EXPONENT_P (fcode1))
11812 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11813 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11814 arg1 = build_call_expr_loc (loc,
11815 expfn, 1,
11816 fold_convert_loc (loc, type, arg));
11817 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11820 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11821 if (fcode1 == BUILT_IN_POW
11822 || fcode1 == BUILT_IN_POWF
11823 || fcode1 == BUILT_IN_POWL)
11825 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11826 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11827 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11828 tree neg11 = fold_convert_loc (loc, type,
11829 negate_expr (arg11));
11830 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11831 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11834 return NULL_TREE;
11836 case TRUNC_DIV_EXPR:
11837 /* Optimize (X & (-A)) / A where A is a power of 2,
11838 to X >> log2(A) */
11839 if (TREE_CODE (arg0) == BIT_AND_EXPR
11840 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11841 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11843 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11844 arg1, TREE_OPERAND (arg0, 1));
11845 if (sum && integer_zerop (sum)) {
11846 tree pow2 = build_int_cst (integer_type_node,
11847 wi::exact_log2 (arg1));
11848 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11849 TREE_OPERAND (arg0, 0), pow2);
11853 /* Fall through */
11855 case FLOOR_DIV_EXPR:
11856 /* Simplify A / (B << N) where A and B are positive and B is
11857 a power of 2, to A >> (N + log2(B)). */
11858 strict_overflow_p = false;
11859 if (TREE_CODE (arg1) == LSHIFT_EXPR
11860 && (TYPE_UNSIGNED (type)
11861 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11863 tree sval = TREE_OPERAND (arg1, 0);
11864 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11866 tree sh_cnt = TREE_OPERAND (arg1, 1);
11867 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11868 wi::exact_log2 (sval));
11870 if (strict_overflow_p)
11871 fold_overflow_warning (("assuming signed overflow does not "
11872 "occur when simplifying A / (B << N)"),
11873 WARN_STRICT_OVERFLOW_MISC);
11875 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11876 sh_cnt, pow2);
11877 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11878 fold_convert_loc (loc, type, arg0), sh_cnt);
11882 /* Fall through */
11884 case ROUND_DIV_EXPR:
11885 case CEIL_DIV_EXPR:
11886 case EXACT_DIV_EXPR:
11887 if (integer_zerop (arg1))
11888 return NULL_TREE;
11890 /* Convert -A / -B to A / B when the type is signed and overflow is
11891 undefined. */
11892 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11893 && TREE_CODE (arg0) == NEGATE_EXPR
11894 && negate_expr_p (arg1))
11896 if (INTEGRAL_TYPE_P (type))
11897 fold_overflow_warning (("assuming signed overflow does not occur "
11898 "when distributing negation across "
11899 "division"),
11900 WARN_STRICT_OVERFLOW_MISC);
11901 return fold_build2_loc (loc, code, type,
11902 fold_convert_loc (loc, type,
11903 TREE_OPERAND (arg0, 0)),
11904 fold_convert_loc (loc, type,
11905 negate_expr (arg1)));
11907 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11908 && TREE_CODE (arg1) == NEGATE_EXPR
11909 && negate_expr_p (arg0))
11911 if (INTEGRAL_TYPE_P (type))
11912 fold_overflow_warning (("assuming signed overflow does not occur "
11913 "when distributing negation across "
11914 "division"),
11915 WARN_STRICT_OVERFLOW_MISC);
11916 return fold_build2_loc (loc, code, type,
11917 fold_convert_loc (loc, type,
11918 negate_expr (arg0)),
11919 fold_convert_loc (loc, type,
11920 TREE_OPERAND (arg1, 0)));
11923 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11924 operation, EXACT_DIV_EXPR.
11926 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11927 At one time others generated faster code, it's not clear if they do
11928 after the last round to changes to the DIV code in expmed.c. */
11929 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11930 && multiple_of_p (type, arg0, arg1))
11931 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11933 strict_overflow_p = false;
11934 if (TREE_CODE (arg1) == INTEGER_CST
11935 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11936 &strict_overflow_p)))
11938 if (strict_overflow_p)
11939 fold_overflow_warning (("assuming signed overflow does not occur "
11940 "when simplifying division"),
11941 WARN_STRICT_OVERFLOW_MISC);
11942 return fold_convert_loc (loc, type, tem);
11945 return NULL_TREE;
11947 case CEIL_MOD_EXPR:
11948 case FLOOR_MOD_EXPR:
11949 case ROUND_MOD_EXPR:
11950 case TRUNC_MOD_EXPR:
11951 /* X % -Y is the same as X % Y. */
11952 if (code == TRUNC_MOD_EXPR
11953 && !TYPE_UNSIGNED (type)
11954 && TREE_CODE (arg1) == NEGATE_EXPR
11955 && !TYPE_OVERFLOW_TRAPS (type))
11956 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11957 fold_convert_loc (loc, type,
11958 TREE_OPERAND (arg1, 0)));
11960 strict_overflow_p = false;
11961 if (TREE_CODE (arg1) == INTEGER_CST
11962 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11963 &strict_overflow_p)))
11965 if (strict_overflow_p)
11966 fold_overflow_warning (("assuming signed overflow does not occur "
11967 "when simplifying modulus"),
11968 WARN_STRICT_OVERFLOW_MISC);
11969 return fold_convert_loc (loc, type, tem);
11972 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11973 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11974 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11975 && (TYPE_UNSIGNED (type)
11976 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11978 tree c = arg1;
11979 /* Also optimize A % (C << N) where C is a power of 2,
11980 to A & ((C << N) - 1). */
11981 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11982 c = TREE_OPERAND (arg1, 0);
11984 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11986 tree mask
11987 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11988 build_int_cst (TREE_TYPE (arg1), 1));
11989 if (strict_overflow_p)
11990 fold_overflow_warning (("assuming signed overflow does not "
11991 "occur when simplifying "
11992 "X % (power of two)"),
11993 WARN_STRICT_OVERFLOW_MISC);
11994 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11995 fold_convert_loc (loc, type, arg0),
11996 fold_convert_loc (loc, type, mask));
12000 return NULL_TREE;
12002 case LROTATE_EXPR:
12003 case RROTATE_EXPR:
12004 case RSHIFT_EXPR:
12005 case LSHIFT_EXPR:
12006 /* Since negative shift count is not well-defined,
12007 don't try to compute it in the compiler. */
12008 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12009 return NULL_TREE;
12011 prec = element_precision (type);
12013 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12014 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12015 && tree_to_uhwi (arg1) < prec
12016 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12017 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12019 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12020 + tree_to_uhwi (arg1));
12022 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12023 being well defined. */
12024 if (low >= prec)
12026 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12027 low = low % prec;
12028 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12029 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12030 TREE_OPERAND (arg0, 0));
12031 else
12032 low = prec - 1;
12035 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12036 build_int_cst (TREE_TYPE (arg1), low));
12039 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12040 into x & ((unsigned)-1 >> c) for unsigned types. */
12041 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12042 || (TYPE_UNSIGNED (type)
12043 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12044 && tree_fits_uhwi_p (arg1)
12045 && tree_to_uhwi (arg1) < prec
12046 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12047 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12049 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12050 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12051 tree lshift;
12052 tree arg00;
12054 if (low0 == low1)
12056 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12058 lshift = build_minus_one_cst (type);
12059 lshift = const_binop (code, lshift, arg1);
12061 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12065 /* If we have a rotate of a bit operation with the rotate count and
12066 the second operand of the bit operation both constant,
12067 permute the two operations. */
12068 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12069 && (TREE_CODE (arg0) == BIT_AND_EXPR
12070 || TREE_CODE (arg0) == BIT_IOR_EXPR
12071 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12073 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12074 fold_build2_loc (loc, code, type,
12075 TREE_OPERAND (arg0, 0), arg1),
12076 fold_build2_loc (loc, code, type,
12077 TREE_OPERAND (arg0, 1), arg1));
12079 /* Two consecutive rotates adding up to the some integer
12080 multiple of the precision of the type can be ignored. */
12081 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12082 && TREE_CODE (arg0) == RROTATE_EXPR
12083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12084 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12085 prec) == 0)
12086 return TREE_OPERAND (arg0, 0);
12088 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12089 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12090 if the latter can be further optimized. */
12091 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12092 && TREE_CODE (arg0) == BIT_AND_EXPR
12093 && TREE_CODE (arg1) == INTEGER_CST
12094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12096 tree mask = fold_build2_loc (loc, code, type,
12097 fold_convert_loc (loc, type,
12098 TREE_OPERAND (arg0, 1)),
12099 arg1);
12100 tree shift = fold_build2_loc (loc, code, type,
12101 fold_convert_loc (loc, type,
12102 TREE_OPERAND (arg0, 0)),
12103 arg1);
12104 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12105 if (tem)
12106 return tem;
12109 return NULL_TREE;
12111 case MIN_EXPR:
12112 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12113 if (tem)
12114 return tem;
12115 goto associate;
12117 case MAX_EXPR:
12118 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12119 if (tem)
12120 return tem;
12121 goto associate;
12123 case TRUTH_ANDIF_EXPR:
12124 /* Note that the operands of this must be ints
12125 and their values must be 0 or 1.
12126 ("true" is a fixed value perhaps depending on the language.) */
12127 /* If first arg is constant zero, return it. */
12128 if (integer_zerop (arg0))
12129 return fold_convert_loc (loc, type, arg0);
12130 case TRUTH_AND_EXPR:
12131 /* If either arg is constant true, drop it. */
12132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12134 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12135 /* Preserve sequence points. */
12136 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12138 /* If second arg is constant zero, result is zero, but first arg
12139 must be evaluated. */
12140 if (integer_zerop (arg1))
12141 return omit_one_operand_loc (loc, type, arg1, arg0);
12142 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12143 case will be handled here. */
12144 if (integer_zerop (arg0))
12145 return omit_one_operand_loc (loc, type, arg0, arg1);
12147 /* !X && X is always false. */
12148 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12149 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12150 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12151 /* X && !X is always false. */
12152 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12153 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12154 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12156 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12157 means A >= Y && A != MAX, but in this case we know that
12158 A < X <= MAX. */
12160 if (!TREE_SIDE_EFFECTS (arg0)
12161 && !TREE_SIDE_EFFECTS (arg1))
12163 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12164 if (tem && !operand_equal_p (tem, arg0, 0))
12165 return fold_build2_loc (loc, code, type, tem, arg1);
12167 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12168 if (tem && !operand_equal_p (tem, arg1, 0))
12169 return fold_build2_loc (loc, code, type, arg0, tem);
12172 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12173 != NULL_TREE)
12174 return tem;
12176 return NULL_TREE;
12178 case TRUTH_ORIF_EXPR:
12179 /* Note that the operands of this must be ints
12180 and their values must be 0 or true.
12181 ("true" is a fixed value perhaps depending on the language.) */
12182 /* If first arg is constant true, return it. */
12183 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12184 return fold_convert_loc (loc, type, arg0);
12185 case TRUTH_OR_EXPR:
12186 /* If either arg is constant zero, drop it. */
12187 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12188 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12189 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12190 /* Preserve sequence points. */
12191 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12193 /* If second arg is constant true, result is true, but we must
12194 evaluate first arg. */
12195 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12196 return omit_one_operand_loc (loc, type, arg1, arg0);
12197 /* Likewise for first arg, but note this only occurs here for
12198 TRUTH_OR_EXPR. */
12199 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12200 return omit_one_operand_loc (loc, type, arg0, arg1);
12202 /* !X || X is always true. */
12203 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12205 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12206 /* X || !X is always true. */
12207 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12208 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12209 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12211 /* (X && !Y) || (!X && Y) is X ^ Y */
12212 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12213 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12215 tree a0, a1, l0, l1, n0, n1;
12217 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12218 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12220 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12221 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12223 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12224 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12226 if ((operand_equal_p (n0, a0, 0)
12227 && operand_equal_p (n1, a1, 0))
12228 || (operand_equal_p (n0, a1, 0)
12229 && operand_equal_p (n1, a0, 0)))
12230 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12233 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12234 != NULL_TREE)
12235 return tem;
12237 return NULL_TREE;
12239 case TRUTH_XOR_EXPR:
12240 /* If the second arg is constant zero, drop it. */
12241 if (integer_zerop (arg1))
12242 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12243 /* If the second arg is constant true, this is a logical inversion. */
12244 if (integer_onep (arg1))
12246 tem = invert_truthvalue_loc (loc, arg0);
12247 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12249 /* Identical arguments cancel to zero. */
12250 if (operand_equal_p (arg0, arg1, 0))
12251 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12253 /* !X ^ X is always true. */
12254 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12256 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12258 /* X ^ !X is always true. */
12259 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12261 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12263 return NULL_TREE;
12265 case EQ_EXPR:
12266 case NE_EXPR:
12267 STRIP_NOPS (arg0);
12268 STRIP_NOPS (arg1);
12270 tem = fold_comparison (loc, code, type, op0, op1);
12271 if (tem != NULL_TREE)
12272 return tem;
12274 /* bool_var != 0 becomes bool_var. */
12275 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12276 && code == NE_EXPR)
12277 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12279 /* bool_var == 1 becomes bool_var. */
12280 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12281 && code == EQ_EXPR)
12282 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12284 /* bool_var != 1 becomes !bool_var. */
12285 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12286 && code == NE_EXPR)
12287 return fold_convert_loc (loc, type,
12288 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12289 TREE_TYPE (arg0), arg0));
12291 /* bool_var == 0 becomes !bool_var. */
12292 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12293 && code == EQ_EXPR)
12294 return fold_convert_loc (loc, type,
12295 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12296 TREE_TYPE (arg0), arg0));
12298 /* !exp != 0 becomes !exp */
12299 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12300 && code == NE_EXPR)
12301 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12303 /* If this is an equality comparison of the address of two non-weak,
12304 unaliased symbols neither of which are extern (since we do not
12305 have access to attributes for externs), then we know the result. */
12306 if (TREE_CODE (arg0) == ADDR_EXPR
12307 && DECL_P (TREE_OPERAND (arg0, 0))
12308 && TREE_CODE (arg1) == ADDR_EXPR
12309 && DECL_P (TREE_OPERAND (arg1, 0)))
12311 int equal;
12313 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12314 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12315 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12316 ->equal_address_to (symtab_node::get_create
12317 (TREE_OPERAND (arg1, 0)));
12318 else
12319 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12320 if (equal != 2)
12321 return constant_boolean_node (equal
12322 ? code == EQ_EXPR : code != EQ_EXPR,
12323 type);
12326 /* Similarly for a NEGATE_EXPR. */
12327 if (TREE_CODE (arg0) == NEGATE_EXPR
12328 && TREE_CODE (arg1) == INTEGER_CST
12329 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12330 arg1)))
12331 && TREE_CODE (tem) == INTEGER_CST
12332 && !TREE_OVERFLOW (tem))
12333 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12335 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12336 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12337 && TREE_CODE (arg1) == INTEGER_CST
12338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12339 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12340 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12341 fold_convert_loc (loc,
12342 TREE_TYPE (arg0),
12343 arg1),
12344 TREE_OPERAND (arg0, 1)));
12346 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12347 if ((TREE_CODE (arg0) == PLUS_EXPR
12348 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12349 || TREE_CODE (arg0) == MINUS_EXPR)
12350 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12351 0)),
12352 arg1, 0)
12353 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12354 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12356 tree val = TREE_OPERAND (arg0, 1);
12357 return omit_two_operands_loc (loc, type,
12358 fold_build2_loc (loc, code, type,
12359 val,
12360 build_int_cst (TREE_TYPE (val),
12361 0)),
12362 TREE_OPERAND (arg0, 0), arg1);
12365 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12366 if (TREE_CODE (arg0) == MINUS_EXPR
12367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12368 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12369 1)),
12370 arg1, 0)
12371 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12373 return omit_two_operands_loc (loc, type,
12374 code == NE_EXPR
12375 ? boolean_true_node : boolean_false_node,
12376 TREE_OPERAND (arg0, 1), arg1);
12379 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12380 if (TREE_CODE (arg0) == ABS_EXPR
12381 && (integer_zerop (arg1) || real_zerop (arg1)))
12382 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12384 /* If this is an EQ or NE comparison with zero and ARG0 is
12385 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12386 two operations, but the latter can be done in one less insn
12387 on machines that have only two-operand insns or on which a
12388 constant cannot be the first operand. */
12389 if (TREE_CODE (arg0) == BIT_AND_EXPR
12390 && integer_zerop (arg1))
12392 tree arg00 = TREE_OPERAND (arg0, 0);
12393 tree arg01 = TREE_OPERAND (arg0, 1);
12394 if (TREE_CODE (arg00) == LSHIFT_EXPR
12395 && integer_onep (TREE_OPERAND (arg00, 0)))
12397 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12398 arg01, TREE_OPERAND (arg00, 1));
12399 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12400 build_int_cst (TREE_TYPE (arg0), 1));
12401 return fold_build2_loc (loc, code, type,
12402 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12403 arg1);
12405 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12406 && integer_onep (TREE_OPERAND (arg01, 0)))
12408 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12409 arg00, TREE_OPERAND (arg01, 1));
12410 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12411 build_int_cst (TREE_TYPE (arg0), 1));
12412 return fold_build2_loc (loc, code, type,
12413 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12414 arg1);
12418 /* If this is an NE or EQ comparison of zero against the result of a
12419 signed MOD operation whose second operand is a power of 2, make
12420 the MOD operation unsigned since it is simpler and equivalent. */
12421 if (integer_zerop (arg1)
12422 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12423 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12424 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12425 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12426 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12427 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12429 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12430 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12431 fold_convert_loc (loc, newtype,
12432 TREE_OPERAND (arg0, 0)),
12433 fold_convert_loc (loc, newtype,
12434 TREE_OPERAND (arg0, 1)));
12436 return fold_build2_loc (loc, code, type, newmod,
12437 fold_convert_loc (loc, newtype, arg1));
12440 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12441 C1 is a valid shift constant, and C2 is a power of two, i.e.
12442 a single bit. */
12443 if (TREE_CODE (arg0) == BIT_AND_EXPR
12444 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12445 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12446 == INTEGER_CST
12447 && integer_pow2p (TREE_OPERAND (arg0, 1))
12448 && integer_zerop (arg1))
12450 tree itype = TREE_TYPE (arg0);
12451 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12452 prec = TYPE_PRECISION (itype);
12454 /* Check for a valid shift count. */
12455 if (wi::ltu_p (arg001, prec))
12457 tree arg01 = TREE_OPERAND (arg0, 1);
12458 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12459 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12460 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12461 can be rewritten as (X & (C2 << C1)) != 0. */
12462 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12464 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12465 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12466 return fold_build2_loc (loc, code, type, tem,
12467 fold_convert_loc (loc, itype, arg1));
12469 /* Otherwise, for signed (arithmetic) shifts,
12470 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12471 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12472 else if (!TYPE_UNSIGNED (itype))
12473 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12474 arg000, build_int_cst (itype, 0));
12475 /* Otherwise, of unsigned (logical) shifts,
12476 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12477 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12478 else
12479 return omit_one_operand_loc (loc, type,
12480 code == EQ_EXPR ? integer_one_node
12481 : integer_zero_node,
12482 arg000);
12486 /* If we have (A & C) == C where C is a power of 2, convert this into
12487 (A & C) != 0. Similarly for NE_EXPR. */
12488 if (TREE_CODE (arg0) == BIT_AND_EXPR
12489 && integer_pow2p (TREE_OPERAND (arg0, 1))
12490 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12491 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12492 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12493 integer_zero_node));
12495 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12496 bit, then fold the expression into A < 0 or A >= 0. */
12497 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12498 if (tem)
12499 return tem;
12501 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12502 Similarly for NE_EXPR. */
12503 if (TREE_CODE (arg0) == BIT_AND_EXPR
12504 && TREE_CODE (arg1) == INTEGER_CST
12505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12507 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12508 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12509 TREE_OPERAND (arg0, 1));
12510 tree dandnotc
12511 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12512 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12513 notc);
12514 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12515 if (integer_nonzerop (dandnotc))
12516 return omit_one_operand_loc (loc, type, rslt, arg0);
12519 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12520 Similarly for NE_EXPR. */
12521 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12522 && TREE_CODE (arg1) == INTEGER_CST
12523 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12525 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12526 tree candnotd
12527 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12528 TREE_OPERAND (arg0, 1),
12529 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12530 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12531 if (integer_nonzerop (candnotd))
12532 return omit_one_operand_loc (loc, type, rslt, arg0);
12535 /* If this is a comparison of a field, we may be able to simplify it. */
12536 if ((TREE_CODE (arg0) == COMPONENT_REF
12537 || TREE_CODE (arg0) == BIT_FIELD_REF)
12538 /* Handle the constant case even without -O
12539 to make sure the warnings are given. */
12540 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12542 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12543 if (t1)
12544 return t1;
12547 /* Optimize comparisons of strlen vs zero to a compare of the
12548 first character of the string vs zero. To wit,
12549 strlen(ptr) == 0 => *ptr == 0
12550 strlen(ptr) != 0 => *ptr != 0
12551 Other cases should reduce to one of these two (or a constant)
12552 due to the return value of strlen being unsigned. */
12553 if (TREE_CODE (arg0) == CALL_EXPR
12554 && integer_zerop (arg1))
12556 tree fndecl = get_callee_fndecl (arg0);
12558 if (fndecl
12559 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12560 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12561 && call_expr_nargs (arg0) == 1
12562 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12564 tree iref = build_fold_indirect_ref_loc (loc,
12565 CALL_EXPR_ARG (arg0, 0));
12566 return fold_build2_loc (loc, code, type, iref,
12567 build_int_cst (TREE_TYPE (iref), 0));
12571 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12572 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12573 if (TREE_CODE (arg0) == RSHIFT_EXPR
12574 && integer_zerop (arg1)
12575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12577 tree arg00 = TREE_OPERAND (arg0, 0);
12578 tree arg01 = TREE_OPERAND (arg0, 1);
12579 tree itype = TREE_TYPE (arg00);
12580 if (wi::eq_p (arg01, element_precision (itype) - 1))
12582 if (TYPE_UNSIGNED (itype))
12584 itype = signed_type_for (itype);
12585 arg00 = fold_convert_loc (loc, itype, arg00);
12587 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12588 type, arg00, build_zero_cst (itype));
12592 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12593 if (integer_zerop (arg1)
12594 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12595 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12596 TREE_OPERAND (arg0, 1));
12598 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12599 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12600 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12601 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12602 build_zero_cst (TREE_TYPE (arg0)));
12603 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12604 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12605 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12606 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12607 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12608 build_zero_cst (TREE_TYPE (arg0)));
12610 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12611 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12612 && TREE_CODE (arg1) == INTEGER_CST
12613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12614 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12615 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12616 TREE_OPERAND (arg0, 1), arg1));
12618 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12619 (X & C) == 0 when C is a single bit. */
12620 if (TREE_CODE (arg0) == BIT_AND_EXPR
12621 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12622 && integer_zerop (arg1)
12623 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12625 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12626 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12627 TREE_OPERAND (arg0, 1));
12628 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12629 type, tem,
12630 fold_convert_loc (loc, TREE_TYPE (arg0),
12631 arg1));
12634 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12635 constant C is a power of two, i.e. a single bit. */
12636 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12637 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12638 && integer_zerop (arg1)
12639 && integer_pow2p (TREE_OPERAND (arg0, 1))
12640 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12641 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12643 tree arg00 = TREE_OPERAND (arg0, 0);
12644 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12645 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12648 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12649 when is C is a power of two, i.e. a single bit. */
12650 if (TREE_CODE (arg0) == BIT_AND_EXPR
12651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12652 && integer_zerop (arg1)
12653 && integer_pow2p (TREE_OPERAND (arg0, 1))
12654 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12655 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12657 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12658 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12659 arg000, TREE_OPERAND (arg0, 1));
12660 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12661 tem, build_int_cst (TREE_TYPE (tem), 0));
12664 if (integer_zerop (arg1)
12665 && tree_expr_nonzero_p (arg0))
12667 tree res = constant_boolean_node (code==NE_EXPR, type);
12668 return omit_one_operand_loc (loc, type, res, arg0);
12671 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12672 if (TREE_CODE (arg0) == NEGATE_EXPR
12673 && TREE_CODE (arg1) == NEGATE_EXPR)
12674 return fold_build2_loc (loc, code, type,
12675 TREE_OPERAND (arg0, 0),
12676 fold_convert_loc (loc, TREE_TYPE (arg0),
12677 TREE_OPERAND (arg1, 0)));
12679 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12680 if (TREE_CODE (arg0) == BIT_AND_EXPR
12681 && TREE_CODE (arg1) == BIT_AND_EXPR)
12683 tree arg00 = TREE_OPERAND (arg0, 0);
12684 tree arg01 = TREE_OPERAND (arg0, 1);
12685 tree arg10 = TREE_OPERAND (arg1, 0);
12686 tree arg11 = TREE_OPERAND (arg1, 1);
12687 tree itype = TREE_TYPE (arg0);
12689 if (operand_equal_p (arg01, arg11, 0))
12690 return fold_build2_loc (loc, code, type,
12691 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12692 fold_build2_loc (loc,
12693 BIT_XOR_EXPR, itype,
12694 arg00, arg10),
12695 arg01),
12696 build_zero_cst (itype));
12698 if (operand_equal_p (arg01, arg10, 0))
12699 return fold_build2_loc (loc, code, type,
12700 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12701 fold_build2_loc (loc,
12702 BIT_XOR_EXPR, itype,
12703 arg00, arg11),
12704 arg01),
12705 build_zero_cst (itype));
12707 if (operand_equal_p (arg00, arg11, 0))
12708 return fold_build2_loc (loc, code, type,
12709 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12710 fold_build2_loc (loc,
12711 BIT_XOR_EXPR, itype,
12712 arg01, arg10),
12713 arg00),
12714 build_zero_cst (itype));
12716 if (operand_equal_p (arg00, arg10, 0))
12717 return fold_build2_loc (loc, code, type,
12718 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12719 fold_build2_loc (loc,
12720 BIT_XOR_EXPR, itype,
12721 arg01, arg11),
12722 arg00),
12723 build_zero_cst (itype));
12726 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12727 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12729 tree arg00 = TREE_OPERAND (arg0, 0);
12730 tree arg01 = TREE_OPERAND (arg0, 1);
12731 tree arg10 = TREE_OPERAND (arg1, 0);
12732 tree arg11 = TREE_OPERAND (arg1, 1);
12733 tree itype = TREE_TYPE (arg0);
12735 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12736 operand_equal_p guarantees no side-effects so we don't need
12737 to use omit_one_operand on Z. */
12738 if (operand_equal_p (arg01, arg11, 0))
12739 return fold_build2_loc (loc, code, type, arg00,
12740 fold_convert_loc (loc, TREE_TYPE (arg00),
12741 arg10));
12742 if (operand_equal_p (arg01, arg10, 0))
12743 return fold_build2_loc (loc, code, type, arg00,
12744 fold_convert_loc (loc, TREE_TYPE (arg00),
12745 arg11));
12746 if (operand_equal_p (arg00, arg11, 0))
12747 return fold_build2_loc (loc, code, type, arg01,
12748 fold_convert_loc (loc, TREE_TYPE (arg01),
12749 arg10));
12750 if (operand_equal_p (arg00, arg10, 0))
12751 return fold_build2_loc (loc, code, type, arg01,
12752 fold_convert_loc (loc, TREE_TYPE (arg01),
12753 arg11));
12755 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12756 if (TREE_CODE (arg01) == INTEGER_CST
12757 && TREE_CODE (arg11) == INTEGER_CST)
12759 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12760 fold_convert_loc (loc, itype, arg11));
12761 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12762 return fold_build2_loc (loc, code, type, tem,
12763 fold_convert_loc (loc, itype, arg10));
12767 /* Attempt to simplify equality/inequality comparisons of complex
12768 values. Only lower the comparison if the result is known or
12769 can be simplified to a single scalar comparison. */
12770 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12771 || TREE_CODE (arg0) == COMPLEX_CST)
12772 && (TREE_CODE (arg1) == COMPLEX_EXPR
12773 || TREE_CODE (arg1) == COMPLEX_CST))
12775 tree real0, imag0, real1, imag1;
12776 tree rcond, icond;
12778 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12780 real0 = TREE_OPERAND (arg0, 0);
12781 imag0 = TREE_OPERAND (arg0, 1);
12783 else
12785 real0 = TREE_REALPART (arg0);
12786 imag0 = TREE_IMAGPART (arg0);
12789 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12791 real1 = TREE_OPERAND (arg1, 0);
12792 imag1 = TREE_OPERAND (arg1, 1);
12794 else
12796 real1 = TREE_REALPART (arg1);
12797 imag1 = TREE_IMAGPART (arg1);
12800 rcond = fold_binary_loc (loc, code, type, real0, real1);
12801 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12803 if (integer_zerop (rcond))
12805 if (code == EQ_EXPR)
12806 return omit_two_operands_loc (loc, type, boolean_false_node,
12807 imag0, imag1);
12808 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12810 else
12812 if (code == NE_EXPR)
12813 return omit_two_operands_loc (loc, type, boolean_true_node,
12814 imag0, imag1);
12815 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12819 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12820 if (icond && TREE_CODE (icond) == INTEGER_CST)
12822 if (integer_zerop (icond))
12824 if (code == EQ_EXPR)
12825 return omit_two_operands_loc (loc, type, boolean_false_node,
12826 real0, real1);
12827 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12829 else
12831 if (code == NE_EXPR)
12832 return omit_two_operands_loc (loc, type, boolean_true_node,
12833 real0, real1);
12834 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12839 return NULL_TREE;
12841 case LT_EXPR:
12842 case GT_EXPR:
12843 case LE_EXPR:
12844 case GE_EXPR:
12845 tem = fold_comparison (loc, code, type, op0, op1);
12846 if (tem != NULL_TREE)
12847 return tem;
12849 /* Transform comparisons of the form X +- C CMP X. */
12850 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12851 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12852 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12853 && !HONOR_SNANS (arg0))
12854 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12855 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12857 tree arg01 = TREE_OPERAND (arg0, 1);
12858 enum tree_code code0 = TREE_CODE (arg0);
12859 int is_positive;
12861 if (TREE_CODE (arg01) == REAL_CST)
12862 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12863 else
12864 is_positive = tree_int_cst_sgn (arg01);
12866 /* (X - c) > X becomes false. */
12867 if (code == GT_EXPR
12868 && ((code0 == MINUS_EXPR && is_positive >= 0)
12869 || (code0 == PLUS_EXPR && is_positive <= 0)))
12871 if (TREE_CODE (arg01) == INTEGER_CST
12872 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12873 fold_overflow_warning (("assuming signed overflow does not "
12874 "occur when assuming that (X - c) > X "
12875 "is always false"),
12876 WARN_STRICT_OVERFLOW_ALL);
12877 return constant_boolean_node (0, type);
12880 /* Likewise (X + c) < X becomes false. */
12881 if (code == LT_EXPR
12882 && ((code0 == PLUS_EXPR && is_positive >= 0)
12883 || (code0 == MINUS_EXPR && is_positive <= 0)))
12885 if (TREE_CODE (arg01) == INTEGER_CST
12886 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12887 fold_overflow_warning (("assuming signed overflow does not "
12888 "occur when assuming that "
12889 "(X + c) < X is always false"),
12890 WARN_STRICT_OVERFLOW_ALL);
12891 return constant_boolean_node (0, type);
12894 /* Convert (X - c) <= X to true. */
12895 if (!HONOR_NANS (arg1)
12896 && code == LE_EXPR
12897 && ((code0 == MINUS_EXPR && is_positive >= 0)
12898 || (code0 == PLUS_EXPR && is_positive <= 0)))
12900 if (TREE_CODE (arg01) == INTEGER_CST
12901 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12902 fold_overflow_warning (("assuming signed overflow does not "
12903 "occur when assuming that "
12904 "(X - c) <= X is always true"),
12905 WARN_STRICT_OVERFLOW_ALL);
12906 return constant_boolean_node (1, type);
12909 /* Convert (X + c) >= X to true. */
12910 if (!HONOR_NANS (arg1)
12911 && code == GE_EXPR
12912 && ((code0 == PLUS_EXPR && is_positive >= 0)
12913 || (code0 == MINUS_EXPR && is_positive <= 0)))
12915 if (TREE_CODE (arg01) == INTEGER_CST
12916 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12917 fold_overflow_warning (("assuming signed overflow does not "
12918 "occur when assuming that "
12919 "(X + c) >= X is always true"),
12920 WARN_STRICT_OVERFLOW_ALL);
12921 return constant_boolean_node (1, type);
12924 if (TREE_CODE (arg01) == INTEGER_CST)
12926 /* Convert X + c > X and X - c < X to true for integers. */
12927 if (code == GT_EXPR
12928 && ((code0 == PLUS_EXPR && is_positive > 0)
12929 || (code0 == MINUS_EXPR && is_positive < 0)))
12931 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12932 fold_overflow_warning (("assuming signed overflow does "
12933 "not occur when assuming that "
12934 "(X + c) > X is always true"),
12935 WARN_STRICT_OVERFLOW_ALL);
12936 return constant_boolean_node (1, type);
12939 if (code == LT_EXPR
12940 && ((code0 == MINUS_EXPR && is_positive > 0)
12941 || (code0 == PLUS_EXPR && is_positive < 0)))
12943 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12944 fold_overflow_warning (("assuming signed overflow does "
12945 "not occur when assuming that "
12946 "(X - c) < X is always true"),
12947 WARN_STRICT_OVERFLOW_ALL);
12948 return constant_boolean_node (1, type);
12951 /* Convert X + c <= X and X - c >= X to false for integers. */
12952 if (code == LE_EXPR
12953 && ((code0 == PLUS_EXPR && is_positive > 0)
12954 || (code0 == MINUS_EXPR && is_positive < 0)))
12956 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12957 fold_overflow_warning (("assuming signed overflow does "
12958 "not occur when assuming that "
12959 "(X + c) <= X is always false"),
12960 WARN_STRICT_OVERFLOW_ALL);
12961 return constant_boolean_node (0, type);
12964 if (code == GE_EXPR
12965 && ((code0 == MINUS_EXPR && is_positive > 0)
12966 || (code0 == PLUS_EXPR && is_positive < 0)))
12968 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12969 fold_overflow_warning (("assuming signed overflow does "
12970 "not occur when assuming that "
12971 "(X - c) >= X is always false"),
12972 WARN_STRICT_OVERFLOW_ALL);
12973 return constant_boolean_node (0, type);
12978 /* Comparisons with the highest or lowest possible integer of
12979 the specified precision will have known values. */
12981 tree arg1_type = TREE_TYPE (arg1);
12982 unsigned int prec = TYPE_PRECISION (arg1_type);
12984 if (TREE_CODE (arg1) == INTEGER_CST
12985 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12987 wide_int max = wi::max_value (arg1_type);
12988 wide_int signed_max = wi::max_value (prec, SIGNED);
12989 wide_int min = wi::min_value (arg1_type);
12991 if (wi::eq_p (arg1, max))
12992 switch (code)
12994 case GT_EXPR:
12995 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12997 case GE_EXPR:
12998 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13000 case LE_EXPR:
13001 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13003 case LT_EXPR:
13004 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13006 /* The GE_EXPR and LT_EXPR cases above are not normally
13007 reached because of previous transformations. */
13009 default:
13010 break;
13012 else if (wi::eq_p (arg1, max - 1))
13013 switch (code)
13015 case GT_EXPR:
13016 arg1 = const_binop (PLUS_EXPR, arg1,
13017 build_int_cst (TREE_TYPE (arg1), 1));
13018 return fold_build2_loc (loc, EQ_EXPR, type,
13019 fold_convert_loc (loc,
13020 TREE_TYPE (arg1), arg0),
13021 arg1);
13022 case LE_EXPR:
13023 arg1 = const_binop (PLUS_EXPR, arg1,
13024 build_int_cst (TREE_TYPE (arg1), 1));
13025 return fold_build2_loc (loc, NE_EXPR, type,
13026 fold_convert_loc (loc, TREE_TYPE (arg1),
13027 arg0),
13028 arg1);
13029 default:
13030 break;
13032 else if (wi::eq_p (arg1, min))
13033 switch (code)
13035 case LT_EXPR:
13036 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13038 case LE_EXPR:
13039 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13041 case GE_EXPR:
13042 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13044 case GT_EXPR:
13045 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13047 default:
13048 break;
13050 else if (wi::eq_p (arg1, min + 1))
13051 switch (code)
13053 case GE_EXPR:
13054 arg1 = const_binop (MINUS_EXPR, arg1,
13055 build_int_cst (TREE_TYPE (arg1), 1));
13056 return fold_build2_loc (loc, NE_EXPR, type,
13057 fold_convert_loc (loc,
13058 TREE_TYPE (arg1), arg0),
13059 arg1);
13060 case LT_EXPR:
13061 arg1 = const_binop (MINUS_EXPR, arg1,
13062 build_int_cst (TREE_TYPE (arg1), 1));
13063 return fold_build2_loc (loc, EQ_EXPR, type,
13064 fold_convert_loc (loc, TREE_TYPE (arg1),
13065 arg0),
13066 arg1);
13067 default:
13068 break;
13071 else if (wi::eq_p (arg1, signed_max)
13072 && TYPE_UNSIGNED (arg1_type)
13073 /* We will flip the signedness of the comparison operator
13074 associated with the mode of arg1, so the sign bit is
13075 specified by this mode. Check that arg1 is the signed
13076 max associated with this sign bit. */
13077 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13078 /* signed_type does not work on pointer types. */
13079 && INTEGRAL_TYPE_P (arg1_type))
13081 /* The following case also applies to X < signed_max+1
13082 and X >= signed_max+1 because previous transformations. */
13083 if (code == LE_EXPR || code == GT_EXPR)
13085 tree st = signed_type_for (arg1_type);
13086 return fold_build2_loc (loc,
13087 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13088 type, fold_convert_loc (loc, st, arg0),
13089 build_int_cst (st, 0));
13095 /* If we are comparing an ABS_EXPR with a constant, we can
13096 convert all the cases into explicit comparisons, but they may
13097 well not be faster than doing the ABS and one comparison.
13098 But ABS (X) <= C is a range comparison, which becomes a subtraction
13099 and a comparison, and is probably faster. */
13100 if (code == LE_EXPR
13101 && TREE_CODE (arg1) == INTEGER_CST
13102 && TREE_CODE (arg0) == ABS_EXPR
13103 && ! TREE_SIDE_EFFECTS (arg0)
13104 && (0 != (tem = negate_expr (arg1)))
13105 && TREE_CODE (tem) == INTEGER_CST
13106 && !TREE_OVERFLOW (tem))
13107 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13108 build2 (GE_EXPR, type,
13109 TREE_OPERAND (arg0, 0), tem),
13110 build2 (LE_EXPR, type,
13111 TREE_OPERAND (arg0, 0), arg1));
13113 /* Convert ABS_EXPR<x> >= 0 to true. */
13114 strict_overflow_p = false;
13115 if (code == GE_EXPR
13116 && (integer_zerop (arg1)
13117 || (! HONOR_NANS (arg0)
13118 && real_zerop (arg1)))
13119 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13121 if (strict_overflow_p)
13122 fold_overflow_warning (("assuming signed overflow does not occur "
13123 "when simplifying comparison of "
13124 "absolute value and zero"),
13125 WARN_STRICT_OVERFLOW_CONDITIONAL);
13126 return omit_one_operand_loc (loc, type,
13127 constant_boolean_node (true, type),
13128 arg0);
13131 /* Convert ABS_EXPR<x> < 0 to false. */
13132 strict_overflow_p = false;
13133 if (code == LT_EXPR
13134 && (integer_zerop (arg1) || real_zerop (arg1))
13135 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13137 if (strict_overflow_p)
13138 fold_overflow_warning (("assuming signed overflow does not occur "
13139 "when simplifying comparison of "
13140 "absolute value and zero"),
13141 WARN_STRICT_OVERFLOW_CONDITIONAL);
13142 return omit_one_operand_loc (loc, type,
13143 constant_boolean_node (false, type),
13144 arg0);
13147 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13148 and similarly for >= into !=. */
13149 if ((code == LT_EXPR || code == GE_EXPR)
13150 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13151 && TREE_CODE (arg1) == LSHIFT_EXPR
13152 && integer_onep (TREE_OPERAND (arg1, 0)))
13153 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13154 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13155 TREE_OPERAND (arg1, 1)),
13156 build_zero_cst (TREE_TYPE (arg0)));
13158 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13159 otherwise Y might be >= # of bits in X's type and thus e.g.
13160 (unsigned char) (1 << Y) for Y 15 might be 0.
13161 If the cast is widening, then 1 << Y should have unsigned type,
13162 otherwise if Y is number of bits in the signed shift type minus 1,
13163 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13164 31 might be 0xffffffff80000000. */
13165 if ((code == LT_EXPR || code == GE_EXPR)
13166 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13167 && CONVERT_EXPR_P (arg1)
13168 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13169 && (element_precision (TREE_TYPE (arg1))
13170 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13171 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13172 || (element_precision (TREE_TYPE (arg1))
13173 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13174 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13176 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13177 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13178 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13179 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13180 build_zero_cst (TREE_TYPE (arg0)));
13183 return NULL_TREE;
13185 case UNORDERED_EXPR:
13186 case ORDERED_EXPR:
13187 case UNLT_EXPR:
13188 case UNLE_EXPR:
13189 case UNGT_EXPR:
13190 case UNGE_EXPR:
13191 case UNEQ_EXPR:
13192 case LTGT_EXPR:
13193 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13195 t1 = fold_relational_const (code, type, arg0, arg1);
13196 if (t1 != NULL_TREE)
13197 return t1;
13200 /* If the first operand is NaN, the result is constant. */
13201 if (TREE_CODE (arg0) == REAL_CST
13202 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13203 && (code != LTGT_EXPR || ! flag_trapping_math))
13205 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13206 ? integer_zero_node
13207 : integer_one_node;
13208 return omit_one_operand_loc (loc, type, t1, arg1);
13211 /* If the second operand is NaN, the result is constant. */
13212 if (TREE_CODE (arg1) == REAL_CST
13213 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13214 && (code != LTGT_EXPR || ! flag_trapping_math))
13216 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13217 ? integer_zero_node
13218 : integer_one_node;
13219 return omit_one_operand_loc (loc, type, t1, arg0);
13222 /* Simplify unordered comparison of something with itself. */
13223 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13224 && operand_equal_p (arg0, arg1, 0))
13225 return constant_boolean_node (1, type);
13227 if (code == LTGT_EXPR
13228 && !flag_trapping_math
13229 && operand_equal_p (arg0, arg1, 0))
13230 return constant_boolean_node (0, type);
13232 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13234 tree targ0 = strip_float_extensions (arg0);
13235 tree targ1 = strip_float_extensions (arg1);
13236 tree newtype = TREE_TYPE (targ0);
13238 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13239 newtype = TREE_TYPE (targ1);
13241 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13242 return fold_build2_loc (loc, code, type,
13243 fold_convert_loc (loc, newtype, targ0),
13244 fold_convert_loc (loc, newtype, targ1));
13247 return NULL_TREE;
13249 case COMPOUND_EXPR:
13250 /* When pedantic, a compound expression can be neither an lvalue
13251 nor an integer constant expression. */
13252 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13253 return NULL_TREE;
13254 /* Don't let (0, 0) be null pointer constant. */
13255 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13256 : fold_convert_loc (loc, type, arg1);
13257 return pedantic_non_lvalue_loc (loc, tem);
13259 case ASSERT_EXPR:
13260 /* An ASSERT_EXPR should never be passed to fold_binary. */
13261 gcc_unreachable ();
13263 default:
13264 return NULL_TREE;
13265 } /* switch (code) */
13268 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13269 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13270 of GOTO_EXPR. */
13272 static tree
13273 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13275 switch (TREE_CODE (*tp))
13277 case LABEL_EXPR:
13278 return *tp;
13280 case GOTO_EXPR:
13281 *walk_subtrees = 0;
13283 /* ... fall through ... */
13285 default:
13286 return NULL_TREE;
13290 /* Return whether the sub-tree ST contains a label which is accessible from
13291 outside the sub-tree. */
13293 static bool
13294 contains_label_p (tree st)
13296 return
13297 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13300 /* Fold a ternary expression of code CODE and type TYPE with operands
13301 OP0, OP1, and OP2. Return the folded expression if folding is
13302 successful. Otherwise, return NULL_TREE. */
13304 tree
13305 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13306 tree op0, tree op1, tree op2)
13308 tree tem;
13309 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13310 enum tree_code_class kind = TREE_CODE_CLASS (code);
13312 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13313 && TREE_CODE_LENGTH (code) == 3);
13315 /* If this is a commutative operation, and OP0 is a constant, move it
13316 to OP1 to reduce the number of tests below. */
13317 if (commutative_ternary_tree_code (code)
13318 && tree_swap_operands_p (op0, op1, true))
13319 return fold_build3_loc (loc, code, type, op1, op0, op2);
13321 tem = generic_simplify (loc, code, type, op0, op1, op2);
13322 if (tem)
13323 return tem;
13325 /* Strip any conversions that don't change the mode. This is safe
13326 for every expression, except for a comparison expression because
13327 its signedness is derived from its operands. So, in the latter
13328 case, only strip conversions that don't change the signedness.
13330 Note that this is done as an internal manipulation within the
13331 constant folder, in order to find the simplest representation of
13332 the arguments so that their form can be studied. In any cases,
13333 the appropriate type conversions should be put back in the tree
13334 that will get out of the constant folder. */
13335 if (op0)
13337 arg0 = op0;
13338 STRIP_NOPS (arg0);
13341 if (op1)
13343 arg1 = op1;
13344 STRIP_NOPS (arg1);
13347 if (op2)
13349 arg2 = op2;
13350 STRIP_NOPS (arg2);
13353 switch (code)
13355 case COMPONENT_REF:
13356 if (TREE_CODE (arg0) == CONSTRUCTOR
13357 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13359 unsigned HOST_WIDE_INT idx;
13360 tree field, value;
13361 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13362 if (field == arg1)
13363 return value;
13365 return NULL_TREE;
13367 case COND_EXPR:
13368 case VEC_COND_EXPR:
13369 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13370 so all simple results must be passed through pedantic_non_lvalue. */
13371 if (TREE_CODE (arg0) == INTEGER_CST)
13373 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13374 tem = integer_zerop (arg0) ? op2 : op1;
13375 /* Only optimize constant conditions when the selected branch
13376 has the same type as the COND_EXPR. This avoids optimizing
13377 away "c ? x : throw", where the throw has a void type.
13378 Avoid throwing away that operand which contains label. */
13379 if ((!TREE_SIDE_EFFECTS (unused_op)
13380 || !contains_label_p (unused_op))
13381 && (! VOID_TYPE_P (TREE_TYPE (tem))
13382 || VOID_TYPE_P (type)))
13383 return pedantic_non_lvalue_loc (loc, tem);
13384 return NULL_TREE;
13386 else if (TREE_CODE (arg0) == VECTOR_CST)
13388 if ((TREE_CODE (arg1) == VECTOR_CST
13389 || TREE_CODE (arg1) == CONSTRUCTOR)
13390 && (TREE_CODE (arg2) == VECTOR_CST
13391 || TREE_CODE (arg2) == CONSTRUCTOR))
13393 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13394 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13395 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13396 for (i = 0; i < nelts; i++)
13398 tree val = VECTOR_CST_ELT (arg0, i);
13399 if (integer_all_onesp (val))
13400 sel[i] = i;
13401 else if (integer_zerop (val))
13402 sel[i] = nelts + i;
13403 else /* Currently unreachable. */
13404 return NULL_TREE;
13406 tree t = fold_vec_perm (type, arg1, arg2, sel);
13407 if (t != NULL_TREE)
13408 return t;
13412 /* If we have A op B ? A : C, we may be able to convert this to a
13413 simpler expression, depending on the operation and the values
13414 of B and C. Signed zeros prevent all of these transformations,
13415 for reasons given above each one.
13417 Also try swapping the arguments and inverting the conditional. */
13418 if (COMPARISON_CLASS_P (arg0)
13419 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13420 arg1, TREE_OPERAND (arg0, 1))
13421 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13423 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13424 if (tem)
13425 return tem;
13428 if (COMPARISON_CLASS_P (arg0)
13429 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13430 op2,
13431 TREE_OPERAND (arg0, 1))
13432 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13434 location_t loc0 = expr_location_or (arg0, loc);
13435 tem = fold_invert_truthvalue (loc0, arg0);
13436 if (tem && COMPARISON_CLASS_P (tem))
13438 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13439 if (tem)
13440 return tem;
13444 /* If the second operand is simpler than the third, swap them
13445 since that produces better jump optimization results. */
13446 if (truth_value_p (TREE_CODE (arg0))
13447 && tree_swap_operands_p (op1, op2, false))
13449 location_t loc0 = expr_location_or (arg0, loc);
13450 /* See if this can be inverted. If it can't, possibly because
13451 it was a floating-point inequality comparison, don't do
13452 anything. */
13453 tem = fold_invert_truthvalue (loc0, arg0);
13454 if (tem)
13455 return fold_build3_loc (loc, code, type, tem, op2, op1);
13458 /* Convert A ? 1 : 0 to simply A. */
13459 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13460 : (integer_onep (op1)
13461 && !VECTOR_TYPE_P (type)))
13462 && integer_zerop (op2)
13463 /* If we try to convert OP0 to our type, the
13464 call to fold will try to move the conversion inside
13465 a COND, which will recurse. In that case, the COND_EXPR
13466 is probably the best choice, so leave it alone. */
13467 && type == TREE_TYPE (arg0))
13468 return pedantic_non_lvalue_loc (loc, arg0);
13470 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13471 over COND_EXPR in cases such as floating point comparisons. */
13472 if (integer_zerop (op1)
13473 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13474 : (integer_onep (op2)
13475 && !VECTOR_TYPE_P (type)))
13476 && truth_value_p (TREE_CODE (arg0)))
13477 return pedantic_non_lvalue_loc (loc,
13478 fold_convert_loc (loc, type,
13479 invert_truthvalue_loc (loc,
13480 arg0)));
13482 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13483 if (TREE_CODE (arg0) == LT_EXPR
13484 && integer_zerop (TREE_OPERAND (arg0, 1))
13485 && integer_zerop (op2)
13486 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13488 /* sign_bit_p looks through both zero and sign extensions,
13489 but for this optimization only sign extensions are
13490 usable. */
13491 tree tem2 = TREE_OPERAND (arg0, 0);
13492 while (tem != tem2)
13494 if (TREE_CODE (tem2) != NOP_EXPR
13495 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13497 tem = NULL_TREE;
13498 break;
13500 tem2 = TREE_OPERAND (tem2, 0);
13502 /* sign_bit_p only checks ARG1 bits within A's precision.
13503 If <sign bit of A> has wider type than A, bits outside
13504 of A's precision in <sign bit of A> need to be checked.
13505 If they are all 0, this optimization needs to be done
13506 in unsigned A's type, if they are all 1 in signed A's type,
13507 otherwise this can't be done. */
13508 if (tem
13509 && TYPE_PRECISION (TREE_TYPE (tem))
13510 < TYPE_PRECISION (TREE_TYPE (arg1))
13511 && TYPE_PRECISION (TREE_TYPE (tem))
13512 < TYPE_PRECISION (type))
13514 int inner_width, outer_width;
13515 tree tem_type;
13517 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13518 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13519 if (outer_width > TYPE_PRECISION (type))
13520 outer_width = TYPE_PRECISION (type);
13522 wide_int mask = wi::shifted_mask
13523 (inner_width, outer_width - inner_width, false,
13524 TYPE_PRECISION (TREE_TYPE (arg1)));
13526 wide_int common = mask & arg1;
13527 if (common == mask)
13529 tem_type = signed_type_for (TREE_TYPE (tem));
13530 tem = fold_convert_loc (loc, tem_type, tem);
13532 else if (common == 0)
13534 tem_type = unsigned_type_for (TREE_TYPE (tem));
13535 tem = fold_convert_loc (loc, tem_type, tem);
13537 else
13538 tem = NULL;
13541 if (tem)
13542 return
13543 fold_convert_loc (loc, type,
13544 fold_build2_loc (loc, BIT_AND_EXPR,
13545 TREE_TYPE (tem), tem,
13546 fold_convert_loc (loc,
13547 TREE_TYPE (tem),
13548 arg1)));
13551 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13552 already handled above. */
13553 if (TREE_CODE (arg0) == BIT_AND_EXPR
13554 && integer_onep (TREE_OPERAND (arg0, 1))
13555 && integer_zerop (op2)
13556 && integer_pow2p (arg1))
13558 tree tem = TREE_OPERAND (arg0, 0);
13559 STRIP_NOPS (tem);
13560 if (TREE_CODE (tem) == RSHIFT_EXPR
13561 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13562 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13563 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13565 TREE_OPERAND (tem, 0), arg1);
13568 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13569 is probably obsolete because the first operand should be a
13570 truth value (that's why we have the two cases above), but let's
13571 leave it in until we can confirm this for all front-ends. */
13572 if (integer_zerop (op2)
13573 && TREE_CODE (arg0) == NE_EXPR
13574 && integer_zerop (TREE_OPERAND (arg0, 1))
13575 && integer_pow2p (arg1)
13576 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13577 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13578 arg1, OEP_ONLY_CONST))
13579 return pedantic_non_lvalue_loc (loc,
13580 fold_convert_loc (loc, type,
13581 TREE_OPERAND (arg0, 0)));
13583 /* Disable the transformations below for vectors, since
13584 fold_binary_op_with_conditional_arg may undo them immediately,
13585 yielding an infinite loop. */
13586 if (code == VEC_COND_EXPR)
13587 return NULL_TREE;
13589 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13590 if (integer_zerop (op2)
13591 && truth_value_p (TREE_CODE (arg0))
13592 && truth_value_p (TREE_CODE (arg1))
13593 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13594 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13595 : TRUTH_ANDIF_EXPR,
13596 type, fold_convert_loc (loc, type, arg0), arg1);
13598 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13599 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13600 && truth_value_p (TREE_CODE (arg0))
13601 && truth_value_p (TREE_CODE (arg1))
13602 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13604 location_t loc0 = expr_location_or (arg0, loc);
13605 /* Only perform transformation if ARG0 is easily inverted. */
13606 tem = fold_invert_truthvalue (loc0, arg0);
13607 if (tem)
13608 return fold_build2_loc (loc, code == VEC_COND_EXPR
13609 ? BIT_IOR_EXPR
13610 : TRUTH_ORIF_EXPR,
13611 type, fold_convert_loc (loc, type, tem),
13612 arg1);
13615 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13616 if (integer_zerop (arg1)
13617 && truth_value_p (TREE_CODE (arg0))
13618 && truth_value_p (TREE_CODE (op2))
13619 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13621 location_t loc0 = expr_location_or (arg0, loc);
13622 /* Only perform transformation if ARG0 is easily inverted. */
13623 tem = fold_invert_truthvalue (loc0, arg0);
13624 if (tem)
13625 return fold_build2_loc (loc, code == VEC_COND_EXPR
13626 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13627 type, fold_convert_loc (loc, type, tem),
13628 op2);
13631 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13632 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13633 && truth_value_p (TREE_CODE (arg0))
13634 && truth_value_p (TREE_CODE (op2))
13635 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13636 return fold_build2_loc (loc, code == VEC_COND_EXPR
13637 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13638 type, fold_convert_loc (loc, type, arg0), op2);
13640 return NULL_TREE;
13642 case CALL_EXPR:
13643 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13644 of fold_ternary on them. */
13645 gcc_unreachable ();
13647 case BIT_FIELD_REF:
13648 if ((TREE_CODE (arg0) == VECTOR_CST
13649 || (TREE_CODE (arg0) == CONSTRUCTOR
13650 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13651 && (type == TREE_TYPE (TREE_TYPE (arg0))
13652 || (TREE_CODE (type) == VECTOR_TYPE
13653 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13655 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13656 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13657 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13658 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13660 if (n != 0
13661 && (idx % width) == 0
13662 && (n % width) == 0
13663 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13665 idx = idx / width;
13666 n = n / width;
13668 if (TREE_CODE (arg0) == VECTOR_CST)
13670 if (n == 1)
13671 return VECTOR_CST_ELT (arg0, idx);
13673 tree *vals = XALLOCAVEC (tree, n);
13674 for (unsigned i = 0; i < n; ++i)
13675 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13676 return build_vector (type, vals);
13679 /* Constructor elements can be subvectors. */
13680 unsigned HOST_WIDE_INT k = 1;
13681 if (CONSTRUCTOR_NELTS (arg0) != 0)
13683 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13684 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13685 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13688 /* We keep an exact subset of the constructor elements. */
13689 if ((idx % k) == 0 && (n % k) == 0)
13691 if (CONSTRUCTOR_NELTS (arg0) == 0)
13692 return build_constructor (type, NULL);
13693 idx /= k;
13694 n /= k;
13695 if (n == 1)
13697 if (idx < CONSTRUCTOR_NELTS (arg0))
13698 return CONSTRUCTOR_ELT (arg0, idx)->value;
13699 return build_zero_cst (type);
13702 vec<constructor_elt, va_gc> *vals;
13703 vec_alloc (vals, n);
13704 for (unsigned i = 0;
13705 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13706 ++i)
13707 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13708 CONSTRUCTOR_ELT
13709 (arg0, idx + i)->value);
13710 return build_constructor (type, vals);
13712 /* The bitfield references a single constructor element. */
13713 else if (idx + n <= (idx / k + 1) * k)
13715 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13716 return build_zero_cst (type);
13717 else if (n == k)
13718 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13719 else
13720 return fold_build3_loc (loc, code, type,
13721 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13722 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13727 /* A bit-field-ref that referenced the full argument can be stripped. */
13728 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13729 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13730 && integer_zerop (op2))
13731 return fold_convert_loc (loc, type, arg0);
13733 /* On constants we can use native encode/interpret to constant
13734 fold (nearly) all BIT_FIELD_REFs. */
13735 if (CONSTANT_CLASS_P (arg0)
13736 && can_native_interpret_type_p (type)
13737 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13738 /* This limitation should not be necessary, we just need to
13739 round this up to mode size. */
13740 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13741 /* Need bit-shifting of the buffer to relax the following. */
13742 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13744 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13745 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13746 unsigned HOST_WIDE_INT clen;
13747 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13748 /* ??? We cannot tell native_encode_expr to start at
13749 some random byte only. So limit us to a reasonable amount
13750 of work. */
13751 if (clen <= 4096)
13753 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13754 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13755 if (len > 0
13756 && len * BITS_PER_UNIT >= bitpos + bitsize)
13758 tree v = native_interpret_expr (type,
13759 b + bitpos / BITS_PER_UNIT,
13760 bitsize / BITS_PER_UNIT);
13761 if (v)
13762 return v;
13767 return NULL_TREE;
13769 case FMA_EXPR:
13770 /* For integers we can decompose the FMA if possible. */
13771 if (TREE_CODE (arg0) == INTEGER_CST
13772 && TREE_CODE (arg1) == INTEGER_CST)
13773 return fold_build2_loc (loc, PLUS_EXPR, type,
13774 const_binop (MULT_EXPR, arg0, arg1), arg2);
13775 if (integer_zerop (arg2))
13776 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13778 return fold_fma (loc, type, arg0, arg1, arg2);
13780 case VEC_PERM_EXPR:
13781 if (TREE_CODE (arg2) == VECTOR_CST)
13783 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13784 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13785 unsigned char *sel2 = sel + nelts;
13786 bool need_mask_canon = false;
13787 bool need_mask_canon2 = false;
13788 bool all_in_vec0 = true;
13789 bool all_in_vec1 = true;
13790 bool maybe_identity = true;
13791 bool single_arg = (op0 == op1);
13792 bool changed = false;
13794 mask2 = 2 * nelts - 1;
13795 mask = single_arg ? (nelts - 1) : mask2;
13796 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13797 for (i = 0; i < nelts; i++)
13799 tree val = VECTOR_CST_ELT (arg2, i);
13800 if (TREE_CODE (val) != INTEGER_CST)
13801 return NULL_TREE;
13803 /* Make sure that the perm value is in an acceptable
13804 range. */
13805 wide_int t = val;
13806 need_mask_canon |= wi::gtu_p (t, mask);
13807 need_mask_canon2 |= wi::gtu_p (t, mask2);
13808 sel[i] = t.to_uhwi () & mask;
13809 sel2[i] = t.to_uhwi () & mask2;
13811 if (sel[i] < nelts)
13812 all_in_vec1 = false;
13813 else
13814 all_in_vec0 = false;
13816 if ((sel[i] & (nelts-1)) != i)
13817 maybe_identity = false;
13820 if (maybe_identity)
13822 if (all_in_vec0)
13823 return op0;
13824 if (all_in_vec1)
13825 return op1;
13828 if (all_in_vec0)
13829 op1 = op0;
13830 else if (all_in_vec1)
13832 op0 = op1;
13833 for (i = 0; i < nelts; i++)
13834 sel[i] -= nelts;
13835 need_mask_canon = true;
13838 if ((TREE_CODE (op0) == VECTOR_CST
13839 || TREE_CODE (op0) == CONSTRUCTOR)
13840 && (TREE_CODE (op1) == VECTOR_CST
13841 || TREE_CODE (op1) == CONSTRUCTOR))
13843 tree t = fold_vec_perm (type, op0, op1, sel);
13844 if (t != NULL_TREE)
13845 return t;
13848 if (op0 == op1 && !single_arg)
13849 changed = true;
13851 /* Some targets are deficient and fail to expand a single
13852 argument permutation while still allowing an equivalent
13853 2-argument version. */
13854 if (need_mask_canon && arg2 == op2
13855 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13856 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13858 need_mask_canon = need_mask_canon2;
13859 sel = sel2;
13862 if (need_mask_canon && arg2 == op2)
13864 tree *tsel = XALLOCAVEC (tree, nelts);
13865 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13866 for (i = 0; i < nelts; i++)
13867 tsel[i] = build_int_cst (eltype, sel[i]);
13868 op2 = build_vector (TREE_TYPE (arg2), tsel);
13869 changed = true;
13872 if (changed)
13873 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13875 return NULL_TREE;
13877 default:
13878 return NULL_TREE;
13879 } /* switch (code) */
13882 /* Perform constant folding and related simplification of EXPR.
13883 The related simplifications include x*1 => x, x*0 => 0, etc.,
13884 and application of the associative law.
13885 NOP_EXPR conversions may be removed freely (as long as we
13886 are careful not to change the type of the overall expression).
13887 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13888 but we can constant-fold them if they have constant operands. */
13890 #ifdef ENABLE_FOLD_CHECKING
13891 # define fold(x) fold_1 (x)
13892 static tree fold_1 (tree);
13893 static
13894 #endif
13895 tree
13896 fold (tree expr)
13898 const tree t = expr;
13899 enum tree_code code = TREE_CODE (t);
13900 enum tree_code_class kind = TREE_CODE_CLASS (code);
13901 tree tem;
13902 location_t loc = EXPR_LOCATION (expr);
13904 /* Return right away if a constant. */
13905 if (kind == tcc_constant)
13906 return t;
13908 /* CALL_EXPR-like objects with variable numbers of operands are
13909 treated specially. */
13910 if (kind == tcc_vl_exp)
13912 if (code == CALL_EXPR)
13914 tem = fold_call_expr (loc, expr, false);
13915 return tem ? tem : expr;
13917 return expr;
13920 if (IS_EXPR_CODE_CLASS (kind))
13922 tree type = TREE_TYPE (t);
13923 tree op0, op1, op2;
13925 switch (TREE_CODE_LENGTH (code))
13927 case 1:
13928 op0 = TREE_OPERAND (t, 0);
13929 tem = fold_unary_loc (loc, code, type, op0);
13930 return tem ? tem : expr;
13931 case 2:
13932 op0 = TREE_OPERAND (t, 0);
13933 op1 = TREE_OPERAND (t, 1);
13934 tem = fold_binary_loc (loc, code, type, op0, op1);
13935 return tem ? tem : expr;
13936 case 3:
13937 op0 = TREE_OPERAND (t, 0);
13938 op1 = TREE_OPERAND (t, 1);
13939 op2 = TREE_OPERAND (t, 2);
13940 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13941 return tem ? tem : expr;
13942 default:
13943 break;
13947 switch (code)
13949 case ARRAY_REF:
13951 tree op0 = TREE_OPERAND (t, 0);
13952 tree op1 = TREE_OPERAND (t, 1);
13954 if (TREE_CODE (op1) == INTEGER_CST
13955 && TREE_CODE (op0) == CONSTRUCTOR
13956 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13958 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13959 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13960 unsigned HOST_WIDE_INT begin = 0;
13962 /* Find a matching index by means of a binary search. */
13963 while (begin != end)
13965 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13966 tree index = (*elts)[middle].index;
13968 if (TREE_CODE (index) == INTEGER_CST
13969 && tree_int_cst_lt (index, op1))
13970 begin = middle + 1;
13971 else if (TREE_CODE (index) == INTEGER_CST
13972 && tree_int_cst_lt (op1, index))
13973 end = middle;
13974 else if (TREE_CODE (index) == RANGE_EXPR
13975 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13976 begin = middle + 1;
13977 else if (TREE_CODE (index) == RANGE_EXPR
13978 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13979 end = middle;
13980 else
13981 return (*elts)[middle].value;
13985 return t;
13988 /* Return a VECTOR_CST if possible. */
13989 case CONSTRUCTOR:
13991 tree type = TREE_TYPE (t);
13992 if (TREE_CODE (type) != VECTOR_TYPE)
13993 return t;
13995 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13996 unsigned HOST_WIDE_INT idx, pos = 0;
13997 tree value;
13999 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14001 if (!CONSTANT_CLASS_P (value))
14002 return t;
14003 if (TREE_CODE (value) == VECTOR_CST)
14005 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14006 vec[pos++] = VECTOR_CST_ELT (value, i);
14008 else
14009 vec[pos++] = value;
14011 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14012 vec[pos] = build_zero_cst (TREE_TYPE (type));
14014 return build_vector (type, vec);
14017 case CONST_DECL:
14018 return fold (DECL_INITIAL (t));
14020 default:
14021 return t;
14022 } /* switch (code) */
14025 #ifdef ENABLE_FOLD_CHECKING
14026 #undef fold
14028 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14029 hash_table<pointer_hash<const tree_node> > *);
14030 static void fold_check_failed (const_tree, const_tree);
14031 void print_fold_checksum (const_tree);
14033 /* When --enable-checking=fold, compute a digest of expr before
14034 and after actual fold call to see if fold did not accidentally
14035 change original expr. */
14037 tree
14038 fold (tree expr)
14040 tree ret;
14041 struct md5_ctx ctx;
14042 unsigned char checksum_before[16], checksum_after[16];
14043 hash_table<pointer_hash<const tree_node> > ht (32);
14045 md5_init_ctx (&ctx);
14046 fold_checksum_tree (expr, &ctx, &ht);
14047 md5_finish_ctx (&ctx, checksum_before);
14048 ht.empty ();
14050 ret = fold_1 (expr);
14052 md5_init_ctx (&ctx);
14053 fold_checksum_tree (expr, &ctx, &ht);
14054 md5_finish_ctx (&ctx, checksum_after);
14056 if (memcmp (checksum_before, checksum_after, 16))
14057 fold_check_failed (expr, ret);
14059 return ret;
14062 void
14063 print_fold_checksum (const_tree expr)
14065 struct md5_ctx ctx;
14066 unsigned char checksum[16], cnt;
14067 hash_table<pointer_hash<const tree_node> > ht (32);
14069 md5_init_ctx (&ctx);
14070 fold_checksum_tree (expr, &ctx, &ht);
14071 md5_finish_ctx (&ctx, checksum);
14072 for (cnt = 0; cnt < 16; ++cnt)
14073 fprintf (stderr, "%02x", checksum[cnt]);
14074 putc ('\n', stderr);
14077 static void
14078 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14080 internal_error ("fold check: original tree changed by fold");
14083 static void
14084 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14085 hash_table<pointer_hash <const tree_node> > *ht)
14087 const tree_node **slot;
14088 enum tree_code code;
14089 union tree_node buf;
14090 int i, len;
14092 recursive_label:
14093 if (expr == NULL)
14094 return;
14095 slot = ht->find_slot (expr, INSERT);
14096 if (*slot != NULL)
14097 return;
14098 *slot = expr;
14099 code = TREE_CODE (expr);
14100 if (TREE_CODE_CLASS (code) == tcc_declaration
14101 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14103 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14104 memcpy ((char *) &buf, expr, tree_size (expr));
14105 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14106 buf.decl_with_vis.symtab_node = NULL;
14107 expr = (tree) &buf;
14109 else if (TREE_CODE_CLASS (code) == tcc_type
14110 && (TYPE_POINTER_TO (expr)
14111 || TYPE_REFERENCE_TO (expr)
14112 || TYPE_CACHED_VALUES_P (expr)
14113 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14114 || TYPE_NEXT_VARIANT (expr)))
14116 /* Allow these fields to be modified. */
14117 tree tmp;
14118 memcpy ((char *) &buf, expr, tree_size (expr));
14119 expr = tmp = (tree) &buf;
14120 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14121 TYPE_POINTER_TO (tmp) = NULL;
14122 TYPE_REFERENCE_TO (tmp) = NULL;
14123 TYPE_NEXT_VARIANT (tmp) = NULL;
14124 if (TYPE_CACHED_VALUES_P (tmp))
14126 TYPE_CACHED_VALUES_P (tmp) = 0;
14127 TYPE_CACHED_VALUES (tmp) = NULL;
14130 md5_process_bytes (expr, tree_size (expr), ctx);
14131 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14132 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14133 if (TREE_CODE_CLASS (code) != tcc_type
14134 && TREE_CODE_CLASS (code) != tcc_declaration
14135 && code != TREE_LIST
14136 && code != SSA_NAME
14137 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14138 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14139 switch (TREE_CODE_CLASS (code))
14141 case tcc_constant:
14142 switch (code)
14144 case STRING_CST:
14145 md5_process_bytes (TREE_STRING_POINTER (expr),
14146 TREE_STRING_LENGTH (expr), ctx);
14147 break;
14148 case COMPLEX_CST:
14149 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14150 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14151 break;
14152 case VECTOR_CST:
14153 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14154 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14155 break;
14156 default:
14157 break;
14159 break;
14160 case tcc_exceptional:
14161 switch (code)
14163 case TREE_LIST:
14164 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14165 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14166 expr = TREE_CHAIN (expr);
14167 goto recursive_label;
14168 break;
14169 case TREE_VEC:
14170 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14171 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14172 break;
14173 default:
14174 break;
14176 break;
14177 case tcc_expression:
14178 case tcc_reference:
14179 case tcc_comparison:
14180 case tcc_unary:
14181 case tcc_binary:
14182 case tcc_statement:
14183 case tcc_vl_exp:
14184 len = TREE_OPERAND_LENGTH (expr);
14185 for (i = 0; i < len; ++i)
14186 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14187 break;
14188 case tcc_declaration:
14189 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14190 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14191 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14193 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14194 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14195 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14196 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14197 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14200 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14202 if (TREE_CODE (expr) == FUNCTION_DECL)
14204 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14205 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14207 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14209 break;
14210 case tcc_type:
14211 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14212 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14213 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14214 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14215 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14216 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14217 if (INTEGRAL_TYPE_P (expr)
14218 || SCALAR_FLOAT_TYPE_P (expr))
14220 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14221 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14223 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14224 if (TREE_CODE (expr) == RECORD_TYPE
14225 || TREE_CODE (expr) == UNION_TYPE
14226 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14227 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14228 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14229 break;
14230 default:
14231 break;
14235 /* Helper function for outputting the checksum of a tree T. When
14236 debugging with gdb, you can "define mynext" to be "next" followed
14237 by "call debug_fold_checksum (op0)", then just trace down till the
14238 outputs differ. */
14240 DEBUG_FUNCTION void
14241 debug_fold_checksum (const_tree t)
14243 int i;
14244 unsigned char checksum[16];
14245 struct md5_ctx ctx;
14246 hash_table<pointer_hash<const tree_node> > ht (32);
14248 md5_init_ctx (&ctx);
14249 fold_checksum_tree (t, &ctx, &ht);
14250 md5_finish_ctx (&ctx, checksum);
14251 ht.empty ();
14253 for (i = 0; i < 16; i++)
14254 fprintf (stderr, "%d ", checksum[i]);
14256 fprintf (stderr, "\n");
14259 #endif
14261 /* Fold a unary tree expression with code CODE of type TYPE with an
14262 operand OP0. LOC is the location of the resulting expression.
14263 Return a folded expression if successful. Otherwise, return a tree
14264 expression with code CODE of type TYPE with an operand OP0. */
14266 tree
14267 fold_build1_stat_loc (location_t loc,
14268 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14270 tree tem;
14271 #ifdef ENABLE_FOLD_CHECKING
14272 unsigned char checksum_before[16], checksum_after[16];
14273 struct md5_ctx ctx;
14274 hash_table<pointer_hash<const tree_node> > ht (32);
14276 md5_init_ctx (&ctx);
14277 fold_checksum_tree (op0, &ctx, &ht);
14278 md5_finish_ctx (&ctx, checksum_before);
14279 ht.empty ();
14280 #endif
14282 tem = fold_unary_loc (loc, code, type, op0);
14283 if (!tem)
14284 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14286 #ifdef ENABLE_FOLD_CHECKING
14287 md5_init_ctx (&ctx);
14288 fold_checksum_tree (op0, &ctx, &ht);
14289 md5_finish_ctx (&ctx, checksum_after);
14291 if (memcmp (checksum_before, checksum_after, 16))
14292 fold_check_failed (op0, tem);
14293 #endif
14294 return tem;
14297 /* Fold a binary tree expression with code CODE of type TYPE with
14298 operands OP0 and OP1. LOC is the location of the resulting
14299 expression. Return a folded expression if successful. Otherwise,
14300 return a tree expression with code CODE of type TYPE with operands
14301 OP0 and OP1. */
14303 tree
14304 fold_build2_stat_loc (location_t loc,
14305 enum tree_code code, tree type, tree op0, tree op1
14306 MEM_STAT_DECL)
14308 tree tem;
14309 #ifdef ENABLE_FOLD_CHECKING
14310 unsigned char checksum_before_op0[16],
14311 checksum_before_op1[16],
14312 checksum_after_op0[16],
14313 checksum_after_op1[16];
14314 struct md5_ctx ctx;
14315 hash_table<pointer_hash<const tree_node> > ht (32);
14317 md5_init_ctx (&ctx);
14318 fold_checksum_tree (op0, &ctx, &ht);
14319 md5_finish_ctx (&ctx, checksum_before_op0);
14320 ht.empty ();
14322 md5_init_ctx (&ctx);
14323 fold_checksum_tree (op1, &ctx, &ht);
14324 md5_finish_ctx (&ctx, checksum_before_op1);
14325 ht.empty ();
14326 #endif
14328 tem = fold_binary_loc (loc, code, type, op0, op1);
14329 if (!tem)
14330 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14332 #ifdef ENABLE_FOLD_CHECKING
14333 md5_init_ctx (&ctx);
14334 fold_checksum_tree (op0, &ctx, &ht);
14335 md5_finish_ctx (&ctx, checksum_after_op0);
14336 ht.empty ();
14338 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14339 fold_check_failed (op0, tem);
14341 md5_init_ctx (&ctx);
14342 fold_checksum_tree (op1, &ctx, &ht);
14343 md5_finish_ctx (&ctx, checksum_after_op1);
14345 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14346 fold_check_failed (op1, tem);
14347 #endif
14348 return tem;
14351 /* Fold a ternary tree expression with code CODE of type TYPE with
14352 operands OP0, OP1, and OP2. Return a folded expression if
14353 successful. Otherwise, return a tree expression with code CODE of
14354 type TYPE with operands OP0, OP1, and OP2. */
14356 tree
14357 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14358 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14360 tree tem;
14361 #ifdef ENABLE_FOLD_CHECKING
14362 unsigned char checksum_before_op0[16],
14363 checksum_before_op1[16],
14364 checksum_before_op2[16],
14365 checksum_after_op0[16],
14366 checksum_after_op1[16],
14367 checksum_after_op2[16];
14368 struct md5_ctx ctx;
14369 hash_table<pointer_hash<const tree_node> > ht (32);
14371 md5_init_ctx (&ctx);
14372 fold_checksum_tree (op0, &ctx, &ht);
14373 md5_finish_ctx (&ctx, checksum_before_op0);
14374 ht.empty ();
14376 md5_init_ctx (&ctx);
14377 fold_checksum_tree (op1, &ctx, &ht);
14378 md5_finish_ctx (&ctx, checksum_before_op1);
14379 ht.empty ();
14381 md5_init_ctx (&ctx);
14382 fold_checksum_tree (op2, &ctx, &ht);
14383 md5_finish_ctx (&ctx, checksum_before_op2);
14384 ht.empty ();
14385 #endif
14387 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14388 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14389 if (!tem)
14390 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14392 #ifdef ENABLE_FOLD_CHECKING
14393 md5_init_ctx (&ctx);
14394 fold_checksum_tree (op0, &ctx, &ht);
14395 md5_finish_ctx (&ctx, checksum_after_op0);
14396 ht.empty ();
14398 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14399 fold_check_failed (op0, tem);
14401 md5_init_ctx (&ctx);
14402 fold_checksum_tree (op1, &ctx, &ht);
14403 md5_finish_ctx (&ctx, checksum_after_op1);
14404 ht.empty ();
14406 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14407 fold_check_failed (op1, tem);
14409 md5_init_ctx (&ctx);
14410 fold_checksum_tree (op2, &ctx, &ht);
14411 md5_finish_ctx (&ctx, checksum_after_op2);
14413 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14414 fold_check_failed (op2, tem);
14415 #endif
14416 return tem;
14419 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14420 arguments in ARGARRAY, and a null static chain.
14421 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14422 of type TYPE from the given operands as constructed by build_call_array. */
14424 tree
14425 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14426 int nargs, tree *argarray)
14428 tree tem;
14429 #ifdef ENABLE_FOLD_CHECKING
14430 unsigned char checksum_before_fn[16],
14431 checksum_before_arglist[16],
14432 checksum_after_fn[16],
14433 checksum_after_arglist[16];
14434 struct md5_ctx ctx;
14435 hash_table<pointer_hash<const tree_node> > ht (32);
14436 int i;
14438 md5_init_ctx (&ctx);
14439 fold_checksum_tree (fn, &ctx, &ht);
14440 md5_finish_ctx (&ctx, checksum_before_fn);
14441 ht.empty ();
14443 md5_init_ctx (&ctx);
14444 for (i = 0; i < nargs; i++)
14445 fold_checksum_tree (argarray[i], &ctx, &ht);
14446 md5_finish_ctx (&ctx, checksum_before_arglist);
14447 ht.empty ();
14448 #endif
14450 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14451 if (!tem)
14452 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14454 #ifdef ENABLE_FOLD_CHECKING
14455 md5_init_ctx (&ctx);
14456 fold_checksum_tree (fn, &ctx, &ht);
14457 md5_finish_ctx (&ctx, checksum_after_fn);
14458 ht.empty ();
14460 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14461 fold_check_failed (fn, tem);
14463 md5_init_ctx (&ctx);
14464 for (i = 0; i < nargs; i++)
14465 fold_checksum_tree (argarray[i], &ctx, &ht);
14466 md5_finish_ctx (&ctx, checksum_after_arglist);
14468 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14469 fold_check_failed (NULL_TREE, tem);
14470 #endif
14471 return tem;
14474 /* Perform constant folding and related simplification of initializer
14475 expression EXPR. These behave identically to "fold_buildN" but ignore
14476 potential run-time traps and exceptions that fold must preserve. */
14478 #define START_FOLD_INIT \
14479 int saved_signaling_nans = flag_signaling_nans;\
14480 int saved_trapping_math = flag_trapping_math;\
14481 int saved_rounding_math = flag_rounding_math;\
14482 int saved_trapv = flag_trapv;\
14483 int saved_folding_initializer = folding_initializer;\
14484 flag_signaling_nans = 0;\
14485 flag_trapping_math = 0;\
14486 flag_rounding_math = 0;\
14487 flag_trapv = 0;\
14488 folding_initializer = 1;
14490 #define END_FOLD_INIT \
14491 flag_signaling_nans = saved_signaling_nans;\
14492 flag_trapping_math = saved_trapping_math;\
14493 flag_rounding_math = saved_rounding_math;\
14494 flag_trapv = saved_trapv;\
14495 folding_initializer = saved_folding_initializer;
14497 tree
14498 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14499 tree type, tree op)
14501 tree result;
14502 START_FOLD_INIT;
14504 result = fold_build1_loc (loc, code, type, op);
14506 END_FOLD_INIT;
14507 return result;
14510 tree
14511 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14512 tree type, tree op0, tree op1)
14514 tree result;
14515 START_FOLD_INIT;
14517 result = fold_build2_loc (loc, code, type, op0, op1);
14519 END_FOLD_INIT;
14520 return result;
14523 tree
14524 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14525 int nargs, tree *argarray)
14527 tree result;
14528 START_FOLD_INIT;
14530 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14532 END_FOLD_INIT;
14533 return result;
14536 #undef START_FOLD_INIT
14537 #undef END_FOLD_INIT
14539 /* Determine if first argument is a multiple of second argument. Return 0 if
14540 it is not, or we cannot easily determined it to be.
14542 An example of the sort of thing we care about (at this point; this routine
14543 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14544 fold cases do now) is discovering that
14546 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14548 is a multiple of
14550 SAVE_EXPR (J * 8)
14552 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14554 This code also handles discovering that
14556 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14558 is a multiple of 8 so we don't have to worry about dealing with a
14559 possible remainder.
14561 Note that we *look* inside a SAVE_EXPR only to determine how it was
14562 calculated; it is not safe for fold to do much of anything else with the
14563 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14564 at run time. For example, the latter example above *cannot* be implemented
14565 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14566 evaluation time of the original SAVE_EXPR is not necessarily the same at
14567 the time the new expression is evaluated. The only optimization of this
14568 sort that would be valid is changing
14570 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14572 divided by 8 to
14574 SAVE_EXPR (I) * SAVE_EXPR (J)
14576 (where the same SAVE_EXPR (J) is used in the original and the
14577 transformed version). */
14580 multiple_of_p (tree type, const_tree top, const_tree bottom)
14582 if (operand_equal_p (top, bottom, 0))
14583 return 1;
14585 if (TREE_CODE (type) != INTEGER_TYPE)
14586 return 0;
14588 switch (TREE_CODE (top))
14590 case BIT_AND_EXPR:
14591 /* Bitwise and provides a power of two multiple. If the mask is
14592 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14593 if (!integer_pow2p (bottom))
14594 return 0;
14595 /* FALLTHRU */
14597 case MULT_EXPR:
14598 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14599 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14601 case PLUS_EXPR:
14602 case MINUS_EXPR:
14603 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14604 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14606 case LSHIFT_EXPR:
14607 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14609 tree op1, t1;
14611 op1 = TREE_OPERAND (top, 1);
14612 /* const_binop may not detect overflow correctly,
14613 so check for it explicitly here. */
14614 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14615 && 0 != (t1 = fold_convert (type,
14616 const_binop (LSHIFT_EXPR,
14617 size_one_node,
14618 op1)))
14619 && !TREE_OVERFLOW (t1))
14620 return multiple_of_p (type, t1, bottom);
14622 return 0;
14624 case NOP_EXPR:
14625 /* Can't handle conversions from non-integral or wider integral type. */
14626 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14627 || (TYPE_PRECISION (type)
14628 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14629 return 0;
14631 /* .. fall through ... */
14633 case SAVE_EXPR:
14634 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14636 case COND_EXPR:
14637 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14638 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14640 case INTEGER_CST:
14641 if (TREE_CODE (bottom) != INTEGER_CST
14642 || integer_zerop (bottom)
14643 || (TYPE_UNSIGNED (type)
14644 && (tree_int_cst_sgn (top) < 0
14645 || tree_int_cst_sgn (bottom) < 0)))
14646 return 0;
14647 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14648 SIGNED);
14650 default:
14651 return 0;
14655 /* Return true if CODE or TYPE is known to be non-negative. */
14657 static bool
14658 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14660 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14661 && truth_value_p (code))
14662 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14663 have a signed:1 type (where the value is -1 and 0). */
14664 return true;
14665 return false;
14668 /* Return true if (CODE OP0) is known to be non-negative. If the return
14669 value is based on the assumption that signed overflow is undefined,
14670 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14671 *STRICT_OVERFLOW_P. */
14673 bool
14674 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14675 bool *strict_overflow_p)
14677 if (TYPE_UNSIGNED (type))
14678 return true;
14680 switch (code)
14682 case ABS_EXPR:
14683 /* We can't return 1 if flag_wrapv is set because
14684 ABS_EXPR<INT_MIN> = INT_MIN. */
14685 if (!INTEGRAL_TYPE_P (type))
14686 return true;
14687 if (TYPE_OVERFLOW_UNDEFINED (type))
14689 *strict_overflow_p = true;
14690 return true;
14692 break;
14694 case NON_LVALUE_EXPR:
14695 case FLOAT_EXPR:
14696 case FIX_TRUNC_EXPR:
14697 return tree_expr_nonnegative_warnv_p (op0,
14698 strict_overflow_p);
14700 CASE_CONVERT:
14702 tree inner_type = TREE_TYPE (op0);
14703 tree outer_type = type;
14705 if (TREE_CODE (outer_type) == REAL_TYPE)
14707 if (TREE_CODE (inner_type) == REAL_TYPE)
14708 return tree_expr_nonnegative_warnv_p (op0,
14709 strict_overflow_p);
14710 if (INTEGRAL_TYPE_P (inner_type))
14712 if (TYPE_UNSIGNED (inner_type))
14713 return true;
14714 return tree_expr_nonnegative_warnv_p (op0,
14715 strict_overflow_p);
14718 else if (INTEGRAL_TYPE_P (outer_type))
14720 if (TREE_CODE (inner_type) == REAL_TYPE)
14721 return tree_expr_nonnegative_warnv_p (op0,
14722 strict_overflow_p);
14723 if (INTEGRAL_TYPE_P (inner_type))
14724 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14725 && TYPE_UNSIGNED (inner_type);
14728 break;
14730 default:
14731 return tree_simple_nonnegative_warnv_p (code, type);
14734 /* We don't know sign of `t', so be conservative and return false. */
14735 return false;
14738 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14739 value is based on the assumption that signed overflow is undefined,
14740 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14741 *STRICT_OVERFLOW_P. */
14743 bool
14744 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14745 tree op1, bool *strict_overflow_p)
14747 if (TYPE_UNSIGNED (type))
14748 return true;
14750 switch (code)
14752 case POINTER_PLUS_EXPR:
14753 case PLUS_EXPR:
14754 if (FLOAT_TYPE_P (type))
14755 return (tree_expr_nonnegative_warnv_p (op0,
14756 strict_overflow_p)
14757 && tree_expr_nonnegative_warnv_p (op1,
14758 strict_overflow_p));
14760 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14761 both unsigned and at least 2 bits shorter than the result. */
14762 if (TREE_CODE (type) == INTEGER_TYPE
14763 && TREE_CODE (op0) == NOP_EXPR
14764 && TREE_CODE (op1) == NOP_EXPR)
14766 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14767 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14768 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14769 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14771 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14772 TYPE_PRECISION (inner2)) + 1;
14773 return prec < TYPE_PRECISION (type);
14776 break;
14778 case MULT_EXPR:
14779 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14781 /* x * x is always non-negative for floating point x
14782 or without overflow. */
14783 if (operand_equal_p (op0, op1, 0)
14784 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14785 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14787 if (ANY_INTEGRAL_TYPE_P (type)
14788 && TYPE_OVERFLOW_UNDEFINED (type))
14789 *strict_overflow_p = true;
14790 return true;
14794 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14795 both unsigned and their total bits is shorter than the result. */
14796 if (TREE_CODE (type) == INTEGER_TYPE
14797 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14798 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14800 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14801 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14802 : TREE_TYPE (op0);
14803 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14804 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14805 : TREE_TYPE (op1);
14807 bool unsigned0 = TYPE_UNSIGNED (inner0);
14808 bool unsigned1 = TYPE_UNSIGNED (inner1);
14810 if (TREE_CODE (op0) == INTEGER_CST)
14811 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14813 if (TREE_CODE (op1) == INTEGER_CST)
14814 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14816 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14817 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14819 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14820 ? tree_int_cst_min_precision (op0, UNSIGNED)
14821 : TYPE_PRECISION (inner0);
14823 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14824 ? tree_int_cst_min_precision (op1, UNSIGNED)
14825 : TYPE_PRECISION (inner1);
14827 return precision0 + precision1 < TYPE_PRECISION (type);
14830 return false;
14832 case BIT_AND_EXPR:
14833 case MAX_EXPR:
14834 return (tree_expr_nonnegative_warnv_p (op0,
14835 strict_overflow_p)
14836 || tree_expr_nonnegative_warnv_p (op1,
14837 strict_overflow_p));
14839 case BIT_IOR_EXPR:
14840 case BIT_XOR_EXPR:
14841 case MIN_EXPR:
14842 case RDIV_EXPR:
14843 case TRUNC_DIV_EXPR:
14844 case CEIL_DIV_EXPR:
14845 case FLOOR_DIV_EXPR:
14846 case ROUND_DIV_EXPR:
14847 return (tree_expr_nonnegative_warnv_p (op0,
14848 strict_overflow_p)
14849 && tree_expr_nonnegative_warnv_p (op1,
14850 strict_overflow_p));
14852 case TRUNC_MOD_EXPR:
14853 case CEIL_MOD_EXPR:
14854 case FLOOR_MOD_EXPR:
14855 case ROUND_MOD_EXPR:
14856 return tree_expr_nonnegative_warnv_p (op0,
14857 strict_overflow_p);
14858 default:
14859 return tree_simple_nonnegative_warnv_p (code, type);
14862 /* We don't know sign of `t', so be conservative and return false. */
14863 return false;
14866 /* Return true if T is known to be non-negative. If the return
14867 value is based on the assumption that signed overflow is undefined,
14868 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14869 *STRICT_OVERFLOW_P. */
14871 bool
14872 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14874 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14875 return true;
14877 switch (TREE_CODE (t))
14879 case INTEGER_CST:
14880 return tree_int_cst_sgn (t) >= 0;
14882 case REAL_CST:
14883 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14885 case FIXED_CST:
14886 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14888 case COND_EXPR:
14889 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14890 strict_overflow_p)
14891 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14892 strict_overflow_p));
14893 default:
14894 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14895 TREE_TYPE (t));
14897 /* We don't know sign of `t', so be conservative and return false. */
14898 return false;
14901 /* Return true if T is known to be non-negative. If the return
14902 value is based on the assumption that signed overflow is undefined,
14903 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14904 *STRICT_OVERFLOW_P. */
14906 bool
14907 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14908 tree arg0, tree arg1, bool *strict_overflow_p)
14910 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14911 switch (DECL_FUNCTION_CODE (fndecl))
14913 CASE_FLT_FN (BUILT_IN_ACOS):
14914 CASE_FLT_FN (BUILT_IN_ACOSH):
14915 CASE_FLT_FN (BUILT_IN_CABS):
14916 CASE_FLT_FN (BUILT_IN_COSH):
14917 CASE_FLT_FN (BUILT_IN_ERFC):
14918 CASE_FLT_FN (BUILT_IN_EXP):
14919 CASE_FLT_FN (BUILT_IN_EXP10):
14920 CASE_FLT_FN (BUILT_IN_EXP2):
14921 CASE_FLT_FN (BUILT_IN_FABS):
14922 CASE_FLT_FN (BUILT_IN_FDIM):
14923 CASE_FLT_FN (BUILT_IN_HYPOT):
14924 CASE_FLT_FN (BUILT_IN_POW10):
14925 CASE_INT_FN (BUILT_IN_FFS):
14926 CASE_INT_FN (BUILT_IN_PARITY):
14927 CASE_INT_FN (BUILT_IN_POPCOUNT):
14928 CASE_INT_FN (BUILT_IN_CLZ):
14929 CASE_INT_FN (BUILT_IN_CLRSB):
14930 case BUILT_IN_BSWAP32:
14931 case BUILT_IN_BSWAP64:
14932 /* Always true. */
14933 return true;
14935 CASE_FLT_FN (BUILT_IN_SQRT):
14936 /* sqrt(-0.0) is -0.0. */
14937 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14938 return true;
14939 return tree_expr_nonnegative_warnv_p (arg0,
14940 strict_overflow_p);
14942 CASE_FLT_FN (BUILT_IN_ASINH):
14943 CASE_FLT_FN (BUILT_IN_ATAN):
14944 CASE_FLT_FN (BUILT_IN_ATANH):
14945 CASE_FLT_FN (BUILT_IN_CBRT):
14946 CASE_FLT_FN (BUILT_IN_CEIL):
14947 CASE_FLT_FN (BUILT_IN_ERF):
14948 CASE_FLT_FN (BUILT_IN_EXPM1):
14949 CASE_FLT_FN (BUILT_IN_FLOOR):
14950 CASE_FLT_FN (BUILT_IN_FMOD):
14951 CASE_FLT_FN (BUILT_IN_FREXP):
14952 CASE_FLT_FN (BUILT_IN_ICEIL):
14953 CASE_FLT_FN (BUILT_IN_IFLOOR):
14954 CASE_FLT_FN (BUILT_IN_IRINT):
14955 CASE_FLT_FN (BUILT_IN_IROUND):
14956 CASE_FLT_FN (BUILT_IN_LCEIL):
14957 CASE_FLT_FN (BUILT_IN_LDEXP):
14958 CASE_FLT_FN (BUILT_IN_LFLOOR):
14959 CASE_FLT_FN (BUILT_IN_LLCEIL):
14960 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14961 CASE_FLT_FN (BUILT_IN_LLRINT):
14962 CASE_FLT_FN (BUILT_IN_LLROUND):
14963 CASE_FLT_FN (BUILT_IN_LRINT):
14964 CASE_FLT_FN (BUILT_IN_LROUND):
14965 CASE_FLT_FN (BUILT_IN_MODF):
14966 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14967 CASE_FLT_FN (BUILT_IN_RINT):
14968 CASE_FLT_FN (BUILT_IN_ROUND):
14969 CASE_FLT_FN (BUILT_IN_SCALB):
14970 CASE_FLT_FN (BUILT_IN_SCALBLN):
14971 CASE_FLT_FN (BUILT_IN_SCALBN):
14972 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14973 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14974 CASE_FLT_FN (BUILT_IN_SINH):
14975 CASE_FLT_FN (BUILT_IN_TANH):
14976 CASE_FLT_FN (BUILT_IN_TRUNC):
14977 /* True if the 1st argument is nonnegative. */
14978 return tree_expr_nonnegative_warnv_p (arg0,
14979 strict_overflow_p);
14981 CASE_FLT_FN (BUILT_IN_FMAX):
14982 /* True if the 1st OR 2nd arguments are nonnegative. */
14983 return (tree_expr_nonnegative_warnv_p (arg0,
14984 strict_overflow_p)
14985 || (tree_expr_nonnegative_warnv_p (arg1,
14986 strict_overflow_p)));
14988 CASE_FLT_FN (BUILT_IN_FMIN):
14989 /* True if the 1st AND 2nd arguments are nonnegative. */
14990 return (tree_expr_nonnegative_warnv_p (arg0,
14991 strict_overflow_p)
14992 && (tree_expr_nonnegative_warnv_p (arg1,
14993 strict_overflow_p)));
14995 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14996 /* True if the 2nd argument is nonnegative. */
14997 return tree_expr_nonnegative_warnv_p (arg1,
14998 strict_overflow_p);
15000 CASE_FLT_FN (BUILT_IN_POWI):
15001 /* True if the 1st argument is nonnegative or the second
15002 argument is an even integer. */
15003 if (TREE_CODE (arg1) == INTEGER_CST
15004 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15005 return true;
15006 return tree_expr_nonnegative_warnv_p (arg0,
15007 strict_overflow_p);
15009 CASE_FLT_FN (BUILT_IN_POW):
15010 /* True if the 1st argument is nonnegative or the second
15011 argument is an even integer valued real. */
15012 if (TREE_CODE (arg1) == REAL_CST)
15014 REAL_VALUE_TYPE c;
15015 HOST_WIDE_INT n;
15017 c = TREE_REAL_CST (arg1);
15018 n = real_to_integer (&c);
15019 if ((n & 1) == 0)
15021 REAL_VALUE_TYPE cint;
15022 real_from_integer (&cint, VOIDmode, n, SIGNED);
15023 if (real_identical (&c, &cint))
15024 return true;
15027 return tree_expr_nonnegative_warnv_p (arg0,
15028 strict_overflow_p);
15030 default:
15031 break;
15033 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15034 type);
15037 /* Return true if T is known to be non-negative. If the return
15038 value is based on the assumption that signed overflow is undefined,
15039 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15040 *STRICT_OVERFLOW_P. */
15042 static bool
15043 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15045 enum tree_code code = TREE_CODE (t);
15046 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15047 return true;
15049 switch (code)
15051 case TARGET_EXPR:
15053 tree temp = TARGET_EXPR_SLOT (t);
15054 t = TARGET_EXPR_INITIAL (t);
15056 /* If the initializer is non-void, then it's a normal expression
15057 that will be assigned to the slot. */
15058 if (!VOID_TYPE_P (t))
15059 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15061 /* Otherwise, the initializer sets the slot in some way. One common
15062 way is an assignment statement at the end of the initializer. */
15063 while (1)
15065 if (TREE_CODE (t) == BIND_EXPR)
15066 t = expr_last (BIND_EXPR_BODY (t));
15067 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15068 || TREE_CODE (t) == TRY_CATCH_EXPR)
15069 t = expr_last (TREE_OPERAND (t, 0));
15070 else if (TREE_CODE (t) == STATEMENT_LIST)
15071 t = expr_last (t);
15072 else
15073 break;
15075 if (TREE_CODE (t) == MODIFY_EXPR
15076 && TREE_OPERAND (t, 0) == temp)
15077 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15078 strict_overflow_p);
15080 return false;
15083 case CALL_EXPR:
15085 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15086 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15088 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15089 get_callee_fndecl (t),
15090 arg0,
15091 arg1,
15092 strict_overflow_p);
15094 case COMPOUND_EXPR:
15095 case MODIFY_EXPR:
15096 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15097 strict_overflow_p);
15098 case BIND_EXPR:
15099 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15100 strict_overflow_p);
15101 case SAVE_EXPR:
15102 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15103 strict_overflow_p);
15105 default:
15106 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15107 TREE_TYPE (t));
15110 /* We don't know sign of `t', so be conservative and return false. */
15111 return false;
15114 /* Return true if T is known to be non-negative. If the return
15115 value is based on the assumption that signed overflow is undefined,
15116 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15117 *STRICT_OVERFLOW_P. */
15119 bool
15120 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15122 enum tree_code code;
15123 if (t == error_mark_node)
15124 return false;
15126 code = TREE_CODE (t);
15127 switch (TREE_CODE_CLASS (code))
15129 case tcc_binary:
15130 case tcc_comparison:
15131 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15132 TREE_TYPE (t),
15133 TREE_OPERAND (t, 0),
15134 TREE_OPERAND (t, 1),
15135 strict_overflow_p);
15137 case tcc_unary:
15138 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15139 TREE_TYPE (t),
15140 TREE_OPERAND (t, 0),
15141 strict_overflow_p);
15143 case tcc_constant:
15144 case tcc_declaration:
15145 case tcc_reference:
15146 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15148 default:
15149 break;
15152 switch (code)
15154 case TRUTH_AND_EXPR:
15155 case TRUTH_OR_EXPR:
15156 case TRUTH_XOR_EXPR:
15157 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15158 TREE_TYPE (t),
15159 TREE_OPERAND (t, 0),
15160 TREE_OPERAND (t, 1),
15161 strict_overflow_p);
15162 case TRUTH_NOT_EXPR:
15163 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15164 TREE_TYPE (t),
15165 TREE_OPERAND (t, 0),
15166 strict_overflow_p);
15168 case COND_EXPR:
15169 case CONSTRUCTOR:
15170 case OBJ_TYPE_REF:
15171 case ASSERT_EXPR:
15172 case ADDR_EXPR:
15173 case WITH_SIZE_EXPR:
15174 case SSA_NAME:
15175 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15177 default:
15178 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15182 /* Return true if `t' is known to be non-negative. Handle warnings
15183 about undefined signed overflow. */
15185 bool
15186 tree_expr_nonnegative_p (tree t)
15188 bool ret, strict_overflow_p;
15190 strict_overflow_p = false;
15191 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15192 if (strict_overflow_p)
15193 fold_overflow_warning (("assuming signed overflow does not occur when "
15194 "determining that expression is always "
15195 "non-negative"),
15196 WARN_STRICT_OVERFLOW_MISC);
15197 return ret;
15201 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15202 For floating point we further ensure that T is not denormal.
15203 Similar logic is present in nonzero_address in rtlanal.h.
15205 If the return value is based on the assumption that signed overflow
15206 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15207 change *STRICT_OVERFLOW_P. */
15209 bool
15210 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15211 bool *strict_overflow_p)
15213 switch (code)
15215 case ABS_EXPR:
15216 return tree_expr_nonzero_warnv_p (op0,
15217 strict_overflow_p);
15219 case NOP_EXPR:
15221 tree inner_type = TREE_TYPE (op0);
15222 tree outer_type = type;
15224 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15225 && tree_expr_nonzero_warnv_p (op0,
15226 strict_overflow_p));
15228 break;
15230 case NON_LVALUE_EXPR:
15231 return tree_expr_nonzero_warnv_p (op0,
15232 strict_overflow_p);
15234 default:
15235 break;
15238 return false;
15241 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15242 For floating point we further ensure that T is not denormal.
15243 Similar logic is present in nonzero_address in rtlanal.h.
15245 If the return value is based on the assumption that signed overflow
15246 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15247 change *STRICT_OVERFLOW_P. */
15249 bool
15250 tree_binary_nonzero_warnv_p (enum tree_code code,
15251 tree type,
15252 tree op0,
15253 tree op1, bool *strict_overflow_p)
15255 bool sub_strict_overflow_p;
15256 switch (code)
15258 case POINTER_PLUS_EXPR:
15259 case PLUS_EXPR:
15260 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15262 /* With the presence of negative values it is hard
15263 to say something. */
15264 sub_strict_overflow_p = false;
15265 if (!tree_expr_nonnegative_warnv_p (op0,
15266 &sub_strict_overflow_p)
15267 || !tree_expr_nonnegative_warnv_p (op1,
15268 &sub_strict_overflow_p))
15269 return false;
15270 /* One of operands must be positive and the other non-negative. */
15271 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15272 overflows, on a twos-complement machine the sum of two
15273 nonnegative numbers can never be zero. */
15274 return (tree_expr_nonzero_warnv_p (op0,
15275 strict_overflow_p)
15276 || tree_expr_nonzero_warnv_p (op1,
15277 strict_overflow_p));
15279 break;
15281 case MULT_EXPR:
15282 if (TYPE_OVERFLOW_UNDEFINED (type))
15284 if (tree_expr_nonzero_warnv_p (op0,
15285 strict_overflow_p)
15286 && tree_expr_nonzero_warnv_p (op1,
15287 strict_overflow_p))
15289 *strict_overflow_p = true;
15290 return true;
15293 break;
15295 case MIN_EXPR:
15296 sub_strict_overflow_p = false;
15297 if (tree_expr_nonzero_warnv_p (op0,
15298 &sub_strict_overflow_p)
15299 && tree_expr_nonzero_warnv_p (op1,
15300 &sub_strict_overflow_p))
15302 if (sub_strict_overflow_p)
15303 *strict_overflow_p = true;
15305 break;
15307 case MAX_EXPR:
15308 sub_strict_overflow_p = false;
15309 if (tree_expr_nonzero_warnv_p (op0,
15310 &sub_strict_overflow_p))
15312 if (sub_strict_overflow_p)
15313 *strict_overflow_p = true;
15315 /* When both operands are nonzero, then MAX must be too. */
15316 if (tree_expr_nonzero_warnv_p (op1,
15317 strict_overflow_p))
15318 return true;
15320 /* MAX where operand 0 is positive is positive. */
15321 return tree_expr_nonnegative_warnv_p (op0,
15322 strict_overflow_p);
15324 /* MAX where operand 1 is positive is positive. */
15325 else if (tree_expr_nonzero_warnv_p (op1,
15326 &sub_strict_overflow_p)
15327 && tree_expr_nonnegative_warnv_p (op1,
15328 &sub_strict_overflow_p))
15330 if (sub_strict_overflow_p)
15331 *strict_overflow_p = true;
15332 return true;
15334 break;
15336 case BIT_IOR_EXPR:
15337 return (tree_expr_nonzero_warnv_p (op1,
15338 strict_overflow_p)
15339 || tree_expr_nonzero_warnv_p (op0,
15340 strict_overflow_p));
15342 default:
15343 break;
15346 return false;
15349 /* Return true when T is an address and is known to be nonzero.
15350 For floating point we further ensure that T is not denormal.
15351 Similar logic is present in nonzero_address in rtlanal.h.
15353 If the return value is based on the assumption that signed overflow
15354 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15355 change *STRICT_OVERFLOW_P. */
15357 bool
15358 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15360 bool sub_strict_overflow_p;
15361 switch (TREE_CODE (t))
15363 case INTEGER_CST:
15364 return !integer_zerop (t);
15366 case ADDR_EXPR:
15368 tree base = TREE_OPERAND (t, 0);
15370 if (!DECL_P (base))
15371 base = get_base_address (base);
15373 if (!base)
15374 return false;
15376 /* For objects in symbol table check if we know they are non-zero.
15377 Don't do anything for variables and functions before symtab is built;
15378 it is quite possible that they will be declared weak later. */
15379 if (DECL_P (base) && decl_in_symtab_p (base))
15381 struct symtab_node *symbol;
15383 symbol = symtab_node::get_create (base);
15384 if (symbol)
15385 return symbol->nonzero_address ();
15386 else
15387 return false;
15390 /* Function local objects are never NULL. */
15391 if (DECL_P (base)
15392 && (DECL_CONTEXT (base)
15393 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15394 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15395 return true;
15397 /* Constants are never weak. */
15398 if (CONSTANT_CLASS_P (base))
15399 return true;
15401 return false;
15404 case COND_EXPR:
15405 sub_strict_overflow_p = false;
15406 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15407 &sub_strict_overflow_p)
15408 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15409 &sub_strict_overflow_p))
15411 if (sub_strict_overflow_p)
15412 *strict_overflow_p = true;
15413 return true;
15415 break;
15417 default:
15418 break;
15420 return false;
15423 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15424 attempt to fold the expression to a constant without modifying TYPE,
15425 OP0 or OP1.
15427 If the expression could be simplified to a constant, then return
15428 the constant. If the expression would not be simplified to a
15429 constant, then return NULL_TREE. */
15431 tree
15432 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15434 tree tem = fold_binary (code, type, op0, op1);
15435 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15438 /* Given the components of a unary expression CODE, TYPE and OP0,
15439 attempt to fold the expression to a constant without modifying
15440 TYPE or OP0.
15442 If the expression could be simplified to a constant, then return
15443 the constant. If the expression would not be simplified to a
15444 constant, then return NULL_TREE. */
15446 tree
15447 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15449 tree tem = fold_unary (code, type, op0);
15450 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15453 /* If EXP represents referencing an element in a constant string
15454 (either via pointer arithmetic or array indexing), return the
15455 tree representing the value accessed, otherwise return NULL. */
15457 tree
15458 fold_read_from_constant_string (tree exp)
15460 if ((TREE_CODE (exp) == INDIRECT_REF
15461 || TREE_CODE (exp) == ARRAY_REF)
15462 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15464 tree exp1 = TREE_OPERAND (exp, 0);
15465 tree index;
15466 tree string;
15467 location_t loc = EXPR_LOCATION (exp);
15469 if (TREE_CODE (exp) == INDIRECT_REF)
15470 string = string_constant (exp1, &index);
15471 else
15473 tree low_bound = array_ref_low_bound (exp);
15474 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15476 /* Optimize the special-case of a zero lower bound.
15478 We convert the low_bound to sizetype to avoid some problems
15479 with constant folding. (E.g. suppose the lower bound is 1,
15480 and its mode is QI. Without the conversion,l (ARRAY
15481 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15482 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15483 if (! integer_zerop (low_bound))
15484 index = size_diffop_loc (loc, index,
15485 fold_convert_loc (loc, sizetype, low_bound));
15487 string = exp1;
15490 if (string
15491 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15492 && TREE_CODE (string) == STRING_CST
15493 && TREE_CODE (index) == INTEGER_CST
15494 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15495 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15496 == MODE_INT)
15497 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15498 return build_int_cst_type (TREE_TYPE (exp),
15499 (TREE_STRING_POINTER (string)
15500 [TREE_INT_CST_LOW (index)]));
15502 return NULL;
15505 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15506 an integer constant, real, or fixed-point constant.
15508 TYPE is the type of the result. */
15510 static tree
15511 fold_negate_const (tree arg0, tree type)
15513 tree t = NULL_TREE;
15515 switch (TREE_CODE (arg0))
15517 case INTEGER_CST:
15519 bool overflow;
15520 wide_int val = wi::neg (arg0, &overflow);
15521 t = force_fit_type (type, val, 1,
15522 (overflow | TREE_OVERFLOW (arg0))
15523 && !TYPE_UNSIGNED (type));
15524 break;
15527 case REAL_CST:
15528 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15529 break;
15531 case FIXED_CST:
15533 FIXED_VALUE_TYPE f;
15534 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15535 &(TREE_FIXED_CST (arg0)), NULL,
15536 TYPE_SATURATING (type));
15537 t = build_fixed (type, f);
15538 /* Propagate overflow flags. */
15539 if (overflow_p | TREE_OVERFLOW (arg0))
15540 TREE_OVERFLOW (t) = 1;
15541 break;
15544 default:
15545 gcc_unreachable ();
15548 return t;
15551 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15552 an integer constant or real constant.
15554 TYPE is the type of the result. */
15556 tree
15557 fold_abs_const (tree arg0, tree type)
15559 tree t = NULL_TREE;
15561 switch (TREE_CODE (arg0))
15563 case INTEGER_CST:
15565 /* If the value is unsigned or non-negative, then the absolute value
15566 is the same as the ordinary value. */
15567 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15568 t = arg0;
15570 /* If the value is negative, then the absolute value is
15571 its negation. */
15572 else
15574 bool overflow;
15575 wide_int val = wi::neg (arg0, &overflow);
15576 t = force_fit_type (type, val, -1,
15577 overflow | TREE_OVERFLOW (arg0));
15580 break;
15582 case REAL_CST:
15583 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15584 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15585 else
15586 t = arg0;
15587 break;
15589 default:
15590 gcc_unreachable ();
15593 return t;
15596 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15597 constant. TYPE is the type of the result. */
15599 static tree
15600 fold_not_const (const_tree arg0, tree type)
15602 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15604 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15607 /* Given CODE, a relational operator, the target type, TYPE and two
15608 constant operands OP0 and OP1, return the result of the
15609 relational operation. If the result is not a compile time
15610 constant, then return NULL_TREE. */
15612 static tree
15613 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15615 int result, invert;
15617 /* From here on, the only cases we handle are when the result is
15618 known to be a constant. */
15620 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15622 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15623 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15625 /* Handle the cases where either operand is a NaN. */
15626 if (real_isnan (c0) || real_isnan (c1))
15628 switch (code)
15630 case EQ_EXPR:
15631 case ORDERED_EXPR:
15632 result = 0;
15633 break;
15635 case NE_EXPR:
15636 case UNORDERED_EXPR:
15637 case UNLT_EXPR:
15638 case UNLE_EXPR:
15639 case UNGT_EXPR:
15640 case UNGE_EXPR:
15641 case UNEQ_EXPR:
15642 result = 1;
15643 break;
15645 case LT_EXPR:
15646 case LE_EXPR:
15647 case GT_EXPR:
15648 case GE_EXPR:
15649 case LTGT_EXPR:
15650 if (flag_trapping_math)
15651 return NULL_TREE;
15652 result = 0;
15653 break;
15655 default:
15656 gcc_unreachable ();
15659 return constant_boolean_node (result, type);
15662 return constant_boolean_node (real_compare (code, c0, c1), type);
15665 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15667 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15668 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15669 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15672 /* Handle equality/inequality of complex constants. */
15673 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15675 tree rcond = fold_relational_const (code, type,
15676 TREE_REALPART (op0),
15677 TREE_REALPART (op1));
15678 tree icond = fold_relational_const (code, type,
15679 TREE_IMAGPART (op0),
15680 TREE_IMAGPART (op1));
15681 if (code == EQ_EXPR)
15682 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15683 else if (code == NE_EXPR)
15684 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15685 else
15686 return NULL_TREE;
15689 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15691 unsigned count = VECTOR_CST_NELTS (op0);
15692 tree *elts = XALLOCAVEC (tree, count);
15693 gcc_assert (VECTOR_CST_NELTS (op1) == count
15694 && TYPE_VECTOR_SUBPARTS (type) == count);
15696 for (unsigned i = 0; i < count; i++)
15698 tree elem_type = TREE_TYPE (type);
15699 tree elem0 = VECTOR_CST_ELT (op0, i);
15700 tree elem1 = VECTOR_CST_ELT (op1, i);
15702 tree tem = fold_relational_const (code, elem_type,
15703 elem0, elem1);
15705 if (tem == NULL_TREE)
15706 return NULL_TREE;
15708 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15711 return build_vector (type, elts);
15714 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15716 To compute GT, swap the arguments and do LT.
15717 To compute GE, do LT and invert the result.
15718 To compute LE, swap the arguments, do LT and invert the result.
15719 To compute NE, do EQ and invert the result.
15721 Therefore, the code below must handle only EQ and LT. */
15723 if (code == LE_EXPR || code == GT_EXPR)
15725 tree tem = op0;
15726 op0 = op1;
15727 op1 = tem;
15728 code = swap_tree_comparison (code);
15731 /* Note that it is safe to invert for real values here because we
15732 have already handled the one case that it matters. */
15734 invert = 0;
15735 if (code == NE_EXPR || code == GE_EXPR)
15737 invert = 1;
15738 code = invert_tree_comparison (code, false);
15741 /* Compute a result for LT or EQ if args permit;
15742 Otherwise return T. */
15743 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15745 if (code == EQ_EXPR)
15746 result = tree_int_cst_equal (op0, op1);
15747 else
15748 result = tree_int_cst_lt (op0, op1);
15750 else
15751 return NULL_TREE;
15753 if (invert)
15754 result ^= 1;
15755 return constant_boolean_node (result, type);
15758 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15759 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15760 itself. */
15762 tree
15763 fold_build_cleanup_point_expr (tree type, tree expr)
15765 /* If the expression does not have side effects then we don't have to wrap
15766 it with a cleanup point expression. */
15767 if (!TREE_SIDE_EFFECTS (expr))
15768 return expr;
15770 /* If the expression is a return, check to see if the expression inside the
15771 return has no side effects or the right hand side of the modify expression
15772 inside the return. If either don't have side effects set we don't need to
15773 wrap the expression in a cleanup point expression. Note we don't check the
15774 left hand side of the modify because it should always be a return decl. */
15775 if (TREE_CODE (expr) == RETURN_EXPR)
15777 tree op = TREE_OPERAND (expr, 0);
15778 if (!op || !TREE_SIDE_EFFECTS (op))
15779 return expr;
15780 op = TREE_OPERAND (op, 1);
15781 if (!TREE_SIDE_EFFECTS (op))
15782 return expr;
15785 return build1 (CLEANUP_POINT_EXPR, type, expr);
15788 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15789 of an indirection through OP0, or NULL_TREE if no simplification is
15790 possible. */
15792 tree
15793 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15795 tree sub = op0;
15796 tree subtype;
15798 STRIP_NOPS (sub);
15799 subtype = TREE_TYPE (sub);
15800 if (!POINTER_TYPE_P (subtype))
15801 return NULL_TREE;
15803 if (TREE_CODE (sub) == ADDR_EXPR)
15805 tree op = TREE_OPERAND (sub, 0);
15806 tree optype = TREE_TYPE (op);
15807 /* *&CONST_DECL -> to the value of the const decl. */
15808 if (TREE_CODE (op) == CONST_DECL)
15809 return DECL_INITIAL (op);
15810 /* *&p => p; make sure to handle *&"str"[cst] here. */
15811 if (type == optype)
15813 tree fop = fold_read_from_constant_string (op);
15814 if (fop)
15815 return fop;
15816 else
15817 return op;
15819 /* *(foo *)&fooarray => fooarray[0] */
15820 else if (TREE_CODE (optype) == ARRAY_TYPE
15821 && type == TREE_TYPE (optype)
15822 && (!in_gimple_form
15823 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15825 tree type_domain = TYPE_DOMAIN (optype);
15826 tree min_val = size_zero_node;
15827 if (type_domain && TYPE_MIN_VALUE (type_domain))
15828 min_val = TYPE_MIN_VALUE (type_domain);
15829 if (in_gimple_form
15830 && TREE_CODE (min_val) != INTEGER_CST)
15831 return NULL_TREE;
15832 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15833 NULL_TREE, NULL_TREE);
15835 /* *(foo *)&complexfoo => __real__ complexfoo */
15836 else if (TREE_CODE (optype) == COMPLEX_TYPE
15837 && type == TREE_TYPE (optype))
15838 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15839 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15840 else if (TREE_CODE (optype) == VECTOR_TYPE
15841 && type == TREE_TYPE (optype))
15843 tree part_width = TYPE_SIZE (type);
15844 tree index = bitsize_int (0);
15845 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15849 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15850 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15852 tree op00 = TREE_OPERAND (sub, 0);
15853 tree op01 = TREE_OPERAND (sub, 1);
15855 STRIP_NOPS (op00);
15856 if (TREE_CODE (op00) == ADDR_EXPR)
15858 tree op00type;
15859 op00 = TREE_OPERAND (op00, 0);
15860 op00type = TREE_TYPE (op00);
15862 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15863 if (TREE_CODE (op00type) == VECTOR_TYPE
15864 && type == TREE_TYPE (op00type))
15866 HOST_WIDE_INT offset = tree_to_shwi (op01);
15867 tree part_width = TYPE_SIZE (type);
15868 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15869 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15870 tree index = bitsize_int (indexi);
15872 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15873 return fold_build3_loc (loc,
15874 BIT_FIELD_REF, type, op00,
15875 part_width, index);
15878 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15879 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15880 && type == TREE_TYPE (op00type))
15882 tree size = TYPE_SIZE_UNIT (type);
15883 if (tree_int_cst_equal (size, op01))
15884 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15886 /* ((foo *)&fooarray)[1] => fooarray[1] */
15887 else if (TREE_CODE (op00type) == ARRAY_TYPE
15888 && type == TREE_TYPE (op00type))
15890 tree type_domain = TYPE_DOMAIN (op00type);
15891 tree min_val = size_zero_node;
15892 if (type_domain && TYPE_MIN_VALUE (type_domain))
15893 min_val = TYPE_MIN_VALUE (type_domain);
15894 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15895 TYPE_SIZE_UNIT (type));
15896 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15897 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15898 NULL_TREE, NULL_TREE);
15903 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15904 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15905 && type == TREE_TYPE (TREE_TYPE (subtype))
15906 && (!in_gimple_form
15907 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15909 tree type_domain;
15910 tree min_val = size_zero_node;
15911 sub = build_fold_indirect_ref_loc (loc, sub);
15912 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15913 if (type_domain && TYPE_MIN_VALUE (type_domain))
15914 min_val = TYPE_MIN_VALUE (type_domain);
15915 if (in_gimple_form
15916 && TREE_CODE (min_val) != INTEGER_CST)
15917 return NULL_TREE;
15918 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15919 NULL_TREE);
15922 return NULL_TREE;
15925 /* Builds an expression for an indirection through T, simplifying some
15926 cases. */
15928 tree
15929 build_fold_indirect_ref_loc (location_t loc, tree t)
15931 tree type = TREE_TYPE (TREE_TYPE (t));
15932 tree sub = fold_indirect_ref_1 (loc, type, t);
15934 if (sub)
15935 return sub;
15937 return build1_loc (loc, INDIRECT_REF, type, t);
15940 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15942 tree
15943 fold_indirect_ref_loc (location_t loc, tree t)
15945 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15947 if (sub)
15948 return sub;
15949 else
15950 return t;
15953 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15954 whose result is ignored. The type of the returned tree need not be
15955 the same as the original expression. */
15957 tree
15958 fold_ignored_result (tree t)
15960 if (!TREE_SIDE_EFFECTS (t))
15961 return integer_zero_node;
15963 for (;;)
15964 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15966 case tcc_unary:
15967 t = TREE_OPERAND (t, 0);
15968 break;
15970 case tcc_binary:
15971 case tcc_comparison:
15972 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15973 t = TREE_OPERAND (t, 0);
15974 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15975 t = TREE_OPERAND (t, 1);
15976 else
15977 return t;
15978 break;
15980 case tcc_expression:
15981 switch (TREE_CODE (t))
15983 case COMPOUND_EXPR:
15984 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15985 return t;
15986 t = TREE_OPERAND (t, 0);
15987 break;
15989 case COND_EXPR:
15990 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15991 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15992 return t;
15993 t = TREE_OPERAND (t, 0);
15994 break;
15996 default:
15997 return t;
15999 break;
16001 default:
16002 return t;
16006 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16008 tree
16009 round_up_loc (location_t loc, tree value, unsigned int divisor)
16011 tree div = NULL_TREE;
16013 if (divisor == 1)
16014 return value;
16016 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16017 have to do anything. Only do this when we are not given a const,
16018 because in that case, this check is more expensive than just
16019 doing it. */
16020 if (TREE_CODE (value) != INTEGER_CST)
16022 div = build_int_cst (TREE_TYPE (value), divisor);
16024 if (multiple_of_p (TREE_TYPE (value), value, div))
16025 return value;
16028 /* If divisor is a power of two, simplify this to bit manipulation. */
16029 if (divisor == (divisor & -divisor))
16031 if (TREE_CODE (value) == INTEGER_CST)
16033 wide_int val = value;
16034 bool overflow_p;
16036 if ((val & (divisor - 1)) == 0)
16037 return value;
16039 overflow_p = TREE_OVERFLOW (value);
16040 val &= ~(divisor - 1);
16041 val += divisor;
16042 if (val == 0)
16043 overflow_p = true;
16045 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16047 else
16049 tree t;
16051 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16052 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16053 t = build_int_cst (TREE_TYPE (value), -divisor);
16054 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16057 else
16059 if (!div)
16060 div = build_int_cst (TREE_TYPE (value), divisor);
16061 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16062 value = size_binop_loc (loc, MULT_EXPR, value, div);
16065 return value;
16068 /* Likewise, but round down. */
16070 tree
16071 round_down_loc (location_t loc, tree value, int divisor)
16073 tree div = NULL_TREE;
16075 gcc_assert (divisor > 0);
16076 if (divisor == 1)
16077 return value;
16079 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16080 have to do anything. Only do this when we are not given a const,
16081 because in that case, this check is more expensive than just
16082 doing it. */
16083 if (TREE_CODE (value) != INTEGER_CST)
16085 div = build_int_cst (TREE_TYPE (value), divisor);
16087 if (multiple_of_p (TREE_TYPE (value), value, div))
16088 return value;
16091 /* If divisor is a power of two, simplify this to bit manipulation. */
16092 if (divisor == (divisor & -divisor))
16094 tree t;
16096 t = build_int_cst (TREE_TYPE (value), -divisor);
16097 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16099 else
16101 if (!div)
16102 div = build_int_cst (TREE_TYPE (value), divisor);
16103 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16104 value = size_binop_loc (loc, MULT_EXPR, value, div);
16107 return value;
16110 /* Returns the pointer to the base of the object addressed by EXP and
16111 extracts the information about the offset of the access, storing it
16112 to PBITPOS and POFFSET. */
16114 static tree
16115 split_address_to_core_and_offset (tree exp,
16116 HOST_WIDE_INT *pbitpos, tree *poffset)
16118 tree core;
16119 machine_mode mode;
16120 int unsignedp, volatilep;
16121 HOST_WIDE_INT bitsize;
16122 location_t loc = EXPR_LOCATION (exp);
16124 if (TREE_CODE (exp) == ADDR_EXPR)
16126 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16127 poffset, &mode, &unsignedp, &volatilep,
16128 false);
16129 core = build_fold_addr_expr_loc (loc, core);
16131 else
16133 core = exp;
16134 *pbitpos = 0;
16135 *poffset = NULL_TREE;
16138 return core;
16141 /* Returns true if addresses of E1 and E2 differ by a constant, false
16142 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16144 bool
16145 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16147 tree core1, core2;
16148 HOST_WIDE_INT bitpos1, bitpos2;
16149 tree toffset1, toffset2, tdiff, type;
16151 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16152 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16154 if (bitpos1 % BITS_PER_UNIT != 0
16155 || bitpos2 % BITS_PER_UNIT != 0
16156 || !operand_equal_p (core1, core2, 0))
16157 return false;
16159 if (toffset1 && toffset2)
16161 type = TREE_TYPE (toffset1);
16162 if (type != TREE_TYPE (toffset2))
16163 toffset2 = fold_convert (type, toffset2);
16165 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16166 if (!cst_and_fits_in_hwi (tdiff))
16167 return false;
16169 *diff = int_cst_value (tdiff);
16171 else if (toffset1 || toffset2)
16173 /* If only one of the offsets is non-constant, the difference cannot
16174 be a constant. */
16175 return false;
16177 else
16178 *diff = 0;
16180 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16181 return true;
16184 /* Simplify the floating point expression EXP when the sign of the
16185 result is not significant. Return NULL_TREE if no simplification
16186 is possible. */
16188 tree
16189 fold_strip_sign_ops (tree exp)
16191 tree arg0, arg1;
16192 location_t loc = EXPR_LOCATION (exp);
16194 switch (TREE_CODE (exp))
16196 case ABS_EXPR:
16197 case NEGATE_EXPR:
16198 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16199 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16201 case MULT_EXPR:
16202 case RDIV_EXPR:
16203 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16204 return NULL_TREE;
16205 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16206 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16207 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16208 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16209 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16210 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16211 break;
16213 case COMPOUND_EXPR:
16214 arg0 = TREE_OPERAND (exp, 0);
16215 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16216 if (arg1)
16217 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16218 break;
16220 case COND_EXPR:
16221 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16222 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16223 if (arg0 || arg1)
16224 return fold_build3_loc (loc,
16225 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16226 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16227 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16228 break;
16230 case CALL_EXPR:
16232 const enum built_in_function fcode = builtin_mathfn_code (exp);
16233 switch (fcode)
16235 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16236 /* Strip copysign function call, return the 1st argument. */
16237 arg0 = CALL_EXPR_ARG (exp, 0);
16238 arg1 = CALL_EXPR_ARG (exp, 1);
16239 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16241 default:
16242 /* Strip sign ops from the argument of "odd" math functions. */
16243 if (negate_mathfn_p (fcode))
16245 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16246 if (arg0)
16247 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16249 break;
16252 break;
16254 default:
16255 break;
16257 return NULL_TREE;
16260 /* Return OFF converted to a pointer offset type suitable as offset for
16261 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16262 tree
16263 convert_to_ptrofftype_loc (location_t loc, tree off)
16265 return fold_convert_loc (loc, sizetype, off);
16268 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16269 tree
16270 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16272 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16273 ptr, convert_to_ptrofftype_loc (loc, off));
16276 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16277 tree
16278 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16280 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16281 ptr, size_int (off));