* config/i386/i386.h (TARGET_SUPPORTS_WIDE_INT): New define.
[official-gcc.git] / gcc / fold-const.c
blob3654fd670b064ff83788a9655c740532bce3449b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
181 static location_t
182 expr_location_or (tree t, location_t loc)
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
203 return x;
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
213 widest_int quo;
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
219 return NULL_TREE;
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
231 static int fold_deferring_overflow_warnings;
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
238 static const char* fold_deferred_overflow_warning;
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
248 void
249 fold_defer_overflow_warnings (void)
251 ++fold_deferring_overflow_warnings;
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
266 const char *warnmsg;
267 location_t locus;
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
283 if (!issue || warnmsg == NULL)
284 return;
286 if (gimple_no_warning_p (stmt))
287 return;
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
294 if (!issue_strict_overflow_warning (code))
295 return;
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
310 fold_undefer_overflow_warnings (false, NULL, 0);
313 /* Whether we are deferring overflow warnings. */
315 bool
316 fold_deferring_overflow_warnings_p (void)
318 return fold_deferring_overflow_warnings > 0;
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
327 if (fold_deferring_overflow_warnings > 0)
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
343 static bool
344 negate_mathfn_p (enum built_in_function code)
346 switch (code)
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
379 default:
380 break;
382 return false;
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
388 bool
389 may_negate_without_overflow_p (const_tree t)
391 tree type;
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
399 return !wi::only_sign_bit_p (t);
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
405 static bool
406 negate_expr_p (tree t)
408 tree type;
410 if (t == 0)
411 return false;
413 type = TREE_TYPE (t);
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
428 case FIXED_CST:
429 return true;
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
443 case VECTOR_CST:
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
454 return true;
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
487 /* Fall through. */
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
530 break;
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
546 break;
548 default:
549 break;
551 return false;
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
559 static tree
560 fold_negate_expr (location_t loc, tree t)
562 tree type = TREE_TYPE (t);
563 tree tem;
565 switch (TREE_CODE (t))
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
592 case COMPLEX_CST:
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
599 break;
601 case VECTOR_CST:
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
606 for (i = 0; i < count; i++)
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
613 return build_vector (type, elts);
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
656 break;
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
671 /* Fall through. */
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
685 break;
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
724 break;
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
734 break;
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
741 tree fndecl, arg;
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
747 break;
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
764 break;
766 default:
767 break;
770 return NULL_TREE;
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
777 static tree
778 negate_expr (tree t)
780 tree type, tem;
781 location_t loc;
783 if (t == NULL_TREE)
784 return NULL_TREE;
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
811 If IN is itself a literal or constant, return it as appropriate.
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
820 tree var = 0;
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
889 if (negate_p)
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
899 return var;
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
921 if (code == PLUS_EXPR)
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
936 else if (code == MINUS_EXPR)
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
961 switch (code)
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
969 default:
970 break;
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
996 switch (code)
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1120 default:
1121 return NULL_TREE;
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1129 return t;
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1159 return int_const_binop (code, arg1, arg2);
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1183 default:
1184 return NULL_TREE;
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1234 t = build_real (type, result);
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1240 if (TREE_CODE (arg1) == FIXED_CST)
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1271 break;
1273 default:
1274 return NULL_TREE;
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1297 switch (code)
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1354 else
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1365 if (integer_nonzerop (compare))
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1385 else
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1407 break;
1409 default:
1410 return NULL_TREE;
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1424 for (i = 0; i < count; i++)
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1429 elts[i] = const_binop (code, elem1, elem2);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1437 return build_vector (type, elts);
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1503 for (i = 0; i < nelts; i++)
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1512 return build_vector (type, elts);
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1543 for (out = 0; out < nelts; out++)
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1559 return build_vector (type, elts);
1562 default:;
1565 if (TREE_CODE_CLASS (code) != tcc_binary)
1566 return NULL_TREE;
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type)
1570 == TYPE_SATURATING (TREE_TYPE (arg1)));
1572 return const_binop (code, arg1, arg2);
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1578 tree
1579 const_unop (enum tree_code code, tree type, tree arg0)
1581 switch (code)
1583 CASE_CONVERT:
1584 case FLOAT_EXPR:
1585 case FIX_TRUNC_EXPR:
1586 case FIXED_CONVERT_EXPR:
1587 return fold_convert_const (code, type, arg0);
1589 case ADDR_SPACE_CONVERT_EXPR:
1590 if (integer_zerop (arg0))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1597 case NEGATE_EXPR:
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1620 break;
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1641 if (i == count)
1642 return build_vector (type, elements);
1644 break;
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1687 for (i = 0; i < nelts; i++)
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1694 return build_vector (type, elts);
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1713 switch (code)
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1721 for (i = 1; i < nelts; i++)
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1728 return elts[0];
1731 default:
1732 break;
1735 return NULL_TREE;
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1755 tree type = TREE_TYPE (arg0);
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1774 else if (code == MINUS_EXPR)
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1779 else if (code == MULT_EXPR)
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1863 bool overflow = false;
1864 tree t;
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1879 switch (code)
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1885 default:
1886 gcc_unreachable ();
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1899 if (! overflow)
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (REAL_VALUES_LESS (r, l))
1905 overflow = true;
1906 val = lt;
1910 if (! overflow)
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (REAL_VALUES_LESS (u, r))
1918 overflow = true;
1919 val = ut;
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1955 else
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1976 return t;
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1985 REAL_VALUE_TYPE value;
1986 tree t;
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1997 TREE_OVERFLOW (t) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_NANS (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2004 && REAL_VALUE_ISINF (value)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2006 TREE_OVERFLOW (t) = 1;
2007 else
2008 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 return t;
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2015 static tree
2016 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2018 REAL_VALUE_TYPE value;
2019 tree t;
2021 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2022 t = build_real (type, value);
2024 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 return t;
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2031 static tree
2032 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2038 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2039 TYPE_SATURATING (type));
2040 t = build_fixed (type, value);
2042 /* Propagate overflow flags. */
2043 if (overflow_p | TREE_OVERFLOW (arg1))
2044 TREE_OVERFLOW (t) = 1;
2045 return t;
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2051 static tree
2052 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2054 FIXED_VALUE_TYPE value;
2055 tree t;
2056 bool overflow_p;
2057 double_int di;
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2061 di.low = TREE_INT_CST_ELT (arg1, 0);
2062 if (TREE_INT_CST_NUNITS (arg1) == 1)
2063 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2064 else
2065 di.high = TREE_INT_CST_ELT (arg1, 1);
2067 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2081 static tree
2082 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2084 FIXED_VALUE_TYPE value;
2085 tree t;
2086 bool overflow_p;
2088 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2089 &TREE_REAL_CST (arg1),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2102 static tree
2103 fold_convert_const (enum tree_code code, tree type, tree arg1)
2105 if (TREE_TYPE (arg1) == type)
2106 return arg1;
2108 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2109 || TREE_CODE (type) == OFFSET_TYPE)
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_int_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_int_from_real (code, type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_int_from_fixed (type, arg1);
2118 else if (TREE_CODE (type) == REAL_TYPE)
2120 if (TREE_CODE (arg1) == INTEGER_CST)
2121 return build_real_from_int_cst (type, arg1);
2122 else if (TREE_CODE (arg1) == REAL_CST)
2123 return fold_convert_const_real_from_real (type, arg1);
2124 else if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_real_from_fixed (type, arg1);
2127 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2129 if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_fixed_from_fixed (type, arg1);
2131 else if (TREE_CODE (arg1) == INTEGER_CST)
2132 return fold_convert_const_fixed_from_int (type, arg1);
2133 else if (TREE_CODE (arg1) == REAL_CST)
2134 return fold_convert_const_fixed_from_real (type, arg1);
2136 return NULL_TREE;
2139 /* Construct a vector of zero elements of vector type TYPE. */
2141 static tree
2142 build_zero_vector (tree type)
2144 tree t;
2146 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2147 return build_vector_from_val (type, t);
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2152 bool
2153 fold_convertible_p (const_tree type, const_tree arg)
2155 tree orig = TREE_TYPE (arg);
2157 if (type == orig)
2158 return true;
2160 if (TREE_CODE (arg) == ERROR_MARK
2161 || TREE_CODE (type) == ERROR_MARK
2162 || TREE_CODE (orig) == ERROR_MARK)
2163 return false;
2165 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2166 return true;
2168 switch (TREE_CODE (type))
2170 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2171 case POINTER_TYPE: case REFERENCE_TYPE:
2172 case OFFSET_TYPE:
2173 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == OFFSET_TYPE)
2175 return true;
2176 return (TREE_CODE (orig) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2179 case REAL_TYPE:
2180 case FIXED_POINT_TYPE:
2181 case COMPLEX_TYPE:
2182 case VECTOR_TYPE:
2183 case VOID_TYPE:
2184 return TREE_CODE (type) == TREE_CODE (orig);
2186 default:
2187 return false;
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2194 tree
2195 fold_convert_loc (location_t loc, tree type, tree arg)
2197 tree orig = TREE_TYPE (arg);
2198 tree tem;
2200 if (type == orig)
2201 return arg;
2203 if (TREE_CODE (arg) == ERROR_MARK
2204 || TREE_CODE (type) == ERROR_MARK
2205 || TREE_CODE (orig) == ERROR_MARK)
2206 return error_mark_node;
2208 switch (TREE_CODE (type))
2210 case POINTER_TYPE:
2211 case REFERENCE_TYPE:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2216 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2217 /* fall through */
2219 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2220 case OFFSET_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2223 tem = fold_convert_const (NOP_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2227 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2228 || TREE_CODE (orig) == OFFSET_TYPE)
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 if (TREE_CODE (orig) == COMPLEX_TYPE)
2231 return fold_convert_loc (loc, type,
2232 fold_build1_loc (loc, REALPART_EXPR,
2233 TREE_TYPE (orig), arg));
2234 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2236 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2238 case REAL_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2241 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2245 else if (TREE_CODE (arg) == REAL_CST)
2247 tem = fold_convert_const (NOP_EXPR, type, arg);
2248 if (tem != NULL_TREE)
2249 return tem;
2251 else if (TREE_CODE (arg) == FIXED_CST)
2253 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2258 switch (TREE_CODE (orig))
2260 case INTEGER_TYPE:
2261 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2262 case POINTER_TYPE: case REFERENCE_TYPE:
2263 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2265 case REAL_TYPE:
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2268 case FIXED_POINT_TYPE:
2269 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2271 case COMPLEX_TYPE:
2272 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2273 return fold_convert_loc (loc, type, tem);
2275 default:
2276 gcc_unreachable ();
2279 case FIXED_POINT_TYPE:
2280 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2281 || TREE_CODE (arg) == REAL_CST)
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 goto fold_convert_exit;
2288 switch (TREE_CODE (orig))
2290 case FIXED_POINT_TYPE:
2291 case INTEGER_TYPE:
2292 case ENUMERAL_TYPE:
2293 case BOOLEAN_TYPE:
2294 case REAL_TYPE:
2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2297 case COMPLEX_TYPE:
2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2299 return fold_convert_loc (loc, type, tem);
2301 default:
2302 gcc_unreachable ();
2305 case COMPLEX_TYPE:
2306 switch (TREE_CODE (orig))
2308 case INTEGER_TYPE:
2309 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2310 case POINTER_TYPE: case REFERENCE_TYPE:
2311 case REAL_TYPE:
2312 case FIXED_POINT_TYPE:
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2314 fold_convert_loc (loc, TREE_TYPE (type), arg),
2315 fold_convert_loc (loc, TREE_TYPE (type),
2316 integer_zero_node));
2317 case COMPLEX_TYPE:
2319 tree rpart, ipart;
2321 if (TREE_CODE (arg) == COMPLEX_EXPR)
2323 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2324 TREE_OPERAND (arg, 0));
2325 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2326 TREE_OPERAND (arg, 1));
2327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2330 arg = save_expr (arg);
2331 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2332 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2334 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2338 default:
2339 gcc_unreachable ();
2342 case VECTOR_TYPE:
2343 if (integer_zerop (arg))
2344 return build_zero_vector (type);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2347 || TREE_CODE (orig) == VECTOR_TYPE);
2348 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2350 case VOID_TYPE:
2351 tem = fold_ignored_result (arg);
2352 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2354 default:
2355 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2356 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2357 gcc_unreachable ();
2359 fold_convert_exit:
2360 protected_set_expr_location_unshare (tem, loc);
2361 return tem;
2364 /* Return false if expr can be assumed not to be an lvalue, true
2365 otherwise. */
2367 static bool
2368 maybe_lvalue_p (const_tree x)
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x))
2373 case VAR_DECL:
2374 case PARM_DECL:
2375 case RESULT_DECL:
2376 case LABEL_DECL:
2377 case FUNCTION_DECL:
2378 case SSA_NAME:
2380 case COMPONENT_REF:
2381 case MEM_REF:
2382 case INDIRECT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case BIT_FIELD_REF:
2386 case OBJ_TYPE_REF:
2388 case REALPART_EXPR:
2389 case IMAGPART_EXPR:
2390 case PREINCREMENT_EXPR:
2391 case PREDECREMENT_EXPR:
2392 case SAVE_EXPR:
2393 case TRY_CATCH_EXPR:
2394 case WITH_CLEANUP_EXPR:
2395 case COMPOUND_EXPR:
2396 case MODIFY_EXPR:
2397 case TARGET_EXPR:
2398 case COND_EXPR:
2399 case BIND_EXPR:
2400 break;
2402 default:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2405 break;
2406 return false;
2409 return true;
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2414 tree
2415 non_lvalue_loc (location_t loc, tree x)
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2418 us. */
2419 if (in_gimple_form)
2420 return x;
2422 if (! maybe_lvalue_p (x))
2423 return x;
2424 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2430 static tree
2431 pedantic_non_lvalue_loc (location_t loc, tree x)
2433 return protected_set_expr_location_unshare (x, loc);
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2441 enum tree_code
2442 invert_tree_comparison (enum tree_code code, bool honor_nans)
2444 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2445 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2446 return ERROR_MARK;
2448 switch (code)
2450 case EQ_EXPR:
2451 return NE_EXPR;
2452 case NE_EXPR:
2453 return EQ_EXPR;
2454 case GT_EXPR:
2455 return honor_nans ? UNLE_EXPR : LE_EXPR;
2456 case GE_EXPR:
2457 return honor_nans ? UNLT_EXPR : LT_EXPR;
2458 case LT_EXPR:
2459 return honor_nans ? UNGE_EXPR : GE_EXPR;
2460 case LE_EXPR:
2461 return honor_nans ? UNGT_EXPR : GT_EXPR;
2462 case LTGT_EXPR:
2463 return UNEQ_EXPR;
2464 case UNEQ_EXPR:
2465 return LTGT_EXPR;
2466 case UNGT_EXPR:
2467 return LE_EXPR;
2468 case UNGE_EXPR:
2469 return LT_EXPR;
2470 case UNLT_EXPR:
2471 return GE_EXPR;
2472 case UNLE_EXPR:
2473 return GT_EXPR;
2474 case ORDERED_EXPR:
2475 return UNORDERED_EXPR;
2476 case UNORDERED_EXPR:
2477 return ORDERED_EXPR;
2478 default:
2479 gcc_unreachable ();
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2486 enum tree_code
2487 swap_tree_comparison (enum tree_code code)
2489 switch (code)
2491 case EQ_EXPR:
2492 case NE_EXPR:
2493 case ORDERED_EXPR:
2494 case UNORDERED_EXPR:
2495 case LTGT_EXPR:
2496 case UNEQ_EXPR:
2497 return code;
2498 case GT_EXPR:
2499 return LT_EXPR;
2500 case GE_EXPR:
2501 return LE_EXPR;
2502 case LT_EXPR:
2503 return GT_EXPR;
2504 case LE_EXPR:
2505 return GE_EXPR;
2506 case UNGT_EXPR:
2507 return UNLT_EXPR;
2508 case UNGE_EXPR:
2509 return UNLE_EXPR;
2510 case UNLT_EXPR:
2511 return UNGT_EXPR;
2512 case UNLE_EXPR:
2513 return UNGE_EXPR;
2514 default:
2515 gcc_unreachable ();
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code)
2527 switch (code)
2529 case LT_EXPR:
2530 return COMPCODE_LT;
2531 case EQ_EXPR:
2532 return COMPCODE_EQ;
2533 case LE_EXPR:
2534 return COMPCODE_LE;
2535 case GT_EXPR:
2536 return COMPCODE_GT;
2537 case NE_EXPR:
2538 return COMPCODE_NE;
2539 case GE_EXPR:
2540 return COMPCODE_GE;
2541 case ORDERED_EXPR:
2542 return COMPCODE_ORD;
2543 case UNORDERED_EXPR:
2544 return COMPCODE_UNORD;
2545 case UNLT_EXPR:
2546 return COMPCODE_UNLT;
2547 case UNEQ_EXPR:
2548 return COMPCODE_UNEQ;
2549 case UNLE_EXPR:
2550 return COMPCODE_UNLE;
2551 case UNGT_EXPR:
2552 return COMPCODE_UNGT;
2553 case LTGT_EXPR:
2554 return COMPCODE_LTGT;
2555 case UNGE_EXPR:
2556 return COMPCODE_UNGE;
2557 default:
2558 gcc_unreachable ();
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code)
2569 switch (code)
2571 case COMPCODE_LT:
2572 return LT_EXPR;
2573 case COMPCODE_EQ:
2574 return EQ_EXPR;
2575 case COMPCODE_LE:
2576 return LE_EXPR;
2577 case COMPCODE_GT:
2578 return GT_EXPR;
2579 case COMPCODE_NE:
2580 return NE_EXPR;
2581 case COMPCODE_GE:
2582 return GE_EXPR;
2583 case COMPCODE_ORD:
2584 return ORDERED_EXPR;
2585 case COMPCODE_UNORD:
2586 return UNORDERED_EXPR;
2587 case COMPCODE_UNLT:
2588 return UNLT_EXPR;
2589 case COMPCODE_UNEQ:
2590 return UNEQ_EXPR;
2591 case COMPCODE_UNLE:
2592 return UNLE_EXPR;
2593 case COMPCODE_UNGT:
2594 return UNGT_EXPR;
2595 case COMPCODE_LTGT:
2596 return LTGT_EXPR;
2597 case COMPCODE_UNGE:
2598 return UNGE_EXPR;
2599 default:
2600 gcc_unreachable ();
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2610 tree
2611 combine_comparisons (location_t loc,
2612 enum tree_code code, enum tree_code lcode,
2613 enum tree_code rcode, tree truth_type,
2614 tree ll_arg, tree lr_arg)
2616 bool honor_nans = HONOR_NANS (ll_arg);
2617 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2618 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2619 int compcode;
2621 switch (code)
2623 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2624 compcode = lcompcode & rcompcode;
2625 break;
2627 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2628 compcode = lcompcode | rcompcode;
2629 break;
2631 default:
2632 return NULL_TREE;
2635 if (!honor_nans)
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode &= ~COMPCODE_UNORD;
2640 if (compcode == COMPCODE_LTGT)
2641 compcode = COMPCODE_NE;
2642 else if (compcode == COMPCODE_ORD)
2643 compcode = COMPCODE_TRUE;
2645 else if (flag_trapping_math)
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2650 && (lcompcode != COMPCODE_EQ)
2651 && (lcompcode != COMPCODE_ORD);
2652 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2653 && (rcompcode != COMPCODE_EQ)
2654 && (rcompcode != COMPCODE_ORD);
2655 bool trap = (compcode & COMPCODE_UNORD) == 0
2656 && (compcode != COMPCODE_EQ)
2657 && (compcode != COMPCODE_ORD);
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2666 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2667 rtrap = false;
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2671 if (rtrap && !ltrap
2672 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2673 return NULL_TREE;
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap || rtrap) != trap)
2677 return NULL_TREE;
2680 if (compcode == COMPCODE_TRUE)
2681 return constant_boolean_node (true, truth_type);
2682 else if (compcode == COMPCODE_FALSE)
2683 return constant_boolean_node (false, truth_type);
2684 else
2686 enum tree_code tcode;
2688 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2689 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2720 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2724 || TREE_TYPE (arg0) == error_mark_node
2725 || TREE_TYPE (arg1) == error_mark_node)
2726 return 0;
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2731 return 0;
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2736 return tree_int_cst_equal (arg0, arg1);
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2751 return 0;
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2766 && COMPARISON_CLASS_P (arg0)
2767 && COMPARISON_CLASS_P (arg1))
2769 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2771 if (TREE_CODE (arg0) == swap_code)
2772 return operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags);
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2781 return 0;
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2788 return 0;
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 return 1;
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2810 case INTEGER_CST:
2811 return tree_int_cst_equal (arg0, arg1);
2813 case FIXED_CST:
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2817 case REAL_CST:
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2819 TREE_REAL_CST (arg1)))
2820 return 1;
2823 if (!HONOR_SIGNED_ZEROS (arg0))
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0) && real_zerop (arg1))
2828 return 1;
2830 return 0;
2832 case VECTOR_CST:
2834 unsigned i;
2836 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2837 return 0;
2839 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2842 VECTOR_CST_ELT (arg1, i), flags))
2843 return 0;
2845 return 1;
2848 case COMPLEX_CST:
2849 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 flags)
2851 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 flags));
2854 case STRING_CST:
2855 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2856 && ! memcmp (TREE_STRING_POINTER (arg0),
2857 TREE_STRING_POINTER (arg1),
2858 TREE_STRING_LENGTH (arg0)));
2860 case ADDR_EXPR:
2861 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2862 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2863 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2864 default:
2865 break;
2868 if (flags & OEP_ONLY_CONST)
2869 return 0;
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2884 case tcc_unary:
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0))
2888 CASE_CONVERT:
2889 case FIX_TRUNC_EXPR:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2892 return 0;
2893 break;
2894 default:
2895 break;
2898 return OP_SAME (0);
2901 case tcc_comparison:
2902 case tcc_binary:
2903 if (OP_SAME (0) && OP_SAME (1))
2904 return 1;
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0))
2908 && operand_equal_p (TREE_OPERAND (arg0, 0),
2909 TREE_OPERAND (arg1, 1), flags)
2910 && operand_equal_p (TREE_OPERAND (arg0, 1),
2911 TREE_OPERAND (arg1, 0), flags));
2913 case tcc_reference:
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2918 && (TREE_SIDE_EFFECTS (arg0)
2919 || TREE_SIDE_EFFECTS (arg1)))
2920 return 0;
2922 switch (TREE_CODE (arg0))
2924 case INDIRECT_REF:
2925 if (!(flags & OEP_ADDRESS_OF)
2926 && (TYPE_ALIGN (TREE_TYPE (arg0))
2927 != TYPE_ALIGN (TREE_TYPE (arg1))))
2928 return 0;
2929 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2930 return OP_SAME (0);
2932 case REALPART_EXPR:
2933 case IMAGPART_EXPR:
2934 return OP_SAME (0);
2936 case TARGET_MEM_REF:
2937 case MEM_REF:
2938 /* Require equal access sizes, and similar pointer types.
2939 We can have incomplete types for array references of
2940 variable-sized arrays from the Fortran frontend
2941 though. Also verify the types are compatible. */
2942 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2943 || (TYPE_SIZE (TREE_TYPE (arg0))
2944 && TYPE_SIZE (TREE_TYPE (arg1))
2945 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2946 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2947 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2948 && ((flags & OEP_ADDRESS_OF)
2949 || (alias_ptr_types_compatible_p
2950 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2951 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2952 && (MR_DEPENDENCE_CLIQUE (arg0)
2953 == MR_DEPENDENCE_CLIQUE (arg1))
2954 && (MR_DEPENDENCE_BASE (arg0)
2955 == MR_DEPENDENCE_BASE (arg1))
2956 && (TYPE_ALIGN (TREE_TYPE (arg0))
2957 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2958 return 0;
2959 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2960 return (OP_SAME (0) && OP_SAME (1)
2961 /* TARGET_MEM_REF require equal extra operands. */
2962 && (TREE_CODE (arg0) != TARGET_MEM_REF
2963 || (OP_SAME_WITH_NULL (2)
2964 && OP_SAME_WITH_NULL (3)
2965 && OP_SAME_WITH_NULL (4))));
2967 case ARRAY_REF:
2968 case ARRAY_RANGE_REF:
2969 /* Operands 2 and 3 may be null.
2970 Compare the array index by value if it is constant first as we
2971 may have different types but same value here. */
2972 if (!OP_SAME (0))
2973 return 0;
2974 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2975 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2976 TREE_OPERAND (arg1, 1))
2977 || OP_SAME (1))
2978 && OP_SAME_WITH_NULL (2)
2979 && OP_SAME_WITH_NULL (3));
2981 case COMPONENT_REF:
2982 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2983 may be NULL when we're called to compare MEM_EXPRs. */
2984 if (!OP_SAME_WITH_NULL (0)
2985 || !OP_SAME (1))
2986 return 0;
2987 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2988 return OP_SAME_WITH_NULL (2);
2990 case BIT_FIELD_REF:
2991 if (!OP_SAME (0))
2992 return 0;
2993 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2994 return OP_SAME (1) && OP_SAME (2);
2996 default:
2997 return 0;
3000 case tcc_expression:
3001 switch (TREE_CODE (arg0))
3003 case ADDR_EXPR:
3004 return operand_equal_p (TREE_OPERAND (arg0, 0),
3005 TREE_OPERAND (arg1, 0),
3006 flags | OEP_ADDRESS_OF);
3008 case TRUTH_NOT_EXPR:
3009 return OP_SAME (0);
3011 case TRUTH_ANDIF_EXPR:
3012 case TRUTH_ORIF_EXPR:
3013 return OP_SAME (0) && OP_SAME (1);
3015 case FMA_EXPR:
3016 case WIDEN_MULT_PLUS_EXPR:
3017 case WIDEN_MULT_MINUS_EXPR:
3018 if (!OP_SAME (2))
3019 return 0;
3020 /* The multiplcation operands are commutative. */
3021 /* FALLTHRU */
3023 case TRUTH_AND_EXPR:
3024 case TRUTH_OR_EXPR:
3025 case TRUTH_XOR_EXPR:
3026 if (OP_SAME (0) && OP_SAME (1))
3027 return 1;
3029 /* Otherwise take into account this is a commutative operation. */
3030 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3031 TREE_OPERAND (arg1, 1), flags)
3032 && operand_equal_p (TREE_OPERAND (arg0, 1),
3033 TREE_OPERAND (arg1, 0), flags));
3035 case COND_EXPR:
3036 case VEC_COND_EXPR:
3037 case DOT_PROD_EXPR:
3038 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3040 default:
3041 return 0;
3044 case tcc_vl_exp:
3045 switch (TREE_CODE (arg0))
3047 case CALL_EXPR:
3048 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3049 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3050 /* If not both CALL_EXPRs are either internal or normal function
3051 functions, then they are not equal. */
3052 return 0;
3053 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3055 /* If the CALL_EXPRs call different internal functions, then they
3056 are not equal. */
3057 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3058 return 0;
3060 else
3062 /* If the CALL_EXPRs call different functions, then they are not
3063 equal. */
3064 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3065 flags))
3066 return 0;
3070 unsigned int cef = call_expr_flags (arg0);
3071 if (flags & OEP_PURE_SAME)
3072 cef &= ECF_CONST | ECF_PURE;
3073 else
3074 cef &= ECF_CONST;
3075 if (!cef)
3076 return 0;
3079 /* Now see if all the arguments are the same. */
3081 const_call_expr_arg_iterator iter0, iter1;
3082 const_tree a0, a1;
3083 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3084 a1 = first_const_call_expr_arg (arg1, &iter1);
3085 a0 && a1;
3086 a0 = next_const_call_expr_arg (&iter0),
3087 a1 = next_const_call_expr_arg (&iter1))
3088 if (! operand_equal_p (a0, a1, flags))
3089 return 0;
3091 /* If we get here and both argument lists are exhausted
3092 then the CALL_EXPRs are equal. */
3093 return ! (a0 || a1);
3095 default:
3096 return 0;
3099 case tcc_declaration:
3100 /* Consider __builtin_sqrt equal to sqrt. */
3101 return (TREE_CODE (arg0) == FUNCTION_DECL
3102 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3103 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3104 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3106 default:
3107 return 0;
3110 #undef OP_SAME
3111 #undef OP_SAME_WITH_NULL
3114 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3115 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3117 When in doubt, return 0. */
3119 static int
3120 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3122 int unsignedp1, unsignedpo;
3123 tree primarg0, primarg1, primother;
3124 unsigned int correct_width;
3126 if (operand_equal_p (arg0, arg1, 0))
3127 return 1;
3129 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3130 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3131 return 0;
3133 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3134 and see if the inner values are the same. This removes any
3135 signedness comparison, which doesn't matter here. */
3136 primarg0 = arg0, primarg1 = arg1;
3137 STRIP_NOPS (primarg0);
3138 STRIP_NOPS (primarg1);
3139 if (operand_equal_p (primarg0, primarg1, 0))
3140 return 1;
3142 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3143 actual comparison operand, ARG0.
3145 First throw away any conversions to wider types
3146 already present in the operands. */
3148 primarg1 = get_narrower (arg1, &unsignedp1);
3149 primother = get_narrower (other, &unsignedpo);
3151 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3152 if (unsignedp1 == unsignedpo
3153 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3154 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3156 tree type = TREE_TYPE (arg0);
3158 /* Make sure shorter operand is extended the right way
3159 to match the longer operand. */
3160 primarg1 = fold_convert (signed_or_unsigned_type_for
3161 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3163 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3164 return 1;
3167 return 0;
3170 /* See if ARG is an expression that is either a comparison or is performing
3171 arithmetic on comparisons. The comparisons must only be comparing
3172 two different values, which will be stored in *CVAL1 and *CVAL2; if
3173 they are nonzero it means that some operands have already been found.
3174 No variables may be used anywhere else in the expression except in the
3175 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3176 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3178 If this is true, return 1. Otherwise, return zero. */
3180 static int
3181 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3183 enum tree_code code = TREE_CODE (arg);
3184 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3186 /* We can handle some of the tcc_expression cases here. */
3187 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3188 tclass = tcc_unary;
3189 else if (tclass == tcc_expression
3190 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3191 || code == COMPOUND_EXPR))
3192 tclass = tcc_binary;
3194 else if (tclass == tcc_expression && code == SAVE_EXPR
3195 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3197 /* If we've already found a CVAL1 or CVAL2, this expression is
3198 two complex to handle. */
3199 if (*cval1 || *cval2)
3200 return 0;
3202 tclass = tcc_unary;
3203 *save_p = 1;
3206 switch (tclass)
3208 case tcc_unary:
3209 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3211 case tcc_binary:
3212 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3213 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3214 cval1, cval2, save_p));
3216 case tcc_constant:
3217 return 1;
3219 case tcc_expression:
3220 if (code == COND_EXPR)
3221 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3222 cval1, cval2, save_p)
3223 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3224 cval1, cval2, save_p)
3225 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3226 cval1, cval2, save_p));
3227 return 0;
3229 case tcc_comparison:
3230 /* First see if we can handle the first operand, then the second. For
3231 the second operand, we know *CVAL1 can't be zero. It must be that
3232 one side of the comparison is each of the values; test for the
3233 case where this isn't true by failing if the two operands
3234 are the same. */
3236 if (operand_equal_p (TREE_OPERAND (arg, 0),
3237 TREE_OPERAND (arg, 1), 0))
3238 return 0;
3240 if (*cval1 == 0)
3241 *cval1 = TREE_OPERAND (arg, 0);
3242 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3244 else if (*cval2 == 0)
3245 *cval2 = TREE_OPERAND (arg, 0);
3246 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3248 else
3249 return 0;
3251 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3253 else if (*cval2 == 0)
3254 *cval2 = TREE_OPERAND (arg, 1);
3255 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3257 else
3258 return 0;
3260 return 1;
3262 default:
3263 return 0;
3267 /* ARG is a tree that is known to contain just arithmetic operations and
3268 comparisons. Evaluate the operations in the tree substituting NEW0 for
3269 any occurrence of OLD0 as an operand of a comparison and likewise for
3270 NEW1 and OLD1. */
3272 static tree
3273 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3274 tree old1, tree new1)
3276 tree type = TREE_TYPE (arg);
3277 enum tree_code code = TREE_CODE (arg);
3278 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3280 /* We can handle some of the tcc_expression cases here. */
3281 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3282 tclass = tcc_unary;
3283 else if (tclass == tcc_expression
3284 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3285 tclass = tcc_binary;
3287 switch (tclass)
3289 case tcc_unary:
3290 return fold_build1_loc (loc, code, type,
3291 eval_subst (loc, TREE_OPERAND (arg, 0),
3292 old0, new0, old1, new1));
3294 case tcc_binary:
3295 return fold_build2_loc (loc, code, type,
3296 eval_subst (loc, TREE_OPERAND (arg, 0),
3297 old0, new0, old1, new1),
3298 eval_subst (loc, TREE_OPERAND (arg, 1),
3299 old0, new0, old1, new1));
3301 case tcc_expression:
3302 switch (code)
3304 case SAVE_EXPR:
3305 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3306 old1, new1);
3308 case COMPOUND_EXPR:
3309 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3310 old1, new1);
3312 case COND_EXPR:
3313 return fold_build3_loc (loc, code, type,
3314 eval_subst (loc, TREE_OPERAND (arg, 0),
3315 old0, new0, old1, new1),
3316 eval_subst (loc, TREE_OPERAND (arg, 1),
3317 old0, new0, old1, new1),
3318 eval_subst (loc, TREE_OPERAND (arg, 2),
3319 old0, new0, old1, new1));
3320 default:
3321 break;
3323 /* Fall through - ??? */
3325 case tcc_comparison:
3327 tree arg0 = TREE_OPERAND (arg, 0);
3328 tree arg1 = TREE_OPERAND (arg, 1);
3330 /* We need to check both for exact equality and tree equality. The
3331 former will be true if the operand has a side-effect. In that
3332 case, we know the operand occurred exactly once. */
3334 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3335 arg0 = new0;
3336 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3337 arg0 = new1;
3339 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3340 arg1 = new0;
3341 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3342 arg1 = new1;
3344 return fold_build2_loc (loc, code, type, arg0, arg1);
3347 default:
3348 return arg;
3352 /* Return a tree for the case when the result of an expression is RESULT
3353 converted to TYPE and OMITTED was previously an operand of the expression
3354 but is now not needed (e.g., we folded OMITTED * 0).
3356 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3357 the conversion of RESULT to TYPE. */
3359 tree
3360 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3362 tree t = fold_convert_loc (loc, type, result);
3364 /* If the resulting operand is an empty statement, just return the omitted
3365 statement casted to void. */
3366 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3367 return build1_loc (loc, NOP_EXPR, void_type_node,
3368 fold_ignored_result (omitted));
3370 if (TREE_SIDE_EFFECTS (omitted))
3371 return build2_loc (loc, COMPOUND_EXPR, type,
3372 fold_ignored_result (omitted), t);
3374 return non_lvalue_loc (loc, t);
3377 /* Return a tree for the case when the result of an expression is RESULT
3378 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3379 of the expression but are now not needed.
3381 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3382 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3383 evaluated before OMITTED2. Otherwise, if neither has side effects,
3384 just do the conversion of RESULT to TYPE. */
3386 tree
3387 omit_two_operands_loc (location_t loc, tree type, tree result,
3388 tree omitted1, tree omitted2)
3390 tree t = fold_convert_loc (loc, type, result);
3392 if (TREE_SIDE_EFFECTS (omitted2))
3393 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3394 if (TREE_SIDE_EFFECTS (omitted1))
3395 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3397 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3401 /* Return a simplified tree node for the truth-negation of ARG. This
3402 never alters ARG itself. We assume that ARG is an operation that
3403 returns a truth value (0 or 1).
3405 FIXME: one would think we would fold the result, but it causes
3406 problems with the dominator optimizer. */
3408 static tree
3409 fold_truth_not_expr (location_t loc, tree arg)
3411 tree type = TREE_TYPE (arg);
3412 enum tree_code code = TREE_CODE (arg);
3413 location_t loc1, loc2;
3415 /* If this is a comparison, we can simply invert it, except for
3416 floating-point non-equality comparisons, in which case we just
3417 enclose a TRUTH_NOT_EXPR around what we have. */
3419 if (TREE_CODE_CLASS (code) == tcc_comparison)
3421 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3422 if (FLOAT_TYPE_P (op_type)
3423 && flag_trapping_math
3424 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3425 && code != NE_EXPR && code != EQ_EXPR)
3426 return NULL_TREE;
3428 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3429 if (code == ERROR_MARK)
3430 return NULL_TREE;
3432 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3433 TREE_OPERAND (arg, 1));
3436 switch (code)
3438 case INTEGER_CST:
3439 return constant_boolean_node (integer_zerop (arg), type);
3441 case TRUTH_AND_EXPR:
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3444 return build2_loc (loc, TRUTH_OR_EXPR, type,
3445 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3446 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3448 case TRUTH_OR_EXPR:
3449 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3450 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3451 return build2_loc (loc, TRUTH_AND_EXPR, type,
3452 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3453 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3455 case TRUTH_XOR_EXPR:
3456 /* Here we can invert either operand. We invert the first operand
3457 unless the second operand is a TRUTH_NOT_EXPR in which case our
3458 result is the XOR of the first operand with the inside of the
3459 negation of the second operand. */
3461 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3462 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3463 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3464 else
3465 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3466 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3467 TREE_OPERAND (arg, 1));
3469 case TRUTH_ANDIF_EXPR:
3470 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3471 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3472 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3473 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3474 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3476 case TRUTH_ORIF_EXPR:
3477 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3478 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3479 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3480 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3481 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3483 case TRUTH_NOT_EXPR:
3484 return TREE_OPERAND (arg, 0);
3486 case COND_EXPR:
3488 tree arg1 = TREE_OPERAND (arg, 1);
3489 tree arg2 = TREE_OPERAND (arg, 2);
3491 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3492 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3494 /* A COND_EXPR may have a throw as one operand, which
3495 then has void type. Just leave void operands
3496 as they are. */
3497 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3498 VOID_TYPE_P (TREE_TYPE (arg1))
3499 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3500 VOID_TYPE_P (TREE_TYPE (arg2))
3501 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3504 case COMPOUND_EXPR:
3505 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3506 return build2_loc (loc, COMPOUND_EXPR, type,
3507 TREE_OPERAND (arg, 0),
3508 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3510 case NON_LVALUE_EXPR:
3511 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3512 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3514 CASE_CONVERT:
3515 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3516 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3518 /* ... fall through ... */
3520 case FLOAT_EXPR:
3521 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3522 return build1_loc (loc, TREE_CODE (arg), type,
3523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3525 case BIT_AND_EXPR:
3526 if (!integer_onep (TREE_OPERAND (arg, 1)))
3527 return NULL_TREE;
3528 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3530 case SAVE_EXPR:
3531 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3533 case CLEANUP_POINT_EXPR:
3534 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3535 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3536 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3538 default:
3539 return NULL_TREE;
3543 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3544 assume that ARG is an operation that returns a truth value (0 or 1
3545 for scalars, 0 or -1 for vectors). Return the folded expression if
3546 folding is successful. Otherwise, return NULL_TREE. */
3548 static tree
3549 fold_invert_truthvalue (location_t loc, tree arg)
3551 tree type = TREE_TYPE (arg);
3552 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3553 ? BIT_NOT_EXPR
3554 : TRUTH_NOT_EXPR,
3555 type, arg);
3558 /* Return a simplified tree node for the truth-negation of ARG. This
3559 never alters ARG itself. We assume that ARG is an operation that
3560 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3562 tree
3563 invert_truthvalue_loc (location_t loc, tree arg)
3565 if (TREE_CODE (arg) == ERROR_MARK)
3566 return arg;
3568 tree type = TREE_TYPE (arg);
3569 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3570 ? BIT_NOT_EXPR
3571 : TRUTH_NOT_EXPR,
3572 type, arg);
3575 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3576 operands are another bit-wise operation with a common input. If so,
3577 distribute the bit operations to save an operation and possibly two if
3578 constants are involved. For example, convert
3579 (A | B) & (A | C) into A | (B & C)
3580 Further simplification will occur if B and C are constants.
3582 If this optimization cannot be done, 0 will be returned. */
3584 static tree
3585 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3586 tree arg0, tree arg1)
3588 tree common;
3589 tree left, right;
3591 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3592 || TREE_CODE (arg0) == code
3593 || (TREE_CODE (arg0) != BIT_AND_EXPR
3594 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3595 return 0;
3597 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3599 common = TREE_OPERAND (arg0, 0);
3600 left = TREE_OPERAND (arg0, 1);
3601 right = TREE_OPERAND (arg1, 1);
3603 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3605 common = TREE_OPERAND (arg0, 0);
3606 left = TREE_OPERAND (arg0, 1);
3607 right = TREE_OPERAND (arg1, 0);
3609 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3611 common = TREE_OPERAND (arg0, 1);
3612 left = TREE_OPERAND (arg0, 0);
3613 right = TREE_OPERAND (arg1, 1);
3615 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3617 common = TREE_OPERAND (arg0, 1);
3618 left = TREE_OPERAND (arg0, 0);
3619 right = TREE_OPERAND (arg1, 0);
3621 else
3622 return 0;
3624 common = fold_convert_loc (loc, type, common);
3625 left = fold_convert_loc (loc, type, left);
3626 right = fold_convert_loc (loc, type, right);
3627 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3628 fold_build2_loc (loc, code, type, left, right));
3631 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3632 with code CODE. This optimization is unsafe. */
3633 static tree
3634 distribute_real_division (location_t loc, enum tree_code code, tree type,
3635 tree arg0, tree arg1)
3637 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3638 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3640 /* (A / C) +- (B / C) -> (A +- B) / C. */
3641 if (mul0 == mul1
3642 && operand_equal_p (TREE_OPERAND (arg0, 1),
3643 TREE_OPERAND (arg1, 1), 0))
3644 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3645 fold_build2_loc (loc, code, type,
3646 TREE_OPERAND (arg0, 0),
3647 TREE_OPERAND (arg1, 0)),
3648 TREE_OPERAND (arg0, 1));
3650 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3651 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3652 TREE_OPERAND (arg1, 0), 0)
3653 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3654 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3656 REAL_VALUE_TYPE r0, r1;
3657 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3658 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3659 if (!mul0)
3660 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3661 if (!mul1)
3662 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3663 real_arithmetic (&r0, code, &r0, &r1);
3664 return fold_build2_loc (loc, MULT_EXPR, type,
3665 TREE_OPERAND (arg0, 0),
3666 build_real (type, r0));
3669 return NULL_TREE;
3672 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3673 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3675 static tree
3676 make_bit_field_ref (location_t loc, tree inner, tree type,
3677 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3679 tree result, bftype;
3681 if (bitpos == 0)
3683 tree size = TYPE_SIZE (TREE_TYPE (inner));
3684 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3685 || POINTER_TYPE_P (TREE_TYPE (inner)))
3686 && tree_fits_shwi_p (size)
3687 && tree_to_shwi (size) == bitsize)
3688 return fold_convert_loc (loc, type, inner);
3691 bftype = type;
3692 if (TYPE_PRECISION (bftype) != bitsize
3693 || TYPE_UNSIGNED (bftype) == !unsignedp)
3694 bftype = build_nonstandard_integer_type (bitsize, 0);
3696 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3697 size_int (bitsize), bitsize_int (bitpos));
3699 if (bftype != type)
3700 result = fold_convert_loc (loc, type, result);
3702 return result;
3705 /* Optimize a bit-field compare.
3707 There are two cases: First is a compare against a constant and the
3708 second is a comparison of two items where the fields are at the same
3709 bit position relative to the start of a chunk (byte, halfword, word)
3710 large enough to contain it. In these cases we can avoid the shift
3711 implicit in bitfield extractions.
3713 For constants, we emit a compare of the shifted constant with the
3714 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3715 compared. For two fields at the same position, we do the ANDs with the
3716 similar mask and compare the result of the ANDs.
3718 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3719 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3720 are the left and right operands of the comparison, respectively.
3722 If the optimization described above can be done, we return the resulting
3723 tree. Otherwise we return zero. */
3725 static tree
3726 optimize_bit_field_compare (location_t loc, enum tree_code code,
3727 tree compare_type, tree lhs, tree rhs)
3729 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3730 tree type = TREE_TYPE (lhs);
3731 tree unsigned_type;
3732 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3733 machine_mode lmode, rmode, nmode;
3734 int lunsignedp, runsignedp;
3735 int lvolatilep = 0, rvolatilep = 0;
3736 tree linner, rinner = NULL_TREE;
3737 tree mask;
3738 tree offset;
3740 /* Get all the information about the extractions being done. If the bit size
3741 if the same as the size of the underlying object, we aren't doing an
3742 extraction at all and so can do nothing. We also don't want to
3743 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3744 then will no longer be able to replace it. */
3745 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3746 &lunsignedp, &lvolatilep, false);
3747 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3748 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3749 return 0;
3751 if (!const_p)
3753 /* If this is not a constant, we can only do something if bit positions,
3754 sizes, and signedness are the same. */
3755 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3756 &runsignedp, &rvolatilep, false);
3758 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3759 || lunsignedp != runsignedp || offset != 0
3760 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3761 return 0;
3764 /* See if we can find a mode to refer to this field. We should be able to,
3765 but fail if we can't. */
3766 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3767 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3768 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3769 TYPE_ALIGN (TREE_TYPE (rinner))),
3770 word_mode, false);
3771 if (nmode == VOIDmode)
3772 return 0;
3774 /* Set signed and unsigned types of the precision of this mode for the
3775 shifts below. */
3776 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3778 /* Compute the bit position and size for the new reference and our offset
3779 within it. If the new reference is the same size as the original, we
3780 won't optimize anything, so return zero. */
3781 nbitsize = GET_MODE_BITSIZE (nmode);
3782 nbitpos = lbitpos & ~ (nbitsize - 1);
3783 lbitpos -= nbitpos;
3784 if (nbitsize == lbitsize)
3785 return 0;
3787 if (BYTES_BIG_ENDIAN)
3788 lbitpos = nbitsize - lbitsize - lbitpos;
3790 /* Make the mask to be used against the extracted field. */
3791 mask = build_int_cst_type (unsigned_type, -1);
3792 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3793 mask = const_binop (RSHIFT_EXPR, mask,
3794 size_int (nbitsize - lbitsize - lbitpos));
3796 if (! const_p)
3797 /* If not comparing with constant, just rework the comparison
3798 and return. */
3799 return fold_build2_loc (loc, code, compare_type,
3800 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3801 make_bit_field_ref (loc, linner,
3802 unsigned_type,
3803 nbitsize, nbitpos,
3805 mask),
3806 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3807 make_bit_field_ref (loc, rinner,
3808 unsigned_type,
3809 nbitsize, nbitpos,
3811 mask));
3813 /* Otherwise, we are handling the constant case. See if the constant is too
3814 big for the field. Warn and return a tree of for 0 (false) if so. We do
3815 this not only for its own sake, but to avoid having to test for this
3816 error case below. If we didn't, we might generate wrong code.
3818 For unsigned fields, the constant shifted right by the field length should
3819 be all zero. For signed fields, the high-order bits should agree with
3820 the sign bit. */
3822 if (lunsignedp)
3824 if (wi::lrshift (rhs, lbitsize) != 0)
3826 warning (0, "comparison is always %d due to width of bit-field",
3827 code == NE_EXPR);
3828 return constant_boolean_node (code == NE_EXPR, compare_type);
3831 else
3833 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3834 if (tem != 0 && tem != -1)
3836 warning (0, "comparison is always %d due to width of bit-field",
3837 code == NE_EXPR);
3838 return constant_boolean_node (code == NE_EXPR, compare_type);
3842 /* Single-bit compares should always be against zero. */
3843 if (lbitsize == 1 && ! integer_zerop (rhs))
3845 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3846 rhs = build_int_cst (type, 0);
3849 /* Make a new bitfield reference, shift the constant over the
3850 appropriate number of bits and mask it with the computed mask
3851 (in case this was a signed field). If we changed it, make a new one. */
3852 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3854 rhs = const_binop (BIT_AND_EXPR,
3855 const_binop (LSHIFT_EXPR,
3856 fold_convert_loc (loc, unsigned_type, rhs),
3857 size_int (lbitpos)),
3858 mask);
3860 lhs = build2_loc (loc, code, compare_type,
3861 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3862 return lhs;
3865 /* Subroutine for fold_truth_andor_1: decode a field reference.
3867 If EXP is a comparison reference, we return the innermost reference.
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3878 *PUNSIGNEDP is set to the signedness of the field.
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3888 static tree
3889 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3894 tree outer_type = 0;
3895 tree and_mask = 0;
3896 tree mask, inner, offset;
3897 tree unsigned_type;
3898 unsigned int precision;
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 return 0;
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3912 STRIP_NOPS (exp);
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 return 0;
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 return 0;
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3940 mask = build_int_cst_type (unsigned_type, -1);
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 if (and_mask != 0)
3947 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3948 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3950 *pmask = mask;
3951 *pand_mask = and_mask;
3952 return inner;
3955 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3956 bit positions and MASK is SIGNED. */
3958 static int
3959 all_ones_mask_p (const_tree mask, unsigned int size)
3961 tree type = TREE_TYPE (mask);
3962 unsigned int precision = TYPE_PRECISION (type);
3964 /* If this function returns true when the type of the mask is
3965 UNSIGNED, then there will be errors. In particular see
3966 gcc.c-torture/execute/990326-1.c. There does not appear to be
3967 any documentation paper trail as to why this is so. But the pre
3968 wide-int worked with that restriction and it has been preserved
3969 here. */
3970 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3971 return false;
3973 return wi::mask (size, false, precision) == mask;
3976 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3977 represents the sign bit of EXP's type. If EXP represents a sign
3978 or zero extension, also test VAL against the unextended type.
3979 The return value is the (sub)expression whose sign bit is VAL,
3980 or NULL_TREE otherwise. */
3982 tree
3983 sign_bit_p (tree exp, const_tree val)
3985 int width;
3986 tree t;
3988 /* Tree EXP must have an integral type. */
3989 t = TREE_TYPE (exp);
3990 if (! INTEGRAL_TYPE_P (t))
3991 return NULL_TREE;
3993 /* Tree VAL must be an integer constant. */
3994 if (TREE_CODE (val) != INTEGER_CST
3995 || TREE_OVERFLOW (val))
3996 return NULL_TREE;
3998 width = TYPE_PRECISION (t);
3999 if (wi::only_sign_bit_p (val, width))
4000 return exp;
4002 /* Handle extension from a narrower type. */
4003 if (TREE_CODE (exp) == NOP_EXPR
4004 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4005 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4007 return NULL_TREE;
4010 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4011 to be evaluated unconditionally. */
4013 static int
4014 simple_operand_p (const_tree exp)
4016 /* Strip any conversions that don't change the machine mode. */
4017 STRIP_NOPS (exp);
4019 return (CONSTANT_CLASS_P (exp)
4020 || TREE_CODE (exp) == SSA_NAME
4021 || (DECL_P (exp)
4022 && ! TREE_ADDRESSABLE (exp)
4023 && ! TREE_THIS_VOLATILE (exp)
4024 && ! DECL_NONLOCAL (exp)
4025 /* Don't regard global variables as simple. They may be
4026 allocated in ways unknown to the compiler (shared memory,
4027 #pragma weak, etc). */
4028 && ! TREE_PUBLIC (exp)
4029 && ! DECL_EXTERNAL (exp)
4030 /* Weakrefs are not safe to be read, since they can be NULL.
4031 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4032 have DECL_WEAK flag set. */
4033 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4034 /* Loading a static variable is unduly expensive, but global
4035 registers aren't expensive. */
4036 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4039 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4040 to be evaluated unconditionally.
4041 I addition to simple_operand_p, we assume that comparisons, conversions,
4042 and logic-not operations are simple, if their operands are simple, too. */
4044 static bool
4045 simple_operand_p_2 (tree exp)
4047 enum tree_code code;
4049 if (TREE_SIDE_EFFECTS (exp)
4050 || tree_could_trap_p (exp))
4051 return false;
4053 while (CONVERT_EXPR_P (exp))
4054 exp = TREE_OPERAND (exp, 0);
4056 code = TREE_CODE (exp);
4058 if (TREE_CODE_CLASS (code) == tcc_comparison)
4059 return (simple_operand_p (TREE_OPERAND (exp, 0))
4060 && simple_operand_p (TREE_OPERAND (exp, 1)));
4062 if (code == TRUTH_NOT_EXPR)
4063 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4065 return simple_operand_p (exp);
4069 /* The following functions are subroutines to fold_range_test and allow it to
4070 try to change a logical combination of comparisons into a range test.
4072 For example, both
4073 X == 2 || X == 3 || X == 4 || X == 5
4075 X >= 2 && X <= 5
4076 are converted to
4077 (unsigned) (X - 2) <= 3
4079 We describe each set of comparisons as being either inside or outside
4080 a range, using a variable named like IN_P, and then describe the
4081 range with a lower and upper bound. If one of the bounds is omitted,
4082 it represents either the highest or lowest value of the type.
4084 In the comments below, we represent a range by two numbers in brackets
4085 preceded by a "+" to designate being inside that range, or a "-" to
4086 designate being outside that range, so the condition can be inverted by
4087 flipping the prefix. An omitted bound is represented by a "-". For
4088 example, "- [-, 10]" means being outside the range starting at the lowest
4089 possible value and ending at 10, in other words, being greater than 10.
4090 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4091 always false.
4093 We set up things so that the missing bounds are handled in a consistent
4094 manner so neither a missing bound nor "true" and "false" need to be
4095 handled using a special case. */
4097 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4098 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4099 and UPPER1_P are nonzero if the respective argument is an upper bound
4100 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4101 must be specified for a comparison. ARG1 will be converted to ARG0's
4102 type if both are specified. */
4104 static tree
4105 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4106 tree arg1, int upper1_p)
4108 tree tem;
4109 int result;
4110 int sgn0, sgn1;
4112 /* If neither arg represents infinity, do the normal operation.
4113 Else, if not a comparison, return infinity. Else handle the special
4114 comparison rules. Note that most of the cases below won't occur, but
4115 are handled for consistency. */
4117 if (arg0 != 0 && arg1 != 0)
4119 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4120 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4121 STRIP_NOPS (tem);
4122 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4125 if (TREE_CODE_CLASS (code) != tcc_comparison)
4126 return 0;
4128 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4129 for neither. In real maths, we cannot assume open ended ranges are
4130 the same. But, this is computer arithmetic, where numbers are finite.
4131 We can therefore make the transformation of any unbounded range with
4132 the value Z, Z being greater than any representable number. This permits
4133 us to treat unbounded ranges as equal. */
4134 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4135 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4136 switch (code)
4138 case EQ_EXPR:
4139 result = sgn0 == sgn1;
4140 break;
4141 case NE_EXPR:
4142 result = sgn0 != sgn1;
4143 break;
4144 case LT_EXPR:
4145 result = sgn0 < sgn1;
4146 break;
4147 case LE_EXPR:
4148 result = sgn0 <= sgn1;
4149 break;
4150 case GT_EXPR:
4151 result = sgn0 > sgn1;
4152 break;
4153 case GE_EXPR:
4154 result = sgn0 >= sgn1;
4155 break;
4156 default:
4157 gcc_unreachable ();
4160 return constant_boolean_node (result, type);
4163 /* Helper routine for make_range. Perform one step for it, return
4164 new expression if the loop should continue or NULL_TREE if it should
4165 stop. */
4167 tree
4168 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4169 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4170 bool *strict_overflow_p)
4172 tree arg0_type = TREE_TYPE (arg0);
4173 tree n_low, n_high, low = *p_low, high = *p_high;
4174 int in_p = *p_in_p, n_in_p;
4176 switch (code)
4178 case TRUTH_NOT_EXPR:
4179 /* We can only do something if the range is testing for zero. */
4180 if (low == NULL_TREE || high == NULL_TREE
4181 || ! integer_zerop (low) || ! integer_zerop (high))
4182 return NULL_TREE;
4183 *p_in_p = ! in_p;
4184 return arg0;
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == NULL_TREE || high == NULL_TREE
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4196 return NULL_TREE;
4198 switch (code)
4200 case NE_EXPR: /* - [c, c] */
4201 low = high = arg1;
4202 break;
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4205 break;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4208 break;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4211 break;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4214 break;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4217 break;
4218 default:
4219 gcc_unreachable ();
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 in_p, low, high, 1,
4231 build_int_cst (arg0_type, 0),
4232 NULL_TREE))
4233 return NULL_TREE;
4235 in_p = n_in_p, low = n_low, high = n_high;
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4239 minus 1. */
4240 if (high == 0 && low && ! integer_zerop (low))
4242 in_p = ! in_p;
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 build_int_cst (TREE_TYPE (low), 1), 0);
4245 low = build_int_cst (arg0_type, 0);
4249 *p_low = low;
4250 *p_high = high;
4251 *p_in_p = in_p;
4252 return arg0;
4254 case NEGATE_EXPR:
4255 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4256 low and high are non-NULL, then normalize will DTRT. */
4257 if (!TYPE_UNSIGNED (arg0_type)
4258 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4260 if (low == NULL_TREE)
4261 low = TYPE_MIN_VALUE (arg0_type);
4262 if (high == NULL_TREE)
4263 high = TYPE_MAX_VALUE (arg0_type);
4266 /* (-x) IN [a,b] -> x in [-b, -a] */
4267 n_low = range_binop (MINUS_EXPR, exp_type,
4268 build_int_cst (exp_type, 0),
4269 0, high, 1);
4270 n_high = range_binop (MINUS_EXPR, exp_type,
4271 build_int_cst (exp_type, 0),
4272 0, low, 0);
4273 if (n_high != 0 && TREE_OVERFLOW (n_high))
4274 return NULL_TREE;
4275 goto normalize;
4277 case BIT_NOT_EXPR:
4278 /* ~ X -> -X - 1 */
4279 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4280 build_int_cst (exp_type, 1));
4282 case PLUS_EXPR:
4283 case MINUS_EXPR:
4284 if (TREE_CODE (arg1) != INTEGER_CST)
4285 return NULL_TREE;
4287 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4288 move a constant to the other side. */
4289 if (!TYPE_UNSIGNED (arg0_type)
4290 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4291 return NULL_TREE;
4293 /* If EXP is signed, any overflow in the computation is undefined,
4294 so we don't worry about it so long as our computations on
4295 the bounds don't overflow. For unsigned, overflow is defined
4296 and this is exactly the right thing. */
4297 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4298 arg0_type, low, 0, arg1, 0);
4299 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4300 arg0_type, high, 1, arg1, 0);
4301 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4302 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4303 return NULL_TREE;
4305 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4306 *strict_overflow_p = true;
4308 normalize:
4309 /* Check for an unsigned range which has wrapped around the maximum
4310 value thus making n_high < n_low, and normalize it. */
4311 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4313 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4314 build_int_cst (TREE_TYPE (n_high), 1), 0);
4315 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4316 build_int_cst (TREE_TYPE (n_low), 1), 0);
4318 /* If the range is of the form +/- [ x+1, x ], we won't
4319 be able to normalize it. But then, it represents the
4320 whole range or the empty set, so make it
4321 +/- [ -, - ]. */
4322 if (tree_int_cst_equal (n_low, low)
4323 && tree_int_cst_equal (n_high, high))
4324 low = high = 0;
4325 else
4326 in_p = ! in_p;
4328 else
4329 low = n_low, high = n_high;
4331 *p_low = low;
4332 *p_high = high;
4333 *p_in_p = in_p;
4334 return arg0;
4336 CASE_CONVERT:
4337 case NON_LVALUE_EXPR:
4338 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4339 return NULL_TREE;
4341 if (! INTEGRAL_TYPE_P (arg0_type)
4342 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4343 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4344 return NULL_TREE;
4346 n_low = low, n_high = high;
4348 if (n_low != 0)
4349 n_low = fold_convert_loc (loc, arg0_type, n_low);
4351 if (n_high != 0)
4352 n_high = fold_convert_loc (loc, arg0_type, n_high);
4354 /* If we're converting arg0 from an unsigned type, to exp,
4355 a signed type, we will be doing the comparison as unsigned.
4356 The tests above have already verified that LOW and HIGH
4357 are both positive.
4359 So we have to ensure that we will handle large unsigned
4360 values the same way that the current signed bounds treat
4361 negative values. */
4363 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4365 tree high_positive;
4366 tree equiv_type;
4367 /* For fixed-point modes, we need to pass the saturating flag
4368 as the 2nd parameter. */
4369 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4370 equiv_type
4371 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4372 TYPE_SATURATING (arg0_type));
4373 else
4374 equiv_type
4375 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4377 /* A range without an upper bound is, naturally, unbounded.
4378 Since convert would have cropped a very large value, use
4379 the max value for the destination type. */
4380 high_positive
4381 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4382 : TYPE_MAX_VALUE (arg0_type);
4384 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4385 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4386 fold_convert_loc (loc, arg0_type,
4387 high_positive),
4388 build_int_cst (arg0_type, 1));
4390 /* If the low bound is specified, "and" the range with the
4391 range for which the original unsigned value will be
4392 positive. */
4393 if (low != 0)
4395 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4396 1, fold_convert_loc (loc, arg0_type,
4397 integer_zero_node),
4398 high_positive))
4399 return NULL_TREE;
4401 in_p = (n_in_p == in_p);
4403 else
4405 /* Otherwise, "or" the range with the range of the input
4406 that will be interpreted as negative. */
4407 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4408 1, fold_convert_loc (loc, arg0_type,
4409 integer_zero_node),
4410 high_positive))
4411 return NULL_TREE;
4413 in_p = (in_p != n_in_p);
4417 *p_low = n_low;
4418 *p_high = n_high;
4419 *p_in_p = in_p;
4420 return arg0;
4422 default:
4423 return NULL_TREE;
4427 /* Given EXP, a logical expression, set the range it is testing into
4428 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4429 actually being tested. *PLOW and *PHIGH will be made of the same
4430 type as the returned expression. If EXP is not a comparison, we
4431 will most likely not be returning a useful value and range. Set
4432 *STRICT_OVERFLOW_P to true if the return value is only valid
4433 because signed overflow is undefined; otherwise, do not change
4434 *STRICT_OVERFLOW_P. */
4436 tree
4437 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4438 bool *strict_overflow_p)
4440 enum tree_code code;
4441 tree arg0, arg1 = NULL_TREE;
4442 tree exp_type, nexp;
4443 int in_p;
4444 tree low, high;
4445 location_t loc = EXPR_LOCATION (exp);
4447 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4448 and see if we can refine the range. Some of the cases below may not
4449 happen, but it doesn't seem worth worrying about this. We "continue"
4450 the outer loop when we've changed something; otherwise we "break"
4451 the switch, which will "break" the while. */
4453 in_p = 0;
4454 low = high = build_int_cst (TREE_TYPE (exp), 0);
4456 while (1)
4458 code = TREE_CODE (exp);
4459 exp_type = TREE_TYPE (exp);
4460 arg0 = NULL_TREE;
4462 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4464 if (TREE_OPERAND_LENGTH (exp) > 0)
4465 arg0 = TREE_OPERAND (exp, 0);
4466 if (TREE_CODE_CLASS (code) == tcc_binary
4467 || TREE_CODE_CLASS (code) == tcc_comparison
4468 || (TREE_CODE_CLASS (code) == tcc_expression
4469 && TREE_OPERAND_LENGTH (exp) > 1))
4470 arg1 = TREE_OPERAND (exp, 1);
4472 if (arg0 == NULL_TREE)
4473 break;
4475 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4476 &high, &in_p, strict_overflow_p);
4477 if (nexp == NULL_TREE)
4478 break;
4479 exp = nexp;
4482 /* If EXP is a constant, we can evaluate whether this is true or false. */
4483 if (TREE_CODE (exp) == INTEGER_CST)
4485 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4486 exp, 0, low, 0))
4487 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4488 exp, 1, high, 1)));
4489 low = high = 0;
4490 exp = 0;
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return exp;
4497 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4498 type, TYPE, return an expression to test if EXP is in (or out of, depending
4499 on IN_P) the range. Return 0 if the test couldn't be created. */
4501 tree
4502 build_range_check (location_t loc, tree type, tree exp, int in_p,
4503 tree low, tree high)
4505 tree etype = TREE_TYPE (exp), value;
4507 #ifdef HAVE_canonicalize_funcptr_for_compare
4508 /* Disable this optimization for function pointer expressions
4509 on targets that require function pointer canonicalization. */
4510 if (HAVE_canonicalize_funcptr_for_compare
4511 && TREE_CODE (etype) == POINTER_TYPE
4512 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4513 return NULL_TREE;
4514 #endif
4516 if (! in_p)
4518 value = build_range_check (loc, type, exp, 1, low, high);
4519 if (value != 0)
4520 return invert_truthvalue_loc (loc, value);
4522 return 0;
4525 if (low == 0 && high == 0)
4526 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4528 if (low == 0)
4529 return fold_build2_loc (loc, LE_EXPR, type, exp,
4530 fold_convert_loc (loc, etype, high));
4532 if (high == 0)
4533 return fold_build2_loc (loc, GE_EXPR, type, exp,
4534 fold_convert_loc (loc, etype, low));
4536 if (operand_equal_p (low, high, 0))
4537 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4538 fold_convert_loc (loc, etype, low));
4540 if (integer_zerop (low))
4542 if (! TYPE_UNSIGNED (etype))
4544 etype = unsigned_type_for (etype);
4545 high = fold_convert_loc (loc, etype, high);
4546 exp = fold_convert_loc (loc, etype, exp);
4548 return build_range_check (loc, type, exp, 1, 0, high);
4551 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4552 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4554 int prec = TYPE_PRECISION (etype);
4556 if (wi::mask (prec - 1, false, prec) == high)
4558 if (TYPE_UNSIGNED (etype))
4560 tree signed_etype = signed_type_for (etype);
4561 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4562 etype
4563 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4564 else
4565 etype = signed_etype;
4566 exp = fold_convert_loc (loc, etype, exp);
4568 return fold_build2_loc (loc, GT_EXPR, type, exp,
4569 build_int_cst (etype, 0));
4573 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4574 This requires wrap-around arithmetics for the type of the expression.
4575 First make sure that arithmetics in this type is valid, then make sure
4576 that it wraps around. */
4577 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4578 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4579 TYPE_UNSIGNED (etype));
4581 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4583 tree utype, minv, maxv;
4585 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4586 for the type in question, as we rely on this here. */
4587 utype = unsigned_type_for (etype);
4588 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4589 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4590 build_int_cst (TREE_TYPE (maxv), 1), 1);
4591 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4593 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4594 minv, 1, maxv, 1)))
4595 etype = utype;
4596 else
4597 return 0;
4600 high = fold_convert_loc (loc, etype, high);
4601 low = fold_convert_loc (loc, etype, low);
4602 exp = fold_convert_loc (loc, etype, exp);
4604 value = const_binop (MINUS_EXPR, high, low);
4607 if (POINTER_TYPE_P (etype))
4609 if (value != 0 && !TREE_OVERFLOW (value))
4611 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4612 return build_range_check (loc, type,
4613 fold_build_pointer_plus_loc (loc, exp, low),
4614 1, build_int_cst (etype, 0), value);
4616 return 0;
4619 if (value != 0 && !TREE_OVERFLOW (value))
4620 return build_range_check (loc, type,
4621 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4622 1, build_int_cst (etype, 0), value);
4624 return 0;
4627 /* Return the predecessor of VAL in its type, handling the infinite case. */
4629 static tree
4630 range_predecessor (tree val)
4632 tree type = TREE_TYPE (val);
4634 if (INTEGRAL_TYPE_P (type)
4635 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4636 return 0;
4637 else
4638 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4639 build_int_cst (TREE_TYPE (val), 1), 0);
4642 /* Return the successor of VAL in its type, handling the infinite case. */
4644 static tree
4645 range_successor (tree val)
4647 tree type = TREE_TYPE (val);
4649 if (INTEGRAL_TYPE_P (type)
4650 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4651 return 0;
4652 else
4653 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4654 build_int_cst (TREE_TYPE (val), 1), 0);
4657 /* Given two ranges, see if we can merge them into one. Return 1 if we
4658 can, 0 if we can't. Set the output range into the specified parameters. */
4660 bool
4661 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4662 tree high0, int in1_p, tree low1, tree high1)
4664 int no_overlap;
4665 int subset;
4666 int temp;
4667 tree tem;
4668 int in_p;
4669 tree low, high;
4670 int lowequal = ((low0 == 0 && low1 == 0)
4671 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4672 low0, 0, low1, 0)));
4673 int highequal = ((high0 == 0 && high1 == 0)
4674 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4675 high0, 1, high1, 1)));
4677 /* Make range 0 be the range that starts first, or ends last if they
4678 start at the same value. Swap them if it isn't. */
4679 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4680 low0, 0, low1, 0))
4681 || (lowequal
4682 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4683 high1, 1, high0, 1))))
4685 temp = in0_p, in0_p = in1_p, in1_p = temp;
4686 tem = low0, low0 = low1, low1 = tem;
4687 tem = high0, high0 = high1, high1 = tem;
4690 /* Now flag two cases, whether the ranges are disjoint or whether the
4691 second range is totally subsumed in the first. Note that the tests
4692 below are simplified by the ones above. */
4693 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4694 high0, 1, low1, 0));
4695 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4696 high1, 1, high0, 1));
4698 /* We now have four cases, depending on whether we are including or
4699 excluding the two ranges. */
4700 if (in0_p && in1_p)
4702 /* If they don't overlap, the result is false. If the second range
4703 is a subset it is the result. Otherwise, the range is from the start
4704 of the second to the end of the first. */
4705 if (no_overlap)
4706 in_p = 0, low = high = 0;
4707 else if (subset)
4708 in_p = 1, low = low1, high = high1;
4709 else
4710 in_p = 1, low = low1, high = high0;
4713 else if (in0_p && ! in1_p)
4715 /* If they don't overlap, the result is the first range. If they are
4716 equal, the result is false. If the second range is a subset of the
4717 first, and the ranges begin at the same place, we go from just after
4718 the end of the second range to the end of the first. If the second
4719 range is not a subset of the first, or if it is a subset and both
4720 ranges end at the same place, the range starts at the start of the
4721 first range and ends just before the second range.
4722 Otherwise, we can't describe this as a single range. */
4723 if (no_overlap)
4724 in_p = 1, low = low0, high = high0;
4725 else if (lowequal && highequal)
4726 in_p = 0, low = high = 0;
4727 else if (subset && lowequal)
4729 low = range_successor (high1);
4730 high = high0;
4731 in_p = 1;
4732 if (low == 0)
4734 /* We are in the weird situation where high0 > high1 but
4735 high1 has no successor. Punt. */
4736 return 0;
4739 else if (! subset || highequal)
4741 low = low0;
4742 high = range_predecessor (low1);
4743 in_p = 1;
4744 if (high == 0)
4746 /* low0 < low1 but low1 has no predecessor. Punt. */
4747 return 0;
4750 else
4751 return 0;
4754 else if (! in0_p && in1_p)
4756 /* If they don't overlap, the result is the second range. If the second
4757 is a subset of the first, the result is false. Otherwise,
4758 the range starts just after the first range and ends at the
4759 end of the second. */
4760 if (no_overlap)
4761 in_p = 1, low = low1, high = high1;
4762 else if (subset || highequal)
4763 in_p = 0, low = high = 0;
4764 else
4766 low = range_successor (high0);
4767 high = high1;
4768 in_p = 1;
4769 if (low == 0)
4771 /* high1 > high0 but high0 has no successor. Punt. */
4772 return 0;
4777 else
4779 /* The case where we are excluding both ranges. Here the complex case
4780 is if they don't overlap. In that case, the only time we have a
4781 range is if they are adjacent. If the second is a subset of the
4782 first, the result is the first. Otherwise, the range to exclude
4783 starts at the beginning of the first range and ends at the end of the
4784 second. */
4785 if (no_overlap)
4787 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4788 range_successor (high0),
4789 1, low1, 0)))
4790 in_p = 0, low = low0, high = high1;
4791 else
4793 /* Canonicalize - [min, x] into - [-, x]. */
4794 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4795 switch (TREE_CODE (TREE_TYPE (low0)))
4797 case ENUMERAL_TYPE:
4798 if (TYPE_PRECISION (TREE_TYPE (low0))
4799 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4800 break;
4801 /* FALLTHROUGH */
4802 case INTEGER_TYPE:
4803 if (tree_int_cst_equal (low0,
4804 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4805 low0 = 0;
4806 break;
4807 case POINTER_TYPE:
4808 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4809 && integer_zerop (low0))
4810 low0 = 0;
4811 break;
4812 default:
4813 break;
4816 /* Canonicalize - [x, max] into - [x, -]. */
4817 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4818 switch (TREE_CODE (TREE_TYPE (high1)))
4820 case ENUMERAL_TYPE:
4821 if (TYPE_PRECISION (TREE_TYPE (high1))
4822 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4823 break;
4824 /* FALLTHROUGH */
4825 case INTEGER_TYPE:
4826 if (tree_int_cst_equal (high1,
4827 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4828 high1 = 0;
4829 break;
4830 case POINTER_TYPE:
4831 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4832 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4833 high1, 1,
4834 build_int_cst (TREE_TYPE (high1), 1),
4835 1)))
4836 high1 = 0;
4837 break;
4838 default:
4839 break;
4842 /* The ranges might be also adjacent between the maximum and
4843 minimum values of the given type. For
4844 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4845 return + [x + 1, y - 1]. */
4846 if (low0 == 0 && high1 == 0)
4848 low = range_successor (high0);
4849 high = range_predecessor (low1);
4850 if (low == 0 || high == 0)
4851 return 0;
4853 in_p = 1;
4855 else
4856 return 0;
4859 else if (subset)
4860 in_p = 0, low = low0, high = high0;
4861 else
4862 in_p = 0, low = low0, high = high1;
4865 *pin_p = in_p, *plow = low, *phigh = high;
4866 return 1;
4870 /* Subroutine of fold, looking inside expressions of the form
4871 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4872 of the COND_EXPR. This function is being used also to optimize
4873 A op B ? C : A, by reversing the comparison first.
4875 Return a folded expression whose code is not a COND_EXPR
4876 anymore, or NULL_TREE if no folding opportunity is found. */
4878 static tree
4879 fold_cond_expr_with_comparison (location_t loc, tree type,
4880 tree arg0, tree arg1, tree arg2)
4882 enum tree_code comp_code = TREE_CODE (arg0);
4883 tree arg00 = TREE_OPERAND (arg0, 0);
4884 tree arg01 = TREE_OPERAND (arg0, 1);
4885 tree arg1_type = TREE_TYPE (arg1);
4886 tree tem;
4888 STRIP_NOPS (arg1);
4889 STRIP_NOPS (arg2);
4891 /* If we have A op 0 ? A : -A, consider applying the following
4892 transformations:
4894 A == 0? A : -A same as -A
4895 A != 0? A : -A same as A
4896 A >= 0? A : -A same as abs (A)
4897 A > 0? A : -A same as abs (A)
4898 A <= 0? A : -A same as -abs (A)
4899 A < 0? A : -A same as -abs (A)
4901 None of these transformations work for modes with signed
4902 zeros. If A is +/-0, the first two transformations will
4903 change the sign of the result (from +0 to -0, or vice
4904 versa). The last four will fix the sign of the result,
4905 even though the original expressions could be positive or
4906 negative, depending on the sign of A.
4908 Note that all these transformations are correct if A is
4909 NaN, since the two alternatives (A and -A) are also NaNs. */
4910 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4911 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4912 ? real_zerop (arg01)
4913 : integer_zerop (arg01))
4914 && ((TREE_CODE (arg2) == NEGATE_EXPR
4915 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4916 /* In the case that A is of the form X-Y, '-A' (arg2) may
4917 have already been folded to Y-X, check for that. */
4918 || (TREE_CODE (arg1) == MINUS_EXPR
4919 && TREE_CODE (arg2) == MINUS_EXPR
4920 && operand_equal_p (TREE_OPERAND (arg1, 0),
4921 TREE_OPERAND (arg2, 1), 0)
4922 && operand_equal_p (TREE_OPERAND (arg1, 1),
4923 TREE_OPERAND (arg2, 0), 0))))
4924 switch (comp_code)
4926 case EQ_EXPR:
4927 case UNEQ_EXPR:
4928 tem = fold_convert_loc (loc, arg1_type, arg1);
4929 return pedantic_non_lvalue_loc (loc,
4930 fold_convert_loc (loc, type,
4931 negate_expr (tem)));
4932 case NE_EXPR:
4933 case LTGT_EXPR:
4934 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4935 case UNGE_EXPR:
4936 case UNGT_EXPR:
4937 if (flag_trapping_math)
4938 break;
4939 /* Fall through. */
4940 case GE_EXPR:
4941 case GT_EXPR:
4942 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4943 arg1 = fold_convert_loc (loc, signed_type_for
4944 (TREE_TYPE (arg1)), arg1);
4945 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4946 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4947 case UNLE_EXPR:
4948 case UNLT_EXPR:
4949 if (flag_trapping_math)
4950 break;
4951 case LE_EXPR:
4952 case LT_EXPR:
4953 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4954 arg1 = fold_convert_loc (loc, signed_type_for
4955 (TREE_TYPE (arg1)), arg1);
4956 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4957 return negate_expr (fold_convert_loc (loc, type, tem));
4958 default:
4959 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4960 break;
4963 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4964 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4965 both transformations are correct when A is NaN: A != 0
4966 is then true, and A == 0 is false. */
4968 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4969 && integer_zerop (arg01) && integer_zerop (arg2))
4971 if (comp_code == NE_EXPR)
4972 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4973 else if (comp_code == EQ_EXPR)
4974 return build_zero_cst (type);
4977 /* Try some transformations of A op B ? A : B.
4979 A == B? A : B same as B
4980 A != B? A : B same as A
4981 A >= B? A : B same as max (A, B)
4982 A > B? A : B same as max (B, A)
4983 A <= B? A : B same as min (A, B)
4984 A < B? A : B same as min (B, A)
4986 As above, these transformations don't work in the presence
4987 of signed zeros. For example, if A and B are zeros of
4988 opposite sign, the first two transformations will change
4989 the sign of the result. In the last four, the original
4990 expressions give different results for (A=+0, B=-0) and
4991 (A=-0, B=+0), but the transformed expressions do not.
4993 The first two transformations are correct if either A or B
4994 is a NaN. In the first transformation, the condition will
4995 be false, and B will indeed be chosen. In the case of the
4996 second transformation, the condition A != B will be true,
4997 and A will be chosen.
4999 The conversions to max() and min() are not correct if B is
5000 a number and A is not. The conditions in the original
5001 expressions will be false, so all four give B. The min()
5002 and max() versions would give a NaN instead. */
5003 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5004 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5005 /* Avoid these transformations if the COND_EXPR may be used
5006 as an lvalue in the C++ front-end. PR c++/19199. */
5007 && (in_gimple_form
5008 || VECTOR_TYPE_P (type)
5009 || (! lang_GNU_CXX ()
5010 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5011 || ! maybe_lvalue_p (arg1)
5012 || ! maybe_lvalue_p (arg2)))
5014 tree comp_op0 = arg00;
5015 tree comp_op1 = arg01;
5016 tree comp_type = TREE_TYPE (comp_op0);
5018 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5019 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5021 comp_type = type;
5022 comp_op0 = arg1;
5023 comp_op1 = arg2;
5026 switch (comp_code)
5028 case EQ_EXPR:
5029 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5030 case NE_EXPR:
5031 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5032 case LE_EXPR:
5033 case LT_EXPR:
5034 case UNLE_EXPR:
5035 case UNLT_EXPR:
5036 /* In C++ a ?: expression can be an lvalue, so put the
5037 operand which will be used if they are equal first
5038 so that we can convert this back to the
5039 corresponding COND_EXPR. */
5040 if (!HONOR_NANS (arg1))
5042 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5043 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5044 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5045 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5046 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5047 comp_op1, comp_op0);
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type, tem));
5051 break;
5052 case GE_EXPR:
5053 case GT_EXPR:
5054 case UNGE_EXPR:
5055 case UNGT_EXPR:
5056 if (!HONOR_NANS (arg1))
5058 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5059 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5060 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5061 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5062 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5063 comp_op1, comp_op0);
5064 return pedantic_non_lvalue_loc (loc,
5065 fold_convert_loc (loc, type, tem));
5067 break;
5068 case UNEQ_EXPR:
5069 if (!HONOR_NANS (arg1))
5070 return pedantic_non_lvalue_loc (loc,
5071 fold_convert_loc (loc, type, arg2));
5072 break;
5073 case LTGT_EXPR:
5074 if (!HONOR_NANS (arg1))
5075 return pedantic_non_lvalue_loc (loc,
5076 fold_convert_loc (loc, type, arg1));
5077 break;
5078 default:
5079 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5080 break;
5084 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5085 we might still be able to simplify this. For example,
5086 if C1 is one less or one more than C2, this might have started
5087 out as a MIN or MAX and been transformed by this function.
5088 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5090 if (INTEGRAL_TYPE_P (type)
5091 && TREE_CODE (arg01) == INTEGER_CST
5092 && TREE_CODE (arg2) == INTEGER_CST)
5093 switch (comp_code)
5095 case EQ_EXPR:
5096 if (TREE_CODE (arg1) == INTEGER_CST)
5097 break;
5098 /* We can replace A with C1 in this case. */
5099 arg1 = fold_convert_loc (loc, type, arg01);
5100 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5102 case LT_EXPR:
5103 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5104 MIN_EXPR, to preserve the signedness of the comparison. */
5105 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5106 OEP_ONLY_CONST)
5107 && operand_equal_p (arg01,
5108 const_binop (PLUS_EXPR, arg2,
5109 build_int_cst (type, 1)),
5110 OEP_ONLY_CONST))
5112 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5113 fold_convert_loc (loc, TREE_TYPE (arg00),
5114 arg2));
5115 return pedantic_non_lvalue_loc (loc,
5116 fold_convert_loc (loc, type, tem));
5118 break;
5120 case LE_EXPR:
5121 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5122 as above. */
5123 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5124 OEP_ONLY_CONST)
5125 && operand_equal_p (arg01,
5126 const_binop (MINUS_EXPR, arg2,
5127 build_int_cst (type, 1)),
5128 OEP_ONLY_CONST))
5130 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5131 fold_convert_loc (loc, TREE_TYPE (arg00),
5132 arg2));
5133 return pedantic_non_lvalue_loc (loc,
5134 fold_convert_loc (loc, type, tem));
5136 break;
5138 case GT_EXPR:
5139 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5140 MAX_EXPR, to preserve the signedness of the comparison. */
5141 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5142 OEP_ONLY_CONST)
5143 && operand_equal_p (arg01,
5144 const_binop (MINUS_EXPR, arg2,
5145 build_int_cst (type, 1)),
5146 OEP_ONLY_CONST))
5148 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5149 fold_convert_loc (loc, TREE_TYPE (arg00),
5150 arg2));
5151 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5153 break;
5155 case GE_EXPR:
5156 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5157 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5158 OEP_ONLY_CONST)
5159 && operand_equal_p (arg01,
5160 const_binop (PLUS_EXPR, arg2,
5161 build_int_cst (type, 1)),
5162 OEP_ONLY_CONST))
5164 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5165 fold_convert_loc (loc, TREE_TYPE (arg00),
5166 arg2));
5167 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5169 break;
5170 case NE_EXPR:
5171 break;
5172 default:
5173 gcc_unreachable ();
5176 return NULL_TREE;
5181 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5182 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5183 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5184 false) >= 2)
5185 #endif
5187 /* EXP is some logical combination of boolean tests. See if we can
5188 merge it into some range test. Return the new tree if so. */
5190 static tree
5191 fold_range_test (location_t loc, enum tree_code code, tree type,
5192 tree op0, tree op1)
5194 int or_op = (code == TRUTH_ORIF_EXPR
5195 || code == TRUTH_OR_EXPR);
5196 int in0_p, in1_p, in_p;
5197 tree low0, low1, low, high0, high1, high;
5198 bool strict_overflow_p = false;
5199 tree tem, lhs, rhs;
5200 const char * const warnmsg = G_("assuming signed overflow does not occur "
5201 "when simplifying range test");
5203 if (!INTEGRAL_TYPE_P (type))
5204 return 0;
5206 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5207 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5209 /* If this is an OR operation, invert both sides; we will invert
5210 again at the end. */
5211 if (or_op)
5212 in0_p = ! in0_p, in1_p = ! in1_p;
5214 /* If both expressions are the same, if we can merge the ranges, and we
5215 can build the range test, return it or it inverted. If one of the
5216 ranges is always true or always false, consider it to be the same
5217 expression as the other. */
5218 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5219 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5220 in1_p, low1, high1)
5221 && 0 != (tem = (build_range_check (loc, type,
5222 lhs != 0 ? lhs
5223 : rhs != 0 ? rhs : integer_zero_node,
5224 in_p, low, high))))
5226 if (strict_overflow_p)
5227 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5228 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5231 /* On machines where the branch cost is expensive, if this is a
5232 short-circuited branch and the underlying object on both sides
5233 is the same, make a non-short-circuit operation. */
5234 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5235 && lhs != 0 && rhs != 0
5236 && (code == TRUTH_ANDIF_EXPR
5237 || code == TRUTH_ORIF_EXPR)
5238 && operand_equal_p (lhs, rhs, 0))
5240 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5241 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5242 which cases we can't do this. */
5243 if (simple_operand_p (lhs))
5244 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5245 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5246 type, op0, op1);
5248 else if (!lang_hooks.decls.global_bindings_p ()
5249 && !CONTAINS_PLACEHOLDER_P (lhs))
5251 tree common = save_expr (lhs);
5253 if (0 != (lhs = build_range_check (loc, type, common,
5254 or_op ? ! in0_p : in0_p,
5255 low0, high0))
5256 && (0 != (rhs = build_range_check (loc, type, common,
5257 or_op ? ! in1_p : in1_p,
5258 low1, high1))))
5260 if (strict_overflow_p)
5261 fold_overflow_warning (warnmsg,
5262 WARN_STRICT_OVERFLOW_COMPARISON);
5263 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5264 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5265 type, lhs, rhs);
5270 return 0;
5273 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5274 bit value. Arrange things so the extra bits will be set to zero if and
5275 only if C is signed-extended to its full width. If MASK is nonzero,
5276 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5278 static tree
5279 unextend (tree c, int p, int unsignedp, tree mask)
5281 tree type = TREE_TYPE (c);
5282 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5283 tree temp;
5285 if (p == modesize || unsignedp)
5286 return c;
5288 /* We work by getting just the sign bit into the low-order bit, then
5289 into the high-order bit, then sign-extend. We then XOR that value
5290 with C. */
5291 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5293 /* We must use a signed type in order to get an arithmetic right shift.
5294 However, we must also avoid introducing accidental overflows, so that
5295 a subsequent call to integer_zerop will work. Hence we must
5296 do the type conversion here. At this point, the constant is either
5297 zero or one, and the conversion to a signed type can never overflow.
5298 We could get an overflow if this conversion is done anywhere else. */
5299 if (TYPE_UNSIGNED (type))
5300 temp = fold_convert (signed_type_for (type), temp);
5302 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5303 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5304 if (mask != 0)
5305 temp = const_binop (BIT_AND_EXPR, temp,
5306 fold_convert (TREE_TYPE (c), mask));
5307 /* If necessary, convert the type back to match the type of C. */
5308 if (TYPE_UNSIGNED (type))
5309 temp = fold_convert (type, temp);
5311 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5314 /* For an expression that has the form
5315 (A && B) || ~B
5317 (A || B) && ~B,
5318 we can drop one of the inner expressions and simplify to
5319 A || ~B
5321 A && ~B
5322 LOC is the location of the resulting expression. OP is the inner
5323 logical operation; the left-hand side in the examples above, while CMPOP
5324 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5325 removing a condition that guards another, as in
5326 (A != NULL && A->...) || A == NULL
5327 which we must not transform. If RHS_ONLY is true, only eliminate the
5328 right-most operand of the inner logical operation. */
5330 static tree
5331 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5332 bool rhs_only)
5334 tree type = TREE_TYPE (cmpop);
5335 enum tree_code code = TREE_CODE (cmpop);
5336 enum tree_code truthop_code = TREE_CODE (op);
5337 tree lhs = TREE_OPERAND (op, 0);
5338 tree rhs = TREE_OPERAND (op, 1);
5339 tree orig_lhs = lhs, orig_rhs = rhs;
5340 enum tree_code rhs_code = TREE_CODE (rhs);
5341 enum tree_code lhs_code = TREE_CODE (lhs);
5342 enum tree_code inv_code;
5344 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5345 return NULL_TREE;
5347 if (TREE_CODE_CLASS (code) != tcc_comparison)
5348 return NULL_TREE;
5350 if (rhs_code == truthop_code)
5352 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5353 if (newrhs != NULL_TREE)
5355 rhs = newrhs;
5356 rhs_code = TREE_CODE (rhs);
5359 if (lhs_code == truthop_code && !rhs_only)
5361 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5362 if (newlhs != NULL_TREE)
5364 lhs = newlhs;
5365 lhs_code = TREE_CODE (lhs);
5369 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5370 if (inv_code == rhs_code
5371 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5372 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5373 return lhs;
5374 if (!rhs_only && inv_code == lhs_code
5375 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5376 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5377 return rhs;
5378 if (rhs != orig_rhs || lhs != orig_lhs)
5379 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5380 lhs, rhs);
5381 return NULL_TREE;
5384 /* Find ways of folding logical expressions of LHS and RHS:
5385 Try to merge two comparisons to the same innermost item.
5386 Look for range tests like "ch >= '0' && ch <= '9'".
5387 Look for combinations of simple terms on machines with expensive branches
5388 and evaluate the RHS unconditionally.
5390 For example, if we have p->a == 2 && p->b == 4 and we can make an
5391 object large enough to span both A and B, we can do this with a comparison
5392 against the object ANDed with the a mask.
5394 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5395 operations to do this with one comparison.
5397 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5398 function and the one above.
5400 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5401 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5403 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5404 two operands.
5406 We return the simplified tree or 0 if no optimization is possible. */
5408 static tree
5409 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5410 tree lhs, tree rhs)
5412 /* If this is the "or" of two comparisons, we can do something if
5413 the comparisons are NE_EXPR. If this is the "and", we can do something
5414 if the comparisons are EQ_EXPR. I.e.,
5415 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5417 WANTED_CODE is this operation code. For single bit fields, we can
5418 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5419 comparison for one-bit fields. */
5421 enum tree_code wanted_code;
5422 enum tree_code lcode, rcode;
5423 tree ll_arg, lr_arg, rl_arg, rr_arg;
5424 tree ll_inner, lr_inner, rl_inner, rr_inner;
5425 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5426 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5427 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5428 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5429 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5430 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5431 machine_mode lnmode, rnmode;
5432 tree ll_mask, lr_mask, rl_mask, rr_mask;
5433 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5434 tree l_const, r_const;
5435 tree lntype, rntype, result;
5436 HOST_WIDE_INT first_bit, end_bit;
5437 int volatilep;
5439 /* Start by getting the comparison codes. Fail if anything is volatile.
5440 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5441 it were surrounded with a NE_EXPR. */
5443 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5444 return 0;
5446 lcode = TREE_CODE (lhs);
5447 rcode = TREE_CODE (rhs);
5449 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5451 lhs = build2 (NE_EXPR, truth_type, lhs,
5452 build_int_cst (TREE_TYPE (lhs), 0));
5453 lcode = NE_EXPR;
5456 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5458 rhs = build2 (NE_EXPR, truth_type, rhs,
5459 build_int_cst (TREE_TYPE (rhs), 0));
5460 rcode = NE_EXPR;
5463 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5464 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5465 return 0;
5467 ll_arg = TREE_OPERAND (lhs, 0);
5468 lr_arg = TREE_OPERAND (lhs, 1);
5469 rl_arg = TREE_OPERAND (rhs, 0);
5470 rr_arg = TREE_OPERAND (rhs, 1);
5472 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5473 if (simple_operand_p (ll_arg)
5474 && simple_operand_p (lr_arg))
5476 if (operand_equal_p (ll_arg, rl_arg, 0)
5477 && operand_equal_p (lr_arg, rr_arg, 0))
5479 result = combine_comparisons (loc, code, lcode, rcode,
5480 truth_type, ll_arg, lr_arg);
5481 if (result)
5482 return result;
5484 else if (operand_equal_p (ll_arg, rr_arg, 0)
5485 && operand_equal_p (lr_arg, rl_arg, 0))
5487 result = combine_comparisons (loc, code, lcode,
5488 swap_tree_comparison (rcode),
5489 truth_type, ll_arg, lr_arg);
5490 if (result)
5491 return result;
5495 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5496 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5498 /* If the RHS can be evaluated unconditionally and its operands are
5499 simple, it wins to evaluate the RHS unconditionally on machines
5500 with expensive branches. In this case, this isn't a comparison
5501 that can be merged. */
5503 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5504 false) >= 2
5505 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5506 && simple_operand_p (rl_arg)
5507 && simple_operand_p (rr_arg))
5509 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5510 if (code == TRUTH_OR_EXPR
5511 && lcode == NE_EXPR && integer_zerop (lr_arg)
5512 && rcode == NE_EXPR && integer_zerop (rr_arg)
5513 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5514 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5515 return build2_loc (loc, NE_EXPR, truth_type,
5516 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5517 ll_arg, rl_arg),
5518 build_int_cst (TREE_TYPE (ll_arg), 0));
5520 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5521 if (code == TRUTH_AND_EXPR
5522 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5523 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5524 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5525 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5526 return build2_loc (loc, EQ_EXPR, truth_type,
5527 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5528 ll_arg, rl_arg),
5529 build_int_cst (TREE_TYPE (ll_arg), 0));
5532 /* See if the comparisons can be merged. Then get all the parameters for
5533 each side. */
5535 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5536 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5537 return 0;
5539 volatilep = 0;
5540 ll_inner = decode_field_reference (loc, ll_arg,
5541 &ll_bitsize, &ll_bitpos, &ll_mode,
5542 &ll_unsignedp, &volatilep, &ll_mask,
5543 &ll_and_mask);
5544 lr_inner = decode_field_reference (loc, lr_arg,
5545 &lr_bitsize, &lr_bitpos, &lr_mode,
5546 &lr_unsignedp, &volatilep, &lr_mask,
5547 &lr_and_mask);
5548 rl_inner = decode_field_reference (loc, rl_arg,
5549 &rl_bitsize, &rl_bitpos, &rl_mode,
5550 &rl_unsignedp, &volatilep, &rl_mask,
5551 &rl_and_mask);
5552 rr_inner = decode_field_reference (loc, rr_arg,
5553 &rr_bitsize, &rr_bitpos, &rr_mode,
5554 &rr_unsignedp, &volatilep, &rr_mask,
5555 &rr_and_mask);
5557 /* It must be true that the inner operation on the lhs of each
5558 comparison must be the same if we are to be able to do anything.
5559 Then see if we have constants. If not, the same must be true for
5560 the rhs's. */
5561 if (volatilep || ll_inner == 0 || rl_inner == 0
5562 || ! operand_equal_p (ll_inner, rl_inner, 0))
5563 return 0;
5565 if (TREE_CODE (lr_arg) == INTEGER_CST
5566 && TREE_CODE (rr_arg) == INTEGER_CST)
5567 l_const = lr_arg, r_const = rr_arg;
5568 else if (lr_inner == 0 || rr_inner == 0
5569 || ! operand_equal_p (lr_inner, rr_inner, 0))
5570 return 0;
5571 else
5572 l_const = r_const = 0;
5574 /* If either comparison code is not correct for our logical operation,
5575 fail. However, we can convert a one-bit comparison against zero into
5576 the opposite comparison against that bit being set in the field. */
5578 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5579 if (lcode != wanted_code)
5581 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5583 /* Make the left operand unsigned, since we are only interested
5584 in the value of one bit. Otherwise we are doing the wrong
5585 thing below. */
5586 ll_unsignedp = 1;
5587 l_const = ll_mask;
5589 else
5590 return 0;
5593 /* This is analogous to the code for l_const above. */
5594 if (rcode != wanted_code)
5596 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5598 rl_unsignedp = 1;
5599 r_const = rl_mask;
5601 else
5602 return 0;
5605 /* See if we can find a mode that contains both fields being compared on
5606 the left. If we can't, fail. Otherwise, update all constants and masks
5607 to be relative to a field of that size. */
5608 first_bit = MIN (ll_bitpos, rl_bitpos);
5609 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5610 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5611 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5612 volatilep);
5613 if (lnmode == VOIDmode)
5614 return 0;
5616 lnbitsize = GET_MODE_BITSIZE (lnmode);
5617 lnbitpos = first_bit & ~ (lnbitsize - 1);
5618 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5619 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5621 if (BYTES_BIG_ENDIAN)
5623 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5624 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5627 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5628 size_int (xll_bitpos));
5629 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5630 size_int (xrl_bitpos));
5632 if (l_const)
5634 l_const = fold_convert_loc (loc, lntype, l_const);
5635 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5636 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5637 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5638 fold_build1_loc (loc, BIT_NOT_EXPR,
5639 lntype, ll_mask))))
5641 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5643 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5646 if (r_const)
5648 r_const = fold_convert_loc (loc, lntype, r_const);
5649 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5650 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5651 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5652 fold_build1_loc (loc, BIT_NOT_EXPR,
5653 lntype, rl_mask))))
5655 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5657 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5661 /* If the right sides are not constant, do the same for it. Also,
5662 disallow this optimization if a size or signedness mismatch occurs
5663 between the left and right sides. */
5664 if (l_const == 0)
5666 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5667 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5668 /* Make sure the two fields on the right
5669 correspond to the left without being swapped. */
5670 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5671 return 0;
5673 first_bit = MIN (lr_bitpos, rr_bitpos);
5674 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5675 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5676 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5677 volatilep);
5678 if (rnmode == VOIDmode)
5679 return 0;
5681 rnbitsize = GET_MODE_BITSIZE (rnmode);
5682 rnbitpos = first_bit & ~ (rnbitsize - 1);
5683 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5684 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5686 if (BYTES_BIG_ENDIAN)
5688 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5689 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5692 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5693 rntype, lr_mask),
5694 size_int (xlr_bitpos));
5695 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5696 rntype, rr_mask),
5697 size_int (xrr_bitpos));
5699 /* Make a mask that corresponds to both fields being compared.
5700 Do this for both items being compared. If the operands are the
5701 same size and the bits being compared are in the same position
5702 then we can do this by masking both and comparing the masked
5703 results. */
5704 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5705 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5706 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5708 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5709 ll_unsignedp || rl_unsignedp);
5710 if (! all_ones_mask_p (ll_mask, lnbitsize))
5711 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5713 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5714 lr_unsignedp || rr_unsignedp);
5715 if (! all_ones_mask_p (lr_mask, rnbitsize))
5716 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5718 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5721 /* There is still another way we can do something: If both pairs of
5722 fields being compared are adjacent, we may be able to make a wider
5723 field containing them both.
5725 Note that we still must mask the lhs/rhs expressions. Furthermore,
5726 the mask must be shifted to account for the shift done by
5727 make_bit_field_ref. */
5728 if ((ll_bitsize + ll_bitpos == rl_bitpos
5729 && lr_bitsize + lr_bitpos == rr_bitpos)
5730 || (ll_bitpos == rl_bitpos + rl_bitsize
5731 && lr_bitpos == rr_bitpos + rr_bitsize))
5733 tree type;
5735 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5736 ll_bitsize + rl_bitsize,
5737 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5738 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5739 lr_bitsize + rr_bitsize,
5740 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5742 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5743 size_int (MIN (xll_bitpos, xrl_bitpos)));
5744 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5745 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5747 /* Convert to the smaller type before masking out unwanted bits. */
5748 type = lntype;
5749 if (lntype != rntype)
5751 if (lnbitsize > rnbitsize)
5753 lhs = fold_convert_loc (loc, rntype, lhs);
5754 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5755 type = rntype;
5757 else if (lnbitsize < rnbitsize)
5759 rhs = fold_convert_loc (loc, lntype, rhs);
5760 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5761 type = lntype;
5765 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5766 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5768 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5769 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5771 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5774 return 0;
5777 /* Handle the case of comparisons with constants. If there is something in
5778 common between the masks, those bits of the constants must be the same.
5779 If not, the condition is always false. Test for this to avoid generating
5780 incorrect code below. */
5781 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5782 if (! integer_zerop (result)
5783 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5784 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5786 if (wanted_code == NE_EXPR)
5788 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5789 return constant_boolean_node (true, truth_type);
5791 else
5793 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5794 return constant_boolean_node (false, truth_type);
5798 /* Construct the expression we will return. First get the component
5799 reference we will make. Unless the mask is all ones the width of
5800 that field, perform the mask operation. Then compare with the
5801 merged constant. */
5802 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5803 ll_unsignedp || rl_unsignedp);
5805 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5806 if (! all_ones_mask_p (ll_mask, lnbitsize))
5807 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5809 return build2_loc (loc, wanted_code, truth_type, result,
5810 const_binop (BIT_IOR_EXPR, l_const, r_const));
5813 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5814 constant. */
5816 static tree
5817 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5818 tree op0, tree op1)
5820 tree arg0 = op0;
5821 enum tree_code op_code;
5822 tree comp_const;
5823 tree minmax_const;
5824 int consts_equal, consts_lt;
5825 tree inner;
5827 STRIP_SIGN_NOPS (arg0);
5829 op_code = TREE_CODE (arg0);
5830 minmax_const = TREE_OPERAND (arg0, 1);
5831 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5832 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5833 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5834 inner = TREE_OPERAND (arg0, 0);
5836 /* If something does not permit us to optimize, return the original tree. */
5837 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5838 || TREE_CODE (comp_const) != INTEGER_CST
5839 || TREE_OVERFLOW (comp_const)
5840 || TREE_CODE (minmax_const) != INTEGER_CST
5841 || TREE_OVERFLOW (minmax_const))
5842 return NULL_TREE;
5844 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5845 and GT_EXPR, doing the rest with recursive calls using logical
5846 simplifications. */
5847 switch (code)
5849 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5851 tree tem
5852 = optimize_minmax_comparison (loc,
5853 invert_tree_comparison (code, false),
5854 type, op0, op1);
5855 if (tem)
5856 return invert_truthvalue_loc (loc, tem);
5857 return NULL_TREE;
5860 case GE_EXPR:
5861 return
5862 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5863 optimize_minmax_comparison
5864 (loc, EQ_EXPR, type, arg0, comp_const),
5865 optimize_minmax_comparison
5866 (loc, GT_EXPR, type, arg0, comp_const));
5868 case EQ_EXPR:
5869 if (op_code == MAX_EXPR && consts_equal)
5870 /* MAX (X, 0) == 0 -> X <= 0 */
5871 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5873 else if (op_code == MAX_EXPR && consts_lt)
5874 /* MAX (X, 0) == 5 -> X == 5 */
5875 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5877 else if (op_code == MAX_EXPR)
5878 /* MAX (X, 0) == -1 -> false */
5879 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5881 else if (consts_equal)
5882 /* MIN (X, 0) == 0 -> X >= 0 */
5883 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5885 else if (consts_lt)
5886 /* MIN (X, 0) == 5 -> false */
5887 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5889 else
5890 /* MIN (X, 0) == -1 -> X == -1 */
5891 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5893 case GT_EXPR:
5894 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5895 /* MAX (X, 0) > 0 -> X > 0
5896 MAX (X, 0) > 5 -> X > 5 */
5897 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5899 else if (op_code == MAX_EXPR)
5900 /* MAX (X, 0) > -1 -> true */
5901 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5903 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5904 /* MIN (X, 0) > 0 -> false
5905 MIN (X, 0) > 5 -> false */
5906 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5908 else
5909 /* MIN (X, 0) > -1 -> X > -1 */
5910 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5912 default:
5913 return NULL_TREE;
5917 /* T is an integer expression that is being multiplied, divided, or taken a
5918 modulus (CODE says which and what kind of divide or modulus) by a
5919 constant C. See if we can eliminate that operation by folding it with
5920 other operations already in T. WIDE_TYPE, if non-null, is a type that
5921 should be used for the computation if wider than our type.
5923 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5924 (X * 2) + (Y * 4). We must, however, be assured that either the original
5925 expression would not overflow or that overflow is undefined for the type
5926 in the language in question.
5928 If we return a non-null expression, it is an equivalent form of the
5929 original computation, but need not be in the original type.
5931 We set *STRICT_OVERFLOW_P to true if the return values depends on
5932 signed overflow being undefined. Otherwise we do not change
5933 *STRICT_OVERFLOW_P. */
5935 static tree
5936 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5937 bool *strict_overflow_p)
5939 /* To avoid exponential search depth, refuse to allow recursion past
5940 three levels. Beyond that (1) it's highly unlikely that we'll find
5941 something interesting and (2) we've probably processed it before
5942 when we built the inner expression. */
5944 static int depth;
5945 tree ret;
5947 if (depth > 3)
5948 return NULL;
5950 depth++;
5951 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5952 depth--;
5954 return ret;
5957 static tree
5958 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5959 bool *strict_overflow_p)
5961 tree type = TREE_TYPE (t);
5962 enum tree_code tcode = TREE_CODE (t);
5963 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5964 > GET_MODE_SIZE (TYPE_MODE (type)))
5965 ? wide_type : type);
5966 tree t1, t2;
5967 int same_p = tcode == code;
5968 tree op0 = NULL_TREE, op1 = NULL_TREE;
5969 bool sub_strict_overflow_p;
5971 /* Don't deal with constants of zero here; they confuse the code below. */
5972 if (integer_zerop (c))
5973 return NULL_TREE;
5975 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5976 op0 = TREE_OPERAND (t, 0);
5978 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5979 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5981 /* Note that we need not handle conditional operations here since fold
5982 already handles those cases. So just do arithmetic here. */
5983 switch (tcode)
5985 case INTEGER_CST:
5986 /* For a constant, we can always simplify if we are a multiply
5987 or (for divide and modulus) if it is a multiple of our constant. */
5988 if (code == MULT_EXPR
5989 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5990 return const_binop (code, fold_convert (ctype, t),
5991 fold_convert (ctype, c));
5992 break;
5994 CASE_CONVERT: case NON_LVALUE_EXPR:
5995 /* If op0 is an expression ... */
5996 if ((COMPARISON_CLASS_P (op0)
5997 || UNARY_CLASS_P (op0)
5998 || BINARY_CLASS_P (op0)
5999 || VL_EXP_CLASS_P (op0)
6000 || EXPRESSION_CLASS_P (op0))
6001 /* ... and has wrapping overflow, and its type is smaller
6002 than ctype, then we cannot pass through as widening. */
6003 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6004 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6005 && (TYPE_PRECISION (ctype)
6006 > TYPE_PRECISION (TREE_TYPE (op0))))
6007 /* ... or this is a truncation (t is narrower than op0),
6008 then we cannot pass through this narrowing. */
6009 || (TYPE_PRECISION (type)
6010 < TYPE_PRECISION (TREE_TYPE (op0)))
6011 /* ... or signedness changes for division or modulus,
6012 then we cannot pass through this conversion. */
6013 || (code != MULT_EXPR
6014 && (TYPE_UNSIGNED (ctype)
6015 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6016 /* ... or has undefined overflow while the converted to
6017 type has not, we cannot do the operation in the inner type
6018 as that would introduce undefined overflow. */
6019 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6020 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6021 && !TYPE_OVERFLOW_UNDEFINED (type))))
6022 break;
6024 /* Pass the constant down and see if we can make a simplification. If
6025 we can, replace this expression with the inner simplification for
6026 possible later conversion to our or some other type. */
6027 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6028 && TREE_CODE (t2) == INTEGER_CST
6029 && !TREE_OVERFLOW (t2)
6030 && (0 != (t1 = extract_muldiv (op0, t2, code,
6031 code == MULT_EXPR
6032 ? ctype : NULL_TREE,
6033 strict_overflow_p))))
6034 return t1;
6035 break;
6037 case ABS_EXPR:
6038 /* If widening the type changes it from signed to unsigned, then we
6039 must avoid building ABS_EXPR itself as unsigned. */
6040 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6042 tree cstype = (*signed_type_for) (ctype);
6043 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6044 != 0)
6046 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6047 return fold_convert (ctype, t1);
6049 break;
6051 /* If the constant is negative, we cannot simplify this. */
6052 if (tree_int_cst_sgn (c) == -1)
6053 break;
6054 /* FALLTHROUGH */
6055 case NEGATE_EXPR:
6056 /* For division and modulus, type can't be unsigned, as e.g.
6057 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6058 For signed types, even with wrapping overflow, this is fine. */
6059 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6060 break;
6061 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6062 != 0)
6063 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6064 break;
6066 case MIN_EXPR: case MAX_EXPR:
6067 /* If widening the type changes the signedness, then we can't perform
6068 this optimization as that changes the result. */
6069 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6070 break;
6072 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6073 sub_strict_overflow_p = false;
6074 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6075 &sub_strict_overflow_p)) != 0
6076 && (t2 = extract_muldiv (op1, c, code, wide_type,
6077 &sub_strict_overflow_p)) != 0)
6079 if (tree_int_cst_sgn (c) < 0)
6080 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6081 if (sub_strict_overflow_p)
6082 *strict_overflow_p = true;
6083 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6084 fold_convert (ctype, t2));
6086 break;
6088 case LSHIFT_EXPR: case RSHIFT_EXPR:
6089 /* If the second operand is constant, this is a multiplication
6090 or floor division, by a power of two, so we can treat it that
6091 way unless the multiplier or divisor overflows. Signed
6092 left-shift overflow is implementation-defined rather than
6093 undefined in C90, so do not convert signed left shift into
6094 multiplication. */
6095 if (TREE_CODE (op1) == INTEGER_CST
6096 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6097 /* const_binop may not detect overflow correctly,
6098 so check for it explicitly here. */
6099 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6100 && 0 != (t1 = fold_convert (ctype,
6101 const_binop (LSHIFT_EXPR,
6102 size_one_node,
6103 op1)))
6104 && !TREE_OVERFLOW (t1))
6105 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6106 ? MULT_EXPR : FLOOR_DIV_EXPR,
6107 ctype,
6108 fold_convert (ctype, op0),
6109 t1),
6110 c, code, wide_type, strict_overflow_p);
6111 break;
6113 case PLUS_EXPR: case MINUS_EXPR:
6114 /* See if we can eliminate the operation on both sides. If we can, we
6115 can return a new PLUS or MINUS. If we can't, the only remaining
6116 cases where we can do anything are if the second operand is a
6117 constant. */
6118 sub_strict_overflow_p = false;
6119 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6120 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6121 if (t1 != 0 && t2 != 0
6122 && (code == MULT_EXPR
6123 /* If not multiplication, we can only do this if both operands
6124 are divisible by c. */
6125 || (multiple_of_p (ctype, op0, c)
6126 && multiple_of_p (ctype, op1, c))))
6128 if (sub_strict_overflow_p)
6129 *strict_overflow_p = true;
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6131 fold_convert (ctype, t2));
6134 /* If this was a subtraction, negate OP1 and set it to be an addition.
6135 This simplifies the logic below. */
6136 if (tcode == MINUS_EXPR)
6138 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6139 /* If OP1 was not easily negatable, the constant may be OP0. */
6140 if (TREE_CODE (op0) == INTEGER_CST)
6142 tree tem = op0;
6143 op0 = op1;
6144 op1 = tem;
6145 tem = t1;
6146 t1 = t2;
6147 t2 = tem;
6151 if (TREE_CODE (op1) != INTEGER_CST)
6152 break;
6154 /* If either OP1 or C are negative, this optimization is not safe for
6155 some of the division and remainder types while for others we need
6156 to change the code. */
6157 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6159 if (code == CEIL_DIV_EXPR)
6160 code = FLOOR_DIV_EXPR;
6161 else if (code == FLOOR_DIV_EXPR)
6162 code = CEIL_DIV_EXPR;
6163 else if (code != MULT_EXPR
6164 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6165 break;
6168 /* If it's a multiply or a division/modulus operation of a multiple
6169 of our constant, do the operation and verify it doesn't overflow. */
6170 if (code == MULT_EXPR
6171 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6173 op1 = const_binop (code, fold_convert (ctype, op1),
6174 fold_convert (ctype, c));
6175 /* We allow the constant to overflow with wrapping semantics. */
6176 if (op1 == 0
6177 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6178 break;
6180 else
6181 break;
6183 /* If we have an unsigned type, we cannot widen the operation since it
6184 will change the result if the original computation overflowed. */
6185 if (TYPE_UNSIGNED (ctype) && ctype != type)
6186 break;
6188 /* If we were able to eliminate our operation from the first side,
6189 apply our operation to the second side and reform the PLUS. */
6190 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6191 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6193 /* The last case is if we are a multiply. In that case, we can
6194 apply the distributive law to commute the multiply and addition
6195 if the multiplication of the constants doesn't overflow
6196 and overflow is defined. With undefined overflow
6197 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6198 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6199 return fold_build2 (tcode, ctype,
6200 fold_build2 (code, ctype,
6201 fold_convert (ctype, op0),
6202 fold_convert (ctype, c)),
6203 op1);
6205 break;
6207 case MULT_EXPR:
6208 /* We have a special case here if we are doing something like
6209 (C * 8) % 4 since we know that's zero. */
6210 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6211 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6212 /* If the multiplication can overflow we cannot optimize this. */
6213 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6214 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6215 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6217 *strict_overflow_p = true;
6218 return omit_one_operand (type, integer_zero_node, op0);
6221 /* ... fall through ... */
6223 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6224 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6225 /* If we can extract our operation from the LHS, do so and return a
6226 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6227 do something only if the second operand is a constant. */
6228 if (same_p
6229 && (t1 = extract_muldiv (op0, c, code, wide_type,
6230 strict_overflow_p)) != 0)
6231 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6232 fold_convert (ctype, op1));
6233 else if (tcode == MULT_EXPR && code == MULT_EXPR
6234 && (t1 = extract_muldiv (op1, c, code, wide_type,
6235 strict_overflow_p)) != 0)
6236 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6237 fold_convert (ctype, t1));
6238 else if (TREE_CODE (op1) != INTEGER_CST)
6239 return 0;
6241 /* If these are the same operation types, we can associate them
6242 assuming no overflow. */
6243 if (tcode == code)
6245 bool overflow_p = false;
6246 bool overflow_mul_p;
6247 signop sign = TYPE_SIGN (ctype);
6248 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6249 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6250 if (overflow_mul_p
6251 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6252 overflow_p = true;
6253 if (!overflow_p)
6254 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6255 wide_int_to_tree (ctype, mul));
6258 /* If these operations "cancel" each other, we have the main
6259 optimizations of this pass, which occur when either constant is a
6260 multiple of the other, in which case we replace this with either an
6261 operation or CODE or TCODE.
6263 If we have an unsigned type, we cannot do this since it will change
6264 the result if the original computation overflowed. */
6265 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6266 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6267 || (tcode == MULT_EXPR
6268 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6269 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6270 && code != MULT_EXPR)))
6272 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6274 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6275 *strict_overflow_p = true;
6276 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6277 fold_convert (ctype,
6278 const_binop (TRUNC_DIV_EXPR,
6279 op1, c)));
6281 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6283 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6284 *strict_overflow_p = true;
6285 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6286 fold_convert (ctype,
6287 const_binop (TRUNC_DIV_EXPR,
6288 c, op1)));
6291 break;
6293 default:
6294 break;
6297 return 0;
6300 /* Return a node which has the indicated constant VALUE (either 0 or
6301 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6302 and is of the indicated TYPE. */
6304 tree
6305 constant_boolean_node (bool value, tree type)
6307 if (type == integer_type_node)
6308 return value ? integer_one_node : integer_zero_node;
6309 else if (type == boolean_type_node)
6310 return value ? boolean_true_node : boolean_false_node;
6311 else if (TREE_CODE (type) == VECTOR_TYPE)
6312 return build_vector_from_val (type,
6313 build_int_cst (TREE_TYPE (type),
6314 value ? -1 : 0));
6315 else
6316 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6320 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6321 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6322 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6323 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6324 COND is the first argument to CODE; otherwise (as in the example
6325 given here), it is the second argument. TYPE is the type of the
6326 original expression. Return NULL_TREE if no simplification is
6327 possible. */
6329 static tree
6330 fold_binary_op_with_conditional_arg (location_t loc,
6331 enum tree_code code,
6332 tree type, tree op0, tree op1,
6333 tree cond, tree arg, int cond_first_p)
6335 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6336 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6337 tree test, true_value, false_value;
6338 tree lhs = NULL_TREE;
6339 tree rhs = NULL_TREE;
6340 enum tree_code cond_code = COND_EXPR;
6342 if (TREE_CODE (cond) == COND_EXPR
6343 || TREE_CODE (cond) == VEC_COND_EXPR)
6345 test = TREE_OPERAND (cond, 0);
6346 true_value = TREE_OPERAND (cond, 1);
6347 false_value = TREE_OPERAND (cond, 2);
6348 /* If this operand throws an expression, then it does not make
6349 sense to try to perform a logical or arithmetic operation
6350 involving it. */
6351 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6352 lhs = true_value;
6353 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6354 rhs = false_value;
6356 else
6358 tree testtype = TREE_TYPE (cond);
6359 test = cond;
6360 true_value = constant_boolean_node (true, testtype);
6361 false_value = constant_boolean_node (false, testtype);
6364 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6365 cond_code = VEC_COND_EXPR;
6367 /* This transformation is only worthwhile if we don't have to wrap ARG
6368 in a SAVE_EXPR and the operation can be simplified without recursing
6369 on at least one of the branches once its pushed inside the COND_EXPR. */
6370 if (!TREE_CONSTANT (arg)
6371 && (TREE_SIDE_EFFECTS (arg)
6372 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6373 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6374 return NULL_TREE;
6376 arg = fold_convert_loc (loc, arg_type, arg);
6377 if (lhs == 0)
6379 true_value = fold_convert_loc (loc, cond_type, true_value);
6380 if (cond_first_p)
6381 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6382 else
6383 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6385 if (rhs == 0)
6387 false_value = fold_convert_loc (loc, cond_type, false_value);
6388 if (cond_first_p)
6389 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6390 else
6391 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6394 /* Check that we have simplified at least one of the branches. */
6395 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6396 return NULL_TREE;
6398 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6402 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6404 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6405 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6406 ADDEND is the same as X.
6408 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6409 and finite. The problematic cases are when X is zero, and its mode
6410 has signed zeros. In the case of rounding towards -infinity,
6411 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6412 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6414 bool
6415 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6417 if (!real_zerop (addend))
6418 return false;
6420 /* Don't allow the fold with -fsignaling-nans. */
6421 if (HONOR_SNANS (element_mode (type)))
6422 return false;
6424 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6425 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6426 return true;
6428 /* In a vector or complex, we would need to check the sign of all zeros. */
6429 if (TREE_CODE (addend) != REAL_CST)
6430 return false;
6432 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6433 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6434 negate = !negate;
6436 /* The mode has signed zeros, and we have to honor their sign.
6437 In this situation, there is only one case we can return true for.
6438 X - 0 is the same as X unless rounding towards -infinity is
6439 supported. */
6440 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6443 /* Subroutine of fold() that checks comparisons of built-in math
6444 functions against real constants.
6446 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6447 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6448 is the type of the result and ARG0 and ARG1 are the operands of the
6449 comparison. ARG1 must be a TREE_REAL_CST.
6451 The function returns the constant folded tree if a simplification
6452 can be made, and NULL_TREE otherwise. */
6454 static tree
6455 fold_mathfn_compare (location_t loc,
6456 enum built_in_function fcode, enum tree_code code,
6457 tree type, tree arg0, tree arg1)
6459 REAL_VALUE_TYPE c;
6461 if (BUILTIN_SQRT_P (fcode))
6463 tree arg = CALL_EXPR_ARG (arg0, 0);
6464 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6466 c = TREE_REAL_CST (arg1);
6467 if (REAL_VALUE_NEGATIVE (c))
6469 /* sqrt(x) < y is always false, if y is negative. */
6470 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6471 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6473 /* sqrt(x) > y is always true, if y is negative and we
6474 don't care about NaNs, i.e. negative values of x. */
6475 if (code == NE_EXPR || !HONOR_NANS (mode))
6476 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6478 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6479 return fold_build2_loc (loc, GE_EXPR, type, arg,
6480 build_real (TREE_TYPE (arg), dconst0));
6482 else if (code == GT_EXPR || code == GE_EXPR)
6484 REAL_VALUE_TYPE c2;
6486 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6487 real_convert (&c2, mode, &c2);
6489 if (REAL_VALUE_ISINF (c2))
6491 /* sqrt(x) > y is x == +Inf, when y is very large. */
6492 if (HONOR_INFINITIES (mode))
6493 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6496 /* sqrt(x) > y is always false, when y is very large
6497 and we don't care about infinities. */
6498 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6501 /* sqrt(x) > c is the same as x > c*c. */
6502 return fold_build2_loc (loc, code, type, arg,
6503 build_real (TREE_TYPE (arg), c2));
6505 else if (code == LT_EXPR || code == LE_EXPR)
6507 REAL_VALUE_TYPE c2;
6509 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6510 real_convert (&c2, mode, &c2);
6512 if (REAL_VALUE_ISINF (c2))
6514 /* sqrt(x) < y is always true, when y is a very large
6515 value and we don't care about NaNs or Infinities. */
6516 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6517 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6519 /* sqrt(x) < y is x != +Inf when y is very large and we
6520 don't care about NaNs. */
6521 if (! HONOR_NANS (mode))
6522 return fold_build2_loc (loc, NE_EXPR, type, arg,
6523 build_real (TREE_TYPE (arg), c2));
6525 /* sqrt(x) < y is x >= 0 when y is very large and we
6526 don't care about Infinities. */
6527 if (! HONOR_INFINITIES (mode))
6528 return fold_build2_loc (loc, GE_EXPR, type, arg,
6529 build_real (TREE_TYPE (arg), dconst0));
6531 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6532 arg = save_expr (arg);
6533 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6534 fold_build2_loc (loc, GE_EXPR, type, arg,
6535 build_real (TREE_TYPE (arg),
6536 dconst0)),
6537 fold_build2_loc (loc, NE_EXPR, type, arg,
6538 build_real (TREE_TYPE (arg),
6539 c2)));
6542 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6543 if (! HONOR_NANS (mode))
6544 return fold_build2_loc (loc, code, type, arg,
6545 build_real (TREE_TYPE (arg), c2));
6547 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6548 arg = save_expr (arg);
6549 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6550 fold_build2_loc (loc, GE_EXPR, type, arg,
6551 build_real (TREE_TYPE (arg),
6552 dconst0)),
6553 fold_build2_loc (loc, code, type, arg,
6554 build_real (TREE_TYPE (arg),
6555 c2)));
6559 return NULL_TREE;
6562 /* Subroutine of fold() that optimizes comparisons against Infinities,
6563 either +Inf or -Inf.
6565 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6566 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6567 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6569 The function returns the constant folded tree if a simplification
6570 can be made, and NULL_TREE otherwise. */
6572 static tree
6573 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6574 tree arg0, tree arg1)
6576 machine_mode mode;
6577 REAL_VALUE_TYPE max;
6578 tree temp;
6579 bool neg;
6581 mode = TYPE_MODE (TREE_TYPE (arg0));
6583 /* For negative infinity swap the sense of the comparison. */
6584 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6585 if (neg)
6586 code = swap_tree_comparison (code);
6588 switch (code)
6590 case GT_EXPR:
6591 /* x > +Inf is always false, if with ignore sNANs. */
6592 if (HONOR_SNANS (mode))
6593 return NULL_TREE;
6594 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6596 case LE_EXPR:
6597 /* x <= +Inf is always true, if we don't case about NaNs. */
6598 if (! HONOR_NANS (mode))
6599 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6601 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6602 arg0 = save_expr (arg0);
6603 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6605 case EQ_EXPR:
6606 case GE_EXPR:
6607 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6608 real_maxval (&max, neg, mode);
6609 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6610 arg0, build_real (TREE_TYPE (arg0), max));
6612 case LT_EXPR:
6613 /* x < +Inf is always equal to x <= DBL_MAX. */
6614 real_maxval (&max, neg, mode);
6615 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6616 arg0, build_real (TREE_TYPE (arg0), max));
6618 case NE_EXPR:
6619 /* x != +Inf is always equal to !(x > DBL_MAX). */
6620 real_maxval (&max, neg, mode);
6621 if (! HONOR_NANS (mode))
6622 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6623 arg0, build_real (TREE_TYPE (arg0), max));
6625 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6626 arg0, build_real (TREE_TYPE (arg0), max));
6627 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6629 default:
6630 break;
6633 return NULL_TREE;
6636 /* Subroutine of fold() that optimizes comparisons of a division by
6637 a nonzero integer constant against an integer constant, i.e.
6638 X/C1 op C2.
6640 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6641 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6642 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6644 The function returns the constant folded tree if a simplification
6645 can be made, and NULL_TREE otherwise. */
6647 static tree
6648 fold_div_compare (location_t loc,
6649 enum tree_code code, tree type, tree arg0, tree arg1)
6651 tree prod, tmp, hi, lo;
6652 tree arg00 = TREE_OPERAND (arg0, 0);
6653 tree arg01 = TREE_OPERAND (arg0, 1);
6654 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6655 bool neg_overflow = false;
6656 bool overflow;
6658 /* We have to do this the hard way to detect unsigned overflow.
6659 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6660 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6661 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6662 neg_overflow = false;
6664 if (sign == UNSIGNED)
6666 tmp = int_const_binop (MINUS_EXPR, arg01,
6667 build_int_cst (TREE_TYPE (arg01), 1));
6668 lo = prod;
6670 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6671 val = wi::add (prod, tmp, sign, &overflow);
6672 hi = force_fit_type (TREE_TYPE (arg00), val,
6673 -1, overflow | TREE_OVERFLOW (prod));
6675 else if (tree_int_cst_sgn (arg01) >= 0)
6677 tmp = int_const_binop (MINUS_EXPR, arg01,
6678 build_int_cst (TREE_TYPE (arg01), 1));
6679 switch (tree_int_cst_sgn (arg1))
6681 case -1:
6682 neg_overflow = true;
6683 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6684 hi = prod;
6685 break;
6687 case 0:
6688 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6689 hi = tmp;
6690 break;
6692 case 1:
6693 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6694 lo = prod;
6695 break;
6697 default:
6698 gcc_unreachable ();
6701 else
6703 /* A negative divisor reverses the relational operators. */
6704 code = swap_tree_comparison (code);
6706 tmp = int_const_binop (PLUS_EXPR, arg01,
6707 build_int_cst (TREE_TYPE (arg01), 1));
6708 switch (tree_int_cst_sgn (arg1))
6710 case -1:
6711 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6712 lo = prod;
6713 break;
6715 case 0:
6716 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6717 lo = tmp;
6718 break;
6720 case 1:
6721 neg_overflow = true;
6722 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6723 hi = prod;
6724 break;
6726 default:
6727 gcc_unreachable ();
6731 switch (code)
6733 case EQ_EXPR:
6734 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6735 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6736 if (TREE_OVERFLOW (hi))
6737 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6738 if (TREE_OVERFLOW (lo))
6739 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6740 return build_range_check (loc, type, arg00, 1, lo, hi);
6742 case NE_EXPR:
6743 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6744 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6745 if (TREE_OVERFLOW (hi))
6746 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6747 if (TREE_OVERFLOW (lo))
6748 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6749 return build_range_check (loc, type, arg00, 0, lo, hi);
6751 case LT_EXPR:
6752 if (TREE_OVERFLOW (lo))
6754 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6755 return omit_one_operand_loc (loc, type, tmp, arg00);
6757 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6759 case LE_EXPR:
6760 if (TREE_OVERFLOW (hi))
6762 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6763 return omit_one_operand_loc (loc, type, tmp, arg00);
6765 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6767 case GT_EXPR:
6768 if (TREE_OVERFLOW (hi))
6770 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6771 return omit_one_operand_loc (loc, type, tmp, arg00);
6773 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6775 case GE_EXPR:
6776 if (TREE_OVERFLOW (lo))
6778 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6779 return omit_one_operand_loc (loc, type, tmp, arg00);
6781 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6783 default:
6784 break;
6787 return NULL_TREE;
6791 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6792 equality/inequality test, then return a simplified form of the test
6793 using a sign testing. Otherwise return NULL. TYPE is the desired
6794 result type. */
6796 static tree
6797 fold_single_bit_test_into_sign_test (location_t loc,
6798 enum tree_code code, tree arg0, tree arg1,
6799 tree result_type)
6801 /* If this is testing a single bit, we can optimize the test. */
6802 if ((code == NE_EXPR || code == EQ_EXPR)
6803 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6804 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6806 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6807 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6808 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6810 if (arg00 != NULL_TREE
6811 /* This is only a win if casting to a signed type is cheap,
6812 i.e. when arg00's type is not a partial mode. */
6813 && TYPE_PRECISION (TREE_TYPE (arg00))
6814 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6816 tree stype = signed_type_for (TREE_TYPE (arg00));
6817 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6818 result_type,
6819 fold_convert_loc (loc, stype, arg00),
6820 build_int_cst (stype, 0));
6824 return NULL_TREE;
6827 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6828 equality/inequality test, then return a simplified form of
6829 the test using shifts and logical operations. Otherwise return
6830 NULL. TYPE is the desired result type. */
6832 tree
6833 fold_single_bit_test (location_t loc, enum tree_code code,
6834 tree arg0, tree arg1, tree result_type)
6836 /* If this is testing a single bit, we can optimize the test. */
6837 if ((code == NE_EXPR || code == EQ_EXPR)
6838 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6839 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6841 tree inner = TREE_OPERAND (arg0, 0);
6842 tree type = TREE_TYPE (arg0);
6843 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6844 machine_mode operand_mode = TYPE_MODE (type);
6845 int ops_unsigned;
6846 tree signed_type, unsigned_type, intermediate_type;
6847 tree tem, one;
6849 /* First, see if we can fold the single bit test into a sign-bit
6850 test. */
6851 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6852 result_type);
6853 if (tem)
6854 return tem;
6856 /* Otherwise we have (A & C) != 0 where C is a single bit,
6857 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6858 Similarly for (A & C) == 0. */
6860 /* If INNER is a right shift of a constant and it plus BITNUM does
6861 not overflow, adjust BITNUM and INNER. */
6862 if (TREE_CODE (inner) == RSHIFT_EXPR
6863 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6864 && bitnum < TYPE_PRECISION (type)
6865 && wi::ltu_p (TREE_OPERAND (inner, 1),
6866 TYPE_PRECISION (type) - bitnum))
6868 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6869 inner = TREE_OPERAND (inner, 0);
6872 /* If we are going to be able to omit the AND below, we must do our
6873 operations as unsigned. If we must use the AND, we have a choice.
6874 Normally unsigned is faster, but for some machines signed is. */
6875 #ifdef LOAD_EXTEND_OP
6876 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6877 && !flag_syntax_only) ? 0 : 1;
6878 #else
6879 ops_unsigned = 1;
6880 #endif
6882 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6883 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6884 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6885 inner = fold_convert_loc (loc, intermediate_type, inner);
6887 if (bitnum != 0)
6888 inner = build2 (RSHIFT_EXPR, intermediate_type,
6889 inner, size_int (bitnum));
6891 one = build_int_cst (intermediate_type, 1);
6893 if (code == EQ_EXPR)
6894 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6896 /* Put the AND last so it can combine with more things. */
6897 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6899 /* Make sure to return the proper type. */
6900 inner = fold_convert_loc (loc, result_type, inner);
6902 return inner;
6904 return NULL_TREE;
6907 /* Check whether we are allowed to reorder operands arg0 and arg1,
6908 such that the evaluation of arg1 occurs before arg0. */
6910 static bool
6911 reorder_operands_p (const_tree arg0, const_tree arg1)
6913 if (! flag_evaluation_order)
6914 return true;
6915 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6916 return true;
6917 return ! TREE_SIDE_EFFECTS (arg0)
6918 && ! TREE_SIDE_EFFECTS (arg1);
6921 /* Test whether it is preferable two swap two operands, ARG0 and
6922 ARG1, for example because ARG0 is an integer constant and ARG1
6923 isn't. If REORDER is true, only recommend swapping if we can
6924 evaluate the operands in reverse order. */
6926 bool
6927 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6929 if (CONSTANT_CLASS_P (arg1))
6930 return 0;
6931 if (CONSTANT_CLASS_P (arg0))
6932 return 1;
6934 STRIP_NOPS (arg0);
6935 STRIP_NOPS (arg1);
6937 if (TREE_CONSTANT (arg1))
6938 return 0;
6939 if (TREE_CONSTANT (arg0))
6940 return 1;
6942 if (reorder && flag_evaluation_order
6943 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6944 return 0;
6946 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6947 for commutative and comparison operators. Ensuring a canonical
6948 form allows the optimizers to find additional redundancies without
6949 having to explicitly check for both orderings. */
6950 if (TREE_CODE (arg0) == SSA_NAME
6951 && TREE_CODE (arg1) == SSA_NAME
6952 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6953 return 1;
6955 /* Put SSA_NAMEs last. */
6956 if (TREE_CODE (arg1) == SSA_NAME)
6957 return 0;
6958 if (TREE_CODE (arg0) == SSA_NAME)
6959 return 1;
6961 /* Put variables last. */
6962 if (DECL_P (arg1))
6963 return 0;
6964 if (DECL_P (arg0))
6965 return 1;
6967 return 0;
6970 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6971 ARG0 is extended to a wider type. */
6973 static tree
6974 fold_widened_comparison (location_t loc, enum tree_code code,
6975 tree type, tree arg0, tree arg1)
6977 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6978 tree arg1_unw;
6979 tree shorter_type, outer_type;
6980 tree min, max;
6981 bool above, below;
6983 if (arg0_unw == arg0)
6984 return NULL_TREE;
6985 shorter_type = TREE_TYPE (arg0_unw);
6987 #ifdef HAVE_canonicalize_funcptr_for_compare
6988 /* Disable this optimization if we're casting a function pointer
6989 type on targets that require function pointer canonicalization. */
6990 if (HAVE_canonicalize_funcptr_for_compare
6991 && TREE_CODE (shorter_type) == POINTER_TYPE
6992 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6993 return NULL_TREE;
6994 #endif
6996 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6997 return NULL_TREE;
6999 arg1_unw = get_unwidened (arg1, NULL_TREE);
7001 /* If possible, express the comparison in the shorter mode. */
7002 if ((code == EQ_EXPR || code == NE_EXPR
7003 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7004 && (TREE_TYPE (arg1_unw) == shorter_type
7005 || ((TYPE_PRECISION (shorter_type)
7006 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7007 && (TYPE_UNSIGNED (shorter_type)
7008 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7009 || (TREE_CODE (arg1_unw) == INTEGER_CST
7010 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7011 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7012 && int_fits_type_p (arg1_unw, shorter_type))))
7013 return fold_build2_loc (loc, code, type, arg0_unw,
7014 fold_convert_loc (loc, shorter_type, arg1_unw));
7016 if (TREE_CODE (arg1_unw) != INTEGER_CST
7017 || TREE_CODE (shorter_type) != INTEGER_TYPE
7018 || !int_fits_type_p (arg1_unw, shorter_type))
7019 return NULL_TREE;
7021 /* If we are comparing with the integer that does not fit into the range
7022 of the shorter type, the result is known. */
7023 outer_type = TREE_TYPE (arg1_unw);
7024 min = lower_bound_in_type (outer_type, shorter_type);
7025 max = upper_bound_in_type (outer_type, shorter_type);
7027 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7028 max, arg1_unw));
7029 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7030 arg1_unw, min));
7032 switch (code)
7034 case EQ_EXPR:
7035 if (above || below)
7036 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7037 break;
7039 case NE_EXPR:
7040 if (above || below)
7041 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7042 break;
7044 case LT_EXPR:
7045 case LE_EXPR:
7046 if (above)
7047 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7048 else if (below)
7049 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7051 case GT_EXPR:
7052 case GE_EXPR:
7053 if (above)
7054 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7055 else if (below)
7056 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7058 default:
7059 break;
7062 return NULL_TREE;
7065 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7066 ARG0 just the signedness is changed. */
7068 static tree
7069 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7070 tree arg0, tree arg1)
7072 tree arg0_inner;
7073 tree inner_type, outer_type;
7075 if (!CONVERT_EXPR_P (arg0))
7076 return NULL_TREE;
7078 outer_type = TREE_TYPE (arg0);
7079 arg0_inner = TREE_OPERAND (arg0, 0);
7080 inner_type = TREE_TYPE (arg0_inner);
7082 #ifdef HAVE_canonicalize_funcptr_for_compare
7083 /* Disable this optimization if we're casting a function pointer
7084 type on targets that require function pointer canonicalization. */
7085 if (HAVE_canonicalize_funcptr_for_compare
7086 && TREE_CODE (inner_type) == POINTER_TYPE
7087 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7088 return NULL_TREE;
7089 #endif
7091 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7092 return NULL_TREE;
7094 if (TREE_CODE (arg1) != INTEGER_CST
7095 && !(CONVERT_EXPR_P (arg1)
7096 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7097 return NULL_TREE;
7099 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7100 && code != NE_EXPR
7101 && code != EQ_EXPR)
7102 return NULL_TREE;
7104 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7105 return NULL_TREE;
7107 if (TREE_CODE (arg1) == INTEGER_CST)
7108 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7109 TREE_OVERFLOW (arg1));
7110 else
7111 arg1 = fold_convert_loc (loc, inner_type, arg1);
7113 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7117 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7118 means A >= Y && A != MAX, but in this case we know that
7119 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7121 static tree
7122 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7124 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7126 if (TREE_CODE (bound) == LT_EXPR)
7127 a = TREE_OPERAND (bound, 0);
7128 else if (TREE_CODE (bound) == GT_EXPR)
7129 a = TREE_OPERAND (bound, 1);
7130 else
7131 return NULL_TREE;
7133 typea = TREE_TYPE (a);
7134 if (!INTEGRAL_TYPE_P (typea)
7135 && !POINTER_TYPE_P (typea))
7136 return NULL_TREE;
7138 if (TREE_CODE (ineq) == LT_EXPR)
7140 a1 = TREE_OPERAND (ineq, 1);
7141 y = TREE_OPERAND (ineq, 0);
7143 else if (TREE_CODE (ineq) == GT_EXPR)
7145 a1 = TREE_OPERAND (ineq, 0);
7146 y = TREE_OPERAND (ineq, 1);
7148 else
7149 return NULL_TREE;
7151 if (TREE_TYPE (a1) != typea)
7152 return NULL_TREE;
7154 if (POINTER_TYPE_P (typea))
7156 /* Convert the pointer types into integer before taking the difference. */
7157 tree ta = fold_convert_loc (loc, ssizetype, a);
7158 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7159 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7161 else
7162 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7164 if (!diff || !integer_onep (diff))
7165 return NULL_TREE;
7167 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7170 /* Fold a sum or difference of at least one multiplication.
7171 Returns the folded tree or NULL if no simplification could be made. */
7173 static tree
7174 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7175 tree arg0, tree arg1)
7177 tree arg00, arg01, arg10, arg11;
7178 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7180 /* (A * C) +- (B * C) -> (A+-B) * C.
7181 (A * C) +- A -> A * (C+-1).
7182 We are most concerned about the case where C is a constant,
7183 but other combinations show up during loop reduction. Since
7184 it is not difficult, try all four possibilities. */
7186 if (TREE_CODE (arg0) == MULT_EXPR)
7188 arg00 = TREE_OPERAND (arg0, 0);
7189 arg01 = TREE_OPERAND (arg0, 1);
7191 else if (TREE_CODE (arg0) == INTEGER_CST)
7193 arg00 = build_one_cst (type);
7194 arg01 = arg0;
7196 else
7198 /* We cannot generate constant 1 for fract. */
7199 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7200 return NULL_TREE;
7201 arg00 = arg0;
7202 arg01 = build_one_cst (type);
7204 if (TREE_CODE (arg1) == MULT_EXPR)
7206 arg10 = TREE_OPERAND (arg1, 0);
7207 arg11 = TREE_OPERAND (arg1, 1);
7209 else if (TREE_CODE (arg1) == INTEGER_CST)
7211 arg10 = build_one_cst (type);
7212 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7213 the purpose of this canonicalization. */
7214 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7215 && negate_expr_p (arg1)
7216 && code == PLUS_EXPR)
7218 arg11 = negate_expr (arg1);
7219 code = MINUS_EXPR;
7221 else
7222 arg11 = arg1;
7224 else
7226 /* We cannot generate constant 1 for fract. */
7227 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7228 return NULL_TREE;
7229 arg10 = arg1;
7230 arg11 = build_one_cst (type);
7232 same = NULL_TREE;
7234 if (operand_equal_p (arg01, arg11, 0))
7235 same = arg01, alt0 = arg00, alt1 = arg10;
7236 else if (operand_equal_p (arg00, arg10, 0))
7237 same = arg00, alt0 = arg01, alt1 = arg11;
7238 else if (operand_equal_p (arg00, arg11, 0))
7239 same = arg00, alt0 = arg01, alt1 = arg10;
7240 else if (operand_equal_p (arg01, arg10, 0))
7241 same = arg01, alt0 = arg00, alt1 = arg11;
7243 /* No identical multiplicands; see if we can find a common
7244 power-of-two factor in non-power-of-two multiplies. This
7245 can help in multi-dimensional array access. */
7246 else if (tree_fits_shwi_p (arg01)
7247 && tree_fits_shwi_p (arg11))
7249 HOST_WIDE_INT int01, int11, tmp;
7250 bool swap = false;
7251 tree maybe_same;
7252 int01 = tree_to_shwi (arg01);
7253 int11 = tree_to_shwi (arg11);
7255 /* Move min of absolute values to int11. */
7256 if (absu_hwi (int01) < absu_hwi (int11))
7258 tmp = int01, int01 = int11, int11 = tmp;
7259 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7260 maybe_same = arg01;
7261 swap = true;
7263 else
7264 maybe_same = arg11;
7266 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7267 /* The remainder should not be a constant, otherwise we
7268 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7269 increased the number of multiplications necessary. */
7270 && TREE_CODE (arg10) != INTEGER_CST)
7272 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7273 build_int_cst (TREE_TYPE (arg00),
7274 int01 / int11));
7275 alt1 = arg10;
7276 same = maybe_same;
7277 if (swap)
7278 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7282 if (same)
7283 return fold_build2_loc (loc, MULT_EXPR, type,
7284 fold_build2_loc (loc, code, type,
7285 fold_convert_loc (loc, type, alt0),
7286 fold_convert_loc (loc, type, alt1)),
7287 fold_convert_loc (loc, type, same));
7289 return NULL_TREE;
7292 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7293 specified by EXPR into the buffer PTR of length LEN bytes.
7294 Return the number of bytes placed in the buffer, or zero
7295 upon failure. */
7297 static int
7298 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7300 tree type = TREE_TYPE (expr);
7301 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7302 int byte, offset, word, words;
7303 unsigned char value;
7305 if ((off == -1 && total_bytes > len)
7306 || off >= total_bytes)
7307 return 0;
7308 if (off == -1)
7309 off = 0;
7310 words = total_bytes / UNITS_PER_WORD;
7312 for (byte = 0; byte < total_bytes; byte++)
7314 int bitpos = byte * BITS_PER_UNIT;
7315 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7316 number of bytes. */
7317 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7319 if (total_bytes > UNITS_PER_WORD)
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7327 else
7328 offset += byte % UNITS_PER_WORD;
7330 else
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 if (offset >= off
7333 && offset - off < len)
7334 ptr[offset - off] = value;
7336 return MIN (len, total_bytes - off);
7340 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7343 upon failure. */
7345 static int
7346 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7348 tree type = TREE_TYPE (expr);
7349 machine_mode mode = TYPE_MODE (type);
7350 int total_bytes = GET_MODE_SIZE (mode);
7351 FIXED_VALUE_TYPE value;
7352 tree i_value, i_type;
7354 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7355 return 0;
7357 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7359 if (NULL_TREE == i_type
7360 || TYPE_PRECISION (i_type) != total_bytes)
7361 return 0;
7363 value = TREE_FIXED_CST (expr);
7364 i_value = double_int_to_tree (i_type, value.data);
7366 return native_encode_int (i_value, ptr, len, off);
7370 /* Subroutine of native_encode_expr. Encode the REAL_CST
7371 specified by EXPR into the buffer PTR of length LEN bytes.
7372 Return the number of bytes placed in the buffer, or zero
7373 upon failure. */
7375 static int
7376 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7378 tree type = TREE_TYPE (expr);
7379 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7380 int byte, offset, word, words, bitpos;
7381 unsigned char value;
7383 /* There are always 32 bits in each long, no matter the size of
7384 the hosts long. We handle floating point representations with
7385 up to 192 bits. */
7386 long tmp[6];
7388 if ((off == -1 && total_bytes > len)
7389 || off >= total_bytes)
7390 return 0;
7391 if (off == -1)
7392 off = 0;
7393 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7395 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7397 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7398 bitpos += BITS_PER_UNIT)
7400 byte = (bitpos / BITS_PER_UNIT) & 3;
7401 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7403 if (UNITS_PER_WORD < 4)
7405 word = byte / UNITS_PER_WORD;
7406 if (WORDS_BIG_ENDIAN)
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7411 else
7412 offset += byte % UNITS_PER_WORD;
7414 else
7415 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7416 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7417 if (offset >= off
7418 && offset - off < len)
7419 ptr[offset - off] = value;
7421 return MIN (len, total_bytes - off);
7424 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7425 specified by EXPR into the buffer PTR of length LEN bytes.
7426 Return the number of bytes placed in the buffer, or zero
7427 upon failure. */
7429 static int
7430 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7432 int rsize, isize;
7433 tree part;
7435 part = TREE_REALPART (expr);
7436 rsize = native_encode_expr (part, ptr, len, off);
7437 if (off == -1
7438 && rsize == 0)
7439 return 0;
7440 part = TREE_IMAGPART (expr);
7441 if (off != -1)
7442 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7443 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7444 if (off == -1
7445 && isize != rsize)
7446 return 0;
7447 return rsize + isize;
7451 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7452 specified by EXPR into the buffer PTR of length LEN bytes.
7453 Return the number of bytes placed in the buffer, or zero
7454 upon failure. */
7456 static int
7457 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7459 unsigned i, count;
7460 int size, offset;
7461 tree itype, elem;
7463 offset = 0;
7464 count = VECTOR_CST_NELTS (expr);
7465 itype = TREE_TYPE (TREE_TYPE (expr));
7466 size = GET_MODE_SIZE (TYPE_MODE (itype));
7467 for (i = 0; i < count; i++)
7469 if (off >= size)
7471 off -= size;
7472 continue;
7474 elem = VECTOR_CST_ELT (expr, i);
7475 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7476 if ((off == -1 && res != size)
7477 || res == 0)
7478 return 0;
7479 offset += res;
7480 if (offset >= len)
7481 return offset;
7482 if (off != -1)
7483 off = 0;
7485 return offset;
7489 /* Subroutine of native_encode_expr. Encode the STRING_CST
7490 specified by EXPR into the buffer PTR of length LEN bytes.
7491 Return the number of bytes placed in the buffer, or zero
7492 upon failure. */
7494 static int
7495 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7497 tree type = TREE_TYPE (expr);
7498 HOST_WIDE_INT total_bytes;
7500 if (TREE_CODE (type) != ARRAY_TYPE
7501 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7502 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7503 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7504 return 0;
7505 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7506 if ((off == -1 && total_bytes > len)
7507 || off >= total_bytes)
7508 return 0;
7509 if (off == -1)
7510 off = 0;
7511 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7513 int written = 0;
7514 if (off < TREE_STRING_LENGTH (expr))
7516 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7517 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7519 memset (ptr + written, 0,
7520 MIN (total_bytes - written, len - written));
7522 else
7523 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7524 return MIN (total_bytes - off, len);
7528 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7529 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7530 buffer PTR of length LEN bytes. If OFF is not -1 then start
7531 the encoding at byte offset OFF and encode at most LEN bytes.
7532 Return the number of bytes placed in the buffer, or zero upon failure. */
7535 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7537 switch (TREE_CODE (expr))
7539 case INTEGER_CST:
7540 return native_encode_int (expr, ptr, len, off);
7542 case REAL_CST:
7543 return native_encode_real (expr, ptr, len, off);
7545 case FIXED_CST:
7546 return native_encode_fixed (expr, ptr, len, off);
7548 case COMPLEX_CST:
7549 return native_encode_complex (expr, ptr, len, off);
7551 case VECTOR_CST:
7552 return native_encode_vector (expr, ptr, len, off);
7554 case STRING_CST:
7555 return native_encode_string (expr, ptr, len, off);
7557 default:
7558 return 0;
7563 /* Subroutine of native_interpret_expr. Interpret the contents of
7564 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7565 If the buffer cannot be interpreted, return NULL_TREE. */
7567 static tree
7568 native_interpret_int (tree type, const unsigned char *ptr, int len)
7570 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7572 if (total_bytes > len
7573 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7574 return NULL_TREE;
7576 wide_int result = wi::from_buffer (ptr, total_bytes);
7578 return wide_int_to_tree (type, result);
7582 /* Subroutine of native_interpret_expr. Interpret the contents of
7583 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7584 If the buffer cannot be interpreted, return NULL_TREE. */
7586 static tree
7587 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7589 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7590 double_int result;
7591 FIXED_VALUE_TYPE fixed_value;
7593 if (total_bytes > len
7594 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7595 return NULL_TREE;
7597 result = double_int::from_buffer (ptr, total_bytes);
7598 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7600 return build_fixed (type, fixed_value);
7604 /* Subroutine of native_interpret_expr. Interpret the contents of
7605 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7606 If the buffer cannot be interpreted, return NULL_TREE. */
7608 static tree
7609 native_interpret_real (tree type, const unsigned char *ptr, int len)
7611 machine_mode mode = TYPE_MODE (type);
7612 int total_bytes = GET_MODE_SIZE (mode);
7613 int byte, offset, word, words, bitpos;
7614 unsigned char value;
7615 /* There are always 32 bits in each long, no matter the size of
7616 the hosts long. We handle floating point representations with
7617 up to 192 bits. */
7618 REAL_VALUE_TYPE r;
7619 long tmp[6];
7621 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7622 if (total_bytes > len || total_bytes > 24)
7623 return NULL_TREE;
7624 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7626 memset (tmp, 0, sizeof (tmp));
7627 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7628 bitpos += BITS_PER_UNIT)
7630 byte = (bitpos / BITS_PER_UNIT) & 3;
7631 if (UNITS_PER_WORD < 4)
7633 word = byte / UNITS_PER_WORD;
7634 if (WORDS_BIG_ENDIAN)
7635 word = (words - 1) - word;
7636 offset = word * UNITS_PER_WORD;
7637 if (BYTES_BIG_ENDIAN)
7638 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7639 else
7640 offset += byte % UNITS_PER_WORD;
7642 else
7643 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7644 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7646 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7649 real_from_target (&r, tmp, mode);
7650 return build_real (type, r);
7654 /* Subroutine of native_interpret_expr. Interpret the contents of
7655 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7656 If the buffer cannot be interpreted, return NULL_TREE. */
7658 static tree
7659 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7661 tree etype, rpart, ipart;
7662 int size;
7664 etype = TREE_TYPE (type);
7665 size = GET_MODE_SIZE (TYPE_MODE (etype));
7666 if (size * 2 > len)
7667 return NULL_TREE;
7668 rpart = native_interpret_expr (etype, ptr, size);
7669 if (!rpart)
7670 return NULL_TREE;
7671 ipart = native_interpret_expr (etype, ptr+size, size);
7672 if (!ipart)
7673 return NULL_TREE;
7674 return build_complex (type, rpart, ipart);
7678 /* Subroutine of native_interpret_expr. Interpret the contents of
7679 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7680 If the buffer cannot be interpreted, return NULL_TREE. */
7682 static tree
7683 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7685 tree etype, elem;
7686 int i, size, count;
7687 tree *elements;
7689 etype = TREE_TYPE (type);
7690 size = GET_MODE_SIZE (TYPE_MODE (etype));
7691 count = TYPE_VECTOR_SUBPARTS (type);
7692 if (size * count > len)
7693 return NULL_TREE;
7695 elements = XALLOCAVEC (tree, count);
7696 for (i = count - 1; i >= 0; i--)
7698 elem = native_interpret_expr (etype, ptr+(i*size), size);
7699 if (!elem)
7700 return NULL_TREE;
7701 elements[i] = elem;
7703 return build_vector (type, elements);
7707 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7708 the buffer PTR of length LEN as a constant of type TYPE. For
7709 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7710 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7711 return NULL_TREE. */
7713 tree
7714 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7716 switch (TREE_CODE (type))
7718 case INTEGER_TYPE:
7719 case ENUMERAL_TYPE:
7720 case BOOLEAN_TYPE:
7721 case POINTER_TYPE:
7722 case REFERENCE_TYPE:
7723 return native_interpret_int (type, ptr, len);
7725 case REAL_TYPE:
7726 return native_interpret_real (type, ptr, len);
7728 case FIXED_POINT_TYPE:
7729 return native_interpret_fixed (type, ptr, len);
7731 case COMPLEX_TYPE:
7732 return native_interpret_complex (type, ptr, len);
7734 case VECTOR_TYPE:
7735 return native_interpret_vector (type, ptr, len);
7737 default:
7738 return NULL_TREE;
7742 /* Returns true if we can interpret the contents of a native encoding
7743 as TYPE. */
7745 static bool
7746 can_native_interpret_type_p (tree type)
7748 switch (TREE_CODE (type))
7750 case INTEGER_TYPE:
7751 case ENUMERAL_TYPE:
7752 case BOOLEAN_TYPE:
7753 case POINTER_TYPE:
7754 case REFERENCE_TYPE:
7755 case FIXED_POINT_TYPE:
7756 case REAL_TYPE:
7757 case COMPLEX_TYPE:
7758 case VECTOR_TYPE:
7759 return true;
7760 default:
7761 return false;
7765 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7766 TYPE at compile-time. If we're unable to perform the conversion
7767 return NULL_TREE. */
7769 static tree
7770 fold_view_convert_expr (tree type, tree expr)
7772 /* We support up to 512-bit values (for V8DFmode). */
7773 unsigned char buffer[64];
7774 int len;
7776 /* Check that the host and target are sane. */
7777 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7778 return NULL_TREE;
7780 len = native_encode_expr (expr, buffer, sizeof (buffer));
7781 if (len == 0)
7782 return NULL_TREE;
7784 return native_interpret_expr (type, buffer, len);
7787 /* Build an expression for the address of T. Folds away INDIRECT_REF
7788 to avoid confusing the gimplify process. */
7790 tree
7791 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7793 /* The size of the object is not relevant when talking about its address. */
7794 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7795 t = TREE_OPERAND (t, 0);
7797 if (TREE_CODE (t) == INDIRECT_REF)
7799 t = TREE_OPERAND (t, 0);
7801 if (TREE_TYPE (t) != ptrtype)
7802 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7804 else if (TREE_CODE (t) == MEM_REF
7805 && integer_zerop (TREE_OPERAND (t, 1)))
7806 return TREE_OPERAND (t, 0);
7807 else if (TREE_CODE (t) == MEM_REF
7808 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7809 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7810 TREE_OPERAND (t, 0),
7811 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7812 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7814 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7816 if (TREE_TYPE (t) != ptrtype)
7817 t = fold_convert_loc (loc, ptrtype, t);
7819 else
7820 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7822 return t;
7825 /* Build an expression for the address of T. */
7827 tree
7828 build_fold_addr_expr_loc (location_t loc, tree t)
7830 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7832 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7835 /* Fold a unary expression of code CODE and type TYPE with operand
7836 OP0. Return the folded expression if folding is successful.
7837 Otherwise, return NULL_TREE. */
7839 tree
7840 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7842 tree tem;
7843 tree arg0;
7844 enum tree_code_class kind = TREE_CODE_CLASS (code);
7846 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7847 && TREE_CODE_LENGTH (code) == 1);
7849 arg0 = op0;
7850 if (arg0)
7852 if (CONVERT_EXPR_CODE_P (code)
7853 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7855 /* Don't use STRIP_NOPS, because signedness of argument type
7856 matters. */
7857 STRIP_SIGN_NOPS (arg0);
7859 else
7861 /* Strip any conversions that don't change the mode. This
7862 is safe for every expression, except for a comparison
7863 expression because its signedness is derived from its
7864 operands.
7866 Note that this is done as an internal manipulation within
7867 the constant folder, in order to find the simplest
7868 representation of the arguments so that their form can be
7869 studied. In any cases, the appropriate type conversions
7870 should be put back in the tree that will get out of the
7871 constant folder. */
7872 STRIP_NOPS (arg0);
7875 if (CONSTANT_CLASS_P (arg0))
7877 tree tem = const_unop (code, type, arg0);
7878 if (tem)
7880 if (TREE_TYPE (tem) != type)
7881 tem = fold_convert_loc (loc, type, tem);
7882 return tem;
7887 tem = generic_simplify (loc, code, type, op0);
7888 if (tem)
7889 return tem;
7891 if (TREE_CODE_CLASS (code) == tcc_unary)
7893 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7894 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7895 fold_build1_loc (loc, code, type,
7896 fold_convert_loc (loc, TREE_TYPE (op0),
7897 TREE_OPERAND (arg0, 1))));
7898 else if (TREE_CODE (arg0) == COND_EXPR)
7900 tree arg01 = TREE_OPERAND (arg0, 1);
7901 tree arg02 = TREE_OPERAND (arg0, 2);
7902 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7903 arg01 = fold_build1_loc (loc, code, type,
7904 fold_convert_loc (loc,
7905 TREE_TYPE (op0), arg01));
7906 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7907 arg02 = fold_build1_loc (loc, code, type,
7908 fold_convert_loc (loc,
7909 TREE_TYPE (op0), arg02));
7910 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7911 arg01, arg02);
7913 /* If this was a conversion, and all we did was to move into
7914 inside the COND_EXPR, bring it back out. But leave it if
7915 it is a conversion from integer to integer and the
7916 result precision is no wider than a word since such a
7917 conversion is cheap and may be optimized away by combine,
7918 while it couldn't if it were outside the COND_EXPR. Then return
7919 so we don't get into an infinite recursion loop taking the
7920 conversion out and then back in. */
7922 if ((CONVERT_EXPR_CODE_P (code)
7923 || code == NON_LVALUE_EXPR)
7924 && TREE_CODE (tem) == COND_EXPR
7925 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7926 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7927 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7928 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7929 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7930 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7931 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7932 && (INTEGRAL_TYPE_P
7933 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7934 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7935 || flag_syntax_only))
7936 tem = build1_loc (loc, code, type,
7937 build3 (COND_EXPR,
7938 TREE_TYPE (TREE_OPERAND
7939 (TREE_OPERAND (tem, 1), 0)),
7940 TREE_OPERAND (tem, 0),
7941 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7942 TREE_OPERAND (TREE_OPERAND (tem, 2),
7943 0)));
7944 return tem;
7948 switch (code)
7950 case NON_LVALUE_EXPR:
7951 if (!maybe_lvalue_p (op0))
7952 return fold_convert_loc (loc, type, op0);
7953 return NULL_TREE;
7955 CASE_CONVERT:
7956 case FLOAT_EXPR:
7957 case FIX_TRUNC_EXPR:
7958 if (COMPARISON_CLASS_P (op0))
7960 /* If we have (type) (a CMP b) and type is an integral type, return
7961 new expression involving the new type. Canonicalize
7962 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7963 non-integral type.
7964 Do not fold the result as that would not simplify further, also
7965 folding again results in recursions. */
7966 if (TREE_CODE (type) == BOOLEAN_TYPE)
7967 return build2_loc (loc, TREE_CODE (op0), type,
7968 TREE_OPERAND (op0, 0),
7969 TREE_OPERAND (op0, 1));
7970 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7971 && TREE_CODE (type) != VECTOR_TYPE)
7972 return build3_loc (loc, COND_EXPR, type, op0,
7973 constant_boolean_node (true, type),
7974 constant_boolean_node (false, type));
7977 /* Handle (T *)&A.B.C for A being of type T and B and C
7978 living at offset zero. This occurs frequently in
7979 C++ upcasting and then accessing the base. */
7980 if (TREE_CODE (op0) == ADDR_EXPR
7981 && POINTER_TYPE_P (type)
7982 && handled_component_p (TREE_OPERAND (op0, 0)))
7984 HOST_WIDE_INT bitsize, bitpos;
7985 tree offset;
7986 machine_mode mode;
7987 int unsignedp, volatilep;
7988 tree base = TREE_OPERAND (op0, 0);
7989 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7990 &mode, &unsignedp, &volatilep, false);
7991 /* If the reference was to a (constant) zero offset, we can use
7992 the address of the base if it has the same base type
7993 as the result type and the pointer type is unqualified. */
7994 if (! offset && bitpos == 0
7995 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7996 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7997 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7998 return fold_convert_loc (loc, type,
7999 build_fold_addr_expr_loc (loc, base));
8002 if (TREE_CODE (op0) == MODIFY_EXPR
8003 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8004 /* Detect assigning a bitfield. */
8005 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8006 && DECL_BIT_FIELD
8007 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8009 /* Don't leave an assignment inside a conversion
8010 unless assigning a bitfield. */
8011 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8012 /* First do the assignment, then return converted constant. */
8013 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8014 TREE_NO_WARNING (tem) = 1;
8015 TREE_USED (tem) = 1;
8016 return tem;
8019 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8020 constants (if x has signed type, the sign bit cannot be set
8021 in c). This folds extension into the BIT_AND_EXPR.
8022 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8023 very likely don't have maximal range for their precision and this
8024 transformation effectively doesn't preserve non-maximal ranges. */
8025 if (TREE_CODE (type) == INTEGER_TYPE
8026 && TREE_CODE (op0) == BIT_AND_EXPR
8027 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8029 tree and_expr = op0;
8030 tree and0 = TREE_OPERAND (and_expr, 0);
8031 tree and1 = TREE_OPERAND (and_expr, 1);
8032 int change = 0;
8034 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8035 || (TYPE_PRECISION (type)
8036 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8037 change = 1;
8038 else if (TYPE_PRECISION (TREE_TYPE (and1))
8039 <= HOST_BITS_PER_WIDE_INT
8040 && tree_fits_uhwi_p (and1))
8042 unsigned HOST_WIDE_INT cst;
8044 cst = tree_to_uhwi (and1);
8045 cst &= HOST_WIDE_INT_M1U
8046 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8047 change = (cst == 0);
8048 #ifdef LOAD_EXTEND_OP
8049 if (change
8050 && !flag_syntax_only
8051 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8052 == ZERO_EXTEND))
8054 tree uns = unsigned_type_for (TREE_TYPE (and0));
8055 and0 = fold_convert_loc (loc, uns, and0);
8056 and1 = fold_convert_loc (loc, uns, and1);
8058 #endif
8060 if (change)
8062 tem = force_fit_type (type, wi::to_widest (and1), 0,
8063 TREE_OVERFLOW (and1));
8064 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8065 fold_convert_loc (loc, type, and0), tem);
8069 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8070 when one of the new casts will fold away. Conservatively we assume
8071 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8072 if (POINTER_TYPE_P (type)
8073 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8074 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8075 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8076 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8077 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8079 tree arg00 = TREE_OPERAND (arg0, 0);
8080 tree arg01 = TREE_OPERAND (arg0, 1);
8082 return fold_build_pointer_plus_loc
8083 (loc, fold_convert_loc (loc, type, arg00), arg01);
8086 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8087 of the same precision, and X is an integer type not narrower than
8088 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8089 if (INTEGRAL_TYPE_P (type)
8090 && TREE_CODE (op0) == BIT_NOT_EXPR
8091 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8092 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8093 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8095 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8096 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8097 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8098 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8099 fold_convert_loc (loc, type, tem));
8102 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8103 type of X and Y (integer types only). */
8104 if (INTEGRAL_TYPE_P (type)
8105 && TREE_CODE (op0) == MULT_EXPR
8106 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8107 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8109 /* Be careful not to introduce new overflows. */
8110 tree mult_type;
8111 if (TYPE_OVERFLOW_WRAPS (type))
8112 mult_type = type;
8113 else
8114 mult_type = unsigned_type_for (type);
8116 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8118 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8119 fold_convert_loc (loc, mult_type,
8120 TREE_OPERAND (op0, 0)),
8121 fold_convert_loc (loc, mult_type,
8122 TREE_OPERAND (op0, 1)));
8123 return fold_convert_loc (loc, type, tem);
8127 return NULL_TREE;
8129 case VIEW_CONVERT_EXPR:
8130 if (TREE_CODE (op0) == MEM_REF)
8131 return fold_build2_loc (loc, MEM_REF, type,
8132 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8134 return NULL_TREE;
8136 case NEGATE_EXPR:
8137 tem = fold_negate_expr (loc, arg0);
8138 if (tem)
8139 return fold_convert_loc (loc, type, tem);
8140 return NULL_TREE;
8142 case ABS_EXPR:
8143 /* Convert fabs((double)float) into (double)fabsf(float). */
8144 if (TREE_CODE (arg0) == NOP_EXPR
8145 && TREE_CODE (type) == REAL_TYPE)
8147 tree targ0 = strip_float_extensions (arg0);
8148 if (targ0 != arg0)
8149 return fold_convert_loc (loc, type,
8150 fold_build1_loc (loc, ABS_EXPR,
8151 TREE_TYPE (targ0),
8152 targ0));
8154 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8155 else if (TREE_CODE (arg0) == ABS_EXPR)
8156 return arg0;
8158 /* Strip sign ops from argument. */
8159 if (TREE_CODE (type) == REAL_TYPE)
8161 tem = fold_strip_sign_ops (arg0);
8162 if (tem)
8163 return fold_build1_loc (loc, ABS_EXPR, type,
8164 fold_convert_loc (loc, type, tem));
8166 return NULL_TREE;
8168 case CONJ_EXPR:
8169 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8170 return fold_convert_loc (loc, type, arg0);
8171 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8173 tree itype = TREE_TYPE (type);
8174 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8175 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8176 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8177 negate_expr (ipart));
8179 if (TREE_CODE (arg0) == CONJ_EXPR)
8180 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8181 return NULL_TREE;
8183 case BIT_NOT_EXPR:
8184 /* Convert ~ (-A) to A - 1. */
8185 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8186 return fold_build2_loc (loc, MINUS_EXPR, type,
8187 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8188 build_int_cst (type, 1));
8189 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8190 else if (INTEGRAL_TYPE_P (type)
8191 && ((TREE_CODE (arg0) == MINUS_EXPR
8192 && integer_onep (TREE_OPERAND (arg0, 1)))
8193 || (TREE_CODE (arg0) == PLUS_EXPR
8194 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8196 /* Perform the negation in ARG0's type and only then convert
8197 to TYPE as to avoid introducing undefined behavior. */
8198 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8199 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8200 TREE_OPERAND (arg0, 0));
8201 return fold_convert_loc (loc, type, t);
8203 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8204 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8205 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8206 fold_convert_loc (loc, type,
8207 TREE_OPERAND (arg0, 0)))))
8208 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8209 fold_convert_loc (loc, type,
8210 TREE_OPERAND (arg0, 1)));
8211 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8212 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8213 fold_convert_loc (loc, type,
8214 TREE_OPERAND (arg0, 1)))))
8215 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8216 fold_convert_loc (loc, type,
8217 TREE_OPERAND (arg0, 0)), tem);
8219 return NULL_TREE;
8221 case TRUTH_NOT_EXPR:
8222 /* Note that the operand of this must be an int
8223 and its values must be 0 or 1.
8224 ("true" is a fixed value perhaps depending on the language,
8225 but we don't handle values other than 1 correctly yet.) */
8226 tem = fold_truth_not_expr (loc, arg0);
8227 if (!tem)
8228 return NULL_TREE;
8229 return fold_convert_loc (loc, type, tem);
8231 case REALPART_EXPR:
8232 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8233 return fold_convert_loc (loc, type, arg0);
8234 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8236 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8237 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8238 fold_build1_loc (loc, REALPART_EXPR, itype,
8239 TREE_OPERAND (arg0, 0)),
8240 fold_build1_loc (loc, REALPART_EXPR, itype,
8241 TREE_OPERAND (arg0, 1)));
8242 return fold_convert_loc (loc, type, tem);
8244 if (TREE_CODE (arg0) == CONJ_EXPR)
8246 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8247 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8248 TREE_OPERAND (arg0, 0));
8249 return fold_convert_loc (loc, type, tem);
8251 if (TREE_CODE (arg0) == CALL_EXPR)
8253 tree fn = get_callee_fndecl (arg0);
8254 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8255 switch (DECL_FUNCTION_CODE (fn))
8257 CASE_FLT_FN (BUILT_IN_CEXPI):
8258 fn = mathfn_built_in (type, BUILT_IN_COS);
8259 if (fn)
8260 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8261 break;
8263 default:
8264 break;
8267 return NULL_TREE;
8269 case IMAGPART_EXPR:
8270 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8271 return build_zero_cst (type);
8272 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8274 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8275 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8276 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8277 TREE_OPERAND (arg0, 0)),
8278 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8279 TREE_OPERAND (arg0, 1)));
8280 return fold_convert_loc (loc, type, tem);
8282 if (TREE_CODE (arg0) == CONJ_EXPR)
8284 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8285 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8286 return fold_convert_loc (loc, type, negate_expr (tem));
8288 if (TREE_CODE (arg0) == CALL_EXPR)
8290 tree fn = get_callee_fndecl (arg0);
8291 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8292 switch (DECL_FUNCTION_CODE (fn))
8294 CASE_FLT_FN (BUILT_IN_CEXPI):
8295 fn = mathfn_built_in (type, BUILT_IN_SIN);
8296 if (fn)
8297 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8298 break;
8300 default:
8301 break;
8304 return NULL_TREE;
8306 case INDIRECT_REF:
8307 /* Fold *&X to X if X is an lvalue. */
8308 if (TREE_CODE (op0) == ADDR_EXPR)
8310 tree op00 = TREE_OPERAND (op0, 0);
8311 if ((TREE_CODE (op00) == VAR_DECL
8312 || TREE_CODE (op00) == PARM_DECL
8313 || TREE_CODE (op00) == RESULT_DECL)
8314 && !TREE_READONLY (op00))
8315 return op00;
8317 return NULL_TREE;
8319 default:
8320 return NULL_TREE;
8321 } /* switch (code) */
8325 /* If the operation was a conversion do _not_ mark a resulting constant
8326 with TREE_OVERFLOW if the original constant was not. These conversions
8327 have implementation defined behavior and retaining the TREE_OVERFLOW
8328 flag here would confuse later passes such as VRP. */
8329 tree
8330 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8331 tree type, tree op0)
8333 tree res = fold_unary_loc (loc, code, type, op0);
8334 if (res
8335 && TREE_CODE (res) == INTEGER_CST
8336 && TREE_CODE (op0) == INTEGER_CST
8337 && CONVERT_EXPR_CODE_P (code))
8338 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8340 return res;
8343 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8344 operands OP0 and OP1. LOC is the location of the resulting expression.
8345 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8346 Return the folded expression if folding is successful. Otherwise,
8347 return NULL_TREE. */
8348 static tree
8349 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8350 tree arg0, tree arg1, tree op0, tree op1)
8352 tree tem;
8354 /* We only do these simplifications if we are optimizing. */
8355 if (!optimize)
8356 return NULL_TREE;
8358 /* Check for things like (A || B) && (A || C). We can convert this
8359 to A || (B && C). Note that either operator can be any of the four
8360 truth and/or operations and the transformation will still be
8361 valid. Also note that we only care about order for the
8362 ANDIF and ORIF operators. If B contains side effects, this
8363 might change the truth-value of A. */
8364 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8365 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8366 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8367 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8368 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8369 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8371 tree a00 = TREE_OPERAND (arg0, 0);
8372 tree a01 = TREE_OPERAND (arg0, 1);
8373 tree a10 = TREE_OPERAND (arg1, 0);
8374 tree a11 = TREE_OPERAND (arg1, 1);
8375 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8376 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8377 && (code == TRUTH_AND_EXPR
8378 || code == TRUTH_OR_EXPR));
8380 if (operand_equal_p (a00, a10, 0))
8381 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8382 fold_build2_loc (loc, code, type, a01, a11));
8383 else if (commutative && operand_equal_p (a00, a11, 0))
8384 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8385 fold_build2_loc (loc, code, type, a01, a10));
8386 else if (commutative && operand_equal_p (a01, a10, 0))
8387 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8388 fold_build2_loc (loc, code, type, a00, a11));
8390 /* This case if tricky because we must either have commutative
8391 operators or else A10 must not have side-effects. */
8393 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8394 && operand_equal_p (a01, a11, 0))
8395 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8396 fold_build2_loc (loc, code, type, a00, a10),
8397 a01);
8400 /* See if we can build a range comparison. */
8401 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8402 return tem;
8404 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8405 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8407 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8408 if (tem)
8409 return fold_build2_loc (loc, code, type, tem, arg1);
8412 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8413 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8415 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8416 if (tem)
8417 return fold_build2_loc (loc, code, type, arg0, tem);
8420 /* Check for the possibility of merging component references. If our
8421 lhs is another similar operation, try to merge its rhs with our
8422 rhs. Then try to merge our lhs and rhs. */
8423 if (TREE_CODE (arg0) == code
8424 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8425 TREE_OPERAND (arg0, 1), arg1)))
8426 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8428 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8429 return tem;
8431 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8432 && (code == TRUTH_AND_EXPR
8433 || code == TRUTH_ANDIF_EXPR
8434 || code == TRUTH_OR_EXPR
8435 || code == TRUTH_ORIF_EXPR))
8437 enum tree_code ncode, icode;
8439 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8440 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8441 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8443 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8444 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8445 We don't want to pack more than two leafs to a non-IF AND/OR
8446 expression.
8447 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8448 equal to IF-CODE, then we don't want to add right-hand operand.
8449 If the inner right-hand side of left-hand operand has
8450 side-effects, or isn't simple, then we can't add to it,
8451 as otherwise we might destroy if-sequence. */
8452 if (TREE_CODE (arg0) == icode
8453 && simple_operand_p_2 (arg1)
8454 /* Needed for sequence points to handle trappings, and
8455 side-effects. */
8456 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8458 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8459 arg1);
8460 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8461 tem);
8463 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8464 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8465 else if (TREE_CODE (arg1) == icode
8466 && simple_operand_p_2 (arg0)
8467 /* Needed for sequence points to handle trappings, and
8468 side-effects. */
8469 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8471 tem = fold_build2_loc (loc, ncode, type,
8472 arg0, TREE_OPERAND (arg1, 0));
8473 return fold_build2_loc (loc, icode, type, tem,
8474 TREE_OPERAND (arg1, 1));
8476 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8477 into (A OR B).
8478 For sequence point consistancy, we need to check for trapping,
8479 and side-effects. */
8480 else if (code == icode && simple_operand_p_2 (arg0)
8481 && simple_operand_p_2 (arg1))
8482 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8485 return NULL_TREE;
8488 /* Fold a binary expression of code CODE and type TYPE with operands
8489 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8490 Return the folded expression if folding is successful. Otherwise,
8491 return NULL_TREE. */
8493 static tree
8494 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8496 enum tree_code compl_code;
8498 if (code == MIN_EXPR)
8499 compl_code = MAX_EXPR;
8500 else if (code == MAX_EXPR)
8501 compl_code = MIN_EXPR;
8502 else
8503 gcc_unreachable ();
8505 /* MIN (MAX (a, b), b) == b. */
8506 if (TREE_CODE (op0) == compl_code
8507 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8508 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8510 /* MIN (MAX (b, a), b) == b. */
8511 if (TREE_CODE (op0) == compl_code
8512 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8513 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8514 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8516 /* MIN (a, MAX (a, b)) == a. */
8517 if (TREE_CODE (op1) == compl_code
8518 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8519 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8520 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8522 /* MIN (a, MAX (b, a)) == a. */
8523 if (TREE_CODE (op1) == compl_code
8524 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8525 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8526 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8528 return NULL_TREE;
8531 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8532 by changing CODE to reduce the magnitude of constants involved in
8533 ARG0 of the comparison.
8534 Returns a canonicalized comparison tree if a simplification was
8535 possible, otherwise returns NULL_TREE.
8536 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8537 valid if signed overflow is undefined. */
8539 static tree
8540 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8541 tree arg0, tree arg1,
8542 bool *strict_overflow_p)
8544 enum tree_code code0 = TREE_CODE (arg0);
8545 tree t, cst0 = NULL_TREE;
8546 int sgn0;
8547 bool swap = false;
8549 /* Match A +- CST code arg1 and CST code arg1. We can change the
8550 first form only if overflow is undefined. */
8551 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8552 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8553 /* In principle pointers also have undefined overflow behavior,
8554 but that causes problems elsewhere. */
8555 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8556 && (code0 == MINUS_EXPR
8557 || code0 == PLUS_EXPR)
8558 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8559 || code0 == INTEGER_CST))
8560 return NULL_TREE;
8562 /* Identify the constant in arg0 and its sign. */
8563 if (code0 == INTEGER_CST)
8564 cst0 = arg0;
8565 else
8566 cst0 = TREE_OPERAND (arg0, 1);
8567 sgn0 = tree_int_cst_sgn (cst0);
8569 /* Overflowed constants and zero will cause problems. */
8570 if (integer_zerop (cst0)
8571 || TREE_OVERFLOW (cst0))
8572 return NULL_TREE;
8574 /* See if we can reduce the magnitude of the constant in
8575 arg0 by changing the comparison code. */
8576 if (code0 == INTEGER_CST)
8578 /* CST <= arg1 -> CST-1 < arg1. */
8579 if (code == LE_EXPR && sgn0 == 1)
8580 code = LT_EXPR;
8581 /* -CST < arg1 -> -CST-1 <= arg1. */
8582 else if (code == LT_EXPR && sgn0 == -1)
8583 code = LE_EXPR;
8584 /* CST > arg1 -> CST-1 >= arg1. */
8585 else if (code == GT_EXPR && sgn0 == 1)
8586 code = GE_EXPR;
8587 /* -CST >= arg1 -> -CST-1 > arg1. */
8588 else if (code == GE_EXPR && sgn0 == -1)
8589 code = GT_EXPR;
8590 else
8591 return NULL_TREE;
8592 /* arg1 code' CST' might be more canonical. */
8593 swap = true;
8595 else
8597 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8598 if (code == LT_EXPR
8599 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8600 code = LE_EXPR;
8601 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8602 else if (code == GT_EXPR
8603 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8604 code = GE_EXPR;
8605 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8606 else if (code == LE_EXPR
8607 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8608 code = LT_EXPR;
8609 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8610 else if (code == GE_EXPR
8611 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8612 code = GT_EXPR;
8613 else
8614 return NULL_TREE;
8615 *strict_overflow_p = true;
8618 /* Now build the constant reduced in magnitude. But not if that
8619 would produce one outside of its types range. */
8620 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8621 && ((sgn0 == 1
8622 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8623 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8624 || (sgn0 == -1
8625 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8626 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8627 /* We cannot swap the comparison here as that would cause us to
8628 endlessly recurse. */
8629 return NULL_TREE;
8631 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8632 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8633 if (code0 != INTEGER_CST)
8634 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8635 t = fold_convert (TREE_TYPE (arg1), t);
8637 /* If swapping might yield to a more canonical form, do so. */
8638 if (swap)
8639 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8640 else
8641 return fold_build2_loc (loc, code, type, t, arg1);
8644 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8645 overflow further. Try to decrease the magnitude of constants involved
8646 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8647 and put sole constants at the second argument position.
8648 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8650 static tree
8651 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8652 tree arg0, tree arg1)
8654 tree t;
8655 bool strict_overflow_p;
8656 const char * const warnmsg = G_("assuming signed overflow does not occur "
8657 "when reducing constant in comparison");
8659 /* Try canonicalization by simplifying arg0. */
8660 strict_overflow_p = false;
8661 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8662 &strict_overflow_p);
8663 if (t)
8665 if (strict_overflow_p)
8666 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8667 return t;
8670 /* Try canonicalization by simplifying arg1 using the swapped
8671 comparison. */
8672 code = swap_tree_comparison (code);
8673 strict_overflow_p = false;
8674 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8675 &strict_overflow_p);
8676 if (t && strict_overflow_p)
8677 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8678 return t;
8681 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8682 space. This is used to avoid issuing overflow warnings for
8683 expressions like &p->x which can not wrap. */
8685 static bool
8686 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8688 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8689 return true;
8691 if (bitpos < 0)
8692 return true;
8694 wide_int wi_offset;
8695 int precision = TYPE_PRECISION (TREE_TYPE (base));
8696 if (offset == NULL_TREE)
8697 wi_offset = wi::zero (precision);
8698 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8699 return true;
8700 else
8701 wi_offset = offset;
8703 bool overflow;
8704 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8705 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8706 if (overflow)
8707 return true;
8709 if (!wi::fits_uhwi_p (total))
8710 return true;
8712 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8713 if (size <= 0)
8714 return true;
8716 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8717 array. */
8718 if (TREE_CODE (base) == ADDR_EXPR)
8720 HOST_WIDE_INT base_size;
8722 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8723 if (base_size > 0 && size < base_size)
8724 size = base_size;
8727 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8730 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8731 kind INTEGER_CST. This makes sure to properly sign-extend the
8732 constant. */
8734 static HOST_WIDE_INT
8735 size_low_cst (const_tree t)
8737 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8738 int prec = TYPE_PRECISION (TREE_TYPE (t));
8739 if (prec < HOST_BITS_PER_WIDE_INT)
8740 return sext_hwi (w, prec);
8741 return w;
8744 /* Subroutine of fold_binary. This routine performs all of the
8745 transformations that are common to the equality/inequality
8746 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8747 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8748 fold_binary should call fold_binary. Fold a comparison with
8749 tree code CODE and type TYPE with operands OP0 and OP1. Return
8750 the folded comparison or NULL_TREE. */
8752 static tree
8753 fold_comparison (location_t loc, enum tree_code code, tree type,
8754 tree op0, tree op1)
8756 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8757 tree arg0, arg1, tem;
8759 arg0 = op0;
8760 arg1 = op1;
8762 STRIP_SIGN_NOPS (arg0);
8763 STRIP_SIGN_NOPS (arg1);
8765 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8766 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8767 && (equality_code
8768 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8769 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8770 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8771 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8772 && TREE_CODE (arg1) == INTEGER_CST
8773 && !TREE_OVERFLOW (arg1))
8775 const enum tree_code
8776 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8777 tree const1 = TREE_OPERAND (arg0, 1);
8778 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8779 tree variable = TREE_OPERAND (arg0, 0);
8780 tree new_const = int_const_binop (reverse_op, const2, const1);
8782 /* If the constant operation overflowed this can be
8783 simplified as a comparison against INT_MAX/INT_MIN. */
8784 if (TREE_OVERFLOW (new_const)
8785 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8787 int const1_sgn = tree_int_cst_sgn (const1);
8788 enum tree_code code2 = code;
8790 /* Get the sign of the constant on the lhs if the
8791 operation were VARIABLE + CONST1. */
8792 if (TREE_CODE (arg0) == MINUS_EXPR)
8793 const1_sgn = -const1_sgn;
8795 /* The sign of the constant determines if we overflowed
8796 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8797 Canonicalize to the INT_MIN overflow by swapping the comparison
8798 if necessary. */
8799 if (const1_sgn == -1)
8800 code2 = swap_tree_comparison (code);
8802 /* We now can look at the canonicalized case
8803 VARIABLE + 1 CODE2 INT_MIN
8804 and decide on the result. */
8805 switch (code2)
8807 case EQ_EXPR:
8808 case LT_EXPR:
8809 case LE_EXPR:
8810 return
8811 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8813 case NE_EXPR:
8814 case GE_EXPR:
8815 case GT_EXPR:
8816 return
8817 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8819 default:
8820 gcc_unreachable ();
8823 else
8825 if (!equality_code)
8826 fold_overflow_warning ("assuming signed overflow does not occur "
8827 "when changing X +- C1 cmp C2 to "
8828 "X cmp C2 -+ C1",
8829 WARN_STRICT_OVERFLOW_COMPARISON);
8830 return fold_build2_loc (loc, code, type, variable, new_const);
8834 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8835 if (TREE_CODE (arg0) == MINUS_EXPR
8836 && equality_code
8837 && integer_zerop (arg1))
8839 /* ??? The transformation is valid for the other operators if overflow
8840 is undefined for the type, but performing it here badly interacts
8841 with the transformation in fold_cond_expr_with_comparison which
8842 attempts to synthetize ABS_EXPR. */
8843 if (!equality_code)
8844 fold_overflow_warning ("assuming signed overflow does not occur "
8845 "when changing X - Y cmp 0 to X cmp Y",
8846 WARN_STRICT_OVERFLOW_COMPARISON);
8847 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8848 TREE_OPERAND (arg0, 1));
8851 /* For comparisons of pointers we can decompose it to a compile time
8852 comparison of the base objects and the offsets into the object.
8853 This requires at least one operand being an ADDR_EXPR or a
8854 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8855 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8856 && (TREE_CODE (arg0) == ADDR_EXPR
8857 || TREE_CODE (arg1) == ADDR_EXPR
8858 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8859 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8861 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8862 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8863 machine_mode mode;
8864 int volatilep, unsignedp;
8865 bool indirect_base0 = false, indirect_base1 = false;
8867 /* Get base and offset for the access. Strip ADDR_EXPR for
8868 get_inner_reference, but put it back by stripping INDIRECT_REF
8869 off the base object if possible. indirect_baseN will be true
8870 if baseN is not an address but refers to the object itself. */
8871 base0 = arg0;
8872 if (TREE_CODE (arg0) == ADDR_EXPR)
8874 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8875 &bitsize, &bitpos0, &offset0, &mode,
8876 &unsignedp, &volatilep, false);
8877 if (TREE_CODE (base0) == INDIRECT_REF)
8878 base0 = TREE_OPERAND (base0, 0);
8879 else
8880 indirect_base0 = true;
8882 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8884 base0 = TREE_OPERAND (arg0, 0);
8885 STRIP_SIGN_NOPS (base0);
8886 if (TREE_CODE (base0) == ADDR_EXPR)
8888 base0 = TREE_OPERAND (base0, 0);
8889 indirect_base0 = true;
8891 offset0 = TREE_OPERAND (arg0, 1);
8892 if (tree_fits_shwi_p (offset0))
8894 HOST_WIDE_INT off = size_low_cst (offset0);
8895 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8896 * BITS_PER_UNIT)
8897 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8899 bitpos0 = off * BITS_PER_UNIT;
8900 offset0 = NULL_TREE;
8905 base1 = arg1;
8906 if (TREE_CODE (arg1) == ADDR_EXPR)
8908 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8909 &bitsize, &bitpos1, &offset1, &mode,
8910 &unsignedp, &volatilep, false);
8911 if (TREE_CODE (base1) == INDIRECT_REF)
8912 base1 = TREE_OPERAND (base1, 0);
8913 else
8914 indirect_base1 = true;
8916 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8918 base1 = TREE_OPERAND (arg1, 0);
8919 STRIP_SIGN_NOPS (base1);
8920 if (TREE_CODE (base1) == ADDR_EXPR)
8922 base1 = TREE_OPERAND (base1, 0);
8923 indirect_base1 = true;
8925 offset1 = TREE_OPERAND (arg1, 1);
8926 if (tree_fits_shwi_p (offset1))
8928 HOST_WIDE_INT off = size_low_cst (offset1);
8929 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8930 * BITS_PER_UNIT)
8931 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8933 bitpos1 = off * BITS_PER_UNIT;
8934 offset1 = NULL_TREE;
8939 /* A local variable can never be pointed to by
8940 the default SSA name of an incoming parameter. */
8941 if ((TREE_CODE (arg0) == ADDR_EXPR
8942 && indirect_base0
8943 && TREE_CODE (base0) == VAR_DECL
8944 && auto_var_in_fn_p (base0, current_function_decl)
8945 && !indirect_base1
8946 && TREE_CODE (base1) == SSA_NAME
8947 && SSA_NAME_IS_DEFAULT_DEF (base1)
8948 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8949 || (TREE_CODE (arg1) == ADDR_EXPR
8950 && indirect_base1
8951 && TREE_CODE (base1) == VAR_DECL
8952 && auto_var_in_fn_p (base1, current_function_decl)
8953 && !indirect_base0
8954 && TREE_CODE (base0) == SSA_NAME
8955 && SSA_NAME_IS_DEFAULT_DEF (base0)
8956 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8958 if (code == NE_EXPR)
8959 return constant_boolean_node (1, type);
8960 else if (code == EQ_EXPR)
8961 return constant_boolean_node (0, type);
8963 /* If we have equivalent bases we might be able to simplify. */
8964 else if (indirect_base0 == indirect_base1
8965 && operand_equal_p (base0, base1, 0))
8967 /* We can fold this expression to a constant if the non-constant
8968 offset parts are equal. */
8969 if ((offset0 == offset1
8970 || (offset0 && offset1
8971 && operand_equal_p (offset0, offset1, 0)))
8972 && (code == EQ_EXPR
8973 || code == NE_EXPR
8974 || (indirect_base0 && DECL_P (base0))
8975 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8978 if (!equality_code
8979 && bitpos0 != bitpos1
8980 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8981 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8982 fold_overflow_warning (("assuming pointer wraparound does not "
8983 "occur when comparing P +- C1 with "
8984 "P +- C2"),
8985 WARN_STRICT_OVERFLOW_CONDITIONAL);
8987 switch (code)
8989 case EQ_EXPR:
8990 return constant_boolean_node (bitpos0 == bitpos1, type);
8991 case NE_EXPR:
8992 return constant_boolean_node (bitpos0 != bitpos1, type);
8993 case LT_EXPR:
8994 return constant_boolean_node (bitpos0 < bitpos1, type);
8995 case LE_EXPR:
8996 return constant_boolean_node (bitpos0 <= bitpos1, type);
8997 case GE_EXPR:
8998 return constant_boolean_node (bitpos0 >= bitpos1, type);
8999 case GT_EXPR:
9000 return constant_boolean_node (bitpos0 > bitpos1, type);
9001 default:;
9004 /* We can simplify the comparison to a comparison of the variable
9005 offset parts if the constant offset parts are equal.
9006 Be careful to use signed sizetype here because otherwise we
9007 mess with array offsets in the wrong way. This is possible
9008 because pointer arithmetic is restricted to retain within an
9009 object and overflow on pointer differences is undefined as of
9010 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9011 else if (bitpos0 == bitpos1
9012 && (equality_code
9013 || (indirect_base0 && DECL_P (base0))
9014 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9016 /* By converting to signed sizetype we cover middle-end pointer
9017 arithmetic which operates on unsigned pointer types of size
9018 type size and ARRAY_REF offsets which are properly sign or
9019 zero extended from their type in case it is narrower than
9020 sizetype. */
9021 if (offset0 == NULL_TREE)
9022 offset0 = build_int_cst (ssizetype, 0);
9023 else
9024 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9025 if (offset1 == NULL_TREE)
9026 offset1 = build_int_cst (ssizetype, 0);
9027 else
9028 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9030 if (!equality_code
9031 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9032 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9033 fold_overflow_warning (("assuming pointer wraparound does not "
9034 "occur when comparing P +- C1 with "
9035 "P +- C2"),
9036 WARN_STRICT_OVERFLOW_COMPARISON);
9038 return fold_build2_loc (loc, code, type, offset0, offset1);
9041 /* For non-equal bases we can simplify if they are addresses
9042 declarations with different addresses. */
9043 else if (indirect_base0 && indirect_base1
9044 /* We know that !operand_equal_p (base0, base1, 0)
9045 because the if condition was false. But make
9046 sure two decls are not the same. */
9047 && base0 != base1
9048 && TREE_CODE (arg0) == ADDR_EXPR
9049 && TREE_CODE (arg1) == ADDR_EXPR
9050 && DECL_P (base0)
9051 && DECL_P (base1)
9052 /* Watch for aliases. */
9053 && (!decl_in_symtab_p (base0)
9054 || !decl_in_symtab_p (base1)
9055 || !symtab_node::get_create (base0)->equal_address_to
9056 (symtab_node::get_create (base1))))
9058 if (code == EQ_EXPR)
9059 return omit_two_operands_loc (loc, type, boolean_false_node,
9060 arg0, arg1);
9061 else if (code == NE_EXPR)
9062 return omit_two_operands_loc (loc, type, boolean_true_node,
9063 arg0, arg1);
9065 /* For equal offsets we can simplify to a comparison of the
9066 base addresses. */
9067 else if (bitpos0 == bitpos1
9068 && (indirect_base0
9069 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9070 && (indirect_base1
9071 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9072 && ((offset0 == offset1)
9073 || (offset0 && offset1
9074 && operand_equal_p (offset0, offset1, 0))))
9076 if (indirect_base0)
9077 base0 = build_fold_addr_expr_loc (loc, base0);
9078 if (indirect_base1)
9079 base1 = build_fold_addr_expr_loc (loc, base1);
9080 return fold_build2_loc (loc, code, type, base0, base1);
9084 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9085 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9086 the resulting offset is smaller in absolute value than the
9087 original one and has the same sign. */
9088 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9089 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9090 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9091 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9092 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9093 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9094 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9095 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9097 tree const1 = TREE_OPERAND (arg0, 1);
9098 tree const2 = TREE_OPERAND (arg1, 1);
9099 tree variable1 = TREE_OPERAND (arg0, 0);
9100 tree variable2 = TREE_OPERAND (arg1, 0);
9101 tree cst;
9102 const char * const warnmsg = G_("assuming signed overflow does not "
9103 "occur when combining constants around "
9104 "a comparison");
9106 /* Put the constant on the side where it doesn't overflow and is
9107 of lower absolute value and of same sign than before. */
9108 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9109 ? MINUS_EXPR : PLUS_EXPR,
9110 const2, const1);
9111 if (!TREE_OVERFLOW (cst)
9112 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9113 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9115 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9116 return fold_build2_loc (loc, code, type,
9117 variable1,
9118 fold_build2_loc (loc, TREE_CODE (arg1),
9119 TREE_TYPE (arg1),
9120 variable2, cst));
9123 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9124 ? MINUS_EXPR : PLUS_EXPR,
9125 const1, const2);
9126 if (!TREE_OVERFLOW (cst)
9127 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9128 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9130 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9131 return fold_build2_loc (loc, code, type,
9132 fold_build2_loc (loc, TREE_CODE (arg0),
9133 TREE_TYPE (arg0),
9134 variable1, cst),
9135 variable2);
9139 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9140 signed arithmetic case. That form is created by the compiler
9141 often enough for folding it to be of value. One example is in
9142 computing loop trip counts after Operator Strength Reduction. */
9143 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9144 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9145 && TREE_CODE (arg0) == MULT_EXPR
9146 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9147 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9148 && integer_zerop (arg1))
9150 tree const1 = TREE_OPERAND (arg0, 1);
9151 tree const2 = arg1; /* zero */
9152 tree variable1 = TREE_OPERAND (arg0, 0);
9153 enum tree_code cmp_code = code;
9155 /* Handle unfolded multiplication by zero. */
9156 if (integer_zerop (const1))
9157 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9159 fold_overflow_warning (("assuming signed overflow does not occur when "
9160 "eliminating multiplication in comparison "
9161 "with zero"),
9162 WARN_STRICT_OVERFLOW_COMPARISON);
9164 /* If const1 is negative we swap the sense of the comparison. */
9165 if (tree_int_cst_sgn (const1) < 0)
9166 cmp_code = swap_tree_comparison (cmp_code);
9168 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9171 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9172 if (tem)
9173 return tem;
9175 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9177 tree targ0 = strip_float_extensions (arg0);
9178 tree targ1 = strip_float_extensions (arg1);
9179 tree newtype = TREE_TYPE (targ0);
9181 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9182 newtype = TREE_TYPE (targ1);
9184 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9185 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9186 return fold_build2_loc (loc, code, type,
9187 fold_convert_loc (loc, newtype, targ0),
9188 fold_convert_loc (loc, newtype, targ1));
9190 /* (-a) CMP (-b) -> b CMP a */
9191 if (TREE_CODE (arg0) == NEGATE_EXPR
9192 && TREE_CODE (arg1) == NEGATE_EXPR)
9193 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9194 TREE_OPERAND (arg0, 0));
9196 if (TREE_CODE (arg1) == REAL_CST)
9198 REAL_VALUE_TYPE cst;
9199 cst = TREE_REAL_CST (arg1);
9201 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9202 if (TREE_CODE (arg0) == NEGATE_EXPR)
9203 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9204 TREE_OPERAND (arg0, 0),
9205 build_real (TREE_TYPE (arg1),
9206 real_value_negate (&cst)));
9208 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9209 /* a CMP (-0) -> a CMP 0 */
9210 if (REAL_VALUE_MINUS_ZERO (cst))
9211 return fold_build2_loc (loc, code, type, arg0,
9212 build_real (TREE_TYPE (arg1), dconst0));
9214 /* x != NaN is always true, other ops are always false. */
9215 if (REAL_VALUE_ISNAN (cst)
9216 && ! HONOR_SNANS (arg1))
9218 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9219 return omit_one_operand_loc (loc, type, tem, arg0);
9222 /* Fold comparisons against infinity. */
9223 if (REAL_VALUE_ISINF (cst)
9224 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9226 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9227 if (tem != NULL_TREE)
9228 return tem;
9232 /* If this is a comparison of a real constant with a PLUS_EXPR
9233 or a MINUS_EXPR of a real constant, we can convert it into a
9234 comparison with a revised real constant as long as no overflow
9235 occurs when unsafe_math_optimizations are enabled. */
9236 if (flag_unsafe_math_optimizations
9237 && TREE_CODE (arg1) == REAL_CST
9238 && (TREE_CODE (arg0) == PLUS_EXPR
9239 || TREE_CODE (arg0) == MINUS_EXPR)
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9241 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9242 ? MINUS_EXPR : PLUS_EXPR,
9243 arg1, TREE_OPERAND (arg0, 1)))
9244 && !TREE_OVERFLOW (tem))
9245 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9247 /* Likewise, we can simplify a comparison of a real constant with
9248 a MINUS_EXPR whose first operand is also a real constant, i.e.
9249 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9250 floating-point types only if -fassociative-math is set. */
9251 if (flag_associative_math
9252 && TREE_CODE (arg1) == REAL_CST
9253 && TREE_CODE (arg0) == MINUS_EXPR
9254 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9255 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9256 arg1))
9257 && !TREE_OVERFLOW (tem))
9258 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9259 TREE_OPERAND (arg0, 1), tem);
9261 /* Fold comparisons against built-in math functions. */
9262 if (TREE_CODE (arg1) == REAL_CST
9263 && flag_unsafe_math_optimizations
9264 && ! flag_errno_math)
9266 enum built_in_function fcode = builtin_mathfn_code (arg0);
9268 if (fcode != END_BUILTINS)
9270 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9271 if (tem != NULL_TREE)
9272 return tem;
9277 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9278 && CONVERT_EXPR_P (arg0))
9280 /* If we are widening one operand of an integer comparison,
9281 see if the other operand is similarly being widened. Perhaps we
9282 can do the comparison in the narrower type. */
9283 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9284 if (tem)
9285 return tem;
9287 /* Or if we are changing signedness. */
9288 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9289 if (tem)
9290 return tem;
9293 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9294 constant, we can simplify it. */
9295 if (TREE_CODE (arg1) == INTEGER_CST
9296 && (TREE_CODE (arg0) == MIN_EXPR
9297 || TREE_CODE (arg0) == MAX_EXPR)
9298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9300 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9301 if (tem)
9302 return tem;
9305 /* Simplify comparison of something with itself. (For IEEE
9306 floating-point, we can only do some of these simplifications.) */
9307 if (operand_equal_p (arg0, arg1, 0))
9309 switch (code)
9311 case EQ_EXPR:
9312 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9313 || ! HONOR_NANS (arg0))
9314 return constant_boolean_node (1, type);
9315 break;
9317 case GE_EXPR:
9318 case LE_EXPR:
9319 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9320 || ! HONOR_NANS (arg0))
9321 return constant_boolean_node (1, type);
9322 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9324 case NE_EXPR:
9325 /* For NE, we can only do this simplification if integer
9326 or we don't honor IEEE floating point NaNs. */
9327 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9328 && HONOR_NANS (arg0))
9329 break;
9330 /* ... fall through ... */
9331 case GT_EXPR:
9332 case LT_EXPR:
9333 return constant_boolean_node (0, type);
9334 default:
9335 gcc_unreachable ();
9339 /* If we are comparing an expression that just has comparisons
9340 of two integer values, arithmetic expressions of those comparisons,
9341 and constants, we can simplify it. There are only three cases
9342 to check: the two values can either be equal, the first can be
9343 greater, or the second can be greater. Fold the expression for
9344 those three values. Since each value must be 0 or 1, we have
9345 eight possibilities, each of which corresponds to the constant 0
9346 or 1 or one of the six possible comparisons.
9348 This handles common cases like (a > b) == 0 but also handles
9349 expressions like ((x > y) - (y > x)) > 0, which supposedly
9350 occur in macroized code. */
9352 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9354 tree cval1 = 0, cval2 = 0;
9355 int save_p = 0;
9357 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9358 /* Don't handle degenerate cases here; they should already
9359 have been handled anyway. */
9360 && cval1 != 0 && cval2 != 0
9361 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9362 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9363 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9364 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9365 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9366 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9367 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9369 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9370 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9372 /* We can't just pass T to eval_subst in case cval1 or cval2
9373 was the same as ARG1. */
9375 tree high_result
9376 = fold_build2_loc (loc, code, type,
9377 eval_subst (loc, arg0, cval1, maxval,
9378 cval2, minval),
9379 arg1);
9380 tree equal_result
9381 = fold_build2_loc (loc, code, type,
9382 eval_subst (loc, arg0, cval1, maxval,
9383 cval2, maxval),
9384 arg1);
9385 tree low_result
9386 = fold_build2_loc (loc, code, type,
9387 eval_subst (loc, arg0, cval1, minval,
9388 cval2, maxval),
9389 arg1);
9391 /* All three of these results should be 0 or 1. Confirm they are.
9392 Then use those values to select the proper code to use. */
9394 if (TREE_CODE (high_result) == INTEGER_CST
9395 && TREE_CODE (equal_result) == INTEGER_CST
9396 && TREE_CODE (low_result) == INTEGER_CST)
9398 /* Make a 3-bit mask with the high-order bit being the
9399 value for `>', the next for '=', and the low for '<'. */
9400 switch ((integer_onep (high_result) * 4)
9401 + (integer_onep (equal_result) * 2)
9402 + integer_onep (low_result))
9404 case 0:
9405 /* Always false. */
9406 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9407 case 1:
9408 code = LT_EXPR;
9409 break;
9410 case 2:
9411 code = EQ_EXPR;
9412 break;
9413 case 3:
9414 code = LE_EXPR;
9415 break;
9416 case 4:
9417 code = GT_EXPR;
9418 break;
9419 case 5:
9420 code = NE_EXPR;
9421 break;
9422 case 6:
9423 code = GE_EXPR;
9424 break;
9425 case 7:
9426 /* Always true. */
9427 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9430 if (save_p)
9432 tem = save_expr (build2 (code, type, cval1, cval2));
9433 SET_EXPR_LOCATION (tem, loc);
9434 return tem;
9436 return fold_build2_loc (loc, code, type, cval1, cval2);
9441 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9442 into a single range test. */
9443 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9444 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9445 && TREE_CODE (arg1) == INTEGER_CST
9446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9447 && !integer_zerop (TREE_OPERAND (arg0, 1))
9448 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9449 && !TREE_OVERFLOW (arg1))
9451 tem = fold_div_compare (loc, code, type, arg0, arg1);
9452 if (tem != NULL_TREE)
9453 return tem;
9456 /* Fold ~X op ~Y as Y op X. */
9457 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9458 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9460 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9461 return fold_build2_loc (loc, code, type,
9462 fold_convert_loc (loc, cmp_type,
9463 TREE_OPERAND (arg1, 0)),
9464 TREE_OPERAND (arg0, 0));
9467 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9468 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9469 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9471 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9472 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9473 TREE_OPERAND (arg0, 0),
9474 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9475 fold_convert_loc (loc, cmp_type, arg1)));
9478 return NULL_TREE;
9482 /* Subroutine of fold_binary. Optimize complex multiplications of the
9483 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9484 argument EXPR represents the expression "z" of type TYPE. */
9486 static tree
9487 fold_mult_zconjz (location_t loc, tree type, tree expr)
9489 tree itype = TREE_TYPE (type);
9490 tree rpart, ipart, tem;
9492 if (TREE_CODE (expr) == COMPLEX_EXPR)
9494 rpart = TREE_OPERAND (expr, 0);
9495 ipart = TREE_OPERAND (expr, 1);
9497 else if (TREE_CODE (expr) == COMPLEX_CST)
9499 rpart = TREE_REALPART (expr);
9500 ipart = TREE_IMAGPART (expr);
9502 else
9504 expr = save_expr (expr);
9505 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9506 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9509 rpart = save_expr (rpart);
9510 ipart = save_expr (ipart);
9511 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9512 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9513 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9514 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9515 build_zero_cst (itype));
9519 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9520 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9521 guarantees that P and N have the same least significant log2(M) bits.
9522 N is not otherwise constrained. In particular, N is not normalized to
9523 0 <= N < M as is common. In general, the precise value of P is unknown.
9524 M is chosen as large as possible such that constant N can be determined.
9526 Returns M and sets *RESIDUE to N.
9528 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9529 account. This is not always possible due to PR 35705.
9532 static unsigned HOST_WIDE_INT
9533 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9534 bool allow_func_align)
9536 enum tree_code code;
9538 *residue = 0;
9540 code = TREE_CODE (expr);
9541 if (code == ADDR_EXPR)
9543 unsigned int bitalign;
9544 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9545 *residue /= BITS_PER_UNIT;
9546 return bitalign / BITS_PER_UNIT;
9548 else if (code == POINTER_PLUS_EXPR)
9550 tree op0, op1;
9551 unsigned HOST_WIDE_INT modulus;
9552 enum tree_code inner_code;
9554 op0 = TREE_OPERAND (expr, 0);
9555 STRIP_NOPS (op0);
9556 modulus = get_pointer_modulus_and_residue (op0, residue,
9557 allow_func_align);
9559 op1 = TREE_OPERAND (expr, 1);
9560 STRIP_NOPS (op1);
9561 inner_code = TREE_CODE (op1);
9562 if (inner_code == INTEGER_CST)
9564 *residue += TREE_INT_CST_LOW (op1);
9565 return modulus;
9567 else if (inner_code == MULT_EXPR)
9569 op1 = TREE_OPERAND (op1, 1);
9570 if (TREE_CODE (op1) == INTEGER_CST)
9572 unsigned HOST_WIDE_INT align;
9574 /* Compute the greatest power-of-2 divisor of op1. */
9575 align = TREE_INT_CST_LOW (op1);
9576 align &= -align;
9578 /* If align is non-zero and less than *modulus, replace
9579 *modulus with align., If align is 0, then either op1 is 0
9580 or the greatest power-of-2 divisor of op1 doesn't fit in an
9581 unsigned HOST_WIDE_INT. In either case, no additional
9582 constraint is imposed. */
9583 if (align)
9584 modulus = MIN (modulus, align);
9586 return modulus;
9591 /* If we get here, we were unable to determine anything useful about the
9592 expression. */
9593 return 1;
9596 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9597 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9599 static bool
9600 vec_cst_ctor_to_array (tree arg, tree *elts)
9602 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9604 if (TREE_CODE (arg) == VECTOR_CST)
9606 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9607 elts[i] = VECTOR_CST_ELT (arg, i);
9609 else if (TREE_CODE (arg) == CONSTRUCTOR)
9611 constructor_elt *elt;
9613 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9614 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9615 return false;
9616 else
9617 elts[i] = elt->value;
9619 else
9620 return false;
9621 for (; i < nelts; i++)
9622 elts[i]
9623 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9624 return true;
9627 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9628 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9629 NULL_TREE otherwise. */
9631 static tree
9632 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9634 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9635 tree *elts;
9636 bool need_ctor = false;
9638 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9639 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9640 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9641 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9642 return NULL_TREE;
9644 elts = XALLOCAVEC (tree, nelts * 3);
9645 if (!vec_cst_ctor_to_array (arg0, elts)
9646 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9647 return NULL_TREE;
9649 for (i = 0; i < nelts; i++)
9651 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9652 need_ctor = true;
9653 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9656 if (need_ctor)
9658 vec<constructor_elt, va_gc> *v;
9659 vec_alloc (v, nelts);
9660 for (i = 0; i < nelts; i++)
9661 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9662 return build_constructor (type, v);
9664 else
9665 return build_vector (type, &elts[2 * nelts]);
9668 /* Try to fold a pointer difference of type TYPE two address expressions of
9669 array references AREF0 and AREF1 using location LOC. Return a
9670 simplified expression for the difference or NULL_TREE. */
9672 static tree
9673 fold_addr_of_array_ref_difference (location_t loc, tree type,
9674 tree aref0, tree aref1)
9676 tree base0 = TREE_OPERAND (aref0, 0);
9677 tree base1 = TREE_OPERAND (aref1, 0);
9678 tree base_offset = build_int_cst (type, 0);
9680 /* If the bases are array references as well, recurse. If the bases
9681 are pointer indirections compute the difference of the pointers.
9682 If the bases are equal, we are set. */
9683 if ((TREE_CODE (base0) == ARRAY_REF
9684 && TREE_CODE (base1) == ARRAY_REF
9685 && (base_offset
9686 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9687 || (INDIRECT_REF_P (base0)
9688 && INDIRECT_REF_P (base1)
9689 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9690 TREE_OPERAND (base0, 0),
9691 TREE_OPERAND (base1, 0))))
9692 || operand_equal_p (base0, base1, 0))
9694 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9695 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9696 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9697 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9698 return fold_build2_loc (loc, PLUS_EXPR, type,
9699 base_offset,
9700 fold_build2_loc (loc, MULT_EXPR, type,
9701 diff, esz));
9703 return NULL_TREE;
9706 /* If the real or vector real constant CST of type TYPE has an exact
9707 inverse, return it, else return NULL. */
9709 tree
9710 exact_inverse (tree type, tree cst)
9712 REAL_VALUE_TYPE r;
9713 tree unit_type, *elts;
9714 machine_mode mode;
9715 unsigned vec_nelts, i;
9717 switch (TREE_CODE (cst))
9719 case REAL_CST:
9720 r = TREE_REAL_CST (cst);
9722 if (exact_real_inverse (TYPE_MODE (type), &r))
9723 return build_real (type, r);
9725 return NULL_TREE;
9727 case VECTOR_CST:
9728 vec_nelts = VECTOR_CST_NELTS (cst);
9729 elts = XALLOCAVEC (tree, vec_nelts);
9730 unit_type = TREE_TYPE (type);
9731 mode = TYPE_MODE (unit_type);
9733 for (i = 0; i < vec_nelts; i++)
9735 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9736 if (!exact_real_inverse (mode, &r))
9737 return NULL_TREE;
9738 elts[i] = build_real (unit_type, r);
9741 return build_vector (type, elts);
9743 default:
9744 return NULL_TREE;
9748 /* Mask out the tz least significant bits of X of type TYPE where
9749 tz is the number of trailing zeroes in Y. */
9750 static wide_int
9751 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9753 int tz = wi::ctz (y);
9754 if (tz > 0)
9755 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9756 return x;
9759 /* Return true when T is an address and is known to be nonzero.
9760 For floating point we further ensure that T is not denormal.
9761 Similar logic is present in nonzero_address in rtlanal.h.
9763 If the return value is based on the assumption that signed overflow
9764 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9765 change *STRICT_OVERFLOW_P. */
9767 static bool
9768 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9770 tree type = TREE_TYPE (t);
9771 enum tree_code code;
9773 /* Doing something useful for floating point would need more work. */
9774 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9775 return false;
9777 code = TREE_CODE (t);
9778 switch (TREE_CODE_CLASS (code))
9780 case tcc_unary:
9781 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9782 strict_overflow_p);
9783 case tcc_binary:
9784 case tcc_comparison:
9785 return tree_binary_nonzero_warnv_p (code, type,
9786 TREE_OPERAND (t, 0),
9787 TREE_OPERAND (t, 1),
9788 strict_overflow_p);
9789 case tcc_constant:
9790 case tcc_declaration:
9791 case tcc_reference:
9792 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9794 default:
9795 break;
9798 switch (code)
9800 case TRUTH_NOT_EXPR:
9801 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9802 strict_overflow_p);
9804 case TRUTH_AND_EXPR:
9805 case TRUTH_OR_EXPR:
9806 case TRUTH_XOR_EXPR:
9807 return tree_binary_nonzero_warnv_p (code, type,
9808 TREE_OPERAND (t, 0),
9809 TREE_OPERAND (t, 1),
9810 strict_overflow_p);
9812 case COND_EXPR:
9813 case CONSTRUCTOR:
9814 case OBJ_TYPE_REF:
9815 case ASSERT_EXPR:
9816 case ADDR_EXPR:
9817 case WITH_SIZE_EXPR:
9818 case SSA_NAME:
9819 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9821 case COMPOUND_EXPR:
9822 case MODIFY_EXPR:
9823 case BIND_EXPR:
9824 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9825 strict_overflow_p);
9827 case SAVE_EXPR:
9828 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9829 strict_overflow_p);
9831 case CALL_EXPR:
9833 tree fndecl = get_callee_fndecl (t);
9834 if (!fndecl) return false;
9835 if (flag_delete_null_pointer_checks && !flag_check_new
9836 && DECL_IS_OPERATOR_NEW (fndecl)
9837 && !TREE_NOTHROW (fndecl))
9838 return true;
9839 if (flag_delete_null_pointer_checks
9840 && lookup_attribute ("returns_nonnull",
9841 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9842 return true;
9843 return alloca_call_p (t);
9846 default:
9847 break;
9849 return false;
9852 /* Return true when T is an address and is known to be nonzero.
9853 Handle warnings about undefined signed overflow. */
9855 static bool
9856 tree_expr_nonzero_p (tree t)
9858 bool ret, strict_overflow_p;
9860 strict_overflow_p = false;
9861 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9862 if (strict_overflow_p)
9863 fold_overflow_warning (("assuming signed overflow does not occur when "
9864 "determining that expression is always "
9865 "non-zero"),
9866 WARN_STRICT_OVERFLOW_MISC);
9867 return ret;
9870 /* Fold a binary expression of code CODE and type TYPE with operands
9871 OP0 and OP1. LOC is the location of the resulting expression.
9872 Return the folded expression if folding is successful. Otherwise,
9873 return NULL_TREE. */
9875 tree
9876 fold_binary_loc (location_t loc,
9877 enum tree_code code, tree type, tree op0, tree op1)
9879 enum tree_code_class kind = TREE_CODE_CLASS (code);
9880 tree arg0, arg1, tem;
9881 tree t1 = NULL_TREE;
9882 bool strict_overflow_p;
9883 unsigned int prec;
9885 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9886 && TREE_CODE_LENGTH (code) == 2
9887 && op0 != NULL_TREE
9888 && op1 != NULL_TREE);
9890 arg0 = op0;
9891 arg1 = op1;
9893 /* Strip any conversions that don't change the mode. This is
9894 safe for every expression, except for a comparison expression
9895 because its signedness is derived from its operands. So, in
9896 the latter case, only strip conversions that don't change the
9897 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9898 preserved.
9900 Note that this is done as an internal manipulation within the
9901 constant folder, in order to find the simplest representation
9902 of the arguments so that their form can be studied. In any
9903 cases, the appropriate type conversions should be put back in
9904 the tree that will get out of the constant folder. */
9906 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9908 STRIP_SIGN_NOPS (arg0);
9909 STRIP_SIGN_NOPS (arg1);
9911 else
9913 STRIP_NOPS (arg0);
9914 STRIP_NOPS (arg1);
9917 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9918 constant but we can't do arithmetic on them. */
9919 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9921 tem = const_binop (code, type, arg0, arg1);
9922 if (tem != NULL_TREE)
9924 if (TREE_TYPE (tem) != type)
9925 tem = fold_convert_loc (loc, type, tem);
9926 return tem;
9930 /* If this is a commutative operation, and ARG0 is a constant, move it
9931 to ARG1 to reduce the number of tests below. */
9932 if (commutative_tree_code (code)
9933 && tree_swap_operands_p (arg0, arg1, true))
9934 return fold_build2_loc (loc, code, type, op1, op0);
9936 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9937 to ARG1 to reduce the number of tests below. */
9938 if (kind == tcc_comparison
9939 && tree_swap_operands_p (arg0, arg1, true))
9940 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9942 tem = generic_simplify (loc, code, type, op0, op1);
9943 if (tem)
9944 return tem;
9946 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9948 First check for cases where an arithmetic operation is applied to a
9949 compound, conditional, or comparison operation. Push the arithmetic
9950 operation inside the compound or conditional to see if any folding
9951 can then be done. Convert comparison to conditional for this purpose.
9952 The also optimizes non-constant cases that used to be done in
9953 expand_expr.
9955 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9956 one of the operands is a comparison and the other is a comparison, a
9957 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9958 code below would make the expression more complex. Change it to a
9959 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9960 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9962 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9963 || code == EQ_EXPR || code == NE_EXPR)
9964 && TREE_CODE (type) != VECTOR_TYPE
9965 && ((truth_value_p (TREE_CODE (arg0))
9966 && (truth_value_p (TREE_CODE (arg1))
9967 || (TREE_CODE (arg1) == BIT_AND_EXPR
9968 && integer_onep (TREE_OPERAND (arg1, 1)))))
9969 || (truth_value_p (TREE_CODE (arg1))
9970 && (truth_value_p (TREE_CODE (arg0))
9971 || (TREE_CODE (arg0) == BIT_AND_EXPR
9972 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9974 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9975 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9976 : TRUTH_XOR_EXPR,
9977 boolean_type_node,
9978 fold_convert_loc (loc, boolean_type_node, arg0),
9979 fold_convert_loc (loc, boolean_type_node, arg1));
9981 if (code == EQ_EXPR)
9982 tem = invert_truthvalue_loc (loc, tem);
9984 return fold_convert_loc (loc, type, tem);
9987 if (TREE_CODE_CLASS (code) == tcc_binary
9988 || TREE_CODE_CLASS (code) == tcc_comparison)
9990 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9992 tem = fold_build2_loc (loc, code, type,
9993 fold_convert_loc (loc, TREE_TYPE (op0),
9994 TREE_OPERAND (arg0, 1)), op1);
9995 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9996 tem);
9998 if (TREE_CODE (arg1) == COMPOUND_EXPR
9999 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10001 tem = fold_build2_loc (loc, code, type, op0,
10002 fold_convert_loc (loc, TREE_TYPE (op1),
10003 TREE_OPERAND (arg1, 1)));
10004 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10005 tem);
10008 if (TREE_CODE (arg0) == COND_EXPR
10009 || TREE_CODE (arg0) == VEC_COND_EXPR
10010 || COMPARISON_CLASS_P (arg0))
10012 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10013 arg0, arg1,
10014 /*cond_first_p=*/1);
10015 if (tem != NULL_TREE)
10016 return tem;
10019 if (TREE_CODE (arg1) == COND_EXPR
10020 || TREE_CODE (arg1) == VEC_COND_EXPR
10021 || COMPARISON_CLASS_P (arg1))
10023 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10024 arg1, arg0,
10025 /*cond_first_p=*/0);
10026 if (tem != NULL_TREE)
10027 return tem;
10031 switch (code)
10033 case MEM_REF:
10034 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10035 if (TREE_CODE (arg0) == ADDR_EXPR
10036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10038 tree iref = TREE_OPERAND (arg0, 0);
10039 return fold_build2 (MEM_REF, type,
10040 TREE_OPERAND (iref, 0),
10041 int_const_binop (PLUS_EXPR, arg1,
10042 TREE_OPERAND (iref, 1)));
10045 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10046 if (TREE_CODE (arg0) == ADDR_EXPR
10047 && handled_component_p (TREE_OPERAND (arg0, 0)))
10049 tree base;
10050 HOST_WIDE_INT coffset;
10051 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10052 &coffset);
10053 if (!base)
10054 return NULL_TREE;
10055 return fold_build2 (MEM_REF, type,
10056 build_fold_addr_expr (base),
10057 int_const_binop (PLUS_EXPR, arg1,
10058 size_int (coffset)));
10061 return NULL_TREE;
10063 case POINTER_PLUS_EXPR:
10064 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10065 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10066 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10067 return fold_convert_loc (loc, type,
10068 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10069 fold_convert_loc (loc, sizetype,
10070 arg1),
10071 fold_convert_loc (loc, sizetype,
10072 arg0)));
10074 return NULL_TREE;
10076 case PLUS_EXPR:
10077 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10079 /* X + (X / CST) * -CST is X % CST. */
10080 if (TREE_CODE (arg1) == MULT_EXPR
10081 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10082 && operand_equal_p (arg0,
10083 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10085 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10086 tree cst1 = TREE_OPERAND (arg1, 1);
10087 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10088 cst1, cst0);
10089 if (sum && integer_zerop (sum))
10090 return fold_convert_loc (loc, type,
10091 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10092 TREE_TYPE (arg0), arg0,
10093 cst0));
10097 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10098 one. Make sure the type is not saturating and has the signedness of
10099 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10100 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10101 if ((TREE_CODE (arg0) == MULT_EXPR
10102 || TREE_CODE (arg1) == MULT_EXPR)
10103 && !TYPE_SATURATING (type)
10104 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10105 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10106 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10108 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10109 if (tem)
10110 return tem;
10113 if (! FLOAT_TYPE_P (type))
10115 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10116 with a constant, and the two constants have no bits in common,
10117 we should treat this as a BIT_IOR_EXPR since this may produce more
10118 simplifications. */
10119 if (TREE_CODE (arg0) == BIT_AND_EXPR
10120 && TREE_CODE (arg1) == BIT_AND_EXPR
10121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10122 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10123 && wi::bit_and (TREE_OPERAND (arg0, 1),
10124 TREE_OPERAND (arg1, 1)) == 0)
10126 code = BIT_IOR_EXPR;
10127 goto bit_ior;
10130 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10131 (plus (plus (mult) (mult)) (foo)) so that we can
10132 take advantage of the factoring cases below. */
10133 if (ANY_INTEGRAL_TYPE_P (type)
10134 && TYPE_OVERFLOW_WRAPS (type)
10135 && (((TREE_CODE (arg0) == PLUS_EXPR
10136 || TREE_CODE (arg0) == MINUS_EXPR)
10137 && TREE_CODE (arg1) == MULT_EXPR)
10138 || ((TREE_CODE (arg1) == PLUS_EXPR
10139 || TREE_CODE (arg1) == MINUS_EXPR)
10140 && TREE_CODE (arg0) == MULT_EXPR)))
10142 tree parg0, parg1, parg, marg;
10143 enum tree_code pcode;
10145 if (TREE_CODE (arg1) == MULT_EXPR)
10146 parg = arg0, marg = arg1;
10147 else
10148 parg = arg1, marg = arg0;
10149 pcode = TREE_CODE (parg);
10150 parg0 = TREE_OPERAND (parg, 0);
10151 parg1 = TREE_OPERAND (parg, 1);
10152 STRIP_NOPS (parg0);
10153 STRIP_NOPS (parg1);
10155 if (TREE_CODE (parg0) == MULT_EXPR
10156 && TREE_CODE (parg1) != MULT_EXPR)
10157 return fold_build2_loc (loc, pcode, type,
10158 fold_build2_loc (loc, PLUS_EXPR, type,
10159 fold_convert_loc (loc, type,
10160 parg0),
10161 fold_convert_loc (loc, type,
10162 marg)),
10163 fold_convert_loc (loc, type, parg1));
10164 if (TREE_CODE (parg0) != MULT_EXPR
10165 && TREE_CODE (parg1) == MULT_EXPR)
10166 return
10167 fold_build2_loc (loc, PLUS_EXPR, type,
10168 fold_convert_loc (loc, type, parg0),
10169 fold_build2_loc (loc, pcode, type,
10170 fold_convert_loc (loc, type, marg),
10171 fold_convert_loc (loc, type,
10172 parg1)));
10175 else
10177 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10178 to __complex__ ( x, y ). This is not the same for SNaNs or
10179 if signed zeros are involved. */
10180 if (!HONOR_SNANS (element_mode (arg0))
10181 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10182 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10184 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10185 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10186 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10187 bool arg0rz = false, arg0iz = false;
10188 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10189 || (arg0i && (arg0iz = real_zerop (arg0i))))
10191 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10192 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10193 if (arg0rz && arg1i && real_zerop (arg1i))
10195 tree rp = arg1r ? arg1r
10196 : build1 (REALPART_EXPR, rtype, arg1);
10197 tree ip = arg0i ? arg0i
10198 : build1 (IMAGPART_EXPR, rtype, arg0);
10199 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10201 else if (arg0iz && arg1r && real_zerop (arg1r))
10203 tree rp = arg0r ? arg0r
10204 : build1 (REALPART_EXPR, rtype, arg0);
10205 tree ip = arg1i ? arg1i
10206 : build1 (IMAGPART_EXPR, rtype, arg1);
10207 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10212 if (flag_unsafe_math_optimizations
10213 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10214 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10215 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10216 return tem;
10218 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10219 We associate floats only if the user has specified
10220 -fassociative-math. */
10221 if (flag_associative_math
10222 && TREE_CODE (arg1) == PLUS_EXPR
10223 && TREE_CODE (arg0) != MULT_EXPR)
10225 tree tree10 = TREE_OPERAND (arg1, 0);
10226 tree tree11 = TREE_OPERAND (arg1, 1);
10227 if (TREE_CODE (tree11) == MULT_EXPR
10228 && TREE_CODE (tree10) == MULT_EXPR)
10230 tree tree0;
10231 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10232 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10235 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10236 We associate floats only if the user has specified
10237 -fassociative-math. */
10238 if (flag_associative_math
10239 && TREE_CODE (arg0) == PLUS_EXPR
10240 && TREE_CODE (arg1) != MULT_EXPR)
10242 tree tree00 = TREE_OPERAND (arg0, 0);
10243 tree tree01 = TREE_OPERAND (arg0, 1);
10244 if (TREE_CODE (tree01) == MULT_EXPR
10245 && TREE_CODE (tree00) == MULT_EXPR)
10247 tree tree0;
10248 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10249 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10254 bit_rotate:
10255 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10256 is a rotate of A by C1 bits. */
10257 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10258 is a rotate of A by B bits. */
10260 enum tree_code code0, code1;
10261 tree rtype;
10262 code0 = TREE_CODE (arg0);
10263 code1 = TREE_CODE (arg1);
10264 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10265 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10266 && operand_equal_p (TREE_OPERAND (arg0, 0),
10267 TREE_OPERAND (arg1, 0), 0)
10268 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10269 TYPE_UNSIGNED (rtype))
10270 /* Only create rotates in complete modes. Other cases are not
10271 expanded properly. */
10272 && (element_precision (rtype)
10273 == element_precision (TYPE_MODE (rtype))))
10275 tree tree01, tree11;
10276 enum tree_code code01, code11;
10278 tree01 = TREE_OPERAND (arg0, 1);
10279 tree11 = TREE_OPERAND (arg1, 1);
10280 STRIP_NOPS (tree01);
10281 STRIP_NOPS (tree11);
10282 code01 = TREE_CODE (tree01);
10283 code11 = TREE_CODE (tree11);
10284 if (code01 == INTEGER_CST
10285 && code11 == INTEGER_CST
10286 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10287 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10289 tem = build2_loc (loc, LROTATE_EXPR,
10290 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10291 TREE_OPERAND (arg0, 0),
10292 code0 == LSHIFT_EXPR
10293 ? TREE_OPERAND (arg0, 1)
10294 : TREE_OPERAND (arg1, 1));
10295 return fold_convert_loc (loc, type, tem);
10297 else if (code11 == MINUS_EXPR)
10299 tree tree110, tree111;
10300 tree110 = TREE_OPERAND (tree11, 0);
10301 tree111 = TREE_OPERAND (tree11, 1);
10302 STRIP_NOPS (tree110);
10303 STRIP_NOPS (tree111);
10304 if (TREE_CODE (tree110) == INTEGER_CST
10305 && 0 == compare_tree_int (tree110,
10306 element_precision
10307 (TREE_TYPE (TREE_OPERAND
10308 (arg0, 0))))
10309 && operand_equal_p (tree01, tree111, 0))
10310 return
10311 fold_convert_loc (loc, type,
10312 build2 ((code0 == LSHIFT_EXPR
10313 ? LROTATE_EXPR
10314 : RROTATE_EXPR),
10315 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10316 TREE_OPERAND (arg0, 0),
10317 TREE_OPERAND (arg0, 1)));
10319 else if (code01 == MINUS_EXPR)
10321 tree tree010, tree011;
10322 tree010 = TREE_OPERAND (tree01, 0);
10323 tree011 = TREE_OPERAND (tree01, 1);
10324 STRIP_NOPS (tree010);
10325 STRIP_NOPS (tree011);
10326 if (TREE_CODE (tree010) == INTEGER_CST
10327 && 0 == compare_tree_int (tree010,
10328 element_precision
10329 (TREE_TYPE (TREE_OPERAND
10330 (arg0, 0))))
10331 && operand_equal_p (tree11, tree011, 0))
10332 return fold_convert_loc
10333 (loc, type,
10334 build2 ((code0 != LSHIFT_EXPR
10335 ? LROTATE_EXPR
10336 : RROTATE_EXPR),
10337 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10338 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10343 associate:
10344 /* In most languages, can't associate operations on floats through
10345 parentheses. Rather than remember where the parentheses were, we
10346 don't associate floats at all, unless the user has specified
10347 -fassociative-math.
10348 And, we need to make sure type is not saturating. */
10350 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10351 && !TYPE_SATURATING (type))
10353 tree var0, con0, lit0, minus_lit0;
10354 tree var1, con1, lit1, minus_lit1;
10355 tree atype = type;
10356 bool ok = true;
10358 /* Split both trees into variables, constants, and literals. Then
10359 associate each group together, the constants with literals,
10360 then the result with variables. This increases the chances of
10361 literals being recombined later and of generating relocatable
10362 expressions for the sum of a constant and literal. */
10363 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10364 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10365 code == MINUS_EXPR);
10367 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10368 if (code == MINUS_EXPR)
10369 code = PLUS_EXPR;
10371 /* With undefined overflow prefer doing association in a type
10372 which wraps on overflow, if that is one of the operand types. */
10373 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10374 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10376 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10377 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10378 atype = TREE_TYPE (arg0);
10379 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10380 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10381 atype = TREE_TYPE (arg1);
10382 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10385 /* With undefined overflow we can only associate constants with one
10386 variable, and constants whose association doesn't overflow. */
10387 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10388 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10390 if (var0 && var1)
10392 tree tmp0 = var0;
10393 tree tmp1 = var1;
10395 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10396 tmp0 = TREE_OPERAND (tmp0, 0);
10397 if (CONVERT_EXPR_P (tmp0)
10398 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10399 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10400 <= TYPE_PRECISION (atype)))
10401 tmp0 = TREE_OPERAND (tmp0, 0);
10402 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10403 tmp1 = TREE_OPERAND (tmp1, 0);
10404 if (CONVERT_EXPR_P (tmp1)
10405 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10406 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10407 <= TYPE_PRECISION (atype)))
10408 tmp1 = TREE_OPERAND (tmp1, 0);
10409 /* The only case we can still associate with two variables
10410 is if they are the same, modulo negation and bit-pattern
10411 preserving conversions. */
10412 if (!operand_equal_p (tmp0, tmp1, 0))
10413 ok = false;
10417 /* Only do something if we found more than two objects. Otherwise,
10418 nothing has changed and we risk infinite recursion. */
10419 if (ok
10420 && (2 < ((var0 != 0) + (var1 != 0)
10421 + (con0 != 0) + (con1 != 0)
10422 + (lit0 != 0) + (lit1 != 0)
10423 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10425 bool any_overflows = false;
10426 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10427 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10428 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10429 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10430 var0 = associate_trees (loc, var0, var1, code, atype);
10431 con0 = associate_trees (loc, con0, con1, code, atype);
10432 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10433 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10434 code, atype);
10436 /* Preserve the MINUS_EXPR if the negative part of the literal is
10437 greater than the positive part. Otherwise, the multiplicative
10438 folding code (i.e extract_muldiv) may be fooled in case
10439 unsigned constants are subtracted, like in the following
10440 example: ((X*2 + 4) - 8U)/2. */
10441 if (minus_lit0 && lit0)
10443 if (TREE_CODE (lit0) == INTEGER_CST
10444 && TREE_CODE (minus_lit0) == INTEGER_CST
10445 && tree_int_cst_lt (lit0, minus_lit0))
10447 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10448 MINUS_EXPR, atype);
10449 lit0 = 0;
10451 else
10453 lit0 = associate_trees (loc, lit0, minus_lit0,
10454 MINUS_EXPR, atype);
10455 minus_lit0 = 0;
10459 /* Don't introduce overflows through reassociation. */
10460 if (!any_overflows
10461 && ((lit0 && TREE_OVERFLOW_P (lit0))
10462 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10463 return NULL_TREE;
10465 if (minus_lit0)
10467 if (con0 == 0)
10468 return
10469 fold_convert_loc (loc, type,
10470 associate_trees (loc, var0, minus_lit0,
10471 MINUS_EXPR, atype));
10472 else
10474 con0 = associate_trees (loc, con0, minus_lit0,
10475 MINUS_EXPR, atype);
10476 return
10477 fold_convert_loc (loc, type,
10478 associate_trees (loc, var0, con0,
10479 PLUS_EXPR, atype));
10483 con0 = associate_trees (loc, con0, lit0, code, atype);
10484 return
10485 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10486 code, atype));
10490 return NULL_TREE;
10492 case MINUS_EXPR:
10493 /* Pointer simplifications for subtraction, simple reassociations. */
10494 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10496 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10497 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10498 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10500 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10501 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10502 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10503 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10504 return fold_build2_loc (loc, PLUS_EXPR, type,
10505 fold_build2_loc (loc, MINUS_EXPR, type,
10506 arg00, arg10),
10507 fold_build2_loc (loc, MINUS_EXPR, type,
10508 arg01, arg11));
10510 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10511 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10513 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10514 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10515 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10516 fold_convert_loc (loc, type, arg1));
10517 if (tmp)
10518 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10520 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10521 simplifies. */
10522 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10524 tree arg10 = fold_convert_loc (loc, type,
10525 TREE_OPERAND (arg1, 0));
10526 tree arg11 = fold_convert_loc (loc, type,
10527 TREE_OPERAND (arg1, 1));
10528 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10529 fold_convert_loc (loc, type, arg0),
10530 arg10);
10531 if (tmp)
10532 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10535 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10536 if (TREE_CODE (arg0) == NEGATE_EXPR
10537 && negate_expr_p (arg1)
10538 && reorder_operands_p (arg0, arg1))
10539 return fold_build2_loc (loc, MINUS_EXPR, type,
10540 fold_convert_loc (loc, type,
10541 negate_expr (arg1)),
10542 fold_convert_loc (loc, type,
10543 TREE_OPERAND (arg0, 0)));
10545 /* X - (X / Y) * Y is X % Y. */
10546 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10547 && TREE_CODE (arg1) == MULT_EXPR
10548 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10549 && operand_equal_p (arg0,
10550 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10551 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10552 TREE_OPERAND (arg1, 1), 0))
10553 return
10554 fold_convert_loc (loc, type,
10555 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10556 arg0, TREE_OPERAND (arg1, 1)));
10558 if (! FLOAT_TYPE_P (type))
10560 /* Fold A - (A & B) into ~B & A. */
10561 if (!TREE_SIDE_EFFECTS (arg0)
10562 && TREE_CODE (arg1) == BIT_AND_EXPR)
10564 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10566 tree arg10 = fold_convert_loc (loc, type,
10567 TREE_OPERAND (arg1, 0));
10568 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10569 fold_build1_loc (loc, BIT_NOT_EXPR,
10570 type, arg10),
10571 fold_convert_loc (loc, type, arg0));
10573 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10575 tree arg11 = fold_convert_loc (loc,
10576 type, TREE_OPERAND (arg1, 1));
10577 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10578 fold_build1_loc (loc, BIT_NOT_EXPR,
10579 type, arg11),
10580 fold_convert_loc (loc, type, arg0));
10584 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10585 any power of 2 minus 1. */
10586 if (TREE_CODE (arg0) == BIT_AND_EXPR
10587 && TREE_CODE (arg1) == BIT_AND_EXPR
10588 && operand_equal_p (TREE_OPERAND (arg0, 0),
10589 TREE_OPERAND (arg1, 0), 0))
10591 tree mask0 = TREE_OPERAND (arg0, 1);
10592 tree mask1 = TREE_OPERAND (arg1, 1);
10593 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10595 if (operand_equal_p (tem, mask1, 0))
10597 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10598 TREE_OPERAND (arg0, 0), mask1);
10599 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10604 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10605 __complex__ ( x, -y ). This is not the same for SNaNs or if
10606 signed zeros are involved. */
10607 if (!HONOR_SNANS (element_mode (arg0))
10608 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10609 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10611 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10612 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10613 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10614 bool arg0rz = false, arg0iz = false;
10615 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10616 || (arg0i && (arg0iz = real_zerop (arg0i))))
10618 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10619 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10620 if (arg0rz && arg1i && real_zerop (arg1i))
10622 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10623 arg1r ? arg1r
10624 : build1 (REALPART_EXPR, rtype, arg1));
10625 tree ip = arg0i ? arg0i
10626 : build1 (IMAGPART_EXPR, rtype, arg0);
10627 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10629 else if (arg0iz && arg1r && real_zerop (arg1r))
10631 tree rp = arg0r ? arg0r
10632 : build1 (REALPART_EXPR, rtype, arg0);
10633 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10634 arg1i ? arg1i
10635 : build1 (IMAGPART_EXPR, rtype, arg1));
10636 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10641 /* A - B -> A + (-B) if B is easily negatable. */
10642 if (negate_expr_p (arg1)
10643 && !TYPE_OVERFLOW_SANITIZED (type)
10644 && ((FLOAT_TYPE_P (type)
10645 /* Avoid this transformation if B is a positive REAL_CST. */
10646 && (TREE_CODE (arg1) != REAL_CST
10647 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10648 || INTEGRAL_TYPE_P (type)))
10649 return fold_build2_loc (loc, PLUS_EXPR, type,
10650 fold_convert_loc (loc, type, arg0),
10651 fold_convert_loc (loc, type,
10652 negate_expr (arg1)));
10654 /* Try folding difference of addresses. */
10656 HOST_WIDE_INT diff;
10658 if ((TREE_CODE (arg0) == ADDR_EXPR
10659 || TREE_CODE (arg1) == ADDR_EXPR)
10660 && ptr_difference_const (arg0, arg1, &diff))
10661 return build_int_cst_type (type, diff);
10664 /* Fold &a[i] - &a[j] to i-j. */
10665 if (TREE_CODE (arg0) == ADDR_EXPR
10666 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10667 && TREE_CODE (arg1) == ADDR_EXPR
10668 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10670 tree tem = fold_addr_of_array_ref_difference (loc, type,
10671 TREE_OPERAND (arg0, 0),
10672 TREE_OPERAND (arg1, 0));
10673 if (tem)
10674 return tem;
10677 if (FLOAT_TYPE_P (type)
10678 && flag_unsafe_math_optimizations
10679 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10680 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10681 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10682 return tem;
10684 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10685 one. Make sure the type is not saturating and has the signedness of
10686 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10687 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10688 if ((TREE_CODE (arg0) == MULT_EXPR
10689 || TREE_CODE (arg1) == MULT_EXPR)
10690 && !TYPE_SATURATING (type)
10691 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10692 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10693 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10695 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10696 if (tem)
10697 return tem;
10700 goto associate;
10702 case MULT_EXPR:
10703 /* (-A) * (-B) -> A * B */
10704 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10705 return fold_build2_loc (loc, MULT_EXPR, type,
10706 fold_convert_loc (loc, type,
10707 TREE_OPERAND (arg0, 0)),
10708 fold_convert_loc (loc, type,
10709 negate_expr (arg1)));
10710 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10711 return fold_build2_loc (loc, MULT_EXPR, type,
10712 fold_convert_loc (loc, type,
10713 negate_expr (arg0)),
10714 fold_convert_loc (loc, type,
10715 TREE_OPERAND (arg1, 0)));
10717 if (! FLOAT_TYPE_P (type))
10719 /* Transform x * -C into -x * C if x is easily negatable. */
10720 if (TREE_CODE (arg1) == INTEGER_CST
10721 && tree_int_cst_sgn (arg1) == -1
10722 && negate_expr_p (arg0)
10723 && (tem = negate_expr (arg1)) != arg1
10724 && !TREE_OVERFLOW (tem))
10725 return fold_build2_loc (loc, MULT_EXPR, type,
10726 fold_convert_loc (loc, type,
10727 negate_expr (arg0)),
10728 tem);
10730 /* (a * (1 << b)) is (a << b) */
10731 if (TREE_CODE (arg1) == LSHIFT_EXPR
10732 && integer_onep (TREE_OPERAND (arg1, 0)))
10733 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10734 TREE_OPERAND (arg1, 1));
10735 if (TREE_CODE (arg0) == LSHIFT_EXPR
10736 && integer_onep (TREE_OPERAND (arg0, 0)))
10737 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10738 TREE_OPERAND (arg0, 1));
10740 /* (A + A) * C -> A * 2 * C */
10741 if (TREE_CODE (arg0) == PLUS_EXPR
10742 && TREE_CODE (arg1) == INTEGER_CST
10743 && operand_equal_p (TREE_OPERAND (arg0, 0),
10744 TREE_OPERAND (arg0, 1), 0))
10745 return fold_build2_loc (loc, MULT_EXPR, type,
10746 omit_one_operand_loc (loc, type,
10747 TREE_OPERAND (arg0, 0),
10748 TREE_OPERAND (arg0, 1)),
10749 fold_build2_loc (loc, MULT_EXPR, type,
10750 build_int_cst (type, 2) , arg1));
10752 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10753 sign-changing only. */
10754 if (TREE_CODE (arg1) == INTEGER_CST
10755 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10756 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10757 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10759 strict_overflow_p = false;
10760 if (TREE_CODE (arg1) == INTEGER_CST
10761 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10762 &strict_overflow_p)))
10764 if (strict_overflow_p)
10765 fold_overflow_warning (("assuming signed overflow does not "
10766 "occur when simplifying "
10767 "multiplication"),
10768 WARN_STRICT_OVERFLOW_MISC);
10769 return fold_convert_loc (loc, type, tem);
10772 /* Optimize z * conj(z) for integer complex numbers. */
10773 if (TREE_CODE (arg0) == CONJ_EXPR
10774 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10775 return fold_mult_zconjz (loc, type, arg1);
10776 if (TREE_CODE (arg1) == CONJ_EXPR
10777 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10778 return fold_mult_zconjz (loc, type, arg0);
10780 else
10782 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10783 the result for floating point types due to rounding so it is applied
10784 only if -fassociative-math was specify. */
10785 if (flag_associative_math
10786 && TREE_CODE (arg0) == RDIV_EXPR
10787 && TREE_CODE (arg1) == REAL_CST
10788 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10790 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10791 arg1);
10792 if (tem)
10793 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10794 TREE_OPERAND (arg0, 1));
10797 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10798 if (operand_equal_p (arg0, arg1, 0))
10800 tree tem = fold_strip_sign_ops (arg0);
10801 if (tem != NULL_TREE)
10803 tem = fold_convert_loc (loc, type, tem);
10804 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10808 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10809 This is not the same for NaNs or if signed zeros are
10810 involved. */
10811 if (!HONOR_NANS (arg0)
10812 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10813 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10814 && TREE_CODE (arg1) == COMPLEX_CST
10815 && real_zerop (TREE_REALPART (arg1)))
10817 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10818 if (real_onep (TREE_IMAGPART (arg1)))
10819 return
10820 fold_build2_loc (loc, COMPLEX_EXPR, type,
10821 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10822 rtype, arg0)),
10823 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10824 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10825 return
10826 fold_build2_loc (loc, COMPLEX_EXPR, type,
10827 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10828 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10829 rtype, arg0)));
10832 /* Optimize z * conj(z) for floating point complex numbers.
10833 Guarded by flag_unsafe_math_optimizations as non-finite
10834 imaginary components don't produce scalar results. */
10835 if (flag_unsafe_math_optimizations
10836 && TREE_CODE (arg0) == CONJ_EXPR
10837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10838 return fold_mult_zconjz (loc, type, arg1);
10839 if (flag_unsafe_math_optimizations
10840 && TREE_CODE (arg1) == CONJ_EXPR
10841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10842 return fold_mult_zconjz (loc, type, arg0);
10844 if (flag_unsafe_math_optimizations)
10846 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10847 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10849 /* Optimizations of root(...)*root(...). */
10850 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10852 tree rootfn, arg;
10853 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10854 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10856 /* Optimize sqrt(x)*sqrt(x) as x. */
10857 if (BUILTIN_SQRT_P (fcode0)
10858 && operand_equal_p (arg00, arg10, 0)
10859 && ! HONOR_SNANS (element_mode (type)))
10860 return arg00;
10862 /* Optimize root(x)*root(y) as root(x*y). */
10863 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10864 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10865 return build_call_expr_loc (loc, rootfn, 1, arg);
10868 /* Optimize expN(x)*expN(y) as expN(x+y). */
10869 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10871 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10872 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10873 CALL_EXPR_ARG (arg0, 0),
10874 CALL_EXPR_ARG (arg1, 0));
10875 return build_call_expr_loc (loc, expfn, 1, arg);
10878 /* Optimizations of pow(...)*pow(...). */
10879 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10880 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10881 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10883 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10884 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10885 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10886 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10888 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10889 if (operand_equal_p (arg01, arg11, 0))
10891 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10892 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10893 arg00, arg10);
10894 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10897 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10898 if (operand_equal_p (arg00, arg10, 0))
10900 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10901 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10902 arg01, arg11);
10903 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10907 /* Optimize tan(x)*cos(x) as sin(x). */
10908 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10909 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10910 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10911 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10912 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10913 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10914 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10915 CALL_EXPR_ARG (arg1, 0), 0))
10917 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10919 if (sinfn != NULL_TREE)
10920 return build_call_expr_loc (loc, sinfn, 1,
10921 CALL_EXPR_ARG (arg0, 0));
10924 /* Optimize x*pow(x,c) as pow(x,c+1). */
10925 if (fcode1 == BUILT_IN_POW
10926 || fcode1 == BUILT_IN_POWF
10927 || fcode1 == BUILT_IN_POWL)
10929 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10930 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10931 if (TREE_CODE (arg11) == REAL_CST
10932 && !TREE_OVERFLOW (arg11)
10933 && operand_equal_p (arg0, arg10, 0))
10935 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10936 REAL_VALUE_TYPE c;
10937 tree arg;
10939 c = TREE_REAL_CST (arg11);
10940 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10941 arg = build_real (type, c);
10942 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10946 /* Optimize pow(x,c)*x as pow(x,c+1). */
10947 if (fcode0 == BUILT_IN_POW
10948 || fcode0 == BUILT_IN_POWF
10949 || fcode0 == BUILT_IN_POWL)
10951 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10952 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10953 if (TREE_CODE (arg01) == REAL_CST
10954 && !TREE_OVERFLOW (arg01)
10955 && operand_equal_p (arg1, arg00, 0))
10957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10958 REAL_VALUE_TYPE c;
10959 tree arg;
10961 c = TREE_REAL_CST (arg01);
10962 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10963 arg = build_real (type, c);
10964 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10968 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10969 if (!in_gimple_form
10970 && optimize
10971 && operand_equal_p (arg0, arg1, 0))
10973 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10975 if (powfn)
10977 tree arg = build_real (type, dconst2);
10978 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10983 goto associate;
10985 case BIT_IOR_EXPR:
10986 bit_ior:
10987 /* ~X | X is -1. */
10988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10991 t1 = build_zero_cst (type);
10992 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10993 return omit_one_operand_loc (loc, type, t1, arg1);
10996 /* X | ~X is -1. */
10997 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10998 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11000 t1 = build_zero_cst (type);
11001 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11002 return omit_one_operand_loc (loc, type, t1, arg0);
11005 /* Canonicalize (X & C1) | C2. */
11006 if (TREE_CODE (arg0) == BIT_AND_EXPR
11007 && TREE_CODE (arg1) == INTEGER_CST
11008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11010 int width = TYPE_PRECISION (type), w;
11011 wide_int c1 = TREE_OPERAND (arg0, 1);
11012 wide_int c2 = arg1;
11014 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11015 if ((c1 & c2) == c1)
11016 return omit_one_operand_loc (loc, type, arg1,
11017 TREE_OPERAND (arg0, 0));
11019 wide_int msk = wi::mask (width, false,
11020 TYPE_PRECISION (TREE_TYPE (arg1)));
11022 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11023 if (msk.and_not (c1 | c2) == 0)
11024 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11025 TREE_OPERAND (arg0, 0), arg1);
11027 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11028 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11029 mode which allows further optimizations. */
11030 c1 &= msk;
11031 c2 &= msk;
11032 wide_int c3 = c1.and_not (c2);
11033 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11035 wide_int mask = wi::mask (w, false,
11036 TYPE_PRECISION (type));
11037 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11039 c3 = mask;
11040 break;
11044 if (c3 != c1)
11045 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11046 fold_build2_loc (loc, BIT_AND_EXPR, type,
11047 TREE_OPERAND (arg0, 0),
11048 wide_int_to_tree (type,
11049 c3)),
11050 arg1);
11053 /* (X & ~Y) | (~X & Y) is X ^ Y */
11054 if (TREE_CODE (arg0) == BIT_AND_EXPR
11055 && TREE_CODE (arg1) == BIT_AND_EXPR)
11057 tree a0, a1, l0, l1, n0, n1;
11059 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11060 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11062 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11063 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11065 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11066 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11068 if ((operand_equal_p (n0, a0, 0)
11069 && operand_equal_p (n1, a1, 0))
11070 || (operand_equal_p (n0, a1, 0)
11071 && operand_equal_p (n1, a0, 0)))
11072 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11075 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11076 if (t1 != NULL_TREE)
11077 return t1;
11079 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11081 This results in more efficient code for machines without a NAND
11082 instruction. Combine will canonicalize to the first form
11083 which will allow use of NAND instructions provided by the
11084 backend if they exist. */
11085 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11086 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11088 return
11089 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11090 build2 (BIT_AND_EXPR, type,
11091 fold_convert_loc (loc, type,
11092 TREE_OPERAND (arg0, 0)),
11093 fold_convert_loc (loc, type,
11094 TREE_OPERAND (arg1, 0))));
11097 /* See if this can be simplified into a rotate first. If that
11098 is unsuccessful continue in the association code. */
11099 goto bit_rotate;
11101 case BIT_XOR_EXPR:
11102 /* ~X ^ X is -1. */
11103 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11106 t1 = build_zero_cst (type);
11107 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11108 return omit_one_operand_loc (loc, type, t1, arg1);
11111 /* X ^ ~X is -1. */
11112 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11113 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11115 t1 = build_zero_cst (type);
11116 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11117 return omit_one_operand_loc (loc, type, t1, arg0);
11120 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11121 with a constant, and the two constants have no bits in common,
11122 we should treat this as a BIT_IOR_EXPR since this may produce more
11123 simplifications. */
11124 if (TREE_CODE (arg0) == BIT_AND_EXPR
11125 && TREE_CODE (arg1) == BIT_AND_EXPR
11126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11127 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11128 && wi::bit_and (TREE_OPERAND (arg0, 1),
11129 TREE_OPERAND (arg1, 1)) == 0)
11131 code = BIT_IOR_EXPR;
11132 goto bit_ior;
11135 /* (X | Y) ^ X -> Y & ~ X*/
11136 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11137 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11139 tree t2 = TREE_OPERAND (arg0, 1);
11140 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11141 arg1);
11142 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11143 fold_convert_loc (loc, type, t2),
11144 fold_convert_loc (loc, type, t1));
11145 return t1;
11148 /* (Y | X) ^ X -> Y & ~ X*/
11149 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11150 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11152 tree t2 = TREE_OPERAND (arg0, 0);
11153 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11154 arg1);
11155 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11156 fold_convert_loc (loc, type, t2),
11157 fold_convert_loc (loc, type, t1));
11158 return t1;
11161 /* X ^ (X | Y) -> Y & ~ X*/
11162 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11165 tree t2 = TREE_OPERAND (arg1, 1);
11166 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11167 arg0);
11168 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11169 fold_convert_loc (loc, type, t2),
11170 fold_convert_loc (loc, type, t1));
11171 return t1;
11174 /* X ^ (Y | X) -> Y & ~ X*/
11175 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11176 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11178 tree t2 = TREE_OPERAND (arg1, 0);
11179 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11180 arg0);
11181 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11182 fold_convert_loc (loc, type, t2),
11183 fold_convert_loc (loc, type, t1));
11184 return t1;
11187 /* Convert ~X ^ ~Y to X ^ Y. */
11188 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11189 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11190 return fold_build2_loc (loc, code, type,
11191 fold_convert_loc (loc, type,
11192 TREE_OPERAND (arg0, 0)),
11193 fold_convert_loc (loc, type,
11194 TREE_OPERAND (arg1, 0)));
11196 /* Convert ~X ^ C to X ^ ~C. */
11197 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11198 && TREE_CODE (arg1) == INTEGER_CST)
11199 return fold_build2_loc (loc, code, type,
11200 fold_convert_loc (loc, type,
11201 TREE_OPERAND (arg0, 0)),
11202 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11204 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11205 if (TREE_CODE (arg0) == BIT_AND_EXPR
11206 && INTEGRAL_TYPE_P (type)
11207 && integer_onep (TREE_OPERAND (arg0, 1))
11208 && integer_onep (arg1))
11209 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11210 build_zero_cst (TREE_TYPE (arg0)));
11212 /* Fold (X & Y) ^ Y as ~X & Y. */
11213 if (TREE_CODE (arg0) == BIT_AND_EXPR
11214 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11216 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11217 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11219 fold_convert_loc (loc, type, arg1));
11221 /* Fold (X & Y) ^ X as ~Y & X. */
11222 if (TREE_CODE (arg0) == BIT_AND_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11224 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11226 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11227 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11228 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11229 fold_convert_loc (loc, type, arg1));
11231 /* Fold X ^ (X & Y) as X & ~Y. */
11232 if (TREE_CODE (arg1) == BIT_AND_EXPR
11233 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11235 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11237 fold_convert_loc (loc, type, arg0),
11238 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11240 /* Fold X ^ (Y & X) as ~Y & X. */
11241 if (TREE_CODE (arg1) == BIT_AND_EXPR
11242 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11243 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11245 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11246 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11247 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11248 fold_convert_loc (loc, type, arg0));
11251 /* See if this can be simplified into a rotate first. If that
11252 is unsuccessful continue in the association code. */
11253 goto bit_rotate;
11255 case BIT_AND_EXPR:
11256 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11257 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11258 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11259 || (TREE_CODE (arg0) == EQ_EXPR
11260 && integer_zerop (TREE_OPERAND (arg0, 1))))
11261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11262 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11264 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11265 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11266 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11267 || (TREE_CODE (arg1) == EQ_EXPR
11268 && integer_zerop (TREE_OPERAND (arg1, 1))))
11269 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11270 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11272 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11273 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11274 && INTEGRAL_TYPE_P (type)
11275 && integer_onep (TREE_OPERAND (arg0, 1))
11276 && integer_onep (arg1))
11278 tree tem2;
11279 tem = TREE_OPERAND (arg0, 0);
11280 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11281 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11282 tem, tem2);
11283 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11284 build_zero_cst (TREE_TYPE (tem)));
11286 /* Fold ~X & 1 as (X & 1) == 0. */
11287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11288 && INTEGRAL_TYPE_P (type)
11289 && integer_onep (arg1))
11291 tree tem2;
11292 tem = TREE_OPERAND (arg0, 0);
11293 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11294 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11295 tem, tem2);
11296 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11297 build_zero_cst (TREE_TYPE (tem)));
11299 /* Fold !X & 1 as X == 0. */
11300 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11301 && integer_onep (arg1))
11303 tem = TREE_OPERAND (arg0, 0);
11304 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11305 build_zero_cst (TREE_TYPE (tem)));
11308 /* Fold (X ^ Y) & Y as ~X & Y. */
11309 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11310 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11312 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11313 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11314 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11315 fold_convert_loc (loc, type, arg1));
11317 /* Fold (X ^ Y) & X as ~Y & X. */
11318 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11319 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11320 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11322 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11323 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11324 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11325 fold_convert_loc (loc, type, arg1));
11327 /* Fold X & (X ^ Y) as X & ~Y. */
11328 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11329 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11331 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11332 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11333 fold_convert_loc (loc, type, arg0),
11334 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11336 /* Fold X & (Y ^ X) as ~Y & X. */
11337 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11338 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11339 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11341 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11342 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11343 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11344 fold_convert_loc (loc, type, arg0));
11347 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11348 multiple of 1 << CST. */
11349 if (TREE_CODE (arg1) == INTEGER_CST)
11351 wide_int cst1 = arg1;
11352 wide_int ncst1 = -cst1;
11353 if ((cst1 & ncst1) == ncst1
11354 && multiple_of_p (type, arg0,
11355 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11356 return fold_convert_loc (loc, type, arg0);
11359 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11360 bits from CST2. */
11361 if (TREE_CODE (arg1) == INTEGER_CST
11362 && TREE_CODE (arg0) == MULT_EXPR
11363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11365 wide_int warg1 = arg1;
11366 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11368 if (masked == 0)
11369 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11370 arg0, arg1);
11371 else if (masked != warg1)
11373 /* Avoid the transform if arg1 is a mask of some
11374 mode which allows further optimizations. */
11375 int pop = wi::popcount (warg1);
11376 if (!(pop >= BITS_PER_UNIT
11377 && exact_log2 (pop) != -1
11378 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11379 return fold_build2_loc (loc, code, type, op0,
11380 wide_int_to_tree (type, masked));
11384 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11385 ((A & N) + B) & M -> (A + B) & M
11386 Similarly if (N & M) == 0,
11387 ((A | N) + B) & M -> (A + B) & M
11388 and for - instead of + (or unary - instead of +)
11389 and/or ^ instead of |.
11390 If B is constant and (B & M) == 0, fold into A & M. */
11391 if (TREE_CODE (arg1) == INTEGER_CST)
11393 wide_int cst1 = arg1;
11394 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11395 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11396 && (TREE_CODE (arg0) == PLUS_EXPR
11397 || TREE_CODE (arg0) == MINUS_EXPR
11398 || TREE_CODE (arg0) == NEGATE_EXPR)
11399 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11400 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11402 tree pmop[2];
11403 int which = 0;
11404 wide_int cst0;
11406 /* Now we know that arg0 is (C + D) or (C - D) or
11407 -C and arg1 (M) is == (1LL << cst) - 1.
11408 Store C into PMOP[0] and D into PMOP[1]. */
11409 pmop[0] = TREE_OPERAND (arg0, 0);
11410 pmop[1] = NULL;
11411 if (TREE_CODE (arg0) != NEGATE_EXPR)
11413 pmop[1] = TREE_OPERAND (arg0, 1);
11414 which = 1;
11417 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11418 which = -1;
11420 for (; which >= 0; which--)
11421 switch (TREE_CODE (pmop[which]))
11423 case BIT_AND_EXPR:
11424 case BIT_IOR_EXPR:
11425 case BIT_XOR_EXPR:
11426 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11427 != INTEGER_CST)
11428 break;
11429 cst0 = TREE_OPERAND (pmop[which], 1);
11430 cst0 &= cst1;
11431 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11433 if (cst0 != cst1)
11434 break;
11436 else if (cst0 != 0)
11437 break;
11438 /* If C or D is of the form (A & N) where
11439 (N & M) == M, or of the form (A | N) or
11440 (A ^ N) where (N & M) == 0, replace it with A. */
11441 pmop[which] = TREE_OPERAND (pmop[which], 0);
11442 break;
11443 case INTEGER_CST:
11444 /* If C or D is a N where (N & M) == 0, it can be
11445 omitted (assumed 0). */
11446 if ((TREE_CODE (arg0) == PLUS_EXPR
11447 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11448 && (cst1 & pmop[which]) == 0)
11449 pmop[which] = NULL;
11450 break;
11451 default:
11452 break;
11455 /* Only build anything new if we optimized one or both arguments
11456 above. */
11457 if (pmop[0] != TREE_OPERAND (arg0, 0)
11458 || (TREE_CODE (arg0) != NEGATE_EXPR
11459 && pmop[1] != TREE_OPERAND (arg0, 1)))
11461 tree utype = TREE_TYPE (arg0);
11462 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11464 /* Perform the operations in a type that has defined
11465 overflow behavior. */
11466 utype = unsigned_type_for (TREE_TYPE (arg0));
11467 if (pmop[0] != NULL)
11468 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11469 if (pmop[1] != NULL)
11470 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11473 if (TREE_CODE (arg0) == NEGATE_EXPR)
11474 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11475 else if (TREE_CODE (arg0) == PLUS_EXPR)
11477 if (pmop[0] != NULL && pmop[1] != NULL)
11478 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11479 pmop[0], pmop[1]);
11480 else if (pmop[0] != NULL)
11481 tem = pmop[0];
11482 else if (pmop[1] != NULL)
11483 tem = pmop[1];
11484 else
11485 return build_int_cst (type, 0);
11487 else if (pmop[0] == NULL)
11488 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11489 else
11490 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11491 pmop[0], pmop[1]);
11492 /* TEM is now the new binary +, - or unary - replacement. */
11493 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11494 fold_convert_loc (loc, utype, arg1));
11495 return fold_convert_loc (loc, type, tem);
11500 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11501 if (t1 != NULL_TREE)
11502 return t1;
11503 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11504 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11505 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11507 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11509 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11510 if (mask == -1)
11511 return
11512 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11515 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11517 This results in more efficient code for machines without a NOR
11518 instruction. Combine will canonicalize to the first form
11519 which will allow use of NOR instructions provided by the
11520 backend if they exist. */
11521 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11522 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11524 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11525 build2 (BIT_IOR_EXPR, type,
11526 fold_convert_loc (loc, type,
11527 TREE_OPERAND (arg0, 0)),
11528 fold_convert_loc (loc, type,
11529 TREE_OPERAND (arg1, 0))));
11532 /* If arg0 is derived from the address of an object or function, we may
11533 be able to fold this expression using the object or function's
11534 alignment. */
11535 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11537 unsigned HOST_WIDE_INT modulus, residue;
11538 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11540 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11541 integer_onep (arg1));
11543 /* This works because modulus is a power of 2. If this weren't the
11544 case, we'd have to replace it by its greatest power-of-2
11545 divisor: modulus & -modulus. */
11546 if (low < modulus)
11547 return build_int_cst (type, residue & low);
11550 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11551 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11552 if the new mask might be further optimized. */
11553 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11554 || TREE_CODE (arg0) == RSHIFT_EXPR)
11555 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11556 && TREE_CODE (arg1) == INTEGER_CST
11557 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11558 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11559 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11560 < TYPE_PRECISION (TREE_TYPE (arg0))))
11562 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11563 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11564 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11565 tree shift_type = TREE_TYPE (arg0);
11567 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11568 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11569 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11570 && TYPE_PRECISION (TREE_TYPE (arg0))
11571 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11573 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11574 tree arg00 = TREE_OPERAND (arg0, 0);
11575 /* See if more bits can be proven as zero because of
11576 zero extension. */
11577 if (TREE_CODE (arg00) == NOP_EXPR
11578 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11580 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11581 if (TYPE_PRECISION (inner_type)
11582 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11583 && TYPE_PRECISION (inner_type) < prec)
11585 prec = TYPE_PRECISION (inner_type);
11586 /* See if we can shorten the right shift. */
11587 if (shiftc < prec)
11588 shift_type = inner_type;
11589 /* Otherwise X >> C1 is all zeros, so we'll optimize
11590 it into (X, 0) later on by making sure zerobits
11591 is all ones. */
11594 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11595 if (shiftc < prec)
11597 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11598 zerobits <<= prec - shiftc;
11600 /* For arithmetic shift if sign bit could be set, zerobits
11601 can contain actually sign bits, so no transformation is
11602 possible, unless MASK masks them all away. In that
11603 case the shift needs to be converted into logical shift. */
11604 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11605 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11607 if ((mask & zerobits) == 0)
11608 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11609 else
11610 zerobits = 0;
11614 /* ((X << 16) & 0xff00) is (X, 0). */
11615 if ((mask & zerobits) == mask)
11616 return omit_one_operand_loc (loc, type,
11617 build_int_cst (type, 0), arg0);
11619 newmask = mask | zerobits;
11620 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11622 /* Only do the transformation if NEWMASK is some integer
11623 mode's mask. */
11624 for (prec = BITS_PER_UNIT;
11625 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11626 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11627 break;
11628 if (prec < HOST_BITS_PER_WIDE_INT
11629 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11631 tree newmaskt;
11633 if (shift_type != TREE_TYPE (arg0))
11635 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11636 fold_convert_loc (loc, shift_type,
11637 TREE_OPERAND (arg0, 0)),
11638 TREE_OPERAND (arg0, 1));
11639 tem = fold_convert_loc (loc, type, tem);
11641 else
11642 tem = op0;
11643 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11644 if (!tree_int_cst_equal (newmaskt, arg1))
11645 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11650 goto associate;
11652 case RDIV_EXPR:
11653 /* Don't touch a floating-point divide by zero unless the mode
11654 of the constant can represent infinity. */
11655 if (TREE_CODE (arg1) == REAL_CST
11656 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11657 && real_zerop (arg1))
11658 return NULL_TREE;
11660 /* (-A) / (-B) -> A / B */
11661 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11662 return fold_build2_loc (loc, RDIV_EXPR, type,
11663 TREE_OPERAND (arg0, 0),
11664 negate_expr (arg1));
11665 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11666 return fold_build2_loc (loc, RDIV_EXPR, type,
11667 negate_expr (arg0),
11668 TREE_OPERAND (arg1, 0));
11670 /* Convert A/B/C to A/(B*C). */
11671 if (flag_reciprocal_math
11672 && TREE_CODE (arg0) == RDIV_EXPR)
11673 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11674 fold_build2_loc (loc, MULT_EXPR, type,
11675 TREE_OPERAND (arg0, 1), arg1));
11677 /* Convert A/(B/C) to (A/B)*C. */
11678 if (flag_reciprocal_math
11679 && TREE_CODE (arg1) == RDIV_EXPR)
11680 return fold_build2_loc (loc, MULT_EXPR, type,
11681 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11682 TREE_OPERAND (arg1, 0)),
11683 TREE_OPERAND (arg1, 1));
11685 /* Convert C1/(X*C2) into (C1/C2)/X. */
11686 if (flag_reciprocal_math
11687 && TREE_CODE (arg1) == MULT_EXPR
11688 && TREE_CODE (arg0) == REAL_CST
11689 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11691 tree tem = const_binop (RDIV_EXPR, arg0,
11692 TREE_OPERAND (arg1, 1));
11693 if (tem)
11694 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11695 TREE_OPERAND (arg1, 0));
11698 if (flag_unsafe_math_optimizations)
11700 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11701 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11703 /* Optimize sin(x)/cos(x) as tan(x). */
11704 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11705 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11706 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11707 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11708 CALL_EXPR_ARG (arg1, 0), 0))
11710 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11712 if (tanfn != NULL_TREE)
11713 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11716 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11717 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11718 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11719 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11720 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11721 CALL_EXPR_ARG (arg1, 0), 0))
11723 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11725 if (tanfn != NULL_TREE)
11727 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11728 CALL_EXPR_ARG (arg0, 0));
11729 return fold_build2_loc (loc, RDIV_EXPR, type,
11730 build_real (type, dconst1), tmp);
11734 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11735 NaNs or Infinities. */
11736 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11737 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11738 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11740 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11741 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11743 if (! HONOR_NANS (arg00)
11744 && ! HONOR_INFINITIES (element_mode (arg00))
11745 && operand_equal_p (arg00, arg01, 0))
11747 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11749 if (cosfn != NULL_TREE)
11750 return build_call_expr_loc (loc, cosfn, 1, arg00);
11754 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11755 NaNs or Infinities. */
11756 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11757 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11758 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11760 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11761 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11763 if (! HONOR_NANS (arg00)
11764 && ! HONOR_INFINITIES (element_mode (arg00))
11765 && operand_equal_p (arg00, arg01, 0))
11767 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11769 if (cosfn != NULL_TREE)
11771 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11772 return fold_build2_loc (loc, RDIV_EXPR, type,
11773 build_real (type, dconst1),
11774 tmp);
11779 /* Optimize pow(x,c)/x as pow(x,c-1). */
11780 if (fcode0 == BUILT_IN_POW
11781 || fcode0 == BUILT_IN_POWF
11782 || fcode0 == BUILT_IN_POWL)
11784 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11785 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11786 if (TREE_CODE (arg01) == REAL_CST
11787 && !TREE_OVERFLOW (arg01)
11788 && operand_equal_p (arg1, arg00, 0))
11790 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11791 REAL_VALUE_TYPE c;
11792 tree arg;
11794 c = TREE_REAL_CST (arg01);
11795 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11796 arg = build_real (type, c);
11797 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11801 /* Optimize a/root(b/c) into a*root(c/b). */
11802 if (BUILTIN_ROOT_P (fcode1))
11804 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11806 if (TREE_CODE (rootarg) == RDIV_EXPR)
11808 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11809 tree b = TREE_OPERAND (rootarg, 0);
11810 tree c = TREE_OPERAND (rootarg, 1);
11812 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11814 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11815 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11819 /* Optimize x/expN(y) into x*expN(-y). */
11820 if (BUILTIN_EXPONENT_P (fcode1))
11822 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11823 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11824 arg1 = build_call_expr_loc (loc,
11825 expfn, 1,
11826 fold_convert_loc (loc, type, arg));
11827 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11830 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11831 if (fcode1 == BUILT_IN_POW
11832 || fcode1 == BUILT_IN_POWF
11833 || fcode1 == BUILT_IN_POWL)
11835 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11836 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11837 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11838 tree neg11 = fold_convert_loc (loc, type,
11839 negate_expr (arg11));
11840 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11841 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11844 return NULL_TREE;
11846 case TRUNC_DIV_EXPR:
11847 /* Optimize (X & (-A)) / A where A is a power of 2,
11848 to X >> log2(A) */
11849 if (TREE_CODE (arg0) == BIT_AND_EXPR
11850 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11851 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11853 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11854 arg1, TREE_OPERAND (arg0, 1));
11855 if (sum && integer_zerop (sum)) {
11856 tree pow2 = build_int_cst (integer_type_node,
11857 wi::exact_log2 (arg1));
11858 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11859 TREE_OPERAND (arg0, 0), pow2);
11863 /* Fall through */
11865 case FLOOR_DIV_EXPR:
11866 /* Simplify A / (B << N) where A and B are positive and B is
11867 a power of 2, to A >> (N + log2(B)). */
11868 strict_overflow_p = false;
11869 if (TREE_CODE (arg1) == LSHIFT_EXPR
11870 && (TYPE_UNSIGNED (type)
11871 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11873 tree sval = TREE_OPERAND (arg1, 0);
11874 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11876 tree sh_cnt = TREE_OPERAND (arg1, 1);
11877 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11878 wi::exact_log2 (sval));
11880 if (strict_overflow_p)
11881 fold_overflow_warning (("assuming signed overflow does not "
11882 "occur when simplifying A / (B << N)"),
11883 WARN_STRICT_OVERFLOW_MISC);
11885 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11886 sh_cnt, pow2);
11887 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11888 fold_convert_loc (loc, type, arg0), sh_cnt);
11892 /* Fall through */
11894 case ROUND_DIV_EXPR:
11895 case CEIL_DIV_EXPR:
11896 case EXACT_DIV_EXPR:
11897 if (integer_zerop (arg1))
11898 return NULL_TREE;
11900 /* Convert -A / -B to A / B when the type is signed and overflow is
11901 undefined. */
11902 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11903 && TREE_CODE (arg0) == NEGATE_EXPR
11904 && negate_expr_p (arg1))
11906 if (INTEGRAL_TYPE_P (type))
11907 fold_overflow_warning (("assuming signed overflow does not occur "
11908 "when distributing negation across "
11909 "division"),
11910 WARN_STRICT_OVERFLOW_MISC);
11911 return fold_build2_loc (loc, code, type,
11912 fold_convert_loc (loc, type,
11913 TREE_OPERAND (arg0, 0)),
11914 fold_convert_loc (loc, type,
11915 negate_expr (arg1)));
11917 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11918 && TREE_CODE (arg1) == NEGATE_EXPR
11919 && negate_expr_p (arg0))
11921 if (INTEGRAL_TYPE_P (type))
11922 fold_overflow_warning (("assuming signed overflow does not occur "
11923 "when distributing negation across "
11924 "division"),
11925 WARN_STRICT_OVERFLOW_MISC);
11926 return fold_build2_loc (loc, code, type,
11927 fold_convert_loc (loc, type,
11928 negate_expr (arg0)),
11929 fold_convert_loc (loc, type,
11930 TREE_OPERAND (arg1, 0)));
11933 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11934 operation, EXACT_DIV_EXPR.
11936 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11937 At one time others generated faster code, it's not clear if they do
11938 after the last round to changes to the DIV code in expmed.c. */
11939 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11940 && multiple_of_p (type, arg0, arg1))
11941 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11943 strict_overflow_p = false;
11944 if (TREE_CODE (arg1) == INTEGER_CST
11945 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11946 &strict_overflow_p)))
11948 if (strict_overflow_p)
11949 fold_overflow_warning (("assuming signed overflow does not occur "
11950 "when simplifying division"),
11951 WARN_STRICT_OVERFLOW_MISC);
11952 return fold_convert_loc (loc, type, tem);
11955 return NULL_TREE;
11957 case CEIL_MOD_EXPR:
11958 case FLOOR_MOD_EXPR:
11959 case ROUND_MOD_EXPR:
11960 case TRUNC_MOD_EXPR:
11961 /* X % -Y is the same as X % Y. */
11962 if (code == TRUNC_MOD_EXPR
11963 && !TYPE_UNSIGNED (type)
11964 && TREE_CODE (arg1) == NEGATE_EXPR
11965 && !TYPE_OVERFLOW_TRAPS (type))
11966 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11967 fold_convert_loc (loc, type,
11968 TREE_OPERAND (arg1, 0)));
11970 strict_overflow_p = false;
11971 if (TREE_CODE (arg1) == INTEGER_CST
11972 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11973 &strict_overflow_p)))
11975 if (strict_overflow_p)
11976 fold_overflow_warning (("assuming signed overflow does not occur "
11977 "when simplifying modulus"),
11978 WARN_STRICT_OVERFLOW_MISC);
11979 return fold_convert_loc (loc, type, tem);
11982 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11983 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11984 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11985 && (TYPE_UNSIGNED (type)
11986 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11988 tree c = arg1;
11989 /* Also optimize A % (C << N) where C is a power of 2,
11990 to A & ((C << N) - 1). */
11991 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11992 c = TREE_OPERAND (arg1, 0);
11994 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11996 tree mask
11997 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11998 build_int_cst (TREE_TYPE (arg1), 1));
11999 if (strict_overflow_p)
12000 fold_overflow_warning (("assuming signed overflow does not "
12001 "occur when simplifying "
12002 "X % (power of two)"),
12003 WARN_STRICT_OVERFLOW_MISC);
12004 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12005 fold_convert_loc (loc, type, arg0),
12006 fold_convert_loc (loc, type, mask));
12010 return NULL_TREE;
12012 case LROTATE_EXPR:
12013 case RROTATE_EXPR:
12014 case RSHIFT_EXPR:
12015 case LSHIFT_EXPR:
12016 /* Since negative shift count is not well-defined,
12017 don't try to compute it in the compiler. */
12018 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12019 return NULL_TREE;
12021 prec = element_precision (type);
12023 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12024 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12025 && tree_to_uhwi (arg1) < prec
12026 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12027 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12029 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12030 + tree_to_uhwi (arg1));
12032 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12033 being well defined. */
12034 if (low >= prec)
12036 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12037 low = low % prec;
12038 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12039 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12040 TREE_OPERAND (arg0, 0));
12041 else
12042 low = prec - 1;
12045 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12046 build_int_cst (TREE_TYPE (arg1), low));
12049 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12050 into x & ((unsigned)-1 >> c) for unsigned types. */
12051 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12052 || (TYPE_UNSIGNED (type)
12053 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12054 && tree_fits_uhwi_p (arg1)
12055 && tree_to_uhwi (arg1) < prec
12056 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12057 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12059 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12060 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12061 tree lshift;
12062 tree arg00;
12064 if (low0 == low1)
12066 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12068 lshift = build_minus_one_cst (type);
12069 lshift = const_binop (code, lshift, arg1);
12071 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12075 /* If we have a rotate of a bit operation with the rotate count and
12076 the second operand of the bit operation both constant,
12077 permute the two operations. */
12078 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12079 && (TREE_CODE (arg0) == BIT_AND_EXPR
12080 || TREE_CODE (arg0) == BIT_IOR_EXPR
12081 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12082 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12083 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12084 fold_build2_loc (loc, code, type,
12085 TREE_OPERAND (arg0, 0), arg1),
12086 fold_build2_loc (loc, code, type,
12087 TREE_OPERAND (arg0, 1), arg1));
12089 /* Two consecutive rotates adding up to the some integer
12090 multiple of the precision of the type can be ignored. */
12091 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12092 && TREE_CODE (arg0) == RROTATE_EXPR
12093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12094 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12095 prec) == 0)
12096 return TREE_OPERAND (arg0, 0);
12098 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12099 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12100 if the latter can be further optimized. */
12101 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12102 && TREE_CODE (arg0) == BIT_AND_EXPR
12103 && TREE_CODE (arg1) == INTEGER_CST
12104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12106 tree mask = fold_build2_loc (loc, code, type,
12107 fold_convert_loc (loc, type,
12108 TREE_OPERAND (arg0, 1)),
12109 arg1);
12110 tree shift = fold_build2_loc (loc, code, type,
12111 fold_convert_loc (loc, type,
12112 TREE_OPERAND (arg0, 0)),
12113 arg1);
12114 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12115 if (tem)
12116 return tem;
12119 return NULL_TREE;
12121 case MIN_EXPR:
12122 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12123 if (tem)
12124 return tem;
12125 goto associate;
12127 case MAX_EXPR:
12128 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12129 if (tem)
12130 return tem;
12131 goto associate;
12133 case TRUTH_ANDIF_EXPR:
12134 /* Note that the operands of this must be ints
12135 and their values must be 0 or 1.
12136 ("true" is a fixed value perhaps depending on the language.) */
12137 /* If first arg is constant zero, return it. */
12138 if (integer_zerop (arg0))
12139 return fold_convert_loc (loc, type, arg0);
12140 case TRUTH_AND_EXPR:
12141 /* If either arg is constant true, drop it. */
12142 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12144 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12145 /* Preserve sequence points. */
12146 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12147 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12148 /* If second arg is constant zero, result is zero, but first arg
12149 must be evaluated. */
12150 if (integer_zerop (arg1))
12151 return omit_one_operand_loc (loc, type, arg1, arg0);
12152 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12153 case will be handled here. */
12154 if (integer_zerop (arg0))
12155 return omit_one_operand_loc (loc, type, arg0, arg1);
12157 /* !X && X is always false. */
12158 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12159 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12160 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12161 /* X && !X is always false. */
12162 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12163 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12164 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12166 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12167 means A >= Y && A != MAX, but in this case we know that
12168 A < X <= MAX. */
12170 if (!TREE_SIDE_EFFECTS (arg0)
12171 && !TREE_SIDE_EFFECTS (arg1))
12173 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12174 if (tem && !operand_equal_p (tem, arg0, 0))
12175 return fold_build2_loc (loc, code, type, tem, arg1);
12177 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12178 if (tem && !operand_equal_p (tem, arg1, 0))
12179 return fold_build2_loc (loc, code, type, arg0, tem);
12182 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12183 != NULL_TREE)
12184 return tem;
12186 return NULL_TREE;
12188 case TRUTH_ORIF_EXPR:
12189 /* Note that the operands of this must be ints
12190 and their values must be 0 or true.
12191 ("true" is a fixed value perhaps depending on the language.) */
12192 /* If first arg is constant true, return it. */
12193 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12194 return fold_convert_loc (loc, type, arg0);
12195 case TRUTH_OR_EXPR:
12196 /* If either arg is constant zero, drop it. */
12197 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12199 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12200 /* Preserve sequence points. */
12201 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12202 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12203 /* If second arg is constant true, result is true, but we must
12204 evaluate first arg. */
12205 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12206 return omit_one_operand_loc (loc, type, arg1, arg0);
12207 /* Likewise for first arg, but note this only occurs here for
12208 TRUTH_OR_EXPR. */
12209 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12210 return omit_one_operand_loc (loc, type, arg0, arg1);
12212 /* !X || X is always true. */
12213 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12214 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12215 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12216 /* X || !X is always true. */
12217 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12219 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12221 /* (X && !Y) || (!X && Y) is X ^ Y */
12222 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12223 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12225 tree a0, a1, l0, l1, n0, n1;
12227 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12228 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12230 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12231 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12233 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12234 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12236 if ((operand_equal_p (n0, a0, 0)
12237 && operand_equal_p (n1, a1, 0))
12238 || (operand_equal_p (n0, a1, 0)
12239 && operand_equal_p (n1, a0, 0)))
12240 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12243 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12244 != NULL_TREE)
12245 return tem;
12247 return NULL_TREE;
12249 case TRUTH_XOR_EXPR:
12250 /* If the second arg is constant zero, drop it. */
12251 if (integer_zerop (arg1))
12252 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12253 /* If the second arg is constant true, this is a logical inversion. */
12254 if (integer_onep (arg1))
12256 tem = invert_truthvalue_loc (loc, arg0);
12257 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12259 /* Identical arguments cancel to zero. */
12260 if (operand_equal_p (arg0, arg1, 0))
12261 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12263 /* !X ^ X is always true. */
12264 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12265 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12266 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12268 /* X ^ !X is always true. */
12269 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12270 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12271 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12273 return NULL_TREE;
12275 case EQ_EXPR:
12276 case NE_EXPR:
12277 STRIP_NOPS (arg0);
12278 STRIP_NOPS (arg1);
12280 tem = fold_comparison (loc, code, type, op0, op1);
12281 if (tem != NULL_TREE)
12282 return tem;
12284 /* bool_var != 0 becomes bool_var. */
12285 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12286 && code == NE_EXPR)
12287 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12289 /* bool_var == 1 becomes bool_var. */
12290 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12291 && code == EQ_EXPR)
12292 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12294 /* bool_var != 1 becomes !bool_var. */
12295 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12296 && code == NE_EXPR)
12297 return fold_convert_loc (loc, type,
12298 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12299 TREE_TYPE (arg0), arg0));
12301 /* bool_var == 0 becomes !bool_var. */
12302 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12303 && code == EQ_EXPR)
12304 return fold_convert_loc (loc, type,
12305 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12306 TREE_TYPE (arg0), arg0));
12308 /* !exp != 0 becomes !exp */
12309 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12310 && code == NE_EXPR)
12311 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12313 /* If this is an equality comparison of the address of two non-weak,
12314 unaliased symbols neither of which are extern (since we do not
12315 have access to attributes for externs), then we know the result. */
12316 if (TREE_CODE (arg0) == ADDR_EXPR
12317 && DECL_P (TREE_OPERAND (arg0, 0))
12318 && TREE_CODE (arg1) == ADDR_EXPR
12319 && DECL_P (TREE_OPERAND (arg1, 0)))
12321 int equal;
12323 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12324 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12325 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12326 ->equal_address_to (symtab_node::get_create
12327 (TREE_OPERAND (arg1, 0)));
12328 else
12329 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12330 if (equal != 2)
12331 return constant_boolean_node (equal
12332 ? code == EQ_EXPR : code != EQ_EXPR,
12333 type);
12336 /* Similarly for a NEGATE_EXPR. */
12337 if (TREE_CODE (arg0) == NEGATE_EXPR
12338 && TREE_CODE (arg1) == INTEGER_CST
12339 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12340 arg1)))
12341 && TREE_CODE (tem) == INTEGER_CST
12342 && !TREE_OVERFLOW (tem))
12343 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12345 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12346 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12347 && TREE_CODE (arg1) == INTEGER_CST
12348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12349 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12350 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12351 fold_convert_loc (loc,
12352 TREE_TYPE (arg0),
12353 arg1),
12354 TREE_OPERAND (arg0, 1)));
12356 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12357 if ((TREE_CODE (arg0) == PLUS_EXPR
12358 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12359 || TREE_CODE (arg0) == MINUS_EXPR)
12360 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12361 0)),
12362 arg1, 0)
12363 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12364 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12366 tree val = TREE_OPERAND (arg0, 1);
12367 return omit_two_operands_loc (loc, type,
12368 fold_build2_loc (loc, code, type,
12369 val,
12370 build_int_cst (TREE_TYPE (val),
12371 0)),
12372 TREE_OPERAND (arg0, 0), arg1);
12375 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12376 if (TREE_CODE (arg0) == MINUS_EXPR
12377 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12378 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12379 1)),
12380 arg1, 0)
12381 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12383 return omit_two_operands_loc (loc, type,
12384 code == NE_EXPR
12385 ? boolean_true_node : boolean_false_node,
12386 TREE_OPERAND (arg0, 1), arg1);
12389 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12390 if (TREE_CODE (arg0) == ABS_EXPR
12391 && (integer_zerop (arg1) || real_zerop (arg1)))
12392 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12394 /* If this is an EQ or NE comparison with zero and ARG0 is
12395 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12396 two operations, but the latter can be done in one less insn
12397 on machines that have only two-operand insns or on which a
12398 constant cannot be the first operand. */
12399 if (TREE_CODE (arg0) == BIT_AND_EXPR
12400 && integer_zerop (arg1))
12402 tree arg00 = TREE_OPERAND (arg0, 0);
12403 tree arg01 = TREE_OPERAND (arg0, 1);
12404 if (TREE_CODE (arg00) == LSHIFT_EXPR
12405 && integer_onep (TREE_OPERAND (arg00, 0)))
12407 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12408 arg01, TREE_OPERAND (arg00, 1));
12409 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12410 build_int_cst (TREE_TYPE (arg0), 1));
12411 return fold_build2_loc (loc, code, type,
12412 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12413 arg1);
12415 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12416 && integer_onep (TREE_OPERAND (arg01, 0)))
12418 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12419 arg00, TREE_OPERAND (arg01, 1));
12420 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12421 build_int_cst (TREE_TYPE (arg0), 1));
12422 return fold_build2_loc (loc, code, type,
12423 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12424 arg1);
12428 /* If this is an NE or EQ comparison of zero against the result of a
12429 signed MOD operation whose second operand is a power of 2, make
12430 the MOD operation unsigned since it is simpler and equivalent. */
12431 if (integer_zerop (arg1)
12432 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12433 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12434 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12435 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12436 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12437 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12439 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12440 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12441 fold_convert_loc (loc, newtype,
12442 TREE_OPERAND (arg0, 0)),
12443 fold_convert_loc (loc, newtype,
12444 TREE_OPERAND (arg0, 1)));
12446 return fold_build2_loc (loc, code, type, newmod,
12447 fold_convert_loc (loc, newtype, arg1));
12450 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12451 C1 is a valid shift constant, and C2 is a power of two, i.e.
12452 a single bit. */
12453 if (TREE_CODE (arg0) == BIT_AND_EXPR
12454 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12455 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12456 == INTEGER_CST
12457 && integer_pow2p (TREE_OPERAND (arg0, 1))
12458 && integer_zerop (arg1))
12460 tree itype = TREE_TYPE (arg0);
12461 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12462 prec = TYPE_PRECISION (itype);
12464 /* Check for a valid shift count. */
12465 if (wi::ltu_p (arg001, prec))
12467 tree arg01 = TREE_OPERAND (arg0, 1);
12468 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12469 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12470 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12471 can be rewritten as (X & (C2 << C1)) != 0. */
12472 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12474 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12475 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12476 return fold_build2_loc (loc, code, type, tem,
12477 fold_convert_loc (loc, itype, arg1));
12479 /* Otherwise, for signed (arithmetic) shifts,
12480 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12481 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12482 else if (!TYPE_UNSIGNED (itype))
12483 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12484 arg000, build_int_cst (itype, 0));
12485 /* Otherwise, of unsigned (logical) shifts,
12486 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12487 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12488 else
12489 return omit_one_operand_loc (loc, type,
12490 code == EQ_EXPR ? integer_one_node
12491 : integer_zero_node,
12492 arg000);
12496 /* If we have (A & C) == C where C is a power of 2, convert this into
12497 (A & C) != 0. Similarly for NE_EXPR. */
12498 if (TREE_CODE (arg0) == BIT_AND_EXPR
12499 && integer_pow2p (TREE_OPERAND (arg0, 1))
12500 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12501 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12502 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12503 integer_zero_node));
12505 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12506 bit, then fold the expression into A < 0 or A >= 0. */
12507 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12508 if (tem)
12509 return tem;
12511 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12512 Similarly for NE_EXPR. */
12513 if (TREE_CODE (arg0) == BIT_AND_EXPR
12514 && TREE_CODE (arg1) == INTEGER_CST
12515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12517 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12518 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12519 TREE_OPERAND (arg0, 1));
12520 tree dandnotc
12521 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12522 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12523 notc);
12524 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12525 if (integer_nonzerop (dandnotc))
12526 return omit_one_operand_loc (loc, type, rslt, arg0);
12529 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12530 Similarly for NE_EXPR. */
12531 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12532 && TREE_CODE (arg1) == INTEGER_CST
12533 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12535 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12536 tree candnotd
12537 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12538 TREE_OPERAND (arg0, 1),
12539 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12540 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12541 if (integer_nonzerop (candnotd))
12542 return omit_one_operand_loc (loc, type, rslt, arg0);
12545 /* If this is a comparison of a field, we may be able to simplify it. */
12546 if ((TREE_CODE (arg0) == COMPONENT_REF
12547 || TREE_CODE (arg0) == BIT_FIELD_REF)
12548 /* Handle the constant case even without -O
12549 to make sure the warnings are given. */
12550 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12552 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12553 if (t1)
12554 return t1;
12557 /* Optimize comparisons of strlen vs zero to a compare of the
12558 first character of the string vs zero. To wit,
12559 strlen(ptr) == 0 => *ptr == 0
12560 strlen(ptr) != 0 => *ptr != 0
12561 Other cases should reduce to one of these two (or a constant)
12562 due to the return value of strlen being unsigned. */
12563 if (TREE_CODE (arg0) == CALL_EXPR
12564 && integer_zerop (arg1))
12566 tree fndecl = get_callee_fndecl (arg0);
12568 if (fndecl
12569 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12570 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12571 && call_expr_nargs (arg0) == 1
12572 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12574 tree iref = build_fold_indirect_ref_loc (loc,
12575 CALL_EXPR_ARG (arg0, 0));
12576 return fold_build2_loc (loc, code, type, iref,
12577 build_int_cst (TREE_TYPE (iref), 0));
12581 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12582 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12583 if (TREE_CODE (arg0) == RSHIFT_EXPR
12584 && integer_zerop (arg1)
12585 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12587 tree arg00 = TREE_OPERAND (arg0, 0);
12588 tree arg01 = TREE_OPERAND (arg0, 1);
12589 tree itype = TREE_TYPE (arg00);
12590 if (wi::eq_p (arg01, element_precision (itype) - 1))
12592 if (TYPE_UNSIGNED (itype))
12594 itype = signed_type_for (itype);
12595 arg00 = fold_convert_loc (loc, itype, arg00);
12597 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12598 type, arg00, build_zero_cst (itype));
12602 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12603 if (integer_zerop (arg1)
12604 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12605 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12606 TREE_OPERAND (arg0, 1));
12608 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12609 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12610 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12611 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12612 build_zero_cst (TREE_TYPE (arg0)));
12613 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12614 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12615 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12616 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12617 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12618 build_zero_cst (TREE_TYPE (arg0)));
12620 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12621 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12622 && TREE_CODE (arg1) == INTEGER_CST
12623 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12624 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12625 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12626 TREE_OPERAND (arg0, 1), arg1));
12628 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12629 (X & C) == 0 when C is a single bit. */
12630 if (TREE_CODE (arg0) == BIT_AND_EXPR
12631 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12632 && integer_zerop (arg1)
12633 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12635 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12636 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12637 TREE_OPERAND (arg0, 1));
12638 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12639 type, tem,
12640 fold_convert_loc (loc, TREE_TYPE (arg0),
12641 arg1));
12644 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12645 constant C is a power of two, i.e. a single bit. */
12646 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12647 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12648 && integer_zerop (arg1)
12649 && integer_pow2p (TREE_OPERAND (arg0, 1))
12650 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12651 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12653 tree arg00 = TREE_OPERAND (arg0, 0);
12654 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12655 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12658 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12659 when is C is a power of two, i.e. a single bit. */
12660 if (TREE_CODE (arg0) == BIT_AND_EXPR
12661 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12662 && integer_zerop (arg1)
12663 && integer_pow2p (TREE_OPERAND (arg0, 1))
12664 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12665 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12667 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12668 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12669 arg000, TREE_OPERAND (arg0, 1));
12670 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12671 tem, build_int_cst (TREE_TYPE (tem), 0));
12674 if (integer_zerop (arg1)
12675 && tree_expr_nonzero_p (arg0))
12677 tree res = constant_boolean_node (code==NE_EXPR, type);
12678 return omit_one_operand_loc (loc, type, res, arg0);
12681 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12682 if (TREE_CODE (arg0) == NEGATE_EXPR
12683 && TREE_CODE (arg1) == NEGATE_EXPR)
12684 return fold_build2_loc (loc, code, type,
12685 TREE_OPERAND (arg0, 0),
12686 fold_convert_loc (loc, TREE_TYPE (arg0),
12687 TREE_OPERAND (arg1, 0)));
12689 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12690 if (TREE_CODE (arg0) == BIT_AND_EXPR
12691 && TREE_CODE (arg1) == BIT_AND_EXPR)
12693 tree arg00 = TREE_OPERAND (arg0, 0);
12694 tree arg01 = TREE_OPERAND (arg0, 1);
12695 tree arg10 = TREE_OPERAND (arg1, 0);
12696 tree arg11 = TREE_OPERAND (arg1, 1);
12697 tree itype = TREE_TYPE (arg0);
12699 if (operand_equal_p (arg01, arg11, 0))
12700 return fold_build2_loc (loc, code, type,
12701 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12702 fold_build2_loc (loc,
12703 BIT_XOR_EXPR, itype,
12704 arg00, arg10),
12705 arg01),
12706 build_zero_cst (itype));
12708 if (operand_equal_p (arg01, arg10, 0))
12709 return fold_build2_loc (loc, code, type,
12710 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12711 fold_build2_loc (loc,
12712 BIT_XOR_EXPR, itype,
12713 arg00, arg11),
12714 arg01),
12715 build_zero_cst (itype));
12717 if (operand_equal_p (arg00, arg11, 0))
12718 return fold_build2_loc (loc, code, type,
12719 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12720 fold_build2_loc (loc,
12721 BIT_XOR_EXPR, itype,
12722 arg01, arg10),
12723 arg00),
12724 build_zero_cst (itype));
12726 if (operand_equal_p (arg00, arg10, 0))
12727 return fold_build2_loc (loc, code, type,
12728 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12729 fold_build2_loc (loc,
12730 BIT_XOR_EXPR, itype,
12731 arg01, arg11),
12732 arg00),
12733 build_zero_cst (itype));
12736 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12737 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12739 tree arg00 = TREE_OPERAND (arg0, 0);
12740 tree arg01 = TREE_OPERAND (arg0, 1);
12741 tree arg10 = TREE_OPERAND (arg1, 0);
12742 tree arg11 = TREE_OPERAND (arg1, 1);
12743 tree itype = TREE_TYPE (arg0);
12745 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12746 operand_equal_p guarantees no side-effects so we don't need
12747 to use omit_one_operand on Z. */
12748 if (operand_equal_p (arg01, arg11, 0))
12749 return fold_build2_loc (loc, code, type, arg00,
12750 fold_convert_loc (loc, TREE_TYPE (arg00),
12751 arg10));
12752 if (operand_equal_p (arg01, arg10, 0))
12753 return fold_build2_loc (loc, code, type, arg00,
12754 fold_convert_loc (loc, TREE_TYPE (arg00),
12755 arg11));
12756 if (operand_equal_p (arg00, arg11, 0))
12757 return fold_build2_loc (loc, code, type, arg01,
12758 fold_convert_loc (loc, TREE_TYPE (arg01),
12759 arg10));
12760 if (operand_equal_p (arg00, arg10, 0))
12761 return fold_build2_loc (loc, code, type, arg01,
12762 fold_convert_loc (loc, TREE_TYPE (arg01),
12763 arg11));
12765 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12766 if (TREE_CODE (arg01) == INTEGER_CST
12767 && TREE_CODE (arg11) == INTEGER_CST)
12769 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12770 fold_convert_loc (loc, itype, arg11));
12771 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12772 return fold_build2_loc (loc, code, type, tem,
12773 fold_convert_loc (loc, itype, arg10));
12777 /* Attempt to simplify equality/inequality comparisons of complex
12778 values. Only lower the comparison if the result is known or
12779 can be simplified to a single scalar comparison. */
12780 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12781 || TREE_CODE (arg0) == COMPLEX_CST)
12782 && (TREE_CODE (arg1) == COMPLEX_EXPR
12783 || TREE_CODE (arg1) == COMPLEX_CST))
12785 tree real0, imag0, real1, imag1;
12786 tree rcond, icond;
12788 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12790 real0 = TREE_OPERAND (arg0, 0);
12791 imag0 = TREE_OPERAND (arg0, 1);
12793 else
12795 real0 = TREE_REALPART (arg0);
12796 imag0 = TREE_IMAGPART (arg0);
12799 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12801 real1 = TREE_OPERAND (arg1, 0);
12802 imag1 = TREE_OPERAND (arg1, 1);
12804 else
12806 real1 = TREE_REALPART (arg1);
12807 imag1 = TREE_IMAGPART (arg1);
12810 rcond = fold_binary_loc (loc, code, type, real0, real1);
12811 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12813 if (integer_zerop (rcond))
12815 if (code == EQ_EXPR)
12816 return omit_two_operands_loc (loc, type, boolean_false_node,
12817 imag0, imag1);
12818 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12820 else
12822 if (code == NE_EXPR)
12823 return omit_two_operands_loc (loc, type, boolean_true_node,
12824 imag0, imag1);
12825 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12829 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12830 if (icond && TREE_CODE (icond) == INTEGER_CST)
12832 if (integer_zerop (icond))
12834 if (code == EQ_EXPR)
12835 return omit_two_operands_loc (loc, type, boolean_false_node,
12836 real0, real1);
12837 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12839 else
12841 if (code == NE_EXPR)
12842 return omit_two_operands_loc (loc, type, boolean_true_node,
12843 real0, real1);
12844 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12849 return NULL_TREE;
12851 case LT_EXPR:
12852 case GT_EXPR:
12853 case LE_EXPR:
12854 case GE_EXPR:
12855 tem = fold_comparison (loc, code, type, op0, op1);
12856 if (tem != NULL_TREE)
12857 return tem;
12859 /* Transform comparisons of the form X +- C CMP X. */
12860 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12862 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12863 && !HONOR_SNANS (arg0))
12864 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12865 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12867 tree arg01 = TREE_OPERAND (arg0, 1);
12868 enum tree_code code0 = TREE_CODE (arg0);
12869 int is_positive;
12871 if (TREE_CODE (arg01) == REAL_CST)
12872 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12873 else
12874 is_positive = tree_int_cst_sgn (arg01);
12876 /* (X - c) > X becomes false. */
12877 if (code == GT_EXPR
12878 && ((code0 == MINUS_EXPR && is_positive >= 0)
12879 || (code0 == PLUS_EXPR && is_positive <= 0)))
12881 if (TREE_CODE (arg01) == INTEGER_CST
12882 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12883 fold_overflow_warning (("assuming signed overflow does not "
12884 "occur when assuming that (X - c) > X "
12885 "is always false"),
12886 WARN_STRICT_OVERFLOW_ALL);
12887 return constant_boolean_node (0, type);
12890 /* Likewise (X + c) < X becomes false. */
12891 if (code == LT_EXPR
12892 && ((code0 == PLUS_EXPR && is_positive >= 0)
12893 || (code0 == MINUS_EXPR && is_positive <= 0)))
12895 if (TREE_CODE (arg01) == INTEGER_CST
12896 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12897 fold_overflow_warning (("assuming signed overflow does not "
12898 "occur when assuming that "
12899 "(X + c) < X is always false"),
12900 WARN_STRICT_OVERFLOW_ALL);
12901 return constant_boolean_node (0, type);
12904 /* Convert (X - c) <= X to true. */
12905 if (!HONOR_NANS (arg1)
12906 && code == LE_EXPR
12907 && ((code0 == MINUS_EXPR && is_positive >= 0)
12908 || (code0 == PLUS_EXPR && is_positive <= 0)))
12910 if (TREE_CODE (arg01) == INTEGER_CST
12911 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12912 fold_overflow_warning (("assuming signed overflow does not "
12913 "occur when assuming that "
12914 "(X - c) <= X is always true"),
12915 WARN_STRICT_OVERFLOW_ALL);
12916 return constant_boolean_node (1, type);
12919 /* Convert (X + c) >= X to true. */
12920 if (!HONOR_NANS (arg1)
12921 && code == GE_EXPR
12922 && ((code0 == PLUS_EXPR && is_positive >= 0)
12923 || (code0 == MINUS_EXPR && is_positive <= 0)))
12925 if (TREE_CODE (arg01) == INTEGER_CST
12926 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12927 fold_overflow_warning (("assuming signed overflow does not "
12928 "occur when assuming that "
12929 "(X + c) >= X is always true"),
12930 WARN_STRICT_OVERFLOW_ALL);
12931 return constant_boolean_node (1, type);
12934 if (TREE_CODE (arg01) == INTEGER_CST)
12936 /* Convert X + c > X and X - c < X to true for integers. */
12937 if (code == GT_EXPR
12938 && ((code0 == PLUS_EXPR && is_positive > 0)
12939 || (code0 == MINUS_EXPR && is_positive < 0)))
12941 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12942 fold_overflow_warning (("assuming signed overflow does "
12943 "not occur when assuming that "
12944 "(X + c) > X is always true"),
12945 WARN_STRICT_OVERFLOW_ALL);
12946 return constant_boolean_node (1, type);
12949 if (code == LT_EXPR
12950 && ((code0 == MINUS_EXPR && is_positive > 0)
12951 || (code0 == PLUS_EXPR && is_positive < 0)))
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X - c) < X is always true"),
12957 WARN_STRICT_OVERFLOW_ALL);
12958 return constant_boolean_node (1, type);
12961 /* Convert X + c <= X and X - c >= X to false for integers. */
12962 if (code == LE_EXPR
12963 && ((code0 == PLUS_EXPR && is_positive > 0)
12964 || (code0 == MINUS_EXPR && is_positive < 0)))
12966 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12967 fold_overflow_warning (("assuming signed overflow does "
12968 "not occur when assuming that "
12969 "(X + c) <= X is always false"),
12970 WARN_STRICT_OVERFLOW_ALL);
12971 return constant_boolean_node (0, type);
12974 if (code == GE_EXPR
12975 && ((code0 == MINUS_EXPR && is_positive > 0)
12976 || (code0 == PLUS_EXPR && is_positive < 0)))
12978 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12979 fold_overflow_warning (("assuming signed overflow does "
12980 "not occur when assuming that "
12981 "(X - c) >= X is always false"),
12982 WARN_STRICT_OVERFLOW_ALL);
12983 return constant_boolean_node (0, type);
12988 /* Comparisons with the highest or lowest possible integer of
12989 the specified precision will have known values. */
12991 tree arg1_type = TREE_TYPE (arg1);
12992 unsigned int prec = TYPE_PRECISION (arg1_type);
12994 if (TREE_CODE (arg1) == INTEGER_CST
12995 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12997 wide_int max = wi::max_value (arg1_type);
12998 wide_int signed_max = wi::max_value (prec, SIGNED);
12999 wide_int min = wi::min_value (arg1_type);
13001 if (wi::eq_p (arg1, max))
13002 switch (code)
13004 case GT_EXPR:
13005 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13007 case GE_EXPR:
13008 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13010 case LE_EXPR:
13011 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13013 case LT_EXPR:
13014 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13016 /* The GE_EXPR and LT_EXPR cases above are not normally
13017 reached because of previous transformations. */
13019 default:
13020 break;
13022 else if (wi::eq_p (arg1, max - 1))
13023 switch (code)
13025 case GT_EXPR:
13026 arg1 = const_binop (PLUS_EXPR, arg1,
13027 build_int_cst (TREE_TYPE (arg1), 1));
13028 return fold_build2_loc (loc, EQ_EXPR, type,
13029 fold_convert_loc (loc,
13030 TREE_TYPE (arg1), arg0),
13031 arg1);
13032 case LE_EXPR:
13033 arg1 = const_binop (PLUS_EXPR, arg1,
13034 build_int_cst (TREE_TYPE (arg1), 1));
13035 return fold_build2_loc (loc, NE_EXPR, type,
13036 fold_convert_loc (loc, TREE_TYPE (arg1),
13037 arg0),
13038 arg1);
13039 default:
13040 break;
13042 else if (wi::eq_p (arg1, min))
13043 switch (code)
13045 case LT_EXPR:
13046 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13048 case LE_EXPR:
13049 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13051 case GE_EXPR:
13052 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13054 case GT_EXPR:
13055 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13057 default:
13058 break;
13060 else if (wi::eq_p (arg1, min + 1))
13061 switch (code)
13063 case GE_EXPR:
13064 arg1 = const_binop (MINUS_EXPR, arg1,
13065 build_int_cst (TREE_TYPE (arg1), 1));
13066 return fold_build2_loc (loc, NE_EXPR, type,
13067 fold_convert_loc (loc,
13068 TREE_TYPE (arg1), arg0),
13069 arg1);
13070 case LT_EXPR:
13071 arg1 = const_binop (MINUS_EXPR, arg1,
13072 build_int_cst (TREE_TYPE (arg1), 1));
13073 return fold_build2_loc (loc, EQ_EXPR, type,
13074 fold_convert_loc (loc, TREE_TYPE (arg1),
13075 arg0),
13076 arg1);
13077 default:
13078 break;
13081 else if (wi::eq_p (arg1, signed_max)
13082 && TYPE_UNSIGNED (arg1_type)
13083 /* We will flip the signedness of the comparison operator
13084 associated with the mode of arg1, so the sign bit is
13085 specified by this mode. Check that arg1 is the signed
13086 max associated with this sign bit. */
13087 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13088 /* signed_type does not work on pointer types. */
13089 && INTEGRAL_TYPE_P (arg1_type))
13091 /* The following case also applies to X < signed_max+1
13092 and X >= signed_max+1 because previous transformations. */
13093 if (code == LE_EXPR || code == GT_EXPR)
13095 tree st = signed_type_for (arg1_type);
13096 return fold_build2_loc (loc,
13097 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13098 type, fold_convert_loc (loc, st, arg0),
13099 build_int_cst (st, 0));
13105 /* If we are comparing an ABS_EXPR with a constant, we can
13106 convert all the cases into explicit comparisons, but they may
13107 well not be faster than doing the ABS and one comparison.
13108 But ABS (X) <= C is a range comparison, which becomes a subtraction
13109 and a comparison, and is probably faster. */
13110 if (code == LE_EXPR
13111 && TREE_CODE (arg1) == INTEGER_CST
13112 && TREE_CODE (arg0) == ABS_EXPR
13113 && ! TREE_SIDE_EFFECTS (arg0)
13114 && (0 != (tem = negate_expr (arg1)))
13115 && TREE_CODE (tem) == INTEGER_CST
13116 && !TREE_OVERFLOW (tem))
13117 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13118 build2 (GE_EXPR, type,
13119 TREE_OPERAND (arg0, 0), tem),
13120 build2 (LE_EXPR, type,
13121 TREE_OPERAND (arg0, 0), arg1));
13123 /* Convert ABS_EXPR<x> >= 0 to true. */
13124 strict_overflow_p = false;
13125 if (code == GE_EXPR
13126 && (integer_zerop (arg1)
13127 || (! HONOR_NANS (arg0)
13128 && real_zerop (arg1)))
13129 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13131 if (strict_overflow_p)
13132 fold_overflow_warning (("assuming signed overflow does not occur "
13133 "when simplifying comparison of "
13134 "absolute value and zero"),
13135 WARN_STRICT_OVERFLOW_CONDITIONAL);
13136 return omit_one_operand_loc (loc, type,
13137 constant_boolean_node (true, type),
13138 arg0);
13141 /* Convert ABS_EXPR<x> < 0 to false. */
13142 strict_overflow_p = false;
13143 if (code == LT_EXPR
13144 && (integer_zerop (arg1) || real_zerop (arg1))
13145 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13147 if (strict_overflow_p)
13148 fold_overflow_warning (("assuming signed overflow does not occur "
13149 "when simplifying comparison of "
13150 "absolute value and zero"),
13151 WARN_STRICT_OVERFLOW_CONDITIONAL);
13152 return omit_one_operand_loc (loc, type,
13153 constant_boolean_node (false, type),
13154 arg0);
13157 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13158 and similarly for >= into !=. */
13159 if ((code == LT_EXPR || code == GE_EXPR)
13160 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13161 && TREE_CODE (arg1) == LSHIFT_EXPR
13162 && integer_onep (TREE_OPERAND (arg1, 0)))
13163 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13164 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13165 TREE_OPERAND (arg1, 1)),
13166 build_zero_cst (TREE_TYPE (arg0)));
13168 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13169 otherwise Y might be >= # of bits in X's type and thus e.g.
13170 (unsigned char) (1 << Y) for Y 15 might be 0.
13171 If the cast is widening, then 1 << Y should have unsigned type,
13172 otherwise if Y is number of bits in the signed shift type minus 1,
13173 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13174 31 might be 0xffffffff80000000. */
13175 if ((code == LT_EXPR || code == GE_EXPR)
13176 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13177 && CONVERT_EXPR_P (arg1)
13178 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13179 && (element_precision (TREE_TYPE (arg1))
13180 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13181 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13182 || (element_precision (TREE_TYPE (arg1))
13183 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13184 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13186 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13187 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13188 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13189 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13190 build_zero_cst (TREE_TYPE (arg0)));
13193 return NULL_TREE;
13195 case UNORDERED_EXPR:
13196 case ORDERED_EXPR:
13197 case UNLT_EXPR:
13198 case UNLE_EXPR:
13199 case UNGT_EXPR:
13200 case UNGE_EXPR:
13201 case UNEQ_EXPR:
13202 case LTGT_EXPR:
13203 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13205 t1 = fold_relational_const (code, type, arg0, arg1);
13206 if (t1 != NULL_TREE)
13207 return t1;
13210 /* If the first operand is NaN, the result is constant. */
13211 if (TREE_CODE (arg0) == REAL_CST
13212 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13213 && (code != LTGT_EXPR || ! flag_trapping_math))
13215 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13216 ? integer_zero_node
13217 : integer_one_node;
13218 return omit_one_operand_loc (loc, type, t1, arg1);
13221 /* If the second operand is NaN, the result is constant. */
13222 if (TREE_CODE (arg1) == REAL_CST
13223 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13224 && (code != LTGT_EXPR || ! flag_trapping_math))
13226 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13227 ? integer_zero_node
13228 : integer_one_node;
13229 return omit_one_operand_loc (loc, type, t1, arg0);
13232 /* Simplify unordered comparison of something with itself. */
13233 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13234 && operand_equal_p (arg0, arg1, 0))
13235 return constant_boolean_node (1, type);
13237 if (code == LTGT_EXPR
13238 && !flag_trapping_math
13239 && operand_equal_p (arg0, arg1, 0))
13240 return constant_boolean_node (0, type);
13242 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13244 tree targ0 = strip_float_extensions (arg0);
13245 tree targ1 = strip_float_extensions (arg1);
13246 tree newtype = TREE_TYPE (targ0);
13248 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13249 newtype = TREE_TYPE (targ1);
13251 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13252 return fold_build2_loc (loc, code, type,
13253 fold_convert_loc (loc, newtype, targ0),
13254 fold_convert_loc (loc, newtype, targ1));
13257 return NULL_TREE;
13259 case COMPOUND_EXPR:
13260 /* When pedantic, a compound expression can be neither an lvalue
13261 nor an integer constant expression. */
13262 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13263 return NULL_TREE;
13264 /* Don't let (0, 0) be null pointer constant. */
13265 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13266 : fold_convert_loc (loc, type, arg1);
13267 return pedantic_non_lvalue_loc (loc, tem);
13269 case ASSERT_EXPR:
13270 /* An ASSERT_EXPR should never be passed to fold_binary. */
13271 gcc_unreachable ();
13273 default:
13274 return NULL_TREE;
13275 } /* switch (code) */
13278 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13279 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13280 of GOTO_EXPR. */
13282 static tree
13283 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13285 switch (TREE_CODE (*tp))
13287 case LABEL_EXPR:
13288 return *tp;
13290 case GOTO_EXPR:
13291 *walk_subtrees = 0;
13293 /* ... fall through ... */
13295 default:
13296 return NULL_TREE;
13300 /* Return whether the sub-tree ST contains a label which is accessible from
13301 outside the sub-tree. */
13303 static bool
13304 contains_label_p (tree st)
13306 return
13307 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13310 /* Fold a ternary expression of code CODE and type TYPE with operands
13311 OP0, OP1, and OP2. Return the folded expression if folding is
13312 successful. Otherwise, return NULL_TREE. */
13314 tree
13315 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13316 tree op0, tree op1, tree op2)
13318 tree tem;
13319 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13320 enum tree_code_class kind = TREE_CODE_CLASS (code);
13322 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13323 && TREE_CODE_LENGTH (code) == 3);
13325 /* If this is a commutative operation, and OP0 is a constant, move it
13326 to OP1 to reduce the number of tests below. */
13327 if (commutative_ternary_tree_code (code)
13328 && tree_swap_operands_p (op0, op1, true))
13329 return fold_build3_loc (loc, code, type, op1, op0, op2);
13331 tem = generic_simplify (loc, code, type, op0, op1, op2);
13332 if (tem)
13333 return tem;
13335 /* Strip any conversions that don't change the mode. This is safe
13336 for every expression, except for a comparison expression because
13337 its signedness is derived from its operands. So, in the latter
13338 case, only strip conversions that don't change the signedness.
13340 Note that this is done as an internal manipulation within the
13341 constant folder, in order to find the simplest representation of
13342 the arguments so that their form can be studied. In any cases,
13343 the appropriate type conversions should be put back in the tree
13344 that will get out of the constant folder. */
13345 if (op0)
13347 arg0 = op0;
13348 STRIP_NOPS (arg0);
13351 if (op1)
13353 arg1 = op1;
13354 STRIP_NOPS (arg1);
13357 if (op2)
13359 arg2 = op2;
13360 STRIP_NOPS (arg2);
13363 switch (code)
13365 case COMPONENT_REF:
13366 if (TREE_CODE (arg0) == CONSTRUCTOR
13367 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13369 unsigned HOST_WIDE_INT idx;
13370 tree field, value;
13371 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13372 if (field == arg1)
13373 return value;
13375 return NULL_TREE;
13377 case COND_EXPR:
13378 case VEC_COND_EXPR:
13379 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13380 so all simple results must be passed through pedantic_non_lvalue. */
13381 if (TREE_CODE (arg0) == INTEGER_CST)
13383 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13384 tem = integer_zerop (arg0) ? op2 : op1;
13385 /* Only optimize constant conditions when the selected branch
13386 has the same type as the COND_EXPR. This avoids optimizing
13387 away "c ? x : throw", where the throw has a void type.
13388 Avoid throwing away that operand which contains label. */
13389 if ((!TREE_SIDE_EFFECTS (unused_op)
13390 || !contains_label_p (unused_op))
13391 && (! VOID_TYPE_P (TREE_TYPE (tem))
13392 || VOID_TYPE_P (type)))
13393 return pedantic_non_lvalue_loc (loc, tem);
13394 return NULL_TREE;
13396 else if (TREE_CODE (arg0) == VECTOR_CST)
13398 if ((TREE_CODE (arg1) == VECTOR_CST
13399 || TREE_CODE (arg1) == CONSTRUCTOR)
13400 && (TREE_CODE (arg2) == VECTOR_CST
13401 || TREE_CODE (arg2) == CONSTRUCTOR))
13403 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13404 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13405 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13406 for (i = 0; i < nelts; i++)
13408 tree val = VECTOR_CST_ELT (arg0, i);
13409 if (integer_all_onesp (val))
13410 sel[i] = i;
13411 else if (integer_zerop (val))
13412 sel[i] = nelts + i;
13413 else /* Currently unreachable. */
13414 return NULL_TREE;
13416 tree t = fold_vec_perm (type, arg1, arg2, sel);
13417 if (t != NULL_TREE)
13418 return t;
13422 /* If we have A op B ? A : C, we may be able to convert this to a
13423 simpler expression, depending on the operation and the values
13424 of B and C. Signed zeros prevent all of these transformations,
13425 for reasons given above each one.
13427 Also try swapping the arguments and inverting the conditional. */
13428 if (COMPARISON_CLASS_P (arg0)
13429 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13430 arg1, TREE_OPERAND (arg0, 1))
13431 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13433 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13434 if (tem)
13435 return tem;
13438 if (COMPARISON_CLASS_P (arg0)
13439 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13440 op2,
13441 TREE_OPERAND (arg0, 1))
13442 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13444 location_t loc0 = expr_location_or (arg0, loc);
13445 tem = fold_invert_truthvalue (loc0, arg0);
13446 if (tem && COMPARISON_CLASS_P (tem))
13448 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13449 if (tem)
13450 return tem;
13454 /* If the second operand is simpler than the third, swap them
13455 since that produces better jump optimization results. */
13456 if (truth_value_p (TREE_CODE (arg0))
13457 && tree_swap_operands_p (op1, op2, false))
13459 location_t loc0 = expr_location_or (arg0, loc);
13460 /* See if this can be inverted. If it can't, possibly because
13461 it was a floating-point inequality comparison, don't do
13462 anything. */
13463 tem = fold_invert_truthvalue (loc0, arg0);
13464 if (tem)
13465 return fold_build3_loc (loc, code, type, tem, op2, op1);
13468 /* Convert A ? 1 : 0 to simply A. */
13469 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13470 : (integer_onep (op1)
13471 && !VECTOR_TYPE_P (type)))
13472 && integer_zerop (op2)
13473 /* If we try to convert OP0 to our type, the
13474 call to fold will try to move the conversion inside
13475 a COND, which will recurse. In that case, the COND_EXPR
13476 is probably the best choice, so leave it alone. */
13477 && type == TREE_TYPE (arg0))
13478 return pedantic_non_lvalue_loc (loc, arg0);
13480 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13481 over COND_EXPR in cases such as floating point comparisons. */
13482 if (integer_zerop (op1)
13483 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13484 : (integer_onep (op2)
13485 && !VECTOR_TYPE_P (type)))
13486 && truth_value_p (TREE_CODE (arg0)))
13487 return pedantic_non_lvalue_loc (loc,
13488 fold_convert_loc (loc, type,
13489 invert_truthvalue_loc (loc,
13490 arg0)));
13492 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13493 if (TREE_CODE (arg0) == LT_EXPR
13494 && integer_zerop (TREE_OPERAND (arg0, 1))
13495 && integer_zerop (op2)
13496 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13498 /* sign_bit_p looks through both zero and sign extensions,
13499 but for this optimization only sign extensions are
13500 usable. */
13501 tree tem2 = TREE_OPERAND (arg0, 0);
13502 while (tem != tem2)
13504 if (TREE_CODE (tem2) != NOP_EXPR
13505 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13507 tem = NULL_TREE;
13508 break;
13510 tem2 = TREE_OPERAND (tem2, 0);
13512 /* sign_bit_p only checks ARG1 bits within A's precision.
13513 If <sign bit of A> has wider type than A, bits outside
13514 of A's precision in <sign bit of A> need to be checked.
13515 If they are all 0, this optimization needs to be done
13516 in unsigned A's type, if they are all 1 in signed A's type,
13517 otherwise this can't be done. */
13518 if (tem
13519 && TYPE_PRECISION (TREE_TYPE (tem))
13520 < TYPE_PRECISION (TREE_TYPE (arg1))
13521 && TYPE_PRECISION (TREE_TYPE (tem))
13522 < TYPE_PRECISION (type))
13524 int inner_width, outer_width;
13525 tree tem_type;
13527 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13528 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13529 if (outer_width > TYPE_PRECISION (type))
13530 outer_width = TYPE_PRECISION (type);
13532 wide_int mask = wi::shifted_mask
13533 (inner_width, outer_width - inner_width, false,
13534 TYPE_PRECISION (TREE_TYPE (arg1)));
13536 wide_int common = mask & arg1;
13537 if (common == mask)
13539 tem_type = signed_type_for (TREE_TYPE (tem));
13540 tem = fold_convert_loc (loc, tem_type, tem);
13542 else if (common == 0)
13544 tem_type = unsigned_type_for (TREE_TYPE (tem));
13545 tem = fold_convert_loc (loc, tem_type, tem);
13547 else
13548 tem = NULL;
13551 if (tem)
13552 return
13553 fold_convert_loc (loc, type,
13554 fold_build2_loc (loc, BIT_AND_EXPR,
13555 TREE_TYPE (tem), tem,
13556 fold_convert_loc (loc,
13557 TREE_TYPE (tem),
13558 arg1)));
13561 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13562 already handled above. */
13563 if (TREE_CODE (arg0) == BIT_AND_EXPR
13564 && integer_onep (TREE_OPERAND (arg0, 1))
13565 && integer_zerop (op2)
13566 && integer_pow2p (arg1))
13568 tree tem = TREE_OPERAND (arg0, 0);
13569 STRIP_NOPS (tem);
13570 if (TREE_CODE (tem) == RSHIFT_EXPR
13571 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13572 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13573 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13574 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13575 TREE_OPERAND (tem, 0), arg1);
13578 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13579 is probably obsolete because the first operand should be a
13580 truth value (that's why we have the two cases above), but let's
13581 leave it in until we can confirm this for all front-ends. */
13582 if (integer_zerop (op2)
13583 && TREE_CODE (arg0) == NE_EXPR
13584 && integer_zerop (TREE_OPERAND (arg0, 1))
13585 && integer_pow2p (arg1)
13586 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13587 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13588 arg1, OEP_ONLY_CONST))
13589 return pedantic_non_lvalue_loc (loc,
13590 fold_convert_loc (loc, type,
13591 TREE_OPERAND (arg0, 0)));
13593 /* Disable the transformations below for vectors, since
13594 fold_binary_op_with_conditional_arg may undo them immediately,
13595 yielding an infinite loop. */
13596 if (code == VEC_COND_EXPR)
13597 return NULL_TREE;
13599 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13600 if (integer_zerop (op2)
13601 && truth_value_p (TREE_CODE (arg0))
13602 && truth_value_p (TREE_CODE (arg1))
13603 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13604 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13605 : TRUTH_ANDIF_EXPR,
13606 type, fold_convert_loc (loc, type, arg0), arg1);
13608 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13609 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13610 && truth_value_p (TREE_CODE (arg0))
13611 && truth_value_p (TREE_CODE (arg1))
13612 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13614 location_t loc0 = expr_location_or (arg0, loc);
13615 /* Only perform transformation if ARG0 is easily inverted. */
13616 tem = fold_invert_truthvalue (loc0, arg0);
13617 if (tem)
13618 return fold_build2_loc (loc, code == VEC_COND_EXPR
13619 ? BIT_IOR_EXPR
13620 : TRUTH_ORIF_EXPR,
13621 type, fold_convert_loc (loc, type, tem),
13622 arg1);
13625 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13626 if (integer_zerop (arg1)
13627 && truth_value_p (TREE_CODE (arg0))
13628 && truth_value_p (TREE_CODE (op2))
13629 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13631 location_t loc0 = expr_location_or (arg0, loc);
13632 /* Only perform transformation if ARG0 is easily inverted. */
13633 tem = fold_invert_truthvalue (loc0, arg0);
13634 if (tem)
13635 return fold_build2_loc (loc, code == VEC_COND_EXPR
13636 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13637 type, fold_convert_loc (loc, type, tem),
13638 op2);
13641 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13642 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13643 && truth_value_p (TREE_CODE (arg0))
13644 && truth_value_p (TREE_CODE (op2))
13645 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13646 return fold_build2_loc (loc, code == VEC_COND_EXPR
13647 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13648 type, fold_convert_loc (loc, type, arg0), op2);
13650 return NULL_TREE;
13652 case CALL_EXPR:
13653 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13654 of fold_ternary on them. */
13655 gcc_unreachable ();
13657 case BIT_FIELD_REF:
13658 if ((TREE_CODE (arg0) == VECTOR_CST
13659 || (TREE_CODE (arg0) == CONSTRUCTOR
13660 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13661 && (type == TREE_TYPE (TREE_TYPE (arg0))
13662 || (TREE_CODE (type) == VECTOR_TYPE
13663 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13665 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13666 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13667 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13668 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13670 if (n != 0
13671 && (idx % width) == 0
13672 && (n % width) == 0
13673 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13675 idx = idx / width;
13676 n = n / width;
13678 if (TREE_CODE (arg0) == VECTOR_CST)
13680 if (n == 1)
13681 return VECTOR_CST_ELT (arg0, idx);
13683 tree *vals = XALLOCAVEC (tree, n);
13684 for (unsigned i = 0; i < n; ++i)
13685 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13686 return build_vector (type, vals);
13689 /* Constructor elements can be subvectors. */
13690 unsigned HOST_WIDE_INT k = 1;
13691 if (CONSTRUCTOR_NELTS (arg0) != 0)
13693 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13694 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13695 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13698 /* We keep an exact subset of the constructor elements. */
13699 if ((idx % k) == 0 && (n % k) == 0)
13701 if (CONSTRUCTOR_NELTS (arg0) == 0)
13702 return build_constructor (type, NULL);
13703 idx /= k;
13704 n /= k;
13705 if (n == 1)
13707 if (idx < CONSTRUCTOR_NELTS (arg0))
13708 return CONSTRUCTOR_ELT (arg0, idx)->value;
13709 return build_zero_cst (type);
13712 vec<constructor_elt, va_gc> *vals;
13713 vec_alloc (vals, n);
13714 for (unsigned i = 0;
13715 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13716 ++i)
13717 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13718 CONSTRUCTOR_ELT
13719 (arg0, idx + i)->value);
13720 return build_constructor (type, vals);
13722 /* The bitfield references a single constructor element. */
13723 else if (idx + n <= (idx / k + 1) * k)
13725 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13726 return build_zero_cst (type);
13727 else if (n == k)
13728 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13729 else
13730 return fold_build3_loc (loc, code, type,
13731 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13732 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13737 /* A bit-field-ref that referenced the full argument can be stripped. */
13738 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13739 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13740 && integer_zerop (op2))
13741 return fold_convert_loc (loc, type, arg0);
13743 /* On constants we can use native encode/interpret to constant
13744 fold (nearly) all BIT_FIELD_REFs. */
13745 if (CONSTANT_CLASS_P (arg0)
13746 && can_native_interpret_type_p (type)
13747 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13748 /* This limitation should not be necessary, we just need to
13749 round this up to mode size. */
13750 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13751 /* Need bit-shifting of the buffer to relax the following. */
13752 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13754 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13755 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13756 unsigned HOST_WIDE_INT clen;
13757 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13758 /* ??? We cannot tell native_encode_expr to start at
13759 some random byte only. So limit us to a reasonable amount
13760 of work. */
13761 if (clen <= 4096)
13763 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13764 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13765 if (len > 0
13766 && len * BITS_PER_UNIT >= bitpos + bitsize)
13768 tree v = native_interpret_expr (type,
13769 b + bitpos / BITS_PER_UNIT,
13770 bitsize / BITS_PER_UNIT);
13771 if (v)
13772 return v;
13777 return NULL_TREE;
13779 case FMA_EXPR:
13780 /* For integers we can decompose the FMA if possible. */
13781 if (TREE_CODE (arg0) == INTEGER_CST
13782 && TREE_CODE (arg1) == INTEGER_CST)
13783 return fold_build2_loc (loc, PLUS_EXPR, type,
13784 const_binop (MULT_EXPR, arg0, arg1), arg2);
13785 if (integer_zerop (arg2))
13786 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13788 return fold_fma (loc, type, arg0, arg1, arg2);
13790 case VEC_PERM_EXPR:
13791 if (TREE_CODE (arg2) == VECTOR_CST)
13793 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13794 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13795 unsigned char *sel2 = sel + nelts;
13796 bool need_mask_canon = false;
13797 bool need_mask_canon2 = false;
13798 bool all_in_vec0 = true;
13799 bool all_in_vec1 = true;
13800 bool maybe_identity = true;
13801 bool single_arg = (op0 == op1);
13802 bool changed = false;
13804 mask2 = 2 * nelts - 1;
13805 mask = single_arg ? (nelts - 1) : mask2;
13806 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13807 for (i = 0; i < nelts; i++)
13809 tree val = VECTOR_CST_ELT (arg2, i);
13810 if (TREE_CODE (val) != INTEGER_CST)
13811 return NULL_TREE;
13813 /* Make sure that the perm value is in an acceptable
13814 range. */
13815 wide_int t = val;
13816 need_mask_canon |= wi::gtu_p (t, mask);
13817 need_mask_canon2 |= wi::gtu_p (t, mask2);
13818 sel[i] = t.to_uhwi () & mask;
13819 sel2[i] = t.to_uhwi () & mask2;
13821 if (sel[i] < nelts)
13822 all_in_vec1 = false;
13823 else
13824 all_in_vec0 = false;
13826 if ((sel[i] & (nelts-1)) != i)
13827 maybe_identity = false;
13830 if (maybe_identity)
13832 if (all_in_vec0)
13833 return op0;
13834 if (all_in_vec1)
13835 return op1;
13838 if (all_in_vec0)
13839 op1 = op0;
13840 else if (all_in_vec1)
13842 op0 = op1;
13843 for (i = 0; i < nelts; i++)
13844 sel[i] -= nelts;
13845 need_mask_canon = true;
13848 if ((TREE_CODE (op0) == VECTOR_CST
13849 || TREE_CODE (op0) == CONSTRUCTOR)
13850 && (TREE_CODE (op1) == VECTOR_CST
13851 || TREE_CODE (op1) == CONSTRUCTOR))
13853 tree t = fold_vec_perm (type, op0, op1, sel);
13854 if (t != NULL_TREE)
13855 return t;
13858 if (op0 == op1 && !single_arg)
13859 changed = true;
13861 /* Some targets are deficient and fail to expand a single
13862 argument permutation while still allowing an equivalent
13863 2-argument version. */
13864 if (need_mask_canon && arg2 == op2
13865 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13866 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13868 need_mask_canon = need_mask_canon2;
13869 sel = sel2;
13872 if (need_mask_canon && arg2 == op2)
13874 tree *tsel = XALLOCAVEC (tree, nelts);
13875 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13876 for (i = 0; i < nelts; i++)
13877 tsel[i] = build_int_cst (eltype, sel[i]);
13878 op2 = build_vector (TREE_TYPE (arg2), tsel);
13879 changed = true;
13882 if (changed)
13883 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13885 return NULL_TREE;
13887 default:
13888 return NULL_TREE;
13889 } /* switch (code) */
13892 /* Perform constant folding and related simplification of EXPR.
13893 The related simplifications include x*1 => x, x*0 => 0, etc.,
13894 and application of the associative law.
13895 NOP_EXPR conversions may be removed freely (as long as we
13896 are careful not to change the type of the overall expression).
13897 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13898 but we can constant-fold them if they have constant operands. */
13900 #ifdef ENABLE_FOLD_CHECKING
13901 # define fold(x) fold_1 (x)
13902 static tree fold_1 (tree);
13903 static
13904 #endif
13905 tree
13906 fold (tree expr)
13908 const tree t = expr;
13909 enum tree_code code = TREE_CODE (t);
13910 enum tree_code_class kind = TREE_CODE_CLASS (code);
13911 tree tem;
13912 location_t loc = EXPR_LOCATION (expr);
13914 /* Return right away if a constant. */
13915 if (kind == tcc_constant)
13916 return t;
13918 /* CALL_EXPR-like objects with variable numbers of operands are
13919 treated specially. */
13920 if (kind == tcc_vl_exp)
13922 if (code == CALL_EXPR)
13924 tem = fold_call_expr (loc, expr, false);
13925 return tem ? tem : expr;
13927 return expr;
13930 if (IS_EXPR_CODE_CLASS (kind))
13932 tree type = TREE_TYPE (t);
13933 tree op0, op1, op2;
13935 switch (TREE_CODE_LENGTH (code))
13937 case 1:
13938 op0 = TREE_OPERAND (t, 0);
13939 tem = fold_unary_loc (loc, code, type, op0);
13940 return tem ? tem : expr;
13941 case 2:
13942 op0 = TREE_OPERAND (t, 0);
13943 op1 = TREE_OPERAND (t, 1);
13944 tem = fold_binary_loc (loc, code, type, op0, op1);
13945 return tem ? tem : expr;
13946 case 3:
13947 op0 = TREE_OPERAND (t, 0);
13948 op1 = TREE_OPERAND (t, 1);
13949 op2 = TREE_OPERAND (t, 2);
13950 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13951 return tem ? tem : expr;
13952 default:
13953 break;
13957 switch (code)
13959 case ARRAY_REF:
13961 tree op0 = TREE_OPERAND (t, 0);
13962 tree op1 = TREE_OPERAND (t, 1);
13964 if (TREE_CODE (op1) == INTEGER_CST
13965 && TREE_CODE (op0) == CONSTRUCTOR
13966 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13968 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13969 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13970 unsigned HOST_WIDE_INT begin = 0;
13972 /* Find a matching index by means of a binary search. */
13973 while (begin != end)
13975 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13976 tree index = (*elts)[middle].index;
13978 if (TREE_CODE (index) == INTEGER_CST
13979 && tree_int_cst_lt (index, op1))
13980 begin = middle + 1;
13981 else if (TREE_CODE (index) == INTEGER_CST
13982 && tree_int_cst_lt (op1, index))
13983 end = middle;
13984 else if (TREE_CODE (index) == RANGE_EXPR
13985 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13986 begin = middle + 1;
13987 else if (TREE_CODE (index) == RANGE_EXPR
13988 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13989 end = middle;
13990 else
13991 return (*elts)[middle].value;
13995 return t;
13998 /* Return a VECTOR_CST if possible. */
13999 case CONSTRUCTOR:
14001 tree type = TREE_TYPE (t);
14002 if (TREE_CODE (type) != VECTOR_TYPE)
14003 return t;
14005 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14006 unsigned HOST_WIDE_INT idx, pos = 0;
14007 tree value;
14009 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14011 if (!CONSTANT_CLASS_P (value))
14012 return t;
14013 if (TREE_CODE (value) == VECTOR_CST)
14015 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14016 vec[pos++] = VECTOR_CST_ELT (value, i);
14018 else
14019 vec[pos++] = value;
14021 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14022 vec[pos] = build_zero_cst (TREE_TYPE (type));
14024 return build_vector (type, vec);
14027 case CONST_DECL:
14028 return fold (DECL_INITIAL (t));
14030 default:
14031 return t;
14032 } /* switch (code) */
14035 #ifdef ENABLE_FOLD_CHECKING
14036 #undef fold
14038 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14039 hash_table<pointer_hash<const tree_node> > *);
14040 static void fold_check_failed (const_tree, const_tree);
14041 void print_fold_checksum (const_tree);
14043 /* When --enable-checking=fold, compute a digest of expr before
14044 and after actual fold call to see if fold did not accidentally
14045 change original expr. */
14047 tree
14048 fold (tree expr)
14050 tree ret;
14051 struct md5_ctx ctx;
14052 unsigned char checksum_before[16], checksum_after[16];
14053 hash_table<pointer_hash<const tree_node> > ht (32);
14055 md5_init_ctx (&ctx);
14056 fold_checksum_tree (expr, &ctx, &ht);
14057 md5_finish_ctx (&ctx, checksum_before);
14058 ht.empty ();
14060 ret = fold_1 (expr);
14062 md5_init_ctx (&ctx);
14063 fold_checksum_tree (expr, &ctx, &ht);
14064 md5_finish_ctx (&ctx, checksum_after);
14066 if (memcmp (checksum_before, checksum_after, 16))
14067 fold_check_failed (expr, ret);
14069 return ret;
14072 void
14073 print_fold_checksum (const_tree expr)
14075 struct md5_ctx ctx;
14076 unsigned char checksum[16], cnt;
14077 hash_table<pointer_hash<const tree_node> > ht (32);
14079 md5_init_ctx (&ctx);
14080 fold_checksum_tree (expr, &ctx, &ht);
14081 md5_finish_ctx (&ctx, checksum);
14082 for (cnt = 0; cnt < 16; ++cnt)
14083 fprintf (stderr, "%02x", checksum[cnt]);
14084 putc ('\n', stderr);
14087 static void
14088 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14090 internal_error ("fold check: original tree changed by fold");
14093 static void
14094 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14095 hash_table<pointer_hash <const tree_node> > *ht)
14097 const tree_node **slot;
14098 enum tree_code code;
14099 union tree_node buf;
14100 int i, len;
14102 recursive_label:
14103 if (expr == NULL)
14104 return;
14105 slot = ht->find_slot (expr, INSERT);
14106 if (*slot != NULL)
14107 return;
14108 *slot = expr;
14109 code = TREE_CODE (expr);
14110 if (TREE_CODE_CLASS (code) == tcc_declaration
14111 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14113 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14114 memcpy ((char *) &buf, expr, tree_size (expr));
14115 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14116 buf.decl_with_vis.symtab_node = NULL;
14117 expr = (tree) &buf;
14119 else if (TREE_CODE_CLASS (code) == tcc_type
14120 && (TYPE_POINTER_TO (expr)
14121 || TYPE_REFERENCE_TO (expr)
14122 || TYPE_CACHED_VALUES_P (expr)
14123 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14124 || TYPE_NEXT_VARIANT (expr)))
14126 /* Allow these fields to be modified. */
14127 tree tmp;
14128 memcpy ((char *) &buf, expr, tree_size (expr));
14129 expr = tmp = (tree) &buf;
14130 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14131 TYPE_POINTER_TO (tmp) = NULL;
14132 TYPE_REFERENCE_TO (tmp) = NULL;
14133 TYPE_NEXT_VARIANT (tmp) = NULL;
14134 if (TYPE_CACHED_VALUES_P (tmp))
14136 TYPE_CACHED_VALUES_P (tmp) = 0;
14137 TYPE_CACHED_VALUES (tmp) = NULL;
14140 md5_process_bytes (expr, tree_size (expr), ctx);
14141 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14142 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14143 if (TREE_CODE_CLASS (code) != tcc_type
14144 && TREE_CODE_CLASS (code) != tcc_declaration
14145 && code != TREE_LIST
14146 && code != SSA_NAME
14147 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14148 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14149 switch (TREE_CODE_CLASS (code))
14151 case tcc_constant:
14152 switch (code)
14154 case STRING_CST:
14155 md5_process_bytes (TREE_STRING_POINTER (expr),
14156 TREE_STRING_LENGTH (expr), ctx);
14157 break;
14158 case COMPLEX_CST:
14159 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14160 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14161 break;
14162 case VECTOR_CST:
14163 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14164 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14165 break;
14166 default:
14167 break;
14169 break;
14170 case tcc_exceptional:
14171 switch (code)
14173 case TREE_LIST:
14174 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14175 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14176 expr = TREE_CHAIN (expr);
14177 goto recursive_label;
14178 break;
14179 case TREE_VEC:
14180 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14181 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14182 break;
14183 default:
14184 break;
14186 break;
14187 case tcc_expression:
14188 case tcc_reference:
14189 case tcc_comparison:
14190 case tcc_unary:
14191 case tcc_binary:
14192 case tcc_statement:
14193 case tcc_vl_exp:
14194 len = TREE_OPERAND_LENGTH (expr);
14195 for (i = 0; i < len; ++i)
14196 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14197 break;
14198 case tcc_declaration:
14199 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14200 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14201 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14203 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14204 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14205 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14206 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14207 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14210 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14212 if (TREE_CODE (expr) == FUNCTION_DECL)
14214 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14215 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14217 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14219 break;
14220 case tcc_type:
14221 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14222 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14223 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14224 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14225 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14226 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14227 if (INTEGRAL_TYPE_P (expr)
14228 || SCALAR_FLOAT_TYPE_P (expr))
14230 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14231 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14233 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14234 if (TREE_CODE (expr) == RECORD_TYPE
14235 || TREE_CODE (expr) == UNION_TYPE
14236 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14237 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14238 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14239 break;
14240 default:
14241 break;
14245 /* Helper function for outputting the checksum of a tree T. When
14246 debugging with gdb, you can "define mynext" to be "next" followed
14247 by "call debug_fold_checksum (op0)", then just trace down till the
14248 outputs differ. */
14250 DEBUG_FUNCTION void
14251 debug_fold_checksum (const_tree t)
14253 int i;
14254 unsigned char checksum[16];
14255 struct md5_ctx ctx;
14256 hash_table<pointer_hash<const tree_node> > ht (32);
14258 md5_init_ctx (&ctx);
14259 fold_checksum_tree (t, &ctx, &ht);
14260 md5_finish_ctx (&ctx, checksum);
14261 ht.empty ();
14263 for (i = 0; i < 16; i++)
14264 fprintf (stderr, "%d ", checksum[i]);
14266 fprintf (stderr, "\n");
14269 #endif
14271 /* Fold a unary tree expression with code CODE of type TYPE with an
14272 operand OP0. LOC is the location of the resulting expression.
14273 Return a folded expression if successful. Otherwise, return a tree
14274 expression with code CODE of type TYPE with an operand OP0. */
14276 tree
14277 fold_build1_stat_loc (location_t loc,
14278 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14280 tree tem;
14281 #ifdef ENABLE_FOLD_CHECKING
14282 unsigned char checksum_before[16], checksum_after[16];
14283 struct md5_ctx ctx;
14284 hash_table<pointer_hash<const tree_node> > ht (32);
14286 md5_init_ctx (&ctx);
14287 fold_checksum_tree (op0, &ctx, &ht);
14288 md5_finish_ctx (&ctx, checksum_before);
14289 ht.empty ();
14290 #endif
14292 tem = fold_unary_loc (loc, code, type, op0);
14293 if (!tem)
14294 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14296 #ifdef ENABLE_FOLD_CHECKING
14297 md5_init_ctx (&ctx);
14298 fold_checksum_tree (op0, &ctx, &ht);
14299 md5_finish_ctx (&ctx, checksum_after);
14301 if (memcmp (checksum_before, checksum_after, 16))
14302 fold_check_failed (op0, tem);
14303 #endif
14304 return tem;
14307 /* Fold a binary tree expression with code CODE of type TYPE with
14308 operands OP0 and OP1. LOC is the location of the resulting
14309 expression. Return a folded expression if successful. Otherwise,
14310 return a tree expression with code CODE of type TYPE with operands
14311 OP0 and OP1. */
14313 tree
14314 fold_build2_stat_loc (location_t loc,
14315 enum tree_code code, tree type, tree op0, tree op1
14316 MEM_STAT_DECL)
14318 tree tem;
14319 #ifdef ENABLE_FOLD_CHECKING
14320 unsigned char checksum_before_op0[16],
14321 checksum_before_op1[16],
14322 checksum_after_op0[16],
14323 checksum_after_op1[16];
14324 struct md5_ctx ctx;
14325 hash_table<pointer_hash<const tree_node> > ht (32);
14327 md5_init_ctx (&ctx);
14328 fold_checksum_tree (op0, &ctx, &ht);
14329 md5_finish_ctx (&ctx, checksum_before_op0);
14330 ht.empty ();
14332 md5_init_ctx (&ctx);
14333 fold_checksum_tree (op1, &ctx, &ht);
14334 md5_finish_ctx (&ctx, checksum_before_op1);
14335 ht.empty ();
14336 #endif
14338 tem = fold_binary_loc (loc, code, type, op0, op1);
14339 if (!tem)
14340 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14342 #ifdef ENABLE_FOLD_CHECKING
14343 md5_init_ctx (&ctx);
14344 fold_checksum_tree (op0, &ctx, &ht);
14345 md5_finish_ctx (&ctx, checksum_after_op0);
14346 ht.empty ();
14348 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14349 fold_check_failed (op0, tem);
14351 md5_init_ctx (&ctx);
14352 fold_checksum_tree (op1, &ctx, &ht);
14353 md5_finish_ctx (&ctx, checksum_after_op1);
14355 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14356 fold_check_failed (op1, tem);
14357 #endif
14358 return tem;
14361 /* Fold a ternary tree expression with code CODE of type TYPE with
14362 operands OP0, OP1, and OP2. Return a folded expression if
14363 successful. Otherwise, return a tree expression with code CODE of
14364 type TYPE with operands OP0, OP1, and OP2. */
14366 tree
14367 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14368 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14370 tree tem;
14371 #ifdef ENABLE_FOLD_CHECKING
14372 unsigned char checksum_before_op0[16],
14373 checksum_before_op1[16],
14374 checksum_before_op2[16],
14375 checksum_after_op0[16],
14376 checksum_after_op1[16],
14377 checksum_after_op2[16];
14378 struct md5_ctx ctx;
14379 hash_table<pointer_hash<const tree_node> > ht (32);
14381 md5_init_ctx (&ctx);
14382 fold_checksum_tree (op0, &ctx, &ht);
14383 md5_finish_ctx (&ctx, checksum_before_op0);
14384 ht.empty ();
14386 md5_init_ctx (&ctx);
14387 fold_checksum_tree (op1, &ctx, &ht);
14388 md5_finish_ctx (&ctx, checksum_before_op1);
14389 ht.empty ();
14391 md5_init_ctx (&ctx);
14392 fold_checksum_tree (op2, &ctx, &ht);
14393 md5_finish_ctx (&ctx, checksum_before_op2);
14394 ht.empty ();
14395 #endif
14397 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14398 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14399 if (!tem)
14400 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14402 #ifdef ENABLE_FOLD_CHECKING
14403 md5_init_ctx (&ctx);
14404 fold_checksum_tree (op0, &ctx, &ht);
14405 md5_finish_ctx (&ctx, checksum_after_op0);
14406 ht.empty ();
14408 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14409 fold_check_failed (op0, tem);
14411 md5_init_ctx (&ctx);
14412 fold_checksum_tree (op1, &ctx, &ht);
14413 md5_finish_ctx (&ctx, checksum_after_op1);
14414 ht.empty ();
14416 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14417 fold_check_failed (op1, tem);
14419 md5_init_ctx (&ctx);
14420 fold_checksum_tree (op2, &ctx, &ht);
14421 md5_finish_ctx (&ctx, checksum_after_op2);
14423 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14424 fold_check_failed (op2, tem);
14425 #endif
14426 return tem;
14429 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14430 arguments in ARGARRAY, and a null static chain.
14431 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14432 of type TYPE from the given operands as constructed by build_call_array. */
14434 tree
14435 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14436 int nargs, tree *argarray)
14438 tree tem;
14439 #ifdef ENABLE_FOLD_CHECKING
14440 unsigned char checksum_before_fn[16],
14441 checksum_before_arglist[16],
14442 checksum_after_fn[16],
14443 checksum_after_arglist[16];
14444 struct md5_ctx ctx;
14445 hash_table<pointer_hash<const tree_node> > ht (32);
14446 int i;
14448 md5_init_ctx (&ctx);
14449 fold_checksum_tree (fn, &ctx, &ht);
14450 md5_finish_ctx (&ctx, checksum_before_fn);
14451 ht.empty ();
14453 md5_init_ctx (&ctx);
14454 for (i = 0; i < nargs; i++)
14455 fold_checksum_tree (argarray[i], &ctx, &ht);
14456 md5_finish_ctx (&ctx, checksum_before_arglist);
14457 ht.empty ();
14458 #endif
14460 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14461 if (!tem)
14462 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14464 #ifdef ENABLE_FOLD_CHECKING
14465 md5_init_ctx (&ctx);
14466 fold_checksum_tree (fn, &ctx, &ht);
14467 md5_finish_ctx (&ctx, checksum_after_fn);
14468 ht.empty ();
14470 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14471 fold_check_failed (fn, tem);
14473 md5_init_ctx (&ctx);
14474 for (i = 0; i < nargs; i++)
14475 fold_checksum_tree (argarray[i], &ctx, &ht);
14476 md5_finish_ctx (&ctx, checksum_after_arglist);
14478 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14479 fold_check_failed (NULL_TREE, tem);
14480 #endif
14481 return tem;
14484 /* Perform constant folding and related simplification of initializer
14485 expression EXPR. These behave identically to "fold_buildN" but ignore
14486 potential run-time traps and exceptions that fold must preserve. */
14488 #define START_FOLD_INIT \
14489 int saved_signaling_nans = flag_signaling_nans;\
14490 int saved_trapping_math = flag_trapping_math;\
14491 int saved_rounding_math = flag_rounding_math;\
14492 int saved_trapv = flag_trapv;\
14493 int saved_folding_initializer = folding_initializer;\
14494 flag_signaling_nans = 0;\
14495 flag_trapping_math = 0;\
14496 flag_rounding_math = 0;\
14497 flag_trapv = 0;\
14498 folding_initializer = 1;
14500 #define END_FOLD_INIT \
14501 flag_signaling_nans = saved_signaling_nans;\
14502 flag_trapping_math = saved_trapping_math;\
14503 flag_rounding_math = saved_rounding_math;\
14504 flag_trapv = saved_trapv;\
14505 folding_initializer = saved_folding_initializer;
14507 tree
14508 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14509 tree type, tree op)
14511 tree result;
14512 START_FOLD_INIT;
14514 result = fold_build1_loc (loc, code, type, op);
14516 END_FOLD_INIT;
14517 return result;
14520 tree
14521 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14522 tree type, tree op0, tree op1)
14524 tree result;
14525 START_FOLD_INIT;
14527 result = fold_build2_loc (loc, code, type, op0, op1);
14529 END_FOLD_INIT;
14530 return result;
14533 tree
14534 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14535 int nargs, tree *argarray)
14537 tree result;
14538 START_FOLD_INIT;
14540 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14542 END_FOLD_INIT;
14543 return result;
14546 #undef START_FOLD_INIT
14547 #undef END_FOLD_INIT
14549 /* Determine if first argument is a multiple of second argument. Return 0 if
14550 it is not, or we cannot easily determined it to be.
14552 An example of the sort of thing we care about (at this point; this routine
14553 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14554 fold cases do now) is discovering that
14556 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14558 is a multiple of
14560 SAVE_EXPR (J * 8)
14562 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14564 This code also handles discovering that
14566 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14568 is a multiple of 8 so we don't have to worry about dealing with a
14569 possible remainder.
14571 Note that we *look* inside a SAVE_EXPR only to determine how it was
14572 calculated; it is not safe for fold to do much of anything else with the
14573 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14574 at run time. For example, the latter example above *cannot* be implemented
14575 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14576 evaluation time of the original SAVE_EXPR is not necessarily the same at
14577 the time the new expression is evaluated. The only optimization of this
14578 sort that would be valid is changing
14580 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14582 divided by 8 to
14584 SAVE_EXPR (I) * SAVE_EXPR (J)
14586 (where the same SAVE_EXPR (J) is used in the original and the
14587 transformed version). */
14590 multiple_of_p (tree type, const_tree top, const_tree bottom)
14592 if (operand_equal_p (top, bottom, 0))
14593 return 1;
14595 if (TREE_CODE (type) != INTEGER_TYPE)
14596 return 0;
14598 switch (TREE_CODE (top))
14600 case BIT_AND_EXPR:
14601 /* Bitwise and provides a power of two multiple. If the mask is
14602 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14603 if (!integer_pow2p (bottom))
14604 return 0;
14605 /* FALLTHRU */
14607 case MULT_EXPR:
14608 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14609 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14611 case PLUS_EXPR:
14612 case MINUS_EXPR:
14613 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14614 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14616 case LSHIFT_EXPR:
14617 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14619 tree op1, t1;
14621 op1 = TREE_OPERAND (top, 1);
14622 /* const_binop may not detect overflow correctly,
14623 so check for it explicitly here. */
14624 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14625 && 0 != (t1 = fold_convert (type,
14626 const_binop (LSHIFT_EXPR,
14627 size_one_node,
14628 op1)))
14629 && !TREE_OVERFLOW (t1))
14630 return multiple_of_p (type, t1, bottom);
14632 return 0;
14634 case NOP_EXPR:
14635 /* Can't handle conversions from non-integral or wider integral type. */
14636 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14637 || (TYPE_PRECISION (type)
14638 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14639 return 0;
14641 /* .. fall through ... */
14643 case SAVE_EXPR:
14644 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14646 case COND_EXPR:
14647 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14648 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14650 case INTEGER_CST:
14651 if (TREE_CODE (bottom) != INTEGER_CST
14652 || integer_zerop (bottom)
14653 || (TYPE_UNSIGNED (type)
14654 && (tree_int_cst_sgn (top) < 0
14655 || tree_int_cst_sgn (bottom) < 0)))
14656 return 0;
14657 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14658 SIGNED);
14660 default:
14661 return 0;
14665 /* Return true if CODE or TYPE is known to be non-negative. */
14667 static bool
14668 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14670 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14671 && truth_value_p (code))
14672 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14673 have a signed:1 type (where the value is -1 and 0). */
14674 return true;
14675 return false;
14678 /* Return true if (CODE OP0) is known to be non-negative. If the return
14679 value is based on the assumption that signed overflow is undefined,
14680 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14681 *STRICT_OVERFLOW_P. */
14683 bool
14684 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14685 bool *strict_overflow_p)
14687 if (TYPE_UNSIGNED (type))
14688 return true;
14690 switch (code)
14692 case ABS_EXPR:
14693 /* We can't return 1 if flag_wrapv is set because
14694 ABS_EXPR<INT_MIN> = INT_MIN. */
14695 if (!INTEGRAL_TYPE_P (type))
14696 return true;
14697 if (TYPE_OVERFLOW_UNDEFINED (type))
14699 *strict_overflow_p = true;
14700 return true;
14702 break;
14704 case NON_LVALUE_EXPR:
14705 case FLOAT_EXPR:
14706 case FIX_TRUNC_EXPR:
14707 return tree_expr_nonnegative_warnv_p (op0,
14708 strict_overflow_p);
14710 CASE_CONVERT:
14712 tree inner_type = TREE_TYPE (op0);
14713 tree outer_type = type;
14715 if (TREE_CODE (outer_type) == REAL_TYPE)
14717 if (TREE_CODE (inner_type) == REAL_TYPE)
14718 return tree_expr_nonnegative_warnv_p (op0,
14719 strict_overflow_p);
14720 if (INTEGRAL_TYPE_P (inner_type))
14722 if (TYPE_UNSIGNED (inner_type))
14723 return true;
14724 return tree_expr_nonnegative_warnv_p (op0,
14725 strict_overflow_p);
14728 else if (INTEGRAL_TYPE_P (outer_type))
14730 if (TREE_CODE (inner_type) == REAL_TYPE)
14731 return tree_expr_nonnegative_warnv_p (op0,
14732 strict_overflow_p);
14733 if (INTEGRAL_TYPE_P (inner_type))
14734 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14735 && TYPE_UNSIGNED (inner_type);
14738 break;
14740 default:
14741 return tree_simple_nonnegative_warnv_p (code, type);
14744 /* We don't know sign of `t', so be conservative and return false. */
14745 return false;
14748 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14749 value is based on the assumption that signed overflow is undefined,
14750 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14751 *STRICT_OVERFLOW_P. */
14753 bool
14754 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14755 tree op1, bool *strict_overflow_p)
14757 if (TYPE_UNSIGNED (type))
14758 return true;
14760 switch (code)
14762 case POINTER_PLUS_EXPR:
14763 case PLUS_EXPR:
14764 if (FLOAT_TYPE_P (type))
14765 return (tree_expr_nonnegative_warnv_p (op0,
14766 strict_overflow_p)
14767 && tree_expr_nonnegative_warnv_p (op1,
14768 strict_overflow_p));
14770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14771 both unsigned and at least 2 bits shorter than the result. */
14772 if (TREE_CODE (type) == INTEGER_TYPE
14773 && TREE_CODE (op0) == NOP_EXPR
14774 && TREE_CODE (op1) == NOP_EXPR)
14776 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14777 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14778 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14779 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14781 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14782 TYPE_PRECISION (inner2)) + 1;
14783 return prec < TYPE_PRECISION (type);
14786 break;
14788 case MULT_EXPR:
14789 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14791 /* x * x is always non-negative for floating point x
14792 or without overflow. */
14793 if (operand_equal_p (op0, op1, 0)
14794 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14795 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14797 if (ANY_INTEGRAL_TYPE_P (type)
14798 && TYPE_OVERFLOW_UNDEFINED (type))
14799 *strict_overflow_p = true;
14800 return true;
14804 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14805 both unsigned and their total bits is shorter than the result. */
14806 if (TREE_CODE (type) == INTEGER_TYPE
14807 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14808 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14810 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14811 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14812 : TREE_TYPE (op0);
14813 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14814 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14815 : TREE_TYPE (op1);
14817 bool unsigned0 = TYPE_UNSIGNED (inner0);
14818 bool unsigned1 = TYPE_UNSIGNED (inner1);
14820 if (TREE_CODE (op0) == INTEGER_CST)
14821 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14823 if (TREE_CODE (op1) == INTEGER_CST)
14824 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14826 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14827 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14829 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14830 ? tree_int_cst_min_precision (op0, UNSIGNED)
14831 : TYPE_PRECISION (inner0);
14833 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14834 ? tree_int_cst_min_precision (op1, UNSIGNED)
14835 : TYPE_PRECISION (inner1);
14837 return precision0 + precision1 < TYPE_PRECISION (type);
14840 return false;
14842 case BIT_AND_EXPR:
14843 case MAX_EXPR:
14844 return (tree_expr_nonnegative_warnv_p (op0,
14845 strict_overflow_p)
14846 || tree_expr_nonnegative_warnv_p (op1,
14847 strict_overflow_p));
14849 case BIT_IOR_EXPR:
14850 case BIT_XOR_EXPR:
14851 case MIN_EXPR:
14852 case RDIV_EXPR:
14853 case TRUNC_DIV_EXPR:
14854 case CEIL_DIV_EXPR:
14855 case FLOOR_DIV_EXPR:
14856 case ROUND_DIV_EXPR:
14857 return (tree_expr_nonnegative_warnv_p (op0,
14858 strict_overflow_p)
14859 && tree_expr_nonnegative_warnv_p (op1,
14860 strict_overflow_p));
14862 case TRUNC_MOD_EXPR:
14863 case CEIL_MOD_EXPR:
14864 case FLOOR_MOD_EXPR:
14865 case ROUND_MOD_EXPR:
14866 return tree_expr_nonnegative_warnv_p (op0,
14867 strict_overflow_p);
14868 default:
14869 return tree_simple_nonnegative_warnv_p (code, type);
14872 /* We don't know sign of `t', so be conservative and return false. */
14873 return false;
14876 /* Return true if T is known to be non-negative. If the return
14877 value is based on the assumption that signed overflow is undefined,
14878 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14879 *STRICT_OVERFLOW_P. */
14881 bool
14882 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14884 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14885 return true;
14887 switch (TREE_CODE (t))
14889 case INTEGER_CST:
14890 return tree_int_cst_sgn (t) >= 0;
14892 case REAL_CST:
14893 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14895 case FIXED_CST:
14896 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14898 case COND_EXPR:
14899 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14900 strict_overflow_p)
14901 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14902 strict_overflow_p));
14903 default:
14904 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14905 TREE_TYPE (t));
14907 /* We don't know sign of `t', so be conservative and return false. */
14908 return false;
14911 /* Return true if T is known to be non-negative. If the return
14912 value is based on the assumption that signed overflow is undefined,
14913 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14914 *STRICT_OVERFLOW_P. */
14916 bool
14917 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14918 tree arg0, tree arg1, bool *strict_overflow_p)
14920 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14921 switch (DECL_FUNCTION_CODE (fndecl))
14923 CASE_FLT_FN (BUILT_IN_ACOS):
14924 CASE_FLT_FN (BUILT_IN_ACOSH):
14925 CASE_FLT_FN (BUILT_IN_CABS):
14926 CASE_FLT_FN (BUILT_IN_COSH):
14927 CASE_FLT_FN (BUILT_IN_ERFC):
14928 CASE_FLT_FN (BUILT_IN_EXP):
14929 CASE_FLT_FN (BUILT_IN_EXP10):
14930 CASE_FLT_FN (BUILT_IN_EXP2):
14931 CASE_FLT_FN (BUILT_IN_FABS):
14932 CASE_FLT_FN (BUILT_IN_FDIM):
14933 CASE_FLT_FN (BUILT_IN_HYPOT):
14934 CASE_FLT_FN (BUILT_IN_POW10):
14935 CASE_INT_FN (BUILT_IN_FFS):
14936 CASE_INT_FN (BUILT_IN_PARITY):
14937 CASE_INT_FN (BUILT_IN_POPCOUNT):
14938 CASE_INT_FN (BUILT_IN_CLZ):
14939 CASE_INT_FN (BUILT_IN_CLRSB):
14940 case BUILT_IN_BSWAP32:
14941 case BUILT_IN_BSWAP64:
14942 /* Always true. */
14943 return true;
14945 CASE_FLT_FN (BUILT_IN_SQRT):
14946 /* sqrt(-0.0) is -0.0. */
14947 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14948 return true;
14949 return tree_expr_nonnegative_warnv_p (arg0,
14950 strict_overflow_p);
14952 CASE_FLT_FN (BUILT_IN_ASINH):
14953 CASE_FLT_FN (BUILT_IN_ATAN):
14954 CASE_FLT_FN (BUILT_IN_ATANH):
14955 CASE_FLT_FN (BUILT_IN_CBRT):
14956 CASE_FLT_FN (BUILT_IN_CEIL):
14957 CASE_FLT_FN (BUILT_IN_ERF):
14958 CASE_FLT_FN (BUILT_IN_EXPM1):
14959 CASE_FLT_FN (BUILT_IN_FLOOR):
14960 CASE_FLT_FN (BUILT_IN_FMOD):
14961 CASE_FLT_FN (BUILT_IN_FREXP):
14962 CASE_FLT_FN (BUILT_IN_ICEIL):
14963 CASE_FLT_FN (BUILT_IN_IFLOOR):
14964 CASE_FLT_FN (BUILT_IN_IRINT):
14965 CASE_FLT_FN (BUILT_IN_IROUND):
14966 CASE_FLT_FN (BUILT_IN_LCEIL):
14967 CASE_FLT_FN (BUILT_IN_LDEXP):
14968 CASE_FLT_FN (BUILT_IN_LFLOOR):
14969 CASE_FLT_FN (BUILT_IN_LLCEIL):
14970 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14971 CASE_FLT_FN (BUILT_IN_LLRINT):
14972 CASE_FLT_FN (BUILT_IN_LLROUND):
14973 CASE_FLT_FN (BUILT_IN_LRINT):
14974 CASE_FLT_FN (BUILT_IN_LROUND):
14975 CASE_FLT_FN (BUILT_IN_MODF):
14976 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14977 CASE_FLT_FN (BUILT_IN_RINT):
14978 CASE_FLT_FN (BUILT_IN_ROUND):
14979 CASE_FLT_FN (BUILT_IN_SCALB):
14980 CASE_FLT_FN (BUILT_IN_SCALBLN):
14981 CASE_FLT_FN (BUILT_IN_SCALBN):
14982 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14983 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14984 CASE_FLT_FN (BUILT_IN_SINH):
14985 CASE_FLT_FN (BUILT_IN_TANH):
14986 CASE_FLT_FN (BUILT_IN_TRUNC):
14987 /* True if the 1st argument is nonnegative. */
14988 return tree_expr_nonnegative_warnv_p (arg0,
14989 strict_overflow_p);
14991 CASE_FLT_FN (BUILT_IN_FMAX):
14992 /* True if the 1st OR 2nd arguments are nonnegative. */
14993 return (tree_expr_nonnegative_warnv_p (arg0,
14994 strict_overflow_p)
14995 || (tree_expr_nonnegative_warnv_p (arg1,
14996 strict_overflow_p)));
14998 CASE_FLT_FN (BUILT_IN_FMIN):
14999 /* True if the 1st AND 2nd arguments are nonnegative. */
15000 return (tree_expr_nonnegative_warnv_p (arg0,
15001 strict_overflow_p)
15002 && (tree_expr_nonnegative_warnv_p (arg1,
15003 strict_overflow_p)));
15005 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15006 /* True if the 2nd argument is nonnegative. */
15007 return tree_expr_nonnegative_warnv_p (arg1,
15008 strict_overflow_p);
15010 CASE_FLT_FN (BUILT_IN_POWI):
15011 /* True if the 1st argument is nonnegative or the second
15012 argument is an even integer. */
15013 if (TREE_CODE (arg1) == INTEGER_CST
15014 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15015 return true;
15016 return tree_expr_nonnegative_warnv_p (arg0,
15017 strict_overflow_p);
15019 CASE_FLT_FN (BUILT_IN_POW):
15020 /* True if the 1st argument is nonnegative or the second
15021 argument is an even integer valued real. */
15022 if (TREE_CODE (arg1) == REAL_CST)
15024 REAL_VALUE_TYPE c;
15025 HOST_WIDE_INT n;
15027 c = TREE_REAL_CST (arg1);
15028 n = real_to_integer (&c);
15029 if ((n & 1) == 0)
15031 REAL_VALUE_TYPE cint;
15032 real_from_integer (&cint, VOIDmode, n, SIGNED);
15033 if (real_identical (&c, &cint))
15034 return true;
15037 return tree_expr_nonnegative_warnv_p (arg0,
15038 strict_overflow_p);
15040 default:
15041 break;
15043 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15044 type);
15047 /* Return true if T is known to be non-negative. If the return
15048 value is based on the assumption that signed overflow is undefined,
15049 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15050 *STRICT_OVERFLOW_P. */
15052 static bool
15053 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15055 enum tree_code code = TREE_CODE (t);
15056 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15057 return true;
15059 switch (code)
15061 case TARGET_EXPR:
15063 tree temp = TARGET_EXPR_SLOT (t);
15064 t = TARGET_EXPR_INITIAL (t);
15066 /* If the initializer is non-void, then it's a normal expression
15067 that will be assigned to the slot. */
15068 if (!VOID_TYPE_P (t))
15069 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15071 /* Otherwise, the initializer sets the slot in some way. One common
15072 way is an assignment statement at the end of the initializer. */
15073 while (1)
15075 if (TREE_CODE (t) == BIND_EXPR)
15076 t = expr_last (BIND_EXPR_BODY (t));
15077 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15078 || TREE_CODE (t) == TRY_CATCH_EXPR)
15079 t = expr_last (TREE_OPERAND (t, 0));
15080 else if (TREE_CODE (t) == STATEMENT_LIST)
15081 t = expr_last (t);
15082 else
15083 break;
15085 if (TREE_CODE (t) == MODIFY_EXPR
15086 && TREE_OPERAND (t, 0) == temp)
15087 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15088 strict_overflow_p);
15090 return false;
15093 case CALL_EXPR:
15095 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15096 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15098 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15099 get_callee_fndecl (t),
15100 arg0,
15101 arg1,
15102 strict_overflow_p);
15104 case COMPOUND_EXPR:
15105 case MODIFY_EXPR:
15106 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15107 strict_overflow_p);
15108 case BIND_EXPR:
15109 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15110 strict_overflow_p);
15111 case SAVE_EXPR:
15112 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15113 strict_overflow_p);
15115 default:
15116 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15117 TREE_TYPE (t));
15120 /* We don't know sign of `t', so be conservative and return false. */
15121 return false;
15124 /* Return true if T is known to be non-negative. If the return
15125 value is based on the assumption that signed overflow is undefined,
15126 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15127 *STRICT_OVERFLOW_P. */
15129 bool
15130 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15132 enum tree_code code;
15133 if (t == error_mark_node)
15134 return false;
15136 code = TREE_CODE (t);
15137 switch (TREE_CODE_CLASS (code))
15139 case tcc_binary:
15140 case tcc_comparison:
15141 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15142 TREE_TYPE (t),
15143 TREE_OPERAND (t, 0),
15144 TREE_OPERAND (t, 1),
15145 strict_overflow_p);
15147 case tcc_unary:
15148 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15149 TREE_TYPE (t),
15150 TREE_OPERAND (t, 0),
15151 strict_overflow_p);
15153 case tcc_constant:
15154 case tcc_declaration:
15155 case tcc_reference:
15156 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15158 default:
15159 break;
15162 switch (code)
15164 case TRUTH_AND_EXPR:
15165 case TRUTH_OR_EXPR:
15166 case TRUTH_XOR_EXPR:
15167 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15168 TREE_TYPE (t),
15169 TREE_OPERAND (t, 0),
15170 TREE_OPERAND (t, 1),
15171 strict_overflow_p);
15172 case TRUTH_NOT_EXPR:
15173 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15174 TREE_TYPE (t),
15175 TREE_OPERAND (t, 0),
15176 strict_overflow_p);
15178 case COND_EXPR:
15179 case CONSTRUCTOR:
15180 case OBJ_TYPE_REF:
15181 case ASSERT_EXPR:
15182 case ADDR_EXPR:
15183 case WITH_SIZE_EXPR:
15184 case SSA_NAME:
15185 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15187 default:
15188 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15192 /* Return true if `t' is known to be non-negative. Handle warnings
15193 about undefined signed overflow. */
15195 bool
15196 tree_expr_nonnegative_p (tree t)
15198 bool ret, strict_overflow_p;
15200 strict_overflow_p = false;
15201 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15202 if (strict_overflow_p)
15203 fold_overflow_warning (("assuming signed overflow does not occur when "
15204 "determining that expression is always "
15205 "non-negative"),
15206 WARN_STRICT_OVERFLOW_MISC);
15207 return ret;
15211 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15212 For floating point we further ensure that T is not denormal.
15213 Similar logic is present in nonzero_address in rtlanal.h.
15215 If the return value is based on the assumption that signed overflow
15216 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15217 change *STRICT_OVERFLOW_P. */
15219 bool
15220 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15221 bool *strict_overflow_p)
15223 switch (code)
15225 case ABS_EXPR:
15226 return tree_expr_nonzero_warnv_p (op0,
15227 strict_overflow_p);
15229 case NOP_EXPR:
15231 tree inner_type = TREE_TYPE (op0);
15232 tree outer_type = type;
15234 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15235 && tree_expr_nonzero_warnv_p (op0,
15236 strict_overflow_p));
15238 break;
15240 case NON_LVALUE_EXPR:
15241 return tree_expr_nonzero_warnv_p (op0,
15242 strict_overflow_p);
15244 default:
15245 break;
15248 return false;
15251 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15252 For floating point we further ensure that T is not denormal.
15253 Similar logic is present in nonzero_address in rtlanal.h.
15255 If the return value is based on the assumption that signed overflow
15256 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15257 change *STRICT_OVERFLOW_P. */
15259 bool
15260 tree_binary_nonzero_warnv_p (enum tree_code code,
15261 tree type,
15262 tree op0,
15263 tree op1, bool *strict_overflow_p)
15265 bool sub_strict_overflow_p;
15266 switch (code)
15268 case POINTER_PLUS_EXPR:
15269 case PLUS_EXPR:
15270 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15272 /* With the presence of negative values it is hard
15273 to say something. */
15274 sub_strict_overflow_p = false;
15275 if (!tree_expr_nonnegative_warnv_p (op0,
15276 &sub_strict_overflow_p)
15277 || !tree_expr_nonnegative_warnv_p (op1,
15278 &sub_strict_overflow_p))
15279 return false;
15280 /* One of operands must be positive and the other non-negative. */
15281 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15282 overflows, on a twos-complement machine the sum of two
15283 nonnegative numbers can never be zero. */
15284 return (tree_expr_nonzero_warnv_p (op0,
15285 strict_overflow_p)
15286 || tree_expr_nonzero_warnv_p (op1,
15287 strict_overflow_p));
15289 break;
15291 case MULT_EXPR:
15292 if (TYPE_OVERFLOW_UNDEFINED (type))
15294 if (tree_expr_nonzero_warnv_p (op0,
15295 strict_overflow_p)
15296 && tree_expr_nonzero_warnv_p (op1,
15297 strict_overflow_p))
15299 *strict_overflow_p = true;
15300 return true;
15303 break;
15305 case MIN_EXPR:
15306 sub_strict_overflow_p = false;
15307 if (tree_expr_nonzero_warnv_p (op0,
15308 &sub_strict_overflow_p)
15309 && tree_expr_nonzero_warnv_p (op1,
15310 &sub_strict_overflow_p))
15312 if (sub_strict_overflow_p)
15313 *strict_overflow_p = true;
15315 break;
15317 case MAX_EXPR:
15318 sub_strict_overflow_p = false;
15319 if (tree_expr_nonzero_warnv_p (op0,
15320 &sub_strict_overflow_p))
15322 if (sub_strict_overflow_p)
15323 *strict_overflow_p = true;
15325 /* When both operands are nonzero, then MAX must be too. */
15326 if (tree_expr_nonzero_warnv_p (op1,
15327 strict_overflow_p))
15328 return true;
15330 /* MAX where operand 0 is positive is positive. */
15331 return tree_expr_nonnegative_warnv_p (op0,
15332 strict_overflow_p);
15334 /* MAX where operand 1 is positive is positive. */
15335 else if (tree_expr_nonzero_warnv_p (op1,
15336 &sub_strict_overflow_p)
15337 && tree_expr_nonnegative_warnv_p (op1,
15338 &sub_strict_overflow_p))
15340 if (sub_strict_overflow_p)
15341 *strict_overflow_p = true;
15342 return true;
15344 break;
15346 case BIT_IOR_EXPR:
15347 return (tree_expr_nonzero_warnv_p (op1,
15348 strict_overflow_p)
15349 || tree_expr_nonzero_warnv_p (op0,
15350 strict_overflow_p));
15352 default:
15353 break;
15356 return false;
15359 /* Return true when T is an address and is known to be nonzero.
15360 For floating point we further ensure that T is not denormal.
15361 Similar logic is present in nonzero_address in rtlanal.h.
15363 If the return value is based on the assumption that signed overflow
15364 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15365 change *STRICT_OVERFLOW_P. */
15367 bool
15368 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15370 bool sub_strict_overflow_p;
15371 switch (TREE_CODE (t))
15373 case INTEGER_CST:
15374 return !integer_zerop (t);
15376 case ADDR_EXPR:
15378 tree base = TREE_OPERAND (t, 0);
15380 if (!DECL_P (base))
15381 base = get_base_address (base);
15383 if (!base)
15384 return false;
15386 /* For objects in symbol table check if we know they are non-zero.
15387 Don't do anything for variables and functions before symtab is built;
15388 it is quite possible that they will be declared weak later. */
15389 if (DECL_P (base) && decl_in_symtab_p (base))
15391 struct symtab_node *symbol;
15393 symbol = symtab_node::get_create (base);
15394 if (symbol)
15395 return symbol->nonzero_address ();
15396 else
15397 return false;
15400 /* Function local objects are never NULL. */
15401 if (DECL_P (base)
15402 && (DECL_CONTEXT (base)
15403 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15404 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15405 return true;
15407 /* Constants are never weak. */
15408 if (CONSTANT_CLASS_P (base))
15409 return true;
15411 return false;
15414 case COND_EXPR:
15415 sub_strict_overflow_p = false;
15416 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15417 &sub_strict_overflow_p)
15418 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15419 &sub_strict_overflow_p))
15421 if (sub_strict_overflow_p)
15422 *strict_overflow_p = true;
15423 return true;
15425 break;
15427 default:
15428 break;
15430 return false;
15433 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15434 attempt to fold the expression to a constant without modifying TYPE,
15435 OP0 or OP1.
15437 If the expression could be simplified to a constant, then return
15438 the constant. If the expression would not be simplified to a
15439 constant, then return NULL_TREE. */
15441 tree
15442 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15444 tree tem = fold_binary (code, type, op0, op1);
15445 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15448 /* Given the components of a unary expression CODE, TYPE and OP0,
15449 attempt to fold the expression to a constant without modifying
15450 TYPE or OP0.
15452 If the expression could be simplified to a constant, then return
15453 the constant. If the expression would not be simplified to a
15454 constant, then return NULL_TREE. */
15456 tree
15457 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15459 tree tem = fold_unary (code, type, op0);
15460 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15463 /* If EXP represents referencing an element in a constant string
15464 (either via pointer arithmetic or array indexing), return the
15465 tree representing the value accessed, otherwise return NULL. */
15467 tree
15468 fold_read_from_constant_string (tree exp)
15470 if ((TREE_CODE (exp) == INDIRECT_REF
15471 || TREE_CODE (exp) == ARRAY_REF)
15472 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15474 tree exp1 = TREE_OPERAND (exp, 0);
15475 tree index;
15476 tree string;
15477 location_t loc = EXPR_LOCATION (exp);
15479 if (TREE_CODE (exp) == INDIRECT_REF)
15480 string = string_constant (exp1, &index);
15481 else
15483 tree low_bound = array_ref_low_bound (exp);
15484 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15486 /* Optimize the special-case of a zero lower bound.
15488 We convert the low_bound to sizetype to avoid some problems
15489 with constant folding. (E.g. suppose the lower bound is 1,
15490 and its mode is QI. Without the conversion,l (ARRAY
15491 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15492 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15493 if (! integer_zerop (low_bound))
15494 index = size_diffop_loc (loc, index,
15495 fold_convert_loc (loc, sizetype, low_bound));
15497 string = exp1;
15500 if (string
15501 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15502 && TREE_CODE (string) == STRING_CST
15503 && TREE_CODE (index) == INTEGER_CST
15504 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15505 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15506 == MODE_INT)
15507 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15508 return build_int_cst_type (TREE_TYPE (exp),
15509 (TREE_STRING_POINTER (string)
15510 [TREE_INT_CST_LOW (index)]));
15512 return NULL;
15515 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15516 an integer constant, real, or fixed-point constant.
15518 TYPE is the type of the result. */
15520 static tree
15521 fold_negate_const (tree arg0, tree type)
15523 tree t = NULL_TREE;
15525 switch (TREE_CODE (arg0))
15527 case INTEGER_CST:
15529 bool overflow;
15530 wide_int val = wi::neg (arg0, &overflow);
15531 t = force_fit_type (type, val, 1,
15532 (overflow | TREE_OVERFLOW (arg0))
15533 && !TYPE_UNSIGNED (type));
15534 break;
15537 case REAL_CST:
15538 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15539 break;
15541 case FIXED_CST:
15543 FIXED_VALUE_TYPE f;
15544 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15545 &(TREE_FIXED_CST (arg0)), NULL,
15546 TYPE_SATURATING (type));
15547 t = build_fixed (type, f);
15548 /* Propagate overflow flags. */
15549 if (overflow_p | TREE_OVERFLOW (arg0))
15550 TREE_OVERFLOW (t) = 1;
15551 break;
15554 default:
15555 gcc_unreachable ();
15558 return t;
15561 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15562 an integer constant or real constant.
15564 TYPE is the type of the result. */
15566 tree
15567 fold_abs_const (tree arg0, tree type)
15569 tree t = NULL_TREE;
15571 switch (TREE_CODE (arg0))
15573 case INTEGER_CST:
15575 /* If the value is unsigned or non-negative, then the absolute value
15576 is the same as the ordinary value. */
15577 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15578 t = arg0;
15580 /* If the value is negative, then the absolute value is
15581 its negation. */
15582 else
15584 bool overflow;
15585 wide_int val = wi::neg (arg0, &overflow);
15586 t = force_fit_type (type, val, -1,
15587 overflow | TREE_OVERFLOW (arg0));
15590 break;
15592 case REAL_CST:
15593 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15594 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15595 else
15596 t = arg0;
15597 break;
15599 default:
15600 gcc_unreachable ();
15603 return t;
15606 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15607 constant. TYPE is the type of the result. */
15609 static tree
15610 fold_not_const (const_tree arg0, tree type)
15612 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15614 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15617 /* Given CODE, a relational operator, the target type, TYPE and two
15618 constant operands OP0 and OP1, return the result of the
15619 relational operation. If the result is not a compile time
15620 constant, then return NULL_TREE. */
15622 static tree
15623 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15625 int result, invert;
15627 /* From here on, the only cases we handle are when the result is
15628 known to be a constant. */
15630 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15632 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15633 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15635 /* Handle the cases where either operand is a NaN. */
15636 if (real_isnan (c0) || real_isnan (c1))
15638 switch (code)
15640 case EQ_EXPR:
15641 case ORDERED_EXPR:
15642 result = 0;
15643 break;
15645 case NE_EXPR:
15646 case UNORDERED_EXPR:
15647 case UNLT_EXPR:
15648 case UNLE_EXPR:
15649 case UNGT_EXPR:
15650 case UNGE_EXPR:
15651 case UNEQ_EXPR:
15652 result = 1;
15653 break;
15655 case LT_EXPR:
15656 case LE_EXPR:
15657 case GT_EXPR:
15658 case GE_EXPR:
15659 case LTGT_EXPR:
15660 if (flag_trapping_math)
15661 return NULL_TREE;
15662 result = 0;
15663 break;
15665 default:
15666 gcc_unreachable ();
15669 return constant_boolean_node (result, type);
15672 return constant_boolean_node (real_compare (code, c0, c1), type);
15675 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15677 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15678 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15679 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15682 /* Handle equality/inequality of complex constants. */
15683 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15685 tree rcond = fold_relational_const (code, type,
15686 TREE_REALPART (op0),
15687 TREE_REALPART (op1));
15688 tree icond = fold_relational_const (code, type,
15689 TREE_IMAGPART (op0),
15690 TREE_IMAGPART (op1));
15691 if (code == EQ_EXPR)
15692 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15693 else if (code == NE_EXPR)
15694 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15695 else
15696 return NULL_TREE;
15699 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15701 unsigned count = VECTOR_CST_NELTS (op0);
15702 tree *elts = XALLOCAVEC (tree, count);
15703 gcc_assert (VECTOR_CST_NELTS (op1) == count
15704 && TYPE_VECTOR_SUBPARTS (type) == count);
15706 for (unsigned i = 0; i < count; i++)
15708 tree elem_type = TREE_TYPE (type);
15709 tree elem0 = VECTOR_CST_ELT (op0, i);
15710 tree elem1 = VECTOR_CST_ELT (op1, i);
15712 tree tem = fold_relational_const (code, elem_type,
15713 elem0, elem1);
15715 if (tem == NULL_TREE)
15716 return NULL_TREE;
15718 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15721 return build_vector (type, elts);
15724 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15726 To compute GT, swap the arguments and do LT.
15727 To compute GE, do LT and invert the result.
15728 To compute LE, swap the arguments, do LT and invert the result.
15729 To compute NE, do EQ and invert the result.
15731 Therefore, the code below must handle only EQ and LT. */
15733 if (code == LE_EXPR || code == GT_EXPR)
15735 tree tem = op0;
15736 op0 = op1;
15737 op1 = tem;
15738 code = swap_tree_comparison (code);
15741 /* Note that it is safe to invert for real values here because we
15742 have already handled the one case that it matters. */
15744 invert = 0;
15745 if (code == NE_EXPR || code == GE_EXPR)
15747 invert = 1;
15748 code = invert_tree_comparison (code, false);
15751 /* Compute a result for LT or EQ if args permit;
15752 Otherwise return T. */
15753 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15755 if (code == EQ_EXPR)
15756 result = tree_int_cst_equal (op0, op1);
15757 else
15758 result = tree_int_cst_lt (op0, op1);
15760 else
15761 return NULL_TREE;
15763 if (invert)
15764 result ^= 1;
15765 return constant_boolean_node (result, type);
15768 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15769 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15770 itself. */
15772 tree
15773 fold_build_cleanup_point_expr (tree type, tree expr)
15775 /* If the expression does not have side effects then we don't have to wrap
15776 it with a cleanup point expression. */
15777 if (!TREE_SIDE_EFFECTS (expr))
15778 return expr;
15780 /* If the expression is a return, check to see if the expression inside the
15781 return has no side effects or the right hand side of the modify expression
15782 inside the return. If either don't have side effects set we don't need to
15783 wrap the expression in a cleanup point expression. Note we don't check the
15784 left hand side of the modify because it should always be a return decl. */
15785 if (TREE_CODE (expr) == RETURN_EXPR)
15787 tree op = TREE_OPERAND (expr, 0);
15788 if (!op || !TREE_SIDE_EFFECTS (op))
15789 return expr;
15790 op = TREE_OPERAND (op, 1);
15791 if (!TREE_SIDE_EFFECTS (op))
15792 return expr;
15795 return build1 (CLEANUP_POINT_EXPR, type, expr);
15798 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15799 of an indirection through OP0, or NULL_TREE if no simplification is
15800 possible. */
15802 tree
15803 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15805 tree sub = op0;
15806 tree subtype;
15808 STRIP_NOPS (sub);
15809 subtype = TREE_TYPE (sub);
15810 if (!POINTER_TYPE_P (subtype))
15811 return NULL_TREE;
15813 if (TREE_CODE (sub) == ADDR_EXPR)
15815 tree op = TREE_OPERAND (sub, 0);
15816 tree optype = TREE_TYPE (op);
15817 /* *&CONST_DECL -> to the value of the const decl. */
15818 if (TREE_CODE (op) == CONST_DECL)
15819 return DECL_INITIAL (op);
15820 /* *&p => p; make sure to handle *&"str"[cst] here. */
15821 if (type == optype)
15823 tree fop = fold_read_from_constant_string (op);
15824 if (fop)
15825 return fop;
15826 else
15827 return op;
15829 /* *(foo *)&fooarray => fooarray[0] */
15830 else if (TREE_CODE (optype) == ARRAY_TYPE
15831 && type == TREE_TYPE (optype)
15832 && (!in_gimple_form
15833 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15835 tree type_domain = TYPE_DOMAIN (optype);
15836 tree min_val = size_zero_node;
15837 if (type_domain && TYPE_MIN_VALUE (type_domain))
15838 min_val = TYPE_MIN_VALUE (type_domain);
15839 if (in_gimple_form
15840 && TREE_CODE (min_val) != INTEGER_CST)
15841 return NULL_TREE;
15842 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15843 NULL_TREE, NULL_TREE);
15845 /* *(foo *)&complexfoo => __real__ complexfoo */
15846 else if (TREE_CODE (optype) == COMPLEX_TYPE
15847 && type == TREE_TYPE (optype))
15848 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15849 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15850 else if (TREE_CODE (optype) == VECTOR_TYPE
15851 && type == TREE_TYPE (optype))
15853 tree part_width = TYPE_SIZE (type);
15854 tree index = bitsize_int (0);
15855 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15859 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15860 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15862 tree op00 = TREE_OPERAND (sub, 0);
15863 tree op01 = TREE_OPERAND (sub, 1);
15865 STRIP_NOPS (op00);
15866 if (TREE_CODE (op00) == ADDR_EXPR)
15868 tree op00type;
15869 op00 = TREE_OPERAND (op00, 0);
15870 op00type = TREE_TYPE (op00);
15872 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15873 if (TREE_CODE (op00type) == VECTOR_TYPE
15874 && type == TREE_TYPE (op00type))
15876 HOST_WIDE_INT offset = tree_to_shwi (op01);
15877 tree part_width = TYPE_SIZE (type);
15878 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15879 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15880 tree index = bitsize_int (indexi);
15882 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15883 return fold_build3_loc (loc,
15884 BIT_FIELD_REF, type, op00,
15885 part_width, index);
15888 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15889 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15890 && type == TREE_TYPE (op00type))
15892 tree size = TYPE_SIZE_UNIT (type);
15893 if (tree_int_cst_equal (size, op01))
15894 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15896 /* ((foo *)&fooarray)[1] => fooarray[1] */
15897 else if (TREE_CODE (op00type) == ARRAY_TYPE
15898 && type == TREE_TYPE (op00type))
15900 tree type_domain = TYPE_DOMAIN (op00type);
15901 tree min_val = size_zero_node;
15902 if (type_domain && TYPE_MIN_VALUE (type_domain))
15903 min_val = TYPE_MIN_VALUE (type_domain);
15904 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15905 TYPE_SIZE_UNIT (type));
15906 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15907 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15908 NULL_TREE, NULL_TREE);
15913 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15914 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15915 && type == TREE_TYPE (TREE_TYPE (subtype))
15916 && (!in_gimple_form
15917 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15919 tree type_domain;
15920 tree min_val = size_zero_node;
15921 sub = build_fold_indirect_ref_loc (loc, sub);
15922 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15923 if (type_domain && TYPE_MIN_VALUE (type_domain))
15924 min_val = TYPE_MIN_VALUE (type_domain);
15925 if (in_gimple_form
15926 && TREE_CODE (min_val) != INTEGER_CST)
15927 return NULL_TREE;
15928 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15929 NULL_TREE);
15932 return NULL_TREE;
15935 /* Builds an expression for an indirection through T, simplifying some
15936 cases. */
15938 tree
15939 build_fold_indirect_ref_loc (location_t loc, tree t)
15941 tree type = TREE_TYPE (TREE_TYPE (t));
15942 tree sub = fold_indirect_ref_1 (loc, type, t);
15944 if (sub)
15945 return sub;
15947 return build1_loc (loc, INDIRECT_REF, type, t);
15950 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15952 tree
15953 fold_indirect_ref_loc (location_t loc, tree t)
15955 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15957 if (sub)
15958 return sub;
15959 else
15960 return t;
15963 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15964 whose result is ignored. The type of the returned tree need not be
15965 the same as the original expression. */
15967 tree
15968 fold_ignored_result (tree t)
15970 if (!TREE_SIDE_EFFECTS (t))
15971 return integer_zero_node;
15973 for (;;)
15974 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15976 case tcc_unary:
15977 t = TREE_OPERAND (t, 0);
15978 break;
15980 case tcc_binary:
15981 case tcc_comparison:
15982 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15983 t = TREE_OPERAND (t, 0);
15984 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15985 t = TREE_OPERAND (t, 1);
15986 else
15987 return t;
15988 break;
15990 case tcc_expression:
15991 switch (TREE_CODE (t))
15993 case COMPOUND_EXPR:
15994 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15995 return t;
15996 t = TREE_OPERAND (t, 0);
15997 break;
15999 case COND_EXPR:
16000 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16001 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16002 return t;
16003 t = TREE_OPERAND (t, 0);
16004 break;
16006 default:
16007 return t;
16009 break;
16011 default:
16012 return t;
16016 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16018 tree
16019 round_up_loc (location_t loc, tree value, unsigned int divisor)
16021 tree div = NULL_TREE;
16023 if (divisor == 1)
16024 return value;
16026 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16027 have to do anything. Only do this when we are not given a const,
16028 because in that case, this check is more expensive than just
16029 doing it. */
16030 if (TREE_CODE (value) != INTEGER_CST)
16032 div = build_int_cst (TREE_TYPE (value), divisor);
16034 if (multiple_of_p (TREE_TYPE (value), value, div))
16035 return value;
16038 /* If divisor is a power of two, simplify this to bit manipulation. */
16039 if (divisor == (divisor & -divisor))
16041 if (TREE_CODE (value) == INTEGER_CST)
16043 wide_int val = value;
16044 bool overflow_p;
16046 if ((val & (divisor - 1)) == 0)
16047 return value;
16049 overflow_p = TREE_OVERFLOW (value);
16050 val += divisor - 1;
16051 val &= - (int) divisor;
16052 if (val == 0)
16053 overflow_p = true;
16055 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16057 else
16059 tree t;
16061 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16062 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16063 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16064 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16067 else
16069 if (!div)
16070 div = build_int_cst (TREE_TYPE (value), divisor);
16071 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16072 value = size_binop_loc (loc, MULT_EXPR, value, div);
16075 return value;
16078 /* Likewise, but round down. */
16080 tree
16081 round_down_loc (location_t loc, tree value, int divisor)
16083 tree div = NULL_TREE;
16085 gcc_assert (divisor > 0);
16086 if (divisor == 1)
16087 return value;
16089 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16090 have to do anything. Only do this when we are not given a const,
16091 because in that case, this check is more expensive than just
16092 doing it. */
16093 if (TREE_CODE (value) != INTEGER_CST)
16095 div = build_int_cst (TREE_TYPE (value), divisor);
16097 if (multiple_of_p (TREE_TYPE (value), value, div))
16098 return value;
16101 /* If divisor is a power of two, simplify this to bit manipulation. */
16102 if (divisor == (divisor & -divisor))
16104 tree t;
16106 t = build_int_cst (TREE_TYPE (value), -divisor);
16107 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16109 else
16111 if (!div)
16112 div = build_int_cst (TREE_TYPE (value), divisor);
16113 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16114 value = size_binop_loc (loc, MULT_EXPR, value, div);
16117 return value;
16120 /* Returns the pointer to the base of the object addressed by EXP and
16121 extracts the information about the offset of the access, storing it
16122 to PBITPOS and POFFSET. */
16124 static tree
16125 split_address_to_core_and_offset (tree exp,
16126 HOST_WIDE_INT *pbitpos, tree *poffset)
16128 tree core;
16129 machine_mode mode;
16130 int unsignedp, volatilep;
16131 HOST_WIDE_INT bitsize;
16132 location_t loc = EXPR_LOCATION (exp);
16134 if (TREE_CODE (exp) == ADDR_EXPR)
16136 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16137 poffset, &mode, &unsignedp, &volatilep,
16138 false);
16139 core = build_fold_addr_expr_loc (loc, core);
16141 else
16143 core = exp;
16144 *pbitpos = 0;
16145 *poffset = NULL_TREE;
16148 return core;
16151 /* Returns true if addresses of E1 and E2 differ by a constant, false
16152 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16154 bool
16155 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16157 tree core1, core2;
16158 HOST_WIDE_INT bitpos1, bitpos2;
16159 tree toffset1, toffset2, tdiff, type;
16161 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16162 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16164 if (bitpos1 % BITS_PER_UNIT != 0
16165 || bitpos2 % BITS_PER_UNIT != 0
16166 || !operand_equal_p (core1, core2, 0))
16167 return false;
16169 if (toffset1 && toffset2)
16171 type = TREE_TYPE (toffset1);
16172 if (type != TREE_TYPE (toffset2))
16173 toffset2 = fold_convert (type, toffset2);
16175 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16176 if (!cst_and_fits_in_hwi (tdiff))
16177 return false;
16179 *diff = int_cst_value (tdiff);
16181 else if (toffset1 || toffset2)
16183 /* If only one of the offsets is non-constant, the difference cannot
16184 be a constant. */
16185 return false;
16187 else
16188 *diff = 0;
16190 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16191 return true;
16194 /* Simplify the floating point expression EXP when the sign of the
16195 result is not significant. Return NULL_TREE if no simplification
16196 is possible. */
16198 tree
16199 fold_strip_sign_ops (tree exp)
16201 tree arg0, arg1;
16202 location_t loc = EXPR_LOCATION (exp);
16204 switch (TREE_CODE (exp))
16206 case ABS_EXPR:
16207 case NEGATE_EXPR:
16208 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16209 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16211 case MULT_EXPR:
16212 case RDIV_EXPR:
16213 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16214 return NULL_TREE;
16215 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16216 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16217 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16218 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16219 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16220 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16221 break;
16223 case COMPOUND_EXPR:
16224 arg0 = TREE_OPERAND (exp, 0);
16225 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16226 if (arg1)
16227 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16228 break;
16230 case COND_EXPR:
16231 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16232 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16233 if (arg0 || arg1)
16234 return fold_build3_loc (loc,
16235 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16236 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16237 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16238 break;
16240 case CALL_EXPR:
16242 const enum built_in_function fcode = builtin_mathfn_code (exp);
16243 switch (fcode)
16245 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16246 /* Strip copysign function call, return the 1st argument. */
16247 arg0 = CALL_EXPR_ARG (exp, 0);
16248 arg1 = CALL_EXPR_ARG (exp, 1);
16249 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16251 default:
16252 /* Strip sign ops from the argument of "odd" math functions. */
16253 if (negate_mathfn_p (fcode))
16255 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16256 if (arg0)
16257 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16259 break;
16262 break;
16264 default:
16265 break;
16267 return NULL_TREE;
16270 /* Return OFF converted to a pointer offset type suitable as offset for
16271 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16272 tree
16273 convert_to_ptrofftype_loc (location_t loc, tree off)
16275 return fold_convert_loc (loc, sizetype, off);
16278 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16279 tree
16280 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16282 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16283 ptr, convert_to_ptrofftype_loc (loc, off));
16286 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16287 tree
16288 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16290 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16291 ptr, size_int (off));