PR jit/63854: Introduce xstrdup_for_dump
[official-gcc.git] / gcc / fold-const.c
blob94d1cbfc2b238f53ddbb9035646c0e19d1dacaa1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_mathfn_compare (location_t,
149 enum built_in_function, enum tree_code,
150 tree, tree, tree);
151 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
152 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
153 static bool reorder_operands_p (const_tree, const_tree);
154 static tree fold_negate_const (tree, tree);
155 static tree fold_not_const (const_tree, tree);
156 static tree fold_relational_const (enum tree_code, tree, tree, tree);
157 static tree fold_convert_const (enum tree_code, tree, tree);
158 static tree fold_view_convert_expr (tree, tree);
159 static bool vec_cst_ctor_to_array (tree, tree *);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
165 static location_t
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
187 return x;
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
197 widest_int quo;
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 return NULL_TREE;
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
232 void
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
250 const char *warnmsg;
251 location_t locus;
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
268 return;
270 if (gimple_no_warning_p (stmt))
271 return;
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
279 return;
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
299 bool
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
327 static bool
328 negate_mathfn_p (enum built_in_function code)
330 switch (code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
363 default:
364 break;
366 return false;
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
372 bool
373 may_negate_without_overflow_p (const_tree t)
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
389 static bool
390 negate_expr_p (tree t)
392 tree type;
394 if (t == 0)
395 return false;
397 type = TREE_TYPE (t);
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
412 case FIXED_CST:
413 return true;
415 case NEGATE_EXPR:
416 return !TYPE_OVERFLOW_SANITIZED (type);
418 case REAL_CST:
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
423 case COMPLEX_CST:
424 return negate_expr_p (TREE_REALPART (t))
425 && negate_expr_p (TREE_IMAGPART (t));
427 case VECTOR_CST:
429 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 return true;
432 int count = TYPE_VECTOR_SUBPARTS (type), i;
434 for (i = 0; i < count; i++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
436 return false;
438 return true;
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
450 || HONOR_SIGNED_ZEROS (element_mode (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
463 && !HONOR_SIGNED_ZEROS (element_mode (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
471 /* Fall through. */
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
514 break;
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
526 tree op1 = TREE_OPERAND (t, 1);
527 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
528 return true;
530 break;
532 default:
533 break;
535 return false;
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
541 returned. */
543 static tree
544 fold_negate_expr (location_t loc, tree t)
546 tree type = TREE_TYPE (t);
547 tree tem;
549 switch (TREE_CODE (t))
551 /* Convert - (~A) to A + 1. */
552 case BIT_NOT_EXPR:
553 if (INTEGRAL_TYPE_P (type))
554 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
555 build_one_cst (type));
556 break;
558 case INTEGER_CST:
559 tem = fold_negate_const (t, type);
560 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
561 || (!TYPE_OVERFLOW_TRAPS (type)
562 && TYPE_OVERFLOW_WRAPS (type))
563 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 return tem;
565 break;
567 case REAL_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
571 case FIXED_CST:
572 tem = fold_negate_const (t, type);
573 return tem;
575 case COMPLEX_CST:
577 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
578 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
579 if (rpart && ipart)
580 return build_complex (type, rpart, ipart);
582 break;
584 case VECTOR_CST:
586 int count = TYPE_VECTOR_SUBPARTS (type), i;
587 tree *elts = XALLOCAVEC (tree, count);
589 for (i = 0; i < count; i++)
591 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
592 if (elts[i] == NULL_TREE)
593 return NULL_TREE;
596 return build_vector (type, elts);
599 case COMPLEX_EXPR:
600 if (negate_expr_p (t))
601 return fold_build2_loc (loc, COMPLEX_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
603 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
604 break;
606 case CONJ_EXPR:
607 if (negate_expr_p (t))
608 return fold_build1_loc (loc, CONJ_EXPR, type,
609 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
610 break;
612 case NEGATE_EXPR:
613 if (!TYPE_OVERFLOW_SANITIZED (type))
614 return TREE_OPERAND (t, 0);
615 break;
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
619 && !HONOR_SIGNED_ZEROS (element_mode (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
644 && !HONOR_SIGNED_ZEROS (element_mode (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1138 if (code == POINTER_PLUS_EXPR)
1139 return int_const_binop (PLUS_EXPR,
1140 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1142 return int_const_binop (code, arg1, arg2);
1145 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1147 machine_mode mode;
1148 REAL_VALUE_TYPE d1;
1149 REAL_VALUE_TYPE d2;
1150 REAL_VALUE_TYPE value;
1151 REAL_VALUE_TYPE result;
1152 bool inexact;
1153 tree t, type;
1155 /* The following codes are handled by real_arithmetic. */
1156 switch (code)
1158 case PLUS_EXPR:
1159 case MINUS_EXPR:
1160 case MULT_EXPR:
1161 case RDIV_EXPR:
1162 case MIN_EXPR:
1163 case MAX_EXPR:
1164 break;
1166 default:
1167 return NULL_TREE;
1170 d1 = TREE_REAL_CST (arg1);
1171 d2 = TREE_REAL_CST (arg2);
1173 type = TREE_TYPE (arg1);
1174 mode = TYPE_MODE (type);
1176 /* Don't perform operation if we honor signaling NaNs and
1177 either operand is a NaN. */
1178 if (HONOR_SNANS (mode)
1179 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1180 return NULL_TREE;
1182 /* Don't perform operation if it would raise a division
1183 by zero exception. */
1184 if (code == RDIV_EXPR
1185 && REAL_VALUES_EQUAL (d2, dconst0)
1186 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1187 return NULL_TREE;
1189 /* If either operand is a NaN, just return it. Otherwise, set up
1190 for floating-point trap; we return an overflow. */
1191 if (REAL_VALUE_ISNAN (d1))
1192 return arg1;
1193 else if (REAL_VALUE_ISNAN (d2))
1194 return arg2;
1196 inexact = real_arithmetic (&value, code, &d1, &d2);
1197 real_convert (&result, mode, &value);
1199 /* Don't constant fold this floating point operation if
1200 the result has overflowed and flag_trapping_math. */
1201 if (flag_trapping_math
1202 && MODE_HAS_INFINITIES (mode)
1203 && REAL_VALUE_ISINF (result)
1204 && !REAL_VALUE_ISINF (d1)
1205 && !REAL_VALUE_ISINF (d2))
1206 return NULL_TREE;
1208 /* Don't constant fold this floating point operation if the
1209 result may dependent upon the run-time rounding mode and
1210 flag_rounding_math is set, or if GCC's software emulation
1211 is unable to accurately represent the result. */
1212 if ((flag_rounding_math
1213 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1214 && (inexact || !real_identical (&result, &value)))
1215 return NULL_TREE;
1217 t = build_real (type, result);
1219 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1220 return t;
1223 if (TREE_CODE (arg1) == FIXED_CST)
1225 FIXED_VALUE_TYPE f1;
1226 FIXED_VALUE_TYPE f2;
1227 FIXED_VALUE_TYPE result;
1228 tree t, type;
1229 int sat_p;
1230 bool overflow_p;
1232 /* The following codes are handled by fixed_arithmetic. */
1233 switch (code)
1235 case PLUS_EXPR:
1236 case MINUS_EXPR:
1237 case MULT_EXPR:
1238 case TRUNC_DIV_EXPR:
1239 if (TREE_CODE (arg2) != FIXED_CST)
1240 return NULL_TREE;
1241 f2 = TREE_FIXED_CST (arg2);
1242 break;
1244 case LSHIFT_EXPR:
1245 case RSHIFT_EXPR:
1247 if (TREE_CODE (arg2) != INTEGER_CST)
1248 return NULL_TREE;
1249 wide_int w2 = arg2;
1250 f2.data.high = w2.elt (1);
1251 f2.data.low = w2.elt (0);
1252 f2.mode = SImode;
1254 break;
1256 default:
1257 return NULL_TREE;
1260 f1 = TREE_FIXED_CST (arg1);
1261 type = TREE_TYPE (arg1);
1262 sat_p = TYPE_SATURATING (type);
1263 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1264 t = build_fixed (type, result);
1265 /* Propagate overflow flags. */
1266 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1267 TREE_OVERFLOW (t) = 1;
1268 return t;
1271 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1273 tree type = TREE_TYPE (arg1);
1274 tree r1 = TREE_REALPART (arg1);
1275 tree i1 = TREE_IMAGPART (arg1);
1276 tree r2 = TREE_REALPART (arg2);
1277 tree i2 = TREE_IMAGPART (arg2);
1278 tree real, imag;
1280 switch (code)
1282 case PLUS_EXPR:
1283 case MINUS_EXPR:
1284 real = const_binop (code, r1, r2);
1285 imag = const_binop (code, i1, i2);
1286 break;
1288 case MULT_EXPR:
1289 if (COMPLEX_FLOAT_TYPE_P (type))
1290 return do_mpc_arg2 (arg1, arg2, type,
1291 /* do_nonfinite= */ folding_initializer,
1292 mpc_mul);
1294 real = const_binop (MINUS_EXPR,
1295 const_binop (MULT_EXPR, r1, r2),
1296 const_binop (MULT_EXPR, i1, i2));
1297 imag = const_binop (PLUS_EXPR,
1298 const_binop (MULT_EXPR, r1, i2),
1299 const_binop (MULT_EXPR, i1, r2));
1300 break;
1302 case RDIV_EXPR:
1303 if (COMPLEX_FLOAT_TYPE_P (type))
1304 return do_mpc_arg2 (arg1, arg2, type,
1305 /* do_nonfinite= */ folding_initializer,
1306 mpc_div);
1307 /* Fallthru ... */
1308 case TRUNC_DIV_EXPR:
1309 case CEIL_DIV_EXPR:
1310 case FLOOR_DIV_EXPR:
1311 case ROUND_DIV_EXPR:
1312 if (flag_complex_method == 0)
1314 /* Keep this algorithm in sync with
1315 tree-complex.c:expand_complex_div_straight().
1317 Expand complex division to scalars, straightforward algorithm.
1318 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 t = br*br + bi*bi
1321 tree magsquared
1322 = const_binop (PLUS_EXPR,
1323 const_binop (MULT_EXPR, r2, r2),
1324 const_binop (MULT_EXPR, i2, i2));
1325 tree t1
1326 = const_binop (PLUS_EXPR,
1327 const_binop (MULT_EXPR, r1, r2),
1328 const_binop (MULT_EXPR, i1, i2));
1329 tree t2
1330 = const_binop (MINUS_EXPR,
1331 const_binop (MULT_EXPR, i1, r2),
1332 const_binop (MULT_EXPR, r1, i2));
1334 real = const_binop (code, t1, magsquared);
1335 imag = const_binop (code, t2, magsquared);
1337 else
1339 /* Keep this algorithm in sync with
1340 tree-complex.c:expand_complex_div_wide().
1342 Expand complex division to scalars, modified algorithm to minimize
1343 overflow with wide input ranges. */
1344 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1345 fold_abs_const (r2, TREE_TYPE (type)),
1346 fold_abs_const (i2, TREE_TYPE (type)));
1348 if (integer_nonzerop (compare))
1350 /* In the TRUE branch, we compute
1351 ratio = br/bi;
1352 div = (br * ratio) + bi;
1353 tr = (ar * ratio) + ai;
1354 ti = (ai * ratio) - ar;
1355 tr = tr / div;
1356 ti = ti / div; */
1357 tree ratio = const_binop (code, r2, i2);
1358 tree div = const_binop (PLUS_EXPR, i2,
1359 const_binop (MULT_EXPR, r2, ratio));
1360 real = const_binop (MULT_EXPR, r1, ratio);
1361 real = const_binop (PLUS_EXPR, real, i1);
1362 real = const_binop (code, real, div);
1364 imag = const_binop (MULT_EXPR, i1, ratio);
1365 imag = const_binop (MINUS_EXPR, imag, r1);
1366 imag = const_binop (code, imag, div);
1368 else
1370 /* In the FALSE branch, we compute
1371 ratio = d/c;
1372 divisor = (d * ratio) + c;
1373 tr = (b * ratio) + a;
1374 ti = b - (a * ratio);
1375 tr = tr / div;
1376 ti = ti / div; */
1377 tree ratio = const_binop (code, i2, r2);
1378 tree div = const_binop (PLUS_EXPR, r2,
1379 const_binop (MULT_EXPR, i2, ratio));
1381 real = const_binop (MULT_EXPR, i1, ratio);
1382 real = const_binop (PLUS_EXPR, real, r1);
1383 real = const_binop (code, real, div);
1385 imag = const_binop (MULT_EXPR, r1, ratio);
1386 imag = const_binop (MINUS_EXPR, i1, imag);
1387 imag = const_binop (code, imag, div);
1390 break;
1392 default:
1393 return NULL_TREE;
1396 if (real && imag)
1397 return build_complex (type, real, imag);
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == VECTOR_CST)
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1407 for (i = 0; i < count; i++)
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1410 tree elem2 = VECTOR_CST_ELT (arg2, i);
1412 elts[i] = const_binop (code, elem1, elem2);
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts[i] == NULL_TREE)
1417 return NULL_TREE;
1420 return build_vector (type, elts);
1423 /* Shifts allow a scalar offset for a vector. */
1424 if (TREE_CODE (arg1) == VECTOR_CST
1425 && TREE_CODE (arg2) == INTEGER_CST)
1427 tree type = TREE_TYPE (arg1);
1428 int count = TYPE_VECTOR_SUBPARTS (type), i;
1429 tree *elts = XALLOCAVEC (tree, count);
1431 for (i = 0; i < count; i++)
1433 tree elem1 = VECTOR_CST_ELT (arg1, i);
1435 elts[i] = const_binop (code, elem1, arg2);
1437 /* It is possible that const_binop cannot handle the given
1438 code and return NULL_TREE. */
1439 if (elts[i] == NULL_TREE)
1440 return NULL_TREE;
1443 return build_vector (type, elts);
1445 return NULL_TREE;
1448 /* Overload that adds a TYPE parameter to be able to dispatch
1449 to fold_relational_const. */
1451 tree
1452 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1454 if (TREE_CODE_CLASS (code) == tcc_comparison)
1455 return fold_relational_const (code, type, arg1, arg2);
1457 /* ??? Until we make the const_binop worker take the type of the
1458 result as argument put those cases that need it here. */
1459 switch (code)
1461 case COMPLEX_EXPR:
1462 if ((TREE_CODE (arg1) == REAL_CST
1463 && TREE_CODE (arg2) == REAL_CST)
1464 || (TREE_CODE (arg1) == INTEGER_CST
1465 && TREE_CODE (arg2) == INTEGER_CST))
1466 return build_complex (type, arg1, arg2);
1467 return NULL_TREE;
1469 case VEC_PACK_TRUNC_EXPR:
1470 case VEC_PACK_FIX_TRUNC_EXPR:
1472 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1473 tree *elts;
1475 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1476 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1477 if (TREE_CODE (arg1) != VECTOR_CST
1478 || TREE_CODE (arg2) != VECTOR_CST)
1479 return NULL_TREE;
1481 elts = XALLOCAVEC (tree, nelts);
1482 if (!vec_cst_ctor_to_array (arg1, elts)
1483 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1484 return NULL_TREE;
1486 for (i = 0; i < nelts; i++)
1488 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1489 ? NOP_EXPR : FIX_TRUNC_EXPR,
1490 TREE_TYPE (type), elts[i]);
1491 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1492 return NULL_TREE;
1495 return build_vector (type, elts);
1498 case VEC_WIDEN_MULT_LO_EXPR:
1499 case VEC_WIDEN_MULT_HI_EXPR:
1500 case VEC_WIDEN_MULT_EVEN_EXPR:
1501 case VEC_WIDEN_MULT_ODD_EXPR:
1503 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1504 unsigned int out, ofs, scale;
1505 tree *elts;
1507 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1508 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1509 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1510 return NULL_TREE;
1512 elts = XALLOCAVEC (tree, nelts * 4);
1513 if (!vec_cst_ctor_to_array (arg1, elts)
1514 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1515 return NULL_TREE;
1517 if (code == VEC_WIDEN_MULT_LO_EXPR)
1518 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1519 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1520 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1521 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1522 scale = 1, ofs = 0;
1523 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1524 scale = 1, ofs = 1;
1526 for (out = 0; out < nelts; out++)
1528 unsigned int in1 = (out << scale) + ofs;
1529 unsigned int in2 = in1 + nelts * 2;
1530 tree t1, t2;
1532 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1533 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1535 if (t1 == NULL_TREE || t2 == NULL_TREE)
1536 return NULL_TREE;
1537 elts[out] = const_binop (MULT_EXPR, t1, t2);
1538 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1539 return NULL_TREE;
1542 return build_vector (type, elts);
1545 default:;
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551 return const_binop (code, arg1, arg2);
1554 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1555 Return zero if computing the constants is not possible. */
1557 tree
1558 const_unop (enum tree_code code, tree type, tree arg0)
1560 switch (code)
1562 CASE_CONVERT:
1563 case FLOAT_EXPR:
1564 case FIX_TRUNC_EXPR:
1565 case FIXED_CONVERT_EXPR:
1566 return fold_convert_const (code, type, arg0);
1568 case ADDR_SPACE_CONVERT_EXPR:
1569 if (integer_zerop (arg0))
1570 return fold_convert_const (code, type, arg0);
1571 break;
1573 case VIEW_CONVERT_EXPR:
1574 return fold_view_convert_expr (type, arg0);
1576 case NEGATE_EXPR:
1578 /* Can't call fold_negate_const directly here as that doesn't
1579 handle all cases and we might not be able to negate some
1580 constants. */
1581 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1582 if (tem && CONSTANT_CLASS_P (tem))
1583 return tem;
1584 break;
1587 case ABS_EXPR:
1588 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1589 return fold_abs_const (arg0, type);
1590 break;
1592 case CONJ_EXPR:
1593 if (TREE_CODE (arg0) == COMPLEX_CST)
1595 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1596 TREE_TYPE (type));
1597 return build_complex (type, TREE_REALPART (arg0), ipart);
1599 break;
1601 case BIT_NOT_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST)
1603 return fold_not_const (arg0, type);
1604 /* Perform BIT_NOT_EXPR on each element individually. */
1605 else if (TREE_CODE (arg0) == VECTOR_CST)
1607 tree *elements;
1608 tree elem;
1609 unsigned count = VECTOR_CST_NELTS (arg0), i;
1611 elements = XALLOCAVEC (tree, count);
1612 for (i = 0; i < count; i++)
1614 elem = VECTOR_CST_ELT (arg0, i);
1615 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1616 if (elem == NULL_TREE)
1617 break;
1618 elements[i] = elem;
1620 if (i == count)
1621 return build_vector (type, elements);
1623 break;
1625 case TRUTH_NOT_EXPR:
1626 if (TREE_CODE (arg0) == INTEGER_CST)
1627 return constant_boolean_node (integer_zerop (arg0), type);
1628 break;
1630 case REALPART_EXPR:
1631 if (TREE_CODE (arg0) == COMPLEX_CST)
1632 return fold_convert (type, TREE_REALPART (arg0));
1633 break;
1635 case IMAGPART_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 return fold_convert (type, TREE_IMAGPART (arg0));
1638 break;
1640 case VEC_UNPACK_LO_EXPR:
1641 case VEC_UNPACK_HI_EXPR:
1642 case VEC_UNPACK_FLOAT_LO_EXPR:
1643 case VEC_UNPACK_FLOAT_HI_EXPR:
1645 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1646 tree *elts;
1647 enum tree_code subcode;
1649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1650 if (TREE_CODE (arg0) != VECTOR_CST)
1651 return NULL_TREE;
1653 elts = XALLOCAVEC (tree, nelts * 2);
1654 if (!vec_cst_ctor_to_array (arg0, elts))
1655 return NULL_TREE;
1657 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1658 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1659 elts += nelts;
1661 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1662 subcode = NOP_EXPR;
1663 else
1664 subcode = FLOAT_EXPR;
1666 for (i = 0; i < nelts; i++)
1668 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1669 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1670 return NULL_TREE;
1673 return build_vector (type, elts);
1676 case REDUC_MIN_EXPR:
1677 case REDUC_MAX_EXPR:
1678 case REDUC_PLUS_EXPR:
1680 unsigned int nelts, i;
1681 tree *elts;
1682 enum tree_code subcode;
1684 if (TREE_CODE (arg0) != VECTOR_CST)
1685 return NULL_TREE;
1686 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1688 elts = XALLOCAVEC (tree, nelts);
1689 if (!vec_cst_ctor_to_array (arg0, elts))
1690 return NULL_TREE;
1692 switch (code)
1694 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1695 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1696 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1697 default: gcc_unreachable ();
1700 for (i = 1; i < nelts; i++)
1702 elts[0] = const_binop (subcode, elts[0], elts[i]);
1703 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1704 return NULL_TREE;
1707 return elts[0];
1710 default:
1711 break;
1714 return NULL_TREE;
1717 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1718 indicates which particular sizetype to create. */
1720 tree
1721 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1723 return build_int_cst (sizetype_tab[(int) kind], number);
1726 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1727 is a tree code. The type of the result is taken from the operands.
1728 Both must be equivalent integer types, ala int_binop_types_match_p.
1729 If the operands are constant, so is the result. */
1731 tree
1732 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1734 tree type = TREE_TYPE (arg0);
1736 if (arg0 == error_mark_node || arg1 == error_mark_node)
1737 return error_mark_node;
1739 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1740 TREE_TYPE (arg1)));
1742 /* Handle the special case of two integer constants faster. */
1743 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1745 /* And some specific cases even faster than that. */
1746 if (code == PLUS_EXPR)
1748 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1749 return arg1;
1750 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1751 return arg0;
1753 else if (code == MINUS_EXPR)
1755 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1756 return arg0;
1758 else if (code == MULT_EXPR)
1760 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1761 return arg1;
1764 /* Handle general case of two integer constants. For sizetype
1765 constant calculations we always want to know about overflow,
1766 even in the unsigned case. */
1767 return int_const_binop_1 (code, arg0, arg1, -1);
1770 return fold_build2_loc (loc, code, type, arg0, arg1);
1773 /* Given two values, either both of sizetype or both of bitsizetype,
1774 compute the difference between the two values. Return the value
1775 in signed type corresponding to the type of the operands. */
1777 tree
1778 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1780 tree type = TREE_TYPE (arg0);
1781 tree ctype;
1783 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1784 TREE_TYPE (arg1)));
1786 /* If the type is already signed, just do the simple thing. */
1787 if (!TYPE_UNSIGNED (type))
1788 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1790 if (type == sizetype)
1791 ctype = ssizetype;
1792 else if (type == bitsizetype)
1793 ctype = sbitsizetype;
1794 else
1795 ctype = signed_type_for (type);
1797 /* If either operand is not a constant, do the conversions to the signed
1798 type and subtract. The hardware will do the right thing with any
1799 overflow in the subtraction. */
1800 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1801 return size_binop_loc (loc, MINUS_EXPR,
1802 fold_convert_loc (loc, ctype, arg0),
1803 fold_convert_loc (loc, ctype, arg1));
1805 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1806 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1807 overflow) and negate (which can't either). Special-case a result
1808 of zero while we're here. */
1809 if (tree_int_cst_equal (arg0, arg1))
1810 return build_int_cst (ctype, 0);
1811 else if (tree_int_cst_lt (arg1, arg0))
1812 return fold_convert_loc (loc, ctype,
1813 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1814 else
1815 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1816 fold_convert_loc (loc, ctype,
1817 size_binop_loc (loc,
1818 MINUS_EXPR,
1819 arg1, arg0)));
1822 /* A subroutine of fold_convert_const handling conversions of an
1823 INTEGER_CST to another integer type. */
1825 static tree
1826 fold_convert_const_int_from_int (tree type, const_tree arg1)
1828 /* Given an integer constant, make new constant with new type,
1829 appropriately sign-extended or truncated. Use widest_int
1830 so that any extension is done according ARG1's type. */
1831 return force_fit_type (type, wi::to_widest (arg1),
1832 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1833 TREE_OVERFLOW (arg1));
1836 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1837 to an integer type. */
1839 static tree
1840 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1842 bool overflow = false;
1843 tree t;
1845 /* The following code implements the floating point to integer
1846 conversion rules required by the Java Language Specification,
1847 that IEEE NaNs are mapped to zero and values that overflow
1848 the target precision saturate, i.e. values greater than
1849 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1850 are mapped to INT_MIN. These semantics are allowed by the
1851 C and C++ standards that simply state that the behavior of
1852 FP-to-integer conversion is unspecified upon overflow. */
1854 wide_int val;
1855 REAL_VALUE_TYPE r;
1856 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 switch (code)
1860 case FIX_TRUNC_EXPR:
1861 real_trunc (&r, VOIDmode, &x);
1862 break;
1864 default:
1865 gcc_unreachable ();
1868 /* If R is NaN, return zero and show we have an overflow. */
1869 if (REAL_VALUE_ISNAN (r))
1871 overflow = true;
1872 val = wi::zero (TYPE_PRECISION (type));
1875 /* See if R is less than the lower bound or greater than the
1876 upper bound. */
1878 if (! overflow)
1880 tree lt = TYPE_MIN_VALUE (type);
1881 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1882 if (REAL_VALUES_LESS (r, l))
1884 overflow = true;
1885 val = lt;
1889 if (! overflow)
1891 tree ut = TYPE_MAX_VALUE (type);
1892 if (ut)
1894 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1895 if (REAL_VALUES_LESS (u, r))
1897 overflow = true;
1898 val = ut;
1903 if (! overflow)
1904 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1906 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1907 return t;
1910 /* A subroutine of fold_convert_const handling conversions of a
1911 FIXED_CST to an integer type. */
1913 static tree
1914 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1916 tree t;
1917 double_int temp, temp_trunc;
1918 unsigned int mode;
1920 /* Right shift FIXED_CST to temp by fbit. */
1921 temp = TREE_FIXED_CST (arg1).data;
1922 mode = TREE_FIXED_CST (arg1).mode;
1923 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1925 temp = temp.rshift (GET_MODE_FBIT (mode),
1926 HOST_BITS_PER_DOUBLE_INT,
1927 SIGNED_FIXED_POINT_MODE_P (mode));
1929 /* Left shift temp to temp_trunc by fbit. */
1930 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1931 HOST_BITS_PER_DOUBLE_INT,
1932 SIGNED_FIXED_POINT_MODE_P (mode));
1934 else
1936 temp = double_int_zero;
1937 temp_trunc = double_int_zero;
1940 /* If FIXED_CST is negative, we need to round the value toward 0.
1941 By checking if the fractional bits are not zero to add 1 to temp. */
1942 if (SIGNED_FIXED_POINT_MODE_P (mode)
1943 && temp_trunc.is_negative ()
1944 && TREE_FIXED_CST (arg1).data != temp_trunc)
1945 temp += double_int_one;
1947 /* Given a fixed-point constant, make new constant with new type,
1948 appropriately sign-extended or truncated. */
1949 t = force_fit_type (type, temp, -1,
1950 (temp.is_negative ()
1951 && (TYPE_UNSIGNED (type)
1952 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1953 | TREE_OVERFLOW (arg1));
1955 return t;
1958 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1959 to another floating point type. */
1961 static tree
1962 fold_convert_const_real_from_real (tree type, const_tree arg1)
1964 REAL_VALUE_TYPE value;
1965 tree t;
1967 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1968 t = build_real (type, value);
1970 /* If converting an infinity or NAN to a representation that doesn't
1971 have one, set the overflow bit so that we can produce some kind of
1972 error message at the appropriate point if necessary. It's not the
1973 most user-friendly message, but it's better than nothing. */
1974 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1975 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1976 TREE_OVERFLOW (t) = 1;
1977 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1978 && !MODE_HAS_NANS (TYPE_MODE (type)))
1979 TREE_OVERFLOW (t) = 1;
1980 /* Regular overflow, conversion produced an infinity in a mode that
1981 can't represent them. */
1982 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1983 && REAL_VALUE_ISINF (value)
1984 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1985 TREE_OVERFLOW (t) = 1;
1986 else
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to a floating point type. */
1994 static tree
1995 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1997 REAL_VALUE_TYPE value;
1998 tree t;
2000 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2001 t = build_real (type, value);
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to another fixed-point type. */
2010 static tree
2011 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2013 FIXED_VALUE_TYPE value;
2014 tree t;
2015 bool overflow_p;
2017 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2018 TYPE_SATURATING (type));
2019 t = build_fixed (type, value);
2021 /* Propagate overflow flags. */
2022 if (overflow_p | TREE_OVERFLOW (arg1))
2023 TREE_OVERFLOW (t) = 1;
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2028 to a fixed-point type. */
2030 static tree
2031 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2033 FIXED_VALUE_TYPE value;
2034 tree t;
2035 bool overflow_p;
2036 double_int di;
2038 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2040 di.low = TREE_INT_CST_ELT (arg1, 0);
2041 if (TREE_INT_CST_NUNITS (arg1) == 1)
2042 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2043 else
2044 di.high = TREE_INT_CST_ELT (arg1, 1);
2046 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2047 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2048 TYPE_SATURATING (type));
2049 t = build_fixed (type, value);
2051 /* Propagate overflow flags. */
2052 if (overflow_p | TREE_OVERFLOW (arg1))
2053 TREE_OVERFLOW (t) = 1;
2054 return t;
2057 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2058 to a fixed-point type. */
2060 static tree
2061 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2063 FIXED_VALUE_TYPE value;
2064 tree t;
2065 bool overflow_p;
2067 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2068 &TREE_REAL_CST (arg1),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2078 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2079 type TYPE. If no simplification can be done return NULL_TREE. */
2081 static tree
2082 fold_convert_const (enum tree_code code, tree type, tree arg1)
2084 if (TREE_TYPE (arg1) == type)
2085 return arg1;
2087 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2088 || TREE_CODE (type) == OFFSET_TYPE)
2090 if (TREE_CODE (arg1) == INTEGER_CST)
2091 return fold_convert_const_int_from_int (type, arg1);
2092 else if (TREE_CODE (arg1) == REAL_CST)
2093 return fold_convert_const_int_from_real (code, type, arg1);
2094 else if (TREE_CODE (arg1) == FIXED_CST)
2095 return fold_convert_const_int_from_fixed (type, arg1);
2097 else if (TREE_CODE (type) == REAL_TYPE)
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return build_real_from_int_cst (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_real_from_real (type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_real_from_fixed (type, arg1);
2106 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2108 if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_fixed_from_fixed (type, arg1);
2110 else if (TREE_CODE (arg1) == INTEGER_CST)
2111 return fold_convert_const_fixed_from_int (type, arg1);
2112 else if (TREE_CODE (arg1) == REAL_CST)
2113 return fold_convert_const_fixed_from_real (type, arg1);
2115 return NULL_TREE;
2118 /* Construct a vector of zero elements of vector type TYPE. */
2120 static tree
2121 build_zero_vector (tree type)
2123 tree t;
2125 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2126 return build_vector_from_val (type, t);
2129 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2131 bool
2132 fold_convertible_p (const_tree type, const_tree arg)
2134 tree orig = TREE_TYPE (arg);
2136 if (type == orig)
2137 return true;
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return false;
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2145 return true;
2147 switch (TREE_CODE (type))
2149 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2150 case POINTER_TYPE: case REFERENCE_TYPE:
2151 case OFFSET_TYPE:
2152 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2153 || TREE_CODE (orig) == OFFSET_TYPE)
2154 return true;
2155 return (TREE_CODE (orig) == VECTOR_TYPE
2156 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2158 case REAL_TYPE:
2159 case FIXED_POINT_TYPE:
2160 case COMPLEX_TYPE:
2161 case VECTOR_TYPE:
2162 case VOID_TYPE:
2163 return TREE_CODE (type) == TREE_CODE (orig);
2165 default:
2166 return false;
2170 /* Convert expression ARG to type TYPE. Used by the middle-end for
2171 simple conversions in preference to calling the front-end's convert. */
2173 tree
2174 fold_convert_loc (location_t loc, tree type, tree arg)
2176 tree orig = TREE_TYPE (arg);
2177 tree tem;
2179 if (type == orig)
2180 return arg;
2182 if (TREE_CODE (arg) == ERROR_MARK
2183 || TREE_CODE (type) == ERROR_MARK
2184 || TREE_CODE (orig) == ERROR_MARK)
2185 return error_mark_node;
2187 switch (TREE_CODE (type))
2189 case POINTER_TYPE:
2190 case REFERENCE_TYPE:
2191 /* Handle conversions between pointers to different address spaces. */
2192 if (POINTER_TYPE_P (orig)
2193 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2194 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2195 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2196 /* fall through */
2198 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2199 case OFFSET_TYPE:
2200 if (TREE_CODE (arg) == INTEGER_CST)
2202 tem = fold_convert_const (NOP_EXPR, type, arg);
2203 if (tem != NULL_TREE)
2204 return tem;
2206 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2207 || TREE_CODE (orig) == OFFSET_TYPE)
2208 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2209 if (TREE_CODE (orig) == COMPLEX_TYPE)
2210 return fold_convert_loc (loc, type,
2211 fold_build1_loc (loc, REALPART_EXPR,
2212 TREE_TYPE (orig), arg));
2213 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2214 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2215 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2217 case REAL_TYPE:
2218 if (TREE_CODE (arg) == INTEGER_CST)
2220 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2221 if (tem != NULL_TREE)
2222 return tem;
2224 else if (TREE_CODE (arg) == REAL_CST)
2226 tem = fold_convert_const (NOP_EXPR, type, arg);
2227 if (tem != NULL_TREE)
2228 return tem;
2230 else if (TREE_CODE (arg) == FIXED_CST)
2232 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2233 if (tem != NULL_TREE)
2234 return tem;
2237 switch (TREE_CODE (orig))
2239 case INTEGER_TYPE:
2240 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2241 case POINTER_TYPE: case REFERENCE_TYPE:
2242 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2244 case REAL_TYPE:
2245 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247 case FIXED_POINT_TYPE:
2248 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2250 case COMPLEX_TYPE:
2251 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2252 return fold_convert_loc (loc, type, tem);
2254 default:
2255 gcc_unreachable ();
2258 case FIXED_POINT_TYPE:
2259 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2260 || TREE_CODE (arg) == REAL_CST)
2262 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 goto fold_convert_exit;
2267 switch (TREE_CODE (orig))
2269 case FIXED_POINT_TYPE:
2270 case INTEGER_TYPE:
2271 case ENUMERAL_TYPE:
2272 case BOOLEAN_TYPE:
2273 case REAL_TYPE:
2274 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2276 case COMPLEX_TYPE:
2277 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2278 return fold_convert_loc (loc, type, tem);
2280 default:
2281 gcc_unreachable ();
2284 case COMPLEX_TYPE:
2285 switch (TREE_CODE (orig))
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 case REAL_TYPE:
2291 case FIXED_POINT_TYPE:
2292 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2293 fold_convert_loc (loc, TREE_TYPE (type), arg),
2294 fold_convert_loc (loc, TREE_TYPE (type),
2295 integer_zero_node));
2296 case COMPLEX_TYPE:
2298 tree rpart, ipart;
2300 if (TREE_CODE (arg) == COMPLEX_EXPR)
2302 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 0));
2304 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2305 TREE_OPERAND (arg, 1));
2306 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2309 arg = save_expr (arg);
2310 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2312 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2314 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2317 default:
2318 gcc_unreachable ();
2321 case VECTOR_TYPE:
2322 if (integer_zerop (arg))
2323 return build_zero_vector (type);
2324 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2325 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2326 || TREE_CODE (orig) == VECTOR_TYPE);
2327 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2329 case VOID_TYPE:
2330 tem = fold_ignored_result (arg);
2331 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2333 default:
2334 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2335 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2336 gcc_unreachable ();
2338 fold_convert_exit:
2339 protected_set_expr_location_unshare (tem, loc);
2340 return tem;
2343 /* Return false if expr can be assumed not to be an lvalue, true
2344 otherwise. */
2346 static bool
2347 maybe_lvalue_p (const_tree x)
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2352 case VAR_DECL:
2353 case PARM_DECL:
2354 case RESULT_DECL:
2355 case LABEL_DECL:
2356 case FUNCTION_DECL:
2357 case SSA_NAME:
2359 case COMPONENT_REF:
2360 case MEM_REF:
2361 case INDIRECT_REF:
2362 case ARRAY_REF:
2363 case ARRAY_RANGE_REF:
2364 case BIT_FIELD_REF:
2365 case OBJ_TYPE_REF:
2367 case REALPART_EXPR:
2368 case IMAGPART_EXPR:
2369 case PREINCREMENT_EXPR:
2370 case PREDECREMENT_EXPR:
2371 case SAVE_EXPR:
2372 case TRY_CATCH_EXPR:
2373 case WITH_CLEANUP_EXPR:
2374 case COMPOUND_EXPR:
2375 case MODIFY_EXPR:
2376 case TARGET_EXPR:
2377 case COND_EXPR:
2378 case BIND_EXPR:
2379 break;
2381 default:
2382 /* Assume the worst for front-end tree codes. */
2383 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2384 break;
2385 return false;
2388 return true;
2391 /* Return an expr equal to X but certainly not valid as an lvalue. */
2393 tree
2394 non_lvalue_loc (location_t loc, tree x)
2396 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2397 us. */
2398 if (in_gimple_form)
2399 return x;
2401 if (! maybe_lvalue_p (x))
2402 return x;
2403 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2409 static tree
2410 pedantic_non_lvalue_loc (location_t loc, tree x)
2412 return protected_set_expr_location_unshare (x, loc);
2415 /* Given a tree comparison code, return the code that is the logical inverse.
2416 It is generally not safe to do this for floating-point comparisons, except
2417 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2418 ERROR_MARK in this case. */
2420 enum tree_code
2421 invert_tree_comparison (enum tree_code code, bool honor_nans)
2423 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2424 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2425 return ERROR_MARK;
2427 switch (code)
2429 case EQ_EXPR:
2430 return NE_EXPR;
2431 case NE_EXPR:
2432 return EQ_EXPR;
2433 case GT_EXPR:
2434 return honor_nans ? UNLE_EXPR : LE_EXPR;
2435 case GE_EXPR:
2436 return honor_nans ? UNLT_EXPR : LT_EXPR;
2437 case LT_EXPR:
2438 return honor_nans ? UNGE_EXPR : GE_EXPR;
2439 case LE_EXPR:
2440 return honor_nans ? UNGT_EXPR : GT_EXPR;
2441 case LTGT_EXPR:
2442 return UNEQ_EXPR;
2443 case UNEQ_EXPR:
2444 return LTGT_EXPR;
2445 case UNGT_EXPR:
2446 return LE_EXPR;
2447 case UNGE_EXPR:
2448 return LT_EXPR;
2449 case UNLT_EXPR:
2450 return GE_EXPR;
2451 case UNLE_EXPR:
2452 return GT_EXPR;
2453 case ORDERED_EXPR:
2454 return UNORDERED_EXPR;
2455 case UNORDERED_EXPR:
2456 return ORDERED_EXPR;
2457 default:
2458 gcc_unreachable ();
2462 /* Similar, but return the comparison that results if the operands are
2463 swapped. This is safe for floating-point. */
2465 enum tree_code
2466 swap_tree_comparison (enum tree_code code)
2468 switch (code)
2470 case EQ_EXPR:
2471 case NE_EXPR:
2472 case ORDERED_EXPR:
2473 case UNORDERED_EXPR:
2474 case LTGT_EXPR:
2475 case UNEQ_EXPR:
2476 return code;
2477 case GT_EXPR:
2478 return LT_EXPR;
2479 case GE_EXPR:
2480 return LE_EXPR;
2481 case LT_EXPR:
2482 return GT_EXPR;
2483 case LE_EXPR:
2484 return GE_EXPR;
2485 case UNGT_EXPR:
2486 return UNLT_EXPR;
2487 case UNGE_EXPR:
2488 return UNLE_EXPR;
2489 case UNLT_EXPR:
2490 return UNGT_EXPR;
2491 case UNLE_EXPR:
2492 return UNGE_EXPR;
2493 default:
2494 gcc_unreachable ();
2499 /* Convert a comparison tree code from an enum tree_code representation
2500 into a compcode bit-based encoding. This function is the inverse of
2501 compcode_to_comparison. */
2503 static enum comparison_code
2504 comparison_to_compcode (enum tree_code code)
2506 switch (code)
2508 case LT_EXPR:
2509 return COMPCODE_LT;
2510 case EQ_EXPR:
2511 return COMPCODE_EQ;
2512 case LE_EXPR:
2513 return COMPCODE_LE;
2514 case GT_EXPR:
2515 return COMPCODE_GT;
2516 case NE_EXPR:
2517 return COMPCODE_NE;
2518 case GE_EXPR:
2519 return COMPCODE_GE;
2520 case ORDERED_EXPR:
2521 return COMPCODE_ORD;
2522 case UNORDERED_EXPR:
2523 return COMPCODE_UNORD;
2524 case UNLT_EXPR:
2525 return COMPCODE_UNLT;
2526 case UNEQ_EXPR:
2527 return COMPCODE_UNEQ;
2528 case UNLE_EXPR:
2529 return COMPCODE_UNLE;
2530 case UNGT_EXPR:
2531 return COMPCODE_UNGT;
2532 case LTGT_EXPR:
2533 return COMPCODE_LTGT;
2534 case UNGE_EXPR:
2535 return COMPCODE_UNGE;
2536 default:
2537 gcc_unreachable ();
2541 /* Convert a compcode bit-based encoding of a comparison operator back
2542 to GCC's enum tree_code representation. This function is the
2543 inverse of comparison_to_compcode. */
2545 static enum tree_code
2546 compcode_to_comparison (enum comparison_code code)
2548 switch (code)
2550 case COMPCODE_LT:
2551 return LT_EXPR;
2552 case COMPCODE_EQ:
2553 return EQ_EXPR;
2554 case COMPCODE_LE:
2555 return LE_EXPR;
2556 case COMPCODE_GT:
2557 return GT_EXPR;
2558 case COMPCODE_NE:
2559 return NE_EXPR;
2560 case COMPCODE_GE:
2561 return GE_EXPR;
2562 case COMPCODE_ORD:
2563 return ORDERED_EXPR;
2564 case COMPCODE_UNORD:
2565 return UNORDERED_EXPR;
2566 case COMPCODE_UNLT:
2567 return UNLT_EXPR;
2568 case COMPCODE_UNEQ:
2569 return UNEQ_EXPR;
2570 case COMPCODE_UNLE:
2571 return UNLE_EXPR;
2572 case COMPCODE_UNGT:
2573 return UNGT_EXPR;
2574 case COMPCODE_LTGT:
2575 return LTGT_EXPR;
2576 case COMPCODE_UNGE:
2577 return UNGE_EXPR;
2578 default:
2579 gcc_unreachable ();
2583 /* Return a tree for the comparison which is the combination of
2584 doing the AND or OR (depending on CODE) of the two operations LCODE
2585 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2586 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2587 if this makes the transformation invalid. */
2589 tree
2590 combine_comparisons (location_t loc,
2591 enum tree_code code, enum tree_code lcode,
2592 enum tree_code rcode, tree truth_type,
2593 tree ll_arg, tree lr_arg)
2595 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2596 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2597 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2598 int compcode;
2600 switch (code)
2602 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2603 compcode = lcompcode & rcompcode;
2604 break;
2606 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2607 compcode = lcompcode | rcompcode;
2608 break;
2610 default:
2611 return NULL_TREE;
2614 if (!honor_nans)
2616 /* Eliminate unordered comparisons, as well as LTGT and ORD
2617 which are not used unless the mode has NaNs. */
2618 compcode &= ~COMPCODE_UNORD;
2619 if (compcode == COMPCODE_LTGT)
2620 compcode = COMPCODE_NE;
2621 else if (compcode == COMPCODE_ORD)
2622 compcode = COMPCODE_TRUE;
2624 else if (flag_trapping_math)
2626 /* Check that the original operation and the optimized ones will trap
2627 under the same condition. */
2628 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2629 && (lcompcode != COMPCODE_EQ)
2630 && (lcompcode != COMPCODE_ORD);
2631 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2632 && (rcompcode != COMPCODE_EQ)
2633 && (rcompcode != COMPCODE_ORD);
2634 bool trap = (compcode & COMPCODE_UNORD) == 0
2635 && (compcode != COMPCODE_EQ)
2636 && (compcode != COMPCODE_ORD);
2638 /* In a short-circuited boolean expression the LHS might be
2639 such that the RHS, if evaluated, will never trap. For
2640 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2641 if neither x nor y is NaN. (This is a mixed blessing: for
2642 example, the expression above will never trap, hence
2643 optimizing it to x < y would be invalid). */
2644 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2645 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2646 rtrap = false;
2648 /* If the comparison was short-circuited, and only the RHS
2649 trapped, we may now generate a spurious trap. */
2650 if (rtrap && !ltrap
2651 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2652 return NULL_TREE;
2654 /* If we changed the conditions that cause a trap, we lose. */
2655 if ((ltrap || rtrap) != trap)
2656 return NULL_TREE;
2659 if (compcode == COMPCODE_TRUE)
2660 return constant_boolean_node (true, truth_type);
2661 else if (compcode == COMPCODE_FALSE)
2662 return constant_boolean_node (false, truth_type);
2663 else
2665 enum tree_code tcode;
2667 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2668 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2672 /* Return nonzero if two operands (typically of the same tree node)
2673 are necessarily equal. If either argument has side-effects this
2674 function returns zero. FLAGS modifies behavior as follows:
2676 If OEP_ONLY_CONST is set, only return nonzero for constants.
2677 This function tests whether the operands are indistinguishable;
2678 it does not test whether they are equal using C's == operation.
2679 The distinction is important for IEEE floating point, because
2680 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2681 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2683 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2684 even though it may hold multiple values during a function.
2685 This is because a GCC tree node guarantees that nothing else is
2686 executed between the evaluation of its "operands" (which may often
2687 be evaluated in arbitrary order). Hence if the operands themselves
2688 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2689 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2690 unset means assuming isochronic (or instantaneous) tree equivalence.
2691 Unless comparing arbitrary expression trees, such as from different
2692 statements, this flag can usually be left unset.
2694 If OEP_PURE_SAME is set, then pure functions with identical arguments
2695 are considered the same. It is used when the caller has other ways
2696 to ensure that global memory is unchanged in between. */
2699 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2701 /* If either is ERROR_MARK, they aren't equal. */
2702 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2703 || TREE_TYPE (arg0) == error_mark_node
2704 || TREE_TYPE (arg1) == error_mark_node)
2705 return 0;
2707 /* Similar, if either does not have a type (like a released SSA name),
2708 they aren't equal. */
2709 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2710 return 0;
2712 /* Check equality of integer constants before bailing out due to
2713 precision differences. */
2714 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2715 return tree_int_cst_equal (arg0, arg1);
2717 /* If both types don't have the same signedness, then we can't consider
2718 them equal. We must check this before the STRIP_NOPS calls
2719 because they may change the signedness of the arguments. As pointers
2720 strictly don't have a signedness, require either two pointers or
2721 two non-pointers as well. */
2722 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2723 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2724 return 0;
2726 /* We cannot consider pointers to different address space equal. */
2727 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2728 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2729 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2730 return 0;
2732 /* If both types don't have the same precision, then it is not safe
2733 to strip NOPs. */
2734 if (element_precision (TREE_TYPE (arg0))
2735 != element_precision (TREE_TYPE (arg1)))
2736 return 0;
2738 STRIP_NOPS (arg0);
2739 STRIP_NOPS (arg1);
2741 /* In case both args are comparisons but with different comparison
2742 code, try to swap the comparison operands of one arg to produce
2743 a match and compare that variant. */
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 && COMPARISON_CLASS_P (arg0)
2746 && COMPARISON_CLASS_P (arg1))
2748 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2750 if (TREE_CODE (arg0) == swap_code)
2751 return operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags);
2757 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2758 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2759 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2760 return 0;
2762 /* This is needed for conversions and for COMPONENT_REF.
2763 Might as well play it safe and always test this. */
2764 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2765 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2766 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2767 return 0;
2769 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2770 We don't care about side effects in that case because the SAVE_EXPR
2771 takes care of that for us. In all other cases, two expressions are
2772 equal if they have no side effects. If we have two identical
2773 expressions with side effects that should be treated the same due
2774 to the only side effects being identical SAVE_EXPR's, that will
2775 be detected in the recursive calls below.
2776 If we are taking an invariant address of two identical objects
2777 they are necessarily equal as well. */
2778 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2779 && (TREE_CODE (arg0) == SAVE_EXPR
2780 || (flags & OEP_CONSTANT_ADDRESS_OF)
2781 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2782 return 1;
2784 /* Next handle constant cases, those for which we can return 1 even
2785 if ONLY_CONST is set. */
2786 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2787 switch (TREE_CODE (arg0))
2789 case INTEGER_CST:
2790 return tree_int_cst_equal (arg0, arg1);
2792 case FIXED_CST:
2793 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2794 TREE_FIXED_CST (arg1));
2796 case REAL_CST:
2797 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2798 TREE_REAL_CST (arg1)))
2799 return 1;
2802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2804 /* If we do not distinguish between signed and unsigned zero,
2805 consider them equal. */
2806 if (real_zerop (arg0) && real_zerop (arg1))
2807 return 1;
2809 return 0;
2811 case VECTOR_CST:
2813 unsigned i;
2815 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2816 return 0;
2818 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2820 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2821 VECTOR_CST_ELT (arg1, i), flags))
2822 return 0;
2824 return 1;
2827 case COMPLEX_CST:
2828 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2829 flags)
2830 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2831 flags));
2833 case STRING_CST:
2834 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2835 && ! memcmp (TREE_STRING_POINTER (arg0),
2836 TREE_STRING_POINTER (arg1),
2837 TREE_STRING_LENGTH (arg0)));
2839 case ADDR_EXPR:
2840 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2841 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2842 ? OEP_CONSTANT_ADDRESS_OF : 0);
2843 default:
2844 break;
2847 if (flags & OEP_ONLY_CONST)
2848 return 0;
2850 /* Define macros to test an operand from arg0 and arg1 for equality and a
2851 variant that allows null and views null as being different from any
2852 non-null value. In the latter case, if either is null, the both
2853 must be; otherwise, do the normal comparison. */
2854 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2855 TREE_OPERAND (arg1, N), flags)
2857 #define OP_SAME_WITH_NULL(N) \
2858 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2859 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2861 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2863 case tcc_unary:
2864 /* Two conversions are equal only if signedness and modes match. */
2865 switch (TREE_CODE (arg0))
2867 CASE_CONVERT:
2868 case FIX_TRUNC_EXPR:
2869 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2870 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2871 return 0;
2872 break;
2873 default:
2874 break;
2877 return OP_SAME (0);
2880 case tcc_comparison:
2881 case tcc_binary:
2882 if (OP_SAME (0) && OP_SAME (1))
2883 return 1;
2885 /* For commutative ops, allow the other order. */
2886 return (commutative_tree_code (TREE_CODE (arg0))
2887 && operand_equal_p (TREE_OPERAND (arg0, 0),
2888 TREE_OPERAND (arg1, 1), flags)
2889 && operand_equal_p (TREE_OPERAND (arg0, 1),
2890 TREE_OPERAND (arg1, 0), flags));
2892 case tcc_reference:
2893 /* If either of the pointer (or reference) expressions we are
2894 dereferencing contain a side effect, these cannot be equal,
2895 but their addresses can be. */
2896 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2897 && (TREE_SIDE_EFFECTS (arg0)
2898 || TREE_SIDE_EFFECTS (arg1)))
2899 return 0;
2901 switch (TREE_CODE (arg0))
2903 case INDIRECT_REF:
2904 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2905 return OP_SAME (0);
2907 case REALPART_EXPR:
2908 case IMAGPART_EXPR:
2909 return OP_SAME (0);
2911 case TARGET_MEM_REF:
2912 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2913 /* Require equal extra operands and then fall through to MEM_REF
2914 handling of the two common operands. */
2915 if (!OP_SAME_WITH_NULL (2)
2916 || !OP_SAME_WITH_NULL (3)
2917 || !OP_SAME_WITH_NULL (4))
2918 return 0;
2919 /* Fallthru. */
2920 case MEM_REF:
2921 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2922 /* Require equal access sizes, and similar pointer types.
2923 We can have incomplete types for array references of
2924 variable-sized arrays from the Fortran frontend
2925 though. Also verify the types are compatible. */
2926 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2927 || (TYPE_SIZE (TREE_TYPE (arg0))
2928 && TYPE_SIZE (TREE_TYPE (arg1))
2929 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2930 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2931 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2932 && alias_ptr_types_compatible_p
2933 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2934 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2935 && OP_SAME (0) && OP_SAME (1));
2937 case ARRAY_REF:
2938 case ARRAY_RANGE_REF:
2939 /* Operands 2 and 3 may be null.
2940 Compare the array index by value if it is constant first as we
2941 may have different types but same value here. */
2942 if (!OP_SAME (0))
2943 return 0;
2944 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2945 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2946 TREE_OPERAND (arg1, 1))
2947 || OP_SAME (1))
2948 && OP_SAME_WITH_NULL (2)
2949 && OP_SAME_WITH_NULL (3));
2951 case COMPONENT_REF:
2952 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2953 may be NULL when we're called to compare MEM_EXPRs. */
2954 if (!OP_SAME_WITH_NULL (0)
2955 || !OP_SAME (1))
2956 return 0;
2957 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2958 return OP_SAME_WITH_NULL (2);
2960 case BIT_FIELD_REF:
2961 if (!OP_SAME (0))
2962 return 0;
2963 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2964 return OP_SAME (1) && OP_SAME (2);
2966 default:
2967 return 0;
2970 case tcc_expression:
2971 switch (TREE_CODE (arg0))
2973 case ADDR_EXPR:
2974 case TRUTH_NOT_EXPR:
2975 return OP_SAME (0);
2977 case TRUTH_ANDIF_EXPR:
2978 case TRUTH_ORIF_EXPR:
2979 return OP_SAME (0) && OP_SAME (1);
2981 case FMA_EXPR:
2982 case WIDEN_MULT_PLUS_EXPR:
2983 case WIDEN_MULT_MINUS_EXPR:
2984 if (!OP_SAME (2))
2985 return 0;
2986 /* The multiplcation operands are commutative. */
2987 /* FALLTHRU */
2989 case TRUTH_AND_EXPR:
2990 case TRUTH_OR_EXPR:
2991 case TRUTH_XOR_EXPR:
2992 if (OP_SAME (0) && OP_SAME (1))
2993 return 1;
2995 /* Otherwise take into account this is a commutative operation. */
2996 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2997 TREE_OPERAND (arg1, 1), flags)
2998 && operand_equal_p (TREE_OPERAND (arg0, 1),
2999 TREE_OPERAND (arg1, 0), flags));
3001 case COND_EXPR:
3002 case VEC_COND_EXPR:
3003 case DOT_PROD_EXPR:
3004 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3006 default:
3007 return 0;
3010 case tcc_vl_exp:
3011 switch (TREE_CODE (arg0))
3013 case CALL_EXPR:
3014 /* If the CALL_EXPRs call different functions, then they
3015 clearly can not be equal. */
3016 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3017 flags))
3018 return 0;
3021 unsigned int cef = call_expr_flags (arg0);
3022 if (flags & OEP_PURE_SAME)
3023 cef &= ECF_CONST | ECF_PURE;
3024 else
3025 cef &= ECF_CONST;
3026 if (!cef)
3027 return 0;
3030 /* Now see if all the arguments are the same. */
3032 const_call_expr_arg_iterator iter0, iter1;
3033 const_tree a0, a1;
3034 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3035 a1 = first_const_call_expr_arg (arg1, &iter1);
3036 a0 && a1;
3037 a0 = next_const_call_expr_arg (&iter0),
3038 a1 = next_const_call_expr_arg (&iter1))
3039 if (! operand_equal_p (a0, a1, flags))
3040 return 0;
3042 /* If we get here and both argument lists are exhausted
3043 then the CALL_EXPRs are equal. */
3044 return ! (a0 || a1);
3046 default:
3047 return 0;
3050 case tcc_declaration:
3051 /* Consider __builtin_sqrt equal to sqrt. */
3052 return (TREE_CODE (arg0) == FUNCTION_DECL
3053 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3054 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3055 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3057 default:
3058 return 0;
3061 #undef OP_SAME
3062 #undef OP_SAME_WITH_NULL
3065 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3066 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3068 When in doubt, return 0. */
3070 static int
3071 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3073 int unsignedp1, unsignedpo;
3074 tree primarg0, primarg1, primother;
3075 unsigned int correct_width;
3077 if (operand_equal_p (arg0, arg1, 0))
3078 return 1;
3080 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3081 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3082 return 0;
3084 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3085 and see if the inner values are the same. This removes any
3086 signedness comparison, which doesn't matter here. */
3087 primarg0 = arg0, primarg1 = arg1;
3088 STRIP_NOPS (primarg0);
3089 STRIP_NOPS (primarg1);
3090 if (operand_equal_p (primarg0, primarg1, 0))
3091 return 1;
3093 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3094 actual comparison operand, ARG0.
3096 First throw away any conversions to wider types
3097 already present in the operands. */
3099 primarg1 = get_narrower (arg1, &unsignedp1);
3100 primother = get_narrower (other, &unsignedpo);
3102 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3103 if (unsignedp1 == unsignedpo
3104 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3105 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3107 tree type = TREE_TYPE (arg0);
3109 /* Make sure shorter operand is extended the right way
3110 to match the longer operand. */
3111 primarg1 = fold_convert (signed_or_unsigned_type_for
3112 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3114 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3115 return 1;
3118 return 0;
3121 /* See if ARG is an expression that is either a comparison or is performing
3122 arithmetic on comparisons. The comparisons must only be comparing
3123 two different values, which will be stored in *CVAL1 and *CVAL2; if
3124 they are nonzero it means that some operands have already been found.
3125 No variables may be used anywhere else in the expression except in the
3126 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3127 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3129 If this is true, return 1. Otherwise, return zero. */
3131 static int
3132 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3134 enum tree_code code = TREE_CODE (arg);
3135 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3137 /* We can handle some of the tcc_expression cases here. */
3138 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3139 tclass = tcc_unary;
3140 else if (tclass == tcc_expression
3141 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3142 || code == COMPOUND_EXPR))
3143 tclass = tcc_binary;
3145 else if (tclass == tcc_expression && code == SAVE_EXPR
3146 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3148 /* If we've already found a CVAL1 or CVAL2, this expression is
3149 two complex to handle. */
3150 if (*cval1 || *cval2)
3151 return 0;
3153 tclass = tcc_unary;
3154 *save_p = 1;
3157 switch (tclass)
3159 case tcc_unary:
3160 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3162 case tcc_binary:
3163 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3164 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3165 cval1, cval2, save_p));
3167 case tcc_constant:
3168 return 1;
3170 case tcc_expression:
3171 if (code == COND_EXPR)
3172 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3173 cval1, cval2, save_p)
3174 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3175 cval1, cval2, save_p)
3176 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3177 cval1, cval2, save_p));
3178 return 0;
3180 case tcc_comparison:
3181 /* First see if we can handle the first operand, then the second. For
3182 the second operand, we know *CVAL1 can't be zero. It must be that
3183 one side of the comparison is each of the values; test for the
3184 case where this isn't true by failing if the two operands
3185 are the same. */
3187 if (operand_equal_p (TREE_OPERAND (arg, 0),
3188 TREE_OPERAND (arg, 1), 0))
3189 return 0;
3191 if (*cval1 == 0)
3192 *cval1 = TREE_OPERAND (arg, 0);
3193 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3195 else if (*cval2 == 0)
3196 *cval2 = TREE_OPERAND (arg, 0);
3197 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3199 else
3200 return 0;
3202 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3204 else if (*cval2 == 0)
3205 *cval2 = TREE_OPERAND (arg, 1);
3206 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3208 else
3209 return 0;
3211 return 1;
3213 default:
3214 return 0;
3218 /* ARG is a tree that is known to contain just arithmetic operations and
3219 comparisons. Evaluate the operations in the tree substituting NEW0 for
3220 any occurrence of OLD0 as an operand of a comparison and likewise for
3221 NEW1 and OLD1. */
3223 static tree
3224 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3225 tree old1, tree new1)
3227 tree type = TREE_TYPE (arg);
3228 enum tree_code code = TREE_CODE (arg);
3229 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3231 /* We can handle some of the tcc_expression cases here. */
3232 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3233 tclass = tcc_unary;
3234 else if (tclass == tcc_expression
3235 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3236 tclass = tcc_binary;
3238 switch (tclass)
3240 case tcc_unary:
3241 return fold_build1_loc (loc, code, type,
3242 eval_subst (loc, TREE_OPERAND (arg, 0),
3243 old0, new0, old1, new1));
3245 case tcc_binary:
3246 return fold_build2_loc (loc, code, type,
3247 eval_subst (loc, TREE_OPERAND (arg, 0),
3248 old0, new0, old1, new1),
3249 eval_subst (loc, TREE_OPERAND (arg, 1),
3250 old0, new0, old1, new1));
3252 case tcc_expression:
3253 switch (code)
3255 case SAVE_EXPR:
3256 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3257 old1, new1);
3259 case COMPOUND_EXPR:
3260 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3261 old1, new1);
3263 case COND_EXPR:
3264 return fold_build3_loc (loc, code, type,
3265 eval_subst (loc, TREE_OPERAND (arg, 0),
3266 old0, new0, old1, new1),
3267 eval_subst (loc, TREE_OPERAND (arg, 1),
3268 old0, new0, old1, new1),
3269 eval_subst (loc, TREE_OPERAND (arg, 2),
3270 old0, new0, old1, new1));
3271 default:
3272 break;
3274 /* Fall through - ??? */
3276 case tcc_comparison:
3278 tree arg0 = TREE_OPERAND (arg, 0);
3279 tree arg1 = TREE_OPERAND (arg, 1);
3281 /* We need to check both for exact equality and tree equality. The
3282 former will be true if the operand has a side-effect. In that
3283 case, we know the operand occurred exactly once. */
3285 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3286 arg0 = new0;
3287 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3288 arg0 = new1;
3290 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3291 arg1 = new0;
3292 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3293 arg1 = new1;
3295 return fold_build2_loc (loc, code, type, arg0, arg1);
3298 default:
3299 return arg;
3303 /* Return a tree for the case when the result of an expression is RESULT
3304 converted to TYPE and OMITTED was previously an operand of the expression
3305 but is now not needed (e.g., we folded OMITTED * 0).
3307 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3308 the conversion of RESULT to TYPE. */
3310 tree
3311 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3313 tree t = fold_convert_loc (loc, type, result);
3315 /* If the resulting operand is an empty statement, just return the omitted
3316 statement casted to void. */
3317 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3318 return build1_loc (loc, NOP_EXPR, void_type_node,
3319 fold_ignored_result (omitted));
3321 if (TREE_SIDE_EFFECTS (omitted))
3322 return build2_loc (loc, COMPOUND_EXPR, type,
3323 fold_ignored_result (omitted), t);
3325 return non_lvalue_loc (loc, t);
3328 /* Return a tree for the case when the result of an expression is RESULT
3329 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3330 of the expression but are now not needed.
3332 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3333 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3334 evaluated before OMITTED2. Otherwise, if neither has side effects,
3335 just do the conversion of RESULT to TYPE. */
3337 tree
3338 omit_two_operands_loc (location_t loc, tree type, tree result,
3339 tree omitted1, tree omitted2)
3341 tree t = fold_convert_loc (loc, type, result);
3343 if (TREE_SIDE_EFFECTS (omitted2))
3344 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3345 if (TREE_SIDE_EFFECTS (omitted1))
3346 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3348 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3352 /* Return a simplified tree node for the truth-negation of ARG. This
3353 never alters ARG itself. We assume that ARG is an operation that
3354 returns a truth value (0 or 1).
3356 FIXME: one would think we would fold the result, but it causes
3357 problems with the dominator optimizer. */
3359 static tree
3360 fold_truth_not_expr (location_t loc, tree arg)
3362 tree type = TREE_TYPE (arg);
3363 enum tree_code code = TREE_CODE (arg);
3364 location_t loc1, loc2;
3366 /* If this is a comparison, we can simply invert it, except for
3367 floating-point non-equality comparisons, in which case we just
3368 enclose a TRUTH_NOT_EXPR around what we have. */
3370 if (TREE_CODE_CLASS (code) == tcc_comparison)
3372 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3373 if (FLOAT_TYPE_P (op_type)
3374 && flag_trapping_math
3375 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3376 && code != NE_EXPR && code != EQ_EXPR)
3377 return NULL_TREE;
3379 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3380 if (code == ERROR_MARK)
3381 return NULL_TREE;
3383 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3384 TREE_OPERAND (arg, 1));
3387 switch (code)
3389 case INTEGER_CST:
3390 return constant_boolean_node (integer_zerop (arg), type);
3392 case TRUTH_AND_EXPR:
3393 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3394 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3395 return build2_loc (loc, TRUTH_OR_EXPR, type,
3396 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3397 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3399 case TRUTH_OR_EXPR:
3400 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3401 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3402 return build2_loc (loc, TRUTH_AND_EXPR, type,
3403 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3404 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3406 case TRUTH_XOR_EXPR:
3407 /* Here we can invert either operand. We invert the first operand
3408 unless the second operand is a TRUTH_NOT_EXPR in which case our
3409 result is the XOR of the first operand with the inside of the
3410 negation of the second operand. */
3412 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3413 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3414 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3415 else
3416 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3417 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3418 TREE_OPERAND (arg, 1));
3420 case TRUTH_ANDIF_EXPR:
3421 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3422 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3423 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3424 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3425 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3427 case TRUTH_ORIF_EXPR:
3428 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3429 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3430 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3431 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3432 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3434 case TRUTH_NOT_EXPR:
3435 return TREE_OPERAND (arg, 0);
3437 case COND_EXPR:
3439 tree arg1 = TREE_OPERAND (arg, 1);
3440 tree arg2 = TREE_OPERAND (arg, 2);
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3445 /* A COND_EXPR may have a throw as one operand, which
3446 then has void type. Just leave void operands
3447 as they are. */
3448 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3449 VOID_TYPE_P (TREE_TYPE (arg1))
3450 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3451 VOID_TYPE_P (TREE_TYPE (arg2))
3452 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3455 case COMPOUND_EXPR:
3456 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3457 return build2_loc (loc, COMPOUND_EXPR, type,
3458 TREE_OPERAND (arg, 0),
3459 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3461 case NON_LVALUE_EXPR:
3462 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3463 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3465 CASE_CONVERT:
3466 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3467 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3469 /* ... fall through ... */
3471 case FLOAT_EXPR:
3472 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3473 return build1_loc (loc, TREE_CODE (arg), type,
3474 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3476 case BIT_AND_EXPR:
3477 if (!integer_onep (TREE_OPERAND (arg, 1)))
3478 return NULL_TREE;
3479 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3481 case SAVE_EXPR:
3482 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3484 case CLEANUP_POINT_EXPR:
3485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3486 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3487 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3489 default:
3490 return NULL_TREE;
3494 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3495 assume that ARG is an operation that returns a truth value (0 or 1
3496 for scalars, 0 or -1 for vectors). Return the folded expression if
3497 folding is successful. Otherwise, return NULL_TREE. */
3499 static tree
3500 fold_invert_truthvalue (location_t loc, tree arg)
3502 tree type = TREE_TYPE (arg);
3503 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3504 ? BIT_NOT_EXPR
3505 : TRUTH_NOT_EXPR,
3506 type, arg);
3509 /* Return a simplified tree node for the truth-negation of ARG. This
3510 never alters ARG itself. We assume that ARG is an operation that
3511 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3513 tree
3514 invert_truthvalue_loc (location_t loc, tree arg)
3516 if (TREE_CODE (arg) == ERROR_MARK)
3517 return arg;
3519 tree type = TREE_TYPE (arg);
3520 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3521 ? BIT_NOT_EXPR
3522 : TRUTH_NOT_EXPR,
3523 type, arg);
3526 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3527 operands are another bit-wise operation with a common input. If so,
3528 distribute the bit operations to save an operation and possibly two if
3529 constants are involved. For example, convert
3530 (A | B) & (A | C) into A | (B & C)
3531 Further simplification will occur if B and C are constants.
3533 If this optimization cannot be done, 0 will be returned. */
3535 static tree
3536 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3537 tree arg0, tree arg1)
3539 tree common;
3540 tree left, right;
3542 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3543 || TREE_CODE (arg0) == code
3544 || (TREE_CODE (arg0) != BIT_AND_EXPR
3545 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3546 return 0;
3548 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3550 common = TREE_OPERAND (arg0, 0);
3551 left = TREE_OPERAND (arg0, 1);
3552 right = TREE_OPERAND (arg1, 1);
3554 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3556 common = TREE_OPERAND (arg0, 0);
3557 left = TREE_OPERAND (arg0, 1);
3558 right = TREE_OPERAND (arg1, 0);
3560 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3562 common = TREE_OPERAND (arg0, 1);
3563 left = TREE_OPERAND (arg0, 0);
3564 right = TREE_OPERAND (arg1, 1);
3566 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3568 common = TREE_OPERAND (arg0, 1);
3569 left = TREE_OPERAND (arg0, 0);
3570 right = TREE_OPERAND (arg1, 0);
3572 else
3573 return 0;
3575 common = fold_convert_loc (loc, type, common);
3576 left = fold_convert_loc (loc, type, left);
3577 right = fold_convert_loc (loc, type, right);
3578 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3579 fold_build2_loc (loc, code, type, left, right));
3582 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3583 with code CODE. This optimization is unsafe. */
3584 static tree
3585 distribute_real_division (location_t loc, enum tree_code code, tree type,
3586 tree arg0, tree arg1)
3588 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3589 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3591 /* (A / C) +- (B / C) -> (A +- B) / C. */
3592 if (mul0 == mul1
3593 && operand_equal_p (TREE_OPERAND (arg0, 1),
3594 TREE_OPERAND (arg1, 1), 0))
3595 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3596 fold_build2_loc (loc, code, type,
3597 TREE_OPERAND (arg0, 0),
3598 TREE_OPERAND (arg1, 0)),
3599 TREE_OPERAND (arg0, 1));
3601 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3602 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3603 TREE_OPERAND (arg1, 0), 0)
3604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3605 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3607 REAL_VALUE_TYPE r0, r1;
3608 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3609 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3610 if (!mul0)
3611 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3612 if (!mul1)
3613 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3614 real_arithmetic (&r0, code, &r0, &r1);
3615 return fold_build2_loc (loc, MULT_EXPR, type,
3616 TREE_OPERAND (arg0, 0),
3617 build_real (type, r0));
3620 return NULL_TREE;
3623 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3624 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3626 static tree
3627 make_bit_field_ref (location_t loc, tree inner, tree type,
3628 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3630 tree result, bftype;
3632 if (bitpos == 0)
3634 tree size = TYPE_SIZE (TREE_TYPE (inner));
3635 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3636 || POINTER_TYPE_P (TREE_TYPE (inner)))
3637 && tree_fits_shwi_p (size)
3638 && tree_to_shwi (size) == bitsize)
3639 return fold_convert_loc (loc, type, inner);
3642 bftype = type;
3643 if (TYPE_PRECISION (bftype) != bitsize
3644 || TYPE_UNSIGNED (bftype) == !unsignedp)
3645 bftype = build_nonstandard_integer_type (bitsize, 0);
3647 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3648 size_int (bitsize), bitsize_int (bitpos));
3650 if (bftype != type)
3651 result = fold_convert_loc (loc, type, result);
3653 return result;
3656 /* Optimize a bit-field compare.
3658 There are two cases: First is a compare against a constant and the
3659 second is a comparison of two items where the fields are at the same
3660 bit position relative to the start of a chunk (byte, halfword, word)
3661 large enough to contain it. In these cases we can avoid the shift
3662 implicit in bitfield extractions.
3664 For constants, we emit a compare of the shifted constant with the
3665 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3666 compared. For two fields at the same position, we do the ANDs with the
3667 similar mask and compare the result of the ANDs.
3669 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3670 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3671 are the left and right operands of the comparison, respectively.
3673 If the optimization described above can be done, we return the resulting
3674 tree. Otherwise we return zero. */
3676 static tree
3677 optimize_bit_field_compare (location_t loc, enum tree_code code,
3678 tree compare_type, tree lhs, tree rhs)
3680 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3681 tree type = TREE_TYPE (lhs);
3682 tree unsigned_type;
3683 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3684 machine_mode lmode, rmode, nmode;
3685 int lunsignedp, runsignedp;
3686 int lvolatilep = 0, rvolatilep = 0;
3687 tree linner, rinner = NULL_TREE;
3688 tree mask;
3689 tree offset;
3691 /* Get all the information about the extractions being done. If the bit size
3692 if the same as the size of the underlying object, we aren't doing an
3693 extraction at all and so can do nothing. We also don't want to
3694 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3695 then will no longer be able to replace it. */
3696 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3697 &lunsignedp, &lvolatilep, false);
3698 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3699 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3700 return 0;
3702 if (!const_p)
3704 /* If this is not a constant, we can only do something if bit positions,
3705 sizes, and signedness are the same. */
3706 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3707 &runsignedp, &rvolatilep, false);
3709 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3710 || lunsignedp != runsignedp || offset != 0
3711 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3712 return 0;
3715 /* See if we can find a mode to refer to this field. We should be able to,
3716 but fail if we can't. */
3717 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3718 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3719 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3720 TYPE_ALIGN (TREE_TYPE (rinner))),
3721 word_mode, false);
3722 if (nmode == VOIDmode)
3723 return 0;
3725 /* Set signed and unsigned types of the precision of this mode for the
3726 shifts below. */
3727 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3729 /* Compute the bit position and size for the new reference and our offset
3730 within it. If the new reference is the same size as the original, we
3731 won't optimize anything, so return zero. */
3732 nbitsize = GET_MODE_BITSIZE (nmode);
3733 nbitpos = lbitpos & ~ (nbitsize - 1);
3734 lbitpos -= nbitpos;
3735 if (nbitsize == lbitsize)
3736 return 0;
3738 if (BYTES_BIG_ENDIAN)
3739 lbitpos = nbitsize - lbitsize - lbitpos;
3741 /* Make the mask to be used against the extracted field. */
3742 mask = build_int_cst_type (unsigned_type, -1);
3743 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3744 mask = const_binop (RSHIFT_EXPR, mask,
3745 size_int (nbitsize - lbitsize - lbitpos));
3747 if (! const_p)
3748 /* If not comparing with constant, just rework the comparison
3749 and return. */
3750 return fold_build2_loc (loc, code, compare_type,
3751 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3752 make_bit_field_ref (loc, linner,
3753 unsigned_type,
3754 nbitsize, nbitpos,
3756 mask),
3757 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3758 make_bit_field_ref (loc, rinner,
3759 unsigned_type,
3760 nbitsize, nbitpos,
3762 mask));
3764 /* Otherwise, we are handling the constant case. See if the constant is too
3765 big for the field. Warn and return a tree of for 0 (false) if so. We do
3766 this not only for its own sake, but to avoid having to test for this
3767 error case below. If we didn't, we might generate wrong code.
3769 For unsigned fields, the constant shifted right by the field length should
3770 be all zero. For signed fields, the high-order bits should agree with
3771 the sign bit. */
3773 if (lunsignedp)
3775 if (wi::lrshift (rhs, lbitsize) != 0)
3777 warning (0, "comparison is always %d due to width of bit-field",
3778 code == NE_EXPR);
3779 return constant_boolean_node (code == NE_EXPR, compare_type);
3782 else
3784 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3785 if (tem != 0 && tem != -1)
3787 warning (0, "comparison is always %d due to width of bit-field",
3788 code == NE_EXPR);
3789 return constant_boolean_node (code == NE_EXPR, compare_type);
3793 /* Single-bit compares should always be against zero. */
3794 if (lbitsize == 1 && ! integer_zerop (rhs))
3796 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3797 rhs = build_int_cst (type, 0);
3800 /* Make a new bitfield reference, shift the constant over the
3801 appropriate number of bits and mask it with the computed mask
3802 (in case this was a signed field). If we changed it, make a new one. */
3803 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3805 rhs = const_binop (BIT_AND_EXPR,
3806 const_binop (LSHIFT_EXPR,
3807 fold_convert_loc (loc, unsigned_type, rhs),
3808 size_int (lbitpos)),
3809 mask);
3811 lhs = build2_loc (loc, code, compare_type,
3812 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3813 return lhs;
3816 /* Subroutine for fold_truth_andor_1: decode a field reference.
3818 If EXP is a comparison reference, we return the innermost reference.
3820 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3821 set to the starting bit number.
3823 If the innermost field can be completely contained in a mode-sized
3824 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3826 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3827 otherwise it is not changed.
3829 *PUNSIGNEDP is set to the signedness of the field.
3831 *PMASK is set to the mask used. This is either contained in a
3832 BIT_AND_EXPR or derived from the width of the field.
3834 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3836 Return 0 if this is not a component reference or is one that we can't
3837 do anything with. */
3839 static tree
3840 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3841 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3842 int *punsignedp, int *pvolatilep,
3843 tree *pmask, tree *pand_mask)
3845 tree outer_type = 0;
3846 tree and_mask = 0;
3847 tree mask, inner, offset;
3848 tree unsigned_type;
3849 unsigned int precision;
3851 /* All the optimizations using this function assume integer fields.
3852 There are problems with FP fields since the type_for_size call
3853 below can fail for, e.g., XFmode. */
3854 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3855 return 0;
3857 /* We are interested in the bare arrangement of bits, so strip everything
3858 that doesn't affect the machine mode. However, record the type of the
3859 outermost expression if it may matter below. */
3860 if (CONVERT_EXPR_P (exp)
3861 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3862 outer_type = TREE_TYPE (exp);
3863 STRIP_NOPS (exp);
3865 if (TREE_CODE (exp) == BIT_AND_EXPR)
3867 and_mask = TREE_OPERAND (exp, 1);
3868 exp = TREE_OPERAND (exp, 0);
3869 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3870 if (TREE_CODE (and_mask) != INTEGER_CST)
3871 return 0;
3874 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3875 punsignedp, pvolatilep, false);
3876 if ((inner == exp && and_mask == 0)
3877 || *pbitsize < 0 || offset != 0
3878 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3879 return 0;
3881 /* If the number of bits in the reference is the same as the bitsize of
3882 the outer type, then the outer type gives the signedness. Otherwise
3883 (in case of a small bitfield) the signedness is unchanged. */
3884 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3885 *punsignedp = TYPE_UNSIGNED (outer_type);
3887 /* Compute the mask to access the bitfield. */
3888 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3889 precision = TYPE_PRECISION (unsigned_type);
3891 mask = build_int_cst_type (unsigned_type, -1);
3893 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3894 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3896 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3897 if (and_mask != 0)
3898 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3899 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3901 *pmask = mask;
3902 *pand_mask = and_mask;
3903 return inner;
3906 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3907 bit positions and MASK is SIGNED. */
3909 static int
3910 all_ones_mask_p (const_tree mask, unsigned int size)
3912 tree type = TREE_TYPE (mask);
3913 unsigned int precision = TYPE_PRECISION (type);
3915 /* If this function returns true when the type of the mask is
3916 UNSIGNED, then there will be errors. In particular see
3917 gcc.c-torture/execute/990326-1.c. There does not appear to be
3918 any documentation paper trail as to why this is so. But the pre
3919 wide-int worked with that restriction and it has been preserved
3920 here. */
3921 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3922 return false;
3924 return wi::mask (size, false, precision) == mask;
3927 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3928 represents the sign bit of EXP's type. If EXP represents a sign
3929 or zero extension, also test VAL against the unextended type.
3930 The return value is the (sub)expression whose sign bit is VAL,
3931 or NULL_TREE otherwise. */
3933 tree
3934 sign_bit_p (tree exp, const_tree val)
3936 int width;
3937 tree t;
3939 /* Tree EXP must have an integral type. */
3940 t = TREE_TYPE (exp);
3941 if (! INTEGRAL_TYPE_P (t))
3942 return NULL_TREE;
3944 /* Tree VAL must be an integer constant. */
3945 if (TREE_CODE (val) != INTEGER_CST
3946 || TREE_OVERFLOW (val))
3947 return NULL_TREE;
3949 width = TYPE_PRECISION (t);
3950 if (wi::only_sign_bit_p (val, width))
3951 return exp;
3953 /* Handle extension from a narrower type. */
3954 if (TREE_CODE (exp) == NOP_EXPR
3955 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3956 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3958 return NULL_TREE;
3961 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3962 to be evaluated unconditionally. */
3964 static int
3965 simple_operand_p (const_tree exp)
3967 /* Strip any conversions that don't change the machine mode. */
3968 STRIP_NOPS (exp);
3970 return (CONSTANT_CLASS_P (exp)
3971 || TREE_CODE (exp) == SSA_NAME
3972 || (DECL_P (exp)
3973 && ! TREE_ADDRESSABLE (exp)
3974 && ! TREE_THIS_VOLATILE (exp)
3975 && ! DECL_NONLOCAL (exp)
3976 /* Don't regard global variables as simple. They may be
3977 allocated in ways unknown to the compiler (shared memory,
3978 #pragma weak, etc). */
3979 && ! TREE_PUBLIC (exp)
3980 && ! DECL_EXTERNAL (exp)
3981 /* Weakrefs are not safe to be read, since they can be NULL.
3982 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3983 have DECL_WEAK flag set. */
3984 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3985 /* Loading a static variable is unduly expensive, but global
3986 registers aren't expensive. */
3987 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3990 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3991 to be evaluated unconditionally.
3992 I addition to simple_operand_p, we assume that comparisons, conversions,
3993 and logic-not operations are simple, if their operands are simple, too. */
3995 static bool
3996 simple_operand_p_2 (tree exp)
3998 enum tree_code code;
4000 if (TREE_SIDE_EFFECTS (exp)
4001 || tree_could_trap_p (exp))
4002 return false;
4004 while (CONVERT_EXPR_P (exp))
4005 exp = TREE_OPERAND (exp, 0);
4007 code = TREE_CODE (exp);
4009 if (TREE_CODE_CLASS (code) == tcc_comparison)
4010 return (simple_operand_p (TREE_OPERAND (exp, 0))
4011 && simple_operand_p (TREE_OPERAND (exp, 1)));
4013 if (code == TRUTH_NOT_EXPR)
4014 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4016 return simple_operand_p (exp);
4020 /* The following functions are subroutines to fold_range_test and allow it to
4021 try to change a logical combination of comparisons into a range test.
4023 For example, both
4024 X == 2 || X == 3 || X == 4 || X == 5
4026 X >= 2 && X <= 5
4027 are converted to
4028 (unsigned) (X - 2) <= 3
4030 We describe each set of comparisons as being either inside or outside
4031 a range, using a variable named like IN_P, and then describe the
4032 range with a lower and upper bound. If one of the bounds is omitted,
4033 it represents either the highest or lowest value of the type.
4035 In the comments below, we represent a range by two numbers in brackets
4036 preceded by a "+" to designate being inside that range, or a "-" to
4037 designate being outside that range, so the condition can be inverted by
4038 flipping the prefix. An omitted bound is represented by a "-". For
4039 example, "- [-, 10]" means being outside the range starting at the lowest
4040 possible value and ending at 10, in other words, being greater than 10.
4041 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4042 always false.
4044 We set up things so that the missing bounds are handled in a consistent
4045 manner so neither a missing bound nor "true" and "false" need to be
4046 handled using a special case. */
4048 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4049 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4050 and UPPER1_P are nonzero if the respective argument is an upper bound
4051 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4052 must be specified for a comparison. ARG1 will be converted to ARG0's
4053 type if both are specified. */
4055 static tree
4056 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4057 tree arg1, int upper1_p)
4059 tree tem;
4060 int result;
4061 int sgn0, sgn1;
4063 /* If neither arg represents infinity, do the normal operation.
4064 Else, if not a comparison, return infinity. Else handle the special
4065 comparison rules. Note that most of the cases below won't occur, but
4066 are handled for consistency. */
4068 if (arg0 != 0 && arg1 != 0)
4070 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4071 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4072 STRIP_NOPS (tem);
4073 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4076 if (TREE_CODE_CLASS (code) != tcc_comparison)
4077 return 0;
4079 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4080 for neither. In real maths, we cannot assume open ended ranges are
4081 the same. But, this is computer arithmetic, where numbers are finite.
4082 We can therefore make the transformation of any unbounded range with
4083 the value Z, Z being greater than any representable number. This permits
4084 us to treat unbounded ranges as equal. */
4085 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4086 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4087 switch (code)
4089 case EQ_EXPR:
4090 result = sgn0 == sgn1;
4091 break;
4092 case NE_EXPR:
4093 result = sgn0 != sgn1;
4094 break;
4095 case LT_EXPR:
4096 result = sgn0 < sgn1;
4097 break;
4098 case LE_EXPR:
4099 result = sgn0 <= sgn1;
4100 break;
4101 case GT_EXPR:
4102 result = sgn0 > sgn1;
4103 break;
4104 case GE_EXPR:
4105 result = sgn0 >= sgn1;
4106 break;
4107 default:
4108 gcc_unreachable ();
4111 return constant_boolean_node (result, type);
4114 /* Helper routine for make_range. Perform one step for it, return
4115 new expression if the loop should continue or NULL_TREE if it should
4116 stop. */
4118 tree
4119 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4120 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4121 bool *strict_overflow_p)
4123 tree arg0_type = TREE_TYPE (arg0);
4124 tree n_low, n_high, low = *p_low, high = *p_high;
4125 int in_p = *p_in_p, n_in_p;
4127 switch (code)
4129 case TRUTH_NOT_EXPR:
4130 /* We can only do something if the range is testing for zero. */
4131 if (low == NULL_TREE || high == NULL_TREE
4132 || ! integer_zerop (low) || ! integer_zerop (high))
4133 return NULL_TREE;
4134 *p_in_p = ! in_p;
4135 return arg0;
4137 case EQ_EXPR: case NE_EXPR:
4138 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4139 /* We can only do something if the range is testing for zero
4140 and if the second operand is an integer constant. Note that
4141 saying something is "in" the range we make is done by
4142 complementing IN_P since it will set in the initial case of
4143 being not equal to zero; "out" is leaving it alone. */
4144 if (low == NULL_TREE || high == NULL_TREE
4145 || ! integer_zerop (low) || ! integer_zerop (high)
4146 || TREE_CODE (arg1) != INTEGER_CST)
4147 return NULL_TREE;
4149 switch (code)
4151 case NE_EXPR: /* - [c, c] */
4152 low = high = arg1;
4153 break;
4154 case EQ_EXPR: /* + [c, c] */
4155 in_p = ! in_p, low = high = arg1;
4156 break;
4157 case GT_EXPR: /* - [-, c] */
4158 low = 0, high = arg1;
4159 break;
4160 case GE_EXPR: /* + [c, -] */
4161 in_p = ! in_p, low = arg1, high = 0;
4162 break;
4163 case LT_EXPR: /* - [c, -] */
4164 low = arg1, high = 0;
4165 break;
4166 case LE_EXPR: /* + [-, c] */
4167 in_p = ! in_p, low = 0, high = arg1;
4168 break;
4169 default:
4170 gcc_unreachable ();
4173 /* If this is an unsigned comparison, we also know that EXP is
4174 greater than or equal to zero. We base the range tests we make
4175 on that fact, so we record it here so we can parse existing
4176 range tests. We test arg0_type since often the return type
4177 of, e.g. EQ_EXPR, is boolean. */
4178 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4180 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4181 in_p, low, high, 1,
4182 build_int_cst (arg0_type, 0),
4183 NULL_TREE))
4184 return NULL_TREE;
4186 in_p = n_in_p, low = n_low, high = n_high;
4188 /* If the high bound is missing, but we have a nonzero low
4189 bound, reverse the range so it goes from zero to the low bound
4190 minus 1. */
4191 if (high == 0 && low && ! integer_zerop (low))
4193 in_p = ! in_p;
4194 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4195 build_int_cst (TREE_TYPE (low), 1), 0);
4196 low = build_int_cst (arg0_type, 0);
4200 *p_low = low;
4201 *p_high = high;
4202 *p_in_p = in_p;
4203 return arg0;
4205 case NEGATE_EXPR:
4206 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4207 low and high are non-NULL, then normalize will DTRT. */
4208 if (!TYPE_UNSIGNED (arg0_type)
4209 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4211 if (low == NULL_TREE)
4212 low = TYPE_MIN_VALUE (arg0_type);
4213 if (high == NULL_TREE)
4214 high = TYPE_MAX_VALUE (arg0_type);
4217 /* (-x) IN [a,b] -> x in [-b, -a] */
4218 n_low = range_binop (MINUS_EXPR, exp_type,
4219 build_int_cst (exp_type, 0),
4220 0, high, 1);
4221 n_high = range_binop (MINUS_EXPR, exp_type,
4222 build_int_cst (exp_type, 0),
4223 0, low, 0);
4224 if (n_high != 0 && TREE_OVERFLOW (n_high))
4225 return NULL_TREE;
4226 goto normalize;
4228 case BIT_NOT_EXPR:
4229 /* ~ X -> -X - 1 */
4230 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4231 build_int_cst (exp_type, 1));
4233 case PLUS_EXPR:
4234 case MINUS_EXPR:
4235 if (TREE_CODE (arg1) != INTEGER_CST)
4236 return NULL_TREE;
4238 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4239 move a constant to the other side. */
4240 if (!TYPE_UNSIGNED (arg0_type)
4241 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4242 return NULL_TREE;
4244 /* If EXP is signed, any overflow in the computation is undefined,
4245 so we don't worry about it so long as our computations on
4246 the bounds don't overflow. For unsigned, overflow is defined
4247 and this is exactly the right thing. */
4248 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4249 arg0_type, low, 0, arg1, 0);
4250 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4251 arg0_type, high, 1, arg1, 0);
4252 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4253 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4254 return NULL_TREE;
4256 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4257 *strict_overflow_p = true;
4259 normalize:
4260 /* Check for an unsigned range which has wrapped around the maximum
4261 value thus making n_high < n_low, and normalize it. */
4262 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4264 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4265 build_int_cst (TREE_TYPE (n_high), 1), 0);
4266 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4267 build_int_cst (TREE_TYPE (n_low), 1), 0);
4269 /* If the range is of the form +/- [ x+1, x ], we won't
4270 be able to normalize it. But then, it represents the
4271 whole range or the empty set, so make it
4272 +/- [ -, - ]. */
4273 if (tree_int_cst_equal (n_low, low)
4274 && tree_int_cst_equal (n_high, high))
4275 low = high = 0;
4276 else
4277 in_p = ! in_p;
4279 else
4280 low = n_low, high = n_high;
4282 *p_low = low;
4283 *p_high = high;
4284 *p_in_p = in_p;
4285 return arg0;
4287 CASE_CONVERT:
4288 case NON_LVALUE_EXPR:
4289 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4290 return NULL_TREE;
4292 if (! INTEGRAL_TYPE_P (arg0_type)
4293 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4294 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4295 return NULL_TREE;
4297 n_low = low, n_high = high;
4299 if (n_low != 0)
4300 n_low = fold_convert_loc (loc, arg0_type, n_low);
4302 if (n_high != 0)
4303 n_high = fold_convert_loc (loc, arg0_type, n_high);
4305 /* If we're converting arg0 from an unsigned type, to exp,
4306 a signed type, we will be doing the comparison as unsigned.
4307 The tests above have already verified that LOW and HIGH
4308 are both positive.
4310 So we have to ensure that we will handle large unsigned
4311 values the same way that the current signed bounds treat
4312 negative values. */
4314 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4316 tree high_positive;
4317 tree equiv_type;
4318 /* For fixed-point modes, we need to pass the saturating flag
4319 as the 2nd parameter. */
4320 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4321 equiv_type
4322 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4323 TYPE_SATURATING (arg0_type));
4324 else
4325 equiv_type
4326 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4328 /* A range without an upper bound is, naturally, unbounded.
4329 Since convert would have cropped a very large value, use
4330 the max value for the destination type. */
4331 high_positive
4332 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4333 : TYPE_MAX_VALUE (arg0_type);
4335 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4336 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4337 fold_convert_loc (loc, arg0_type,
4338 high_positive),
4339 build_int_cst (arg0_type, 1));
4341 /* If the low bound is specified, "and" the range with the
4342 range for which the original unsigned value will be
4343 positive. */
4344 if (low != 0)
4346 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4347 1, fold_convert_loc (loc, arg0_type,
4348 integer_zero_node),
4349 high_positive))
4350 return NULL_TREE;
4352 in_p = (n_in_p == in_p);
4354 else
4356 /* Otherwise, "or" the range with the range of the input
4357 that will be interpreted as negative. */
4358 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4359 1, fold_convert_loc (loc, arg0_type,
4360 integer_zero_node),
4361 high_positive))
4362 return NULL_TREE;
4364 in_p = (in_p != n_in_p);
4368 *p_low = n_low;
4369 *p_high = n_high;
4370 *p_in_p = in_p;
4371 return arg0;
4373 default:
4374 return NULL_TREE;
4378 /* Given EXP, a logical expression, set the range it is testing into
4379 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4380 actually being tested. *PLOW and *PHIGH will be made of the same
4381 type as the returned expression. If EXP is not a comparison, we
4382 will most likely not be returning a useful value and range. Set
4383 *STRICT_OVERFLOW_P to true if the return value is only valid
4384 because signed overflow is undefined; otherwise, do not change
4385 *STRICT_OVERFLOW_P. */
4387 tree
4388 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4389 bool *strict_overflow_p)
4391 enum tree_code code;
4392 tree arg0, arg1 = NULL_TREE;
4393 tree exp_type, nexp;
4394 int in_p;
4395 tree low, high;
4396 location_t loc = EXPR_LOCATION (exp);
4398 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4399 and see if we can refine the range. Some of the cases below may not
4400 happen, but it doesn't seem worth worrying about this. We "continue"
4401 the outer loop when we've changed something; otherwise we "break"
4402 the switch, which will "break" the while. */
4404 in_p = 0;
4405 low = high = build_int_cst (TREE_TYPE (exp), 0);
4407 while (1)
4409 code = TREE_CODE (exp);
4410 exp_type = TREE_TYPE (exp);
4411 arg0 = NULL_TREE;
4413 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4415 if (TREE_OPERAND_LENGTH (exp) > 0)
4416 arg0 = TREE_OPERAND (exp, 0);
4417 if (TREE_CODE_CLASS (code) == tcc_binary
4418 || TREE_CODE_CLASS (code) == tcc_comparison
4419 || (TREE_CODE_CLASS (code) == tcc_expression
4420 && TREE_OPERAND_LENGTH (exp) > 1))
4421 arg1 = TREE_OPERAND (exp, 1);
4423 if (arg0 == NULL_TREE)
4424 break;
4426 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4427 &high, &in_p, strict_overflow_p);
4428 if (nexp == NULL_TREE)
4429 break;
4430 exp = nexp;
4433 /* If EXP is a constant, we can evaluate whether this is true or false. */
4434 if (TREE_CODE (exp) == INTEGER_CST)
4436 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4437 exp, 0, low, 0))
4438 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4439 exp, 1, high, 1)));
4440 low = high = 0;
4441 exp = 0;
4444 *pin_p = in_p, *plow = low, *phigh = high;
4445 return exp;
4448 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4449 type, TYPE, return an expression to test if EXP is in (or out of, depending
4450 on IN_P) the range. Return 0 if the test couldn't be created. */
4452 tree
4453 build_range_check (location_t loc, tree type, tree exp, int in_p,
4454 tree low, tree high)
4456 tree etype = TREE_TYPE (exp), value;
4458 #ifdef HAVE_canonicalize_funcptr_for_compare
4459 /* Disable this optimization for function pointer expressions
4460 on targets that require function pointer canonicalization. */
4461 if (HAVE_canonicalize_funcptr_for_compare
4462 && TREE_CODE (etype) == POINTER_TYPE
4463 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4464 return NULL_TREE;
4465 #endif
4467 if (! in_p)
4469 value = build_range_check (loc, type, exp, 1, low, high);
4470 if (value != 0)
4471 return invert_truthvalue_loc (loc, value);
4473 return 0;
4476 if (low == 0 && high == 0)
4477 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4479 if (low == 0)
4480 return fold_build2_loc (loc, LE_EXPR, type, exp,
4481 fold_convert_loc (loc, etype, high));
4483 if (high == 0)
4484 return fold_build2_loc (loc, GE_EXPR, type, exp,
4485 fold_convert_loc (loc, etype, low));
4487 if (operand_equal_p (low, high, 0))
4488 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4489 fold_convert_loc (loc, etype, low));
4491 if (integer_zerop (low))
4493 if (! TYPE_UNSIGNED (etype))
4495 etype = unsigned_type_for (etype);
4496 high = fold_convert_loc (loc, etype, high);
4497 exp = fold_convert_loc (loc, etype, exp);
4499 return build_range_check (loc, type, exp, 1, 0, high);
4502 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4503 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4505 int prec = TYPE_PRECISION (etype);
4507 if (wi::mask (prec - 1, false, prec) == high)
4509 if (TYPE_UNSIGNED (etype))
4511 tree signed_etype = signed_type_for (etype);
4512 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4513 etype
4514 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4515 else
4516 etype = signed_etype;
4517 exp = fold_convert_loc (loc, etype, exp);
4519 return fold_build2_loc (loc, GT_EXPR, type, exp,
4520 build_int_cst (etype, 0));
4524 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4525 This requires wrap-around arithmetics for the type of the expression.
4526 First make sure that arithmetics in this type is valid, then make sure
4527 that it wraps around. */
4528 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4529 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4530 TYPE_UNSIGNED (etype));
4532 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4534 tree utype, minv, maxv;
4536 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4537 for the type in question, as we rely on this here. */
4538 utype = unsigned_type_for (etype);
4539 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4540 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4541 build_int_cst (TREE_TYPE (maxv), 1), 1);
4542 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4544 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4545 minv, 1, maxv, 1)))
4546 etype = utype;
4547 else
4548 return 0;
4551 high = fold_convert_loc (loc, etype, high);
4552 low = fold_convert_loc (loc, etype, low);
4553 exp = fold_convert_loc (loc, etype, exp);
4555 value = const_binop (MINUS_EXPR, high, low);
4558 if (POINTER_TYPE_P (etype))
4560 if (value != 0 && !TREE_OVERFLOW (value))
4562 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4563 return build_range_check (loc, type,
4564 fold_build_pointer_plus_loc (loc, exp, low),
4565 1, build_int_cst (etype, 0), value);
4567 return 0;
4570 if (value != 0 && !TREE_OVERFLOW (value))
4571 return build_range_check (loc, type,
4572 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4573 1, build_int_cst (etype, 0), value);
4575 return 0;
4578 /* Return the predecessor of VAL in its type, handling the infinite case. */
4580 static tree
4581 range_predecessor (tree val)
4583 tree type = TREE_TYPE (val);
4585 if (INTEGRAL_TYPE_P (type)
4586 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4587 return 0;
4588 else
4589 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4590 build_int_cst (TREE_TYPE (val), 1), 0);
4593 /* Return the successor of VAL in its type, handling the infinite case. */
4595 static tree
4596 range_successor (tree val)
4598 tree type = TREE_TYPE (val);
4600 if (INTEGRAL_TYPE_P (type)
4601 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4602 return 0;
4603 else
4604 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4605 build_int_cst (TREE_TYPE (val), 1), 0);
4608 /* Given two ranges, see if we can merge them into one. Return 1 if we
4609 can, 0 if we can't. Set the output range into the specified parameters. */
4611 bool
4612 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4613 tree high0, int in1_p, tree low1, tree high1)
4615 int no_overlap;
4616 int subset;
4617 int temp;
4618 tree tem;
4619 int in_p;
4620 tree low, high;
4621 int lowequal = ((low0 == 0 && low1 == 0)
4622 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4623 low0, 0, low1, 0)));
4624 int highequal = ((high0 == 0 && high1 == 0)
4625 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4626 high0, 1, high1, 1)));
4628 /* Make range 0 be the range that starts first, or ends last if they
4629 start at the same value. Swap them if it isn't. */
4630 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4631 low0, 0, low1, 0))
4632 || (lowequal
4633 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4634 high1, 1, high0, 1))))
4636 temp = in0_p, in0_p = in1_p, in1_p = temp;
4637 tem = low0, low0 = low1, low1 = tem;
4638 tem = high0, high0 = high1, high1 = tem;
4641 /* Now flag two cases, whether the ranges are disjoint or whether the
4642 second range is totally subsumed in the first. Note that the tests
4643 below are simplified by the ones above. */
4644 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4645 high0, 1, low1, 0));
4646 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4647 high1, 1, high0, 1));
4649 /* We now have four cases, depending on whether we are including or
4650 excluding the two ranges. */
4651 if (in0_p && in1_p)
4653 /* If they don't overlap, the result is false. If the second range
4654 is a subset it is the result. Otherwise, the range is from the start
4655 of the second to the end of the first. */
4656 if (no_overlap)
4657 in_p = 0, low = high = 0;
4658 else if (subset)
4659 in_p = 1, low = low1, high = high1;
4660 else
4661 in_p = 1, low = low1, high = high0;
4664 else if (in0_p && ! in1_p)
4666 /* If they don't overlap, the result is the first range. If they are
4667 equal, the result is false. If the second range is a subset of the
4668 first, and the ranges begin at the same place, we go from just after
4669 the end of the second range to the end of the first. If the second
4670 range is not a subset of the first, or if it is a subset and both
4671 ranges end at the same place, the range starts at the start of the
4672 first range and ends just before the second range.
4673 Otherwise, we can't describe this as a single range. */
4674 if (no_overlap)
4675 in_p = 1, low = low0, high = high0;
4676 else if (lowequal && highequal)
4677 in_p = 0, low = high = 0;
4678 else if (subset && lowequal)
4680 low = range_successor (high1);
4681 high = high0;
4682 in_p = 1;
4683 if (low == 0)
4685 /* We are in the weird situation where high0 > high1 but
4686 high1 has no successor. Punt. */
4687 return 0;
4690 else if (! subset || highequal)
4692 low = low0;
4693 high = range_predecessor (low1);
4694 in_p = 1;
4695 if (high == 0)
4697 /* low0 < low1 but low1 has no predecessor. Punt. */
4698 return 0;
4701 else
4702 return 0;
4705 else if (! in0_p && in1_p)
4707 /* If they don't overlap, the result is the second range. If the second
4708 is a subset of the first, the result is false. Otherwise,
4709 the range starts just after the first range and ends at the
4710 end of the second. */
4711 if (no_overlap)
4712 in_p = 1, low = low1, high = high1;
4713 else if (subset || highequal)
4714 in_p = 0, low = high = 0;
4715 else
4717 low = range_successor (high0);
4718 high = high1;
4719 in_p = 1;
4720 if (low == 0)
4722 /* high1 > high0 but high0 has no successor. Punt. */
4723 return 0;
4728 else
4730 /* The case where we are excluding both ranges. Here the complex case
4731 is if they don't overlap. In that case, the only time we have a
4732 range is if they are adjacent. If the second is a subset of the
4733 first, the result is the first. Otherwise, the range to exclude
4734 starts at the beginning of the first range and ends at the end of the
4735 second. */
4736 if (no_overlap)
4738 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4739 range_successor (high0),
4740 1, low1, 0)))
4741 in_p = 0, low = low0, high = high1;
4742 else
4744 /* Canonicalize - [min, x] into - [-, x]. */
4745 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4746 switch (TREE_CODE (TREE_TYPE (low0)))
4748 case ENUMERAL_TYPE:
4749 if (TYPE_PRECISION (TREE_TYPE (low0))
4750 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4751 break;
4752 /* FALLTHROUGH */
4753 case INTEGER_TYPE:
4754 if (tree_int_cst_equal (low0,
4755 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4756 low0 = 0;
4757 break;
4758 case POINTER_TYPE:
4759 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4760 && integer_zerop (low0))
4761 low0 = 0;
4762 break;
4763 default:
4764 break;
4767 /* Canonicalize - [x, max] into - [x, -]. */
4768 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4769 switch (TREE_CODE (TREE_TYPE (high1)))
4771 case ENUMERAL_TYPE:
4772 if (TYPE_PRECISION (TREE_TYPE (high1))
4773 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4774 break;
4775 /* FALLTHROUGH */
4776 case INTEGER_TYPE:
4777 if (tree_int_cst_equal (high1,
4778 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4779 high1 = 0;
4780 break;
4781 case POINTER_TYPE:
4782 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4783 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4784 high1, 1,
4785 build_int_cst (TREE_TYPE (high1), 1),
4786 1)))
4787 high1 = 0;
4788 break;
4789 default:
4790 break;
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0 == 0 && high1 == 0)
4799 low = range_successor (high0);
4800 high = range_predecessor (low1);
4801 if (low == 0 || high == 0)
4802 return 0;
4804 in_p = 1;
4806 else
4807 return 0;
4810 else if (subset)
4811 in_p = 0, low = low0, high = high0;
4812 else
4813 in_p = 0, low = low0, high = high1;
4816 *pin_p = in_p, *plow = low, *phigh = high;
4817 return 1;
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4829 static tree
4830 fold_cond_expr_with_comparison (location_t loc, tree type,
4831 tree arg0, tree arg1, tree arg2)
4833 enum tree_code comp_code = TREE_CODE (arg0);
4834 tree arg00 = TREE_OPERAND (arg0, 0);
4835 tree arg01 = TREE_OPERAND (arg0, 1);
4836 tree arg1_type = TREE_TYPE (arg1);
4837 tree tem;
4839 STRIP_NOPS (arg1);
4840 STRIP_NOPS (arg2);
4842 /* If we have A op 0 ? A : -A, consider applying the following
4843 transformations:
4845 A == 0? A : -A same as -A
4846 A != 0? A : -A same as A
4847 A >= 0? A : -A same as abs (A)
4848 A > 0? A : -A same as abs (A)
4849 A <= 0? A : -A same as -abs (A)
4850 A < 0? A : -A same as -abs (A)
4852 None of these transformations work for modes with signed
4853 zeros. If A is +/-0, the first two transformations will
4854 change the sign of the result (from +0 to -0, or vice
4855 versa). The last four will fix the sign of the result,
4856 even though the original expressions could be positive or
4857 negative, depending on the sign of A.
4859 Note that all these transformations are correct if A is
4860 NaN, since the two alternatives (A and -A) are also NaNs. */
4861 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4862 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4863 ? real_zerop (arg01)
4864 : integer_zerop (arg01))
4865 && ((TREE_CODE (arg2) == NEGATE_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4867 /* In the case that A is of the form X-Y, '-A' (arg2) may
4868 have already been folded to Y-X, check for that. */
4869 || (TREE_CODE (arg1) == MINUS_EXPR
4870 && TREE_CODE (arg2) == MINUS_EXPR
4871 && operand_equal_p (TREE_OPERAND (arg1, 0),
4872 TREE_OPERAND (arg2, 1), 0)
4873 && operand_equal_p (TREE_OPERAND (arg1, 1),
4874 TREE_OPERAND (arg2, 0), 0))))
4875 switch (comp_code)
4877 case EQ_EXPR:
4878 case UNEQ_EXPR:
4879 tem = fold_convert_loc (loc, arg1_type, arg1);
4880 return pedantic_non_lvalue_loc (loc,
4881 fold_convert_loc (loc, type,
4882 negate_expr (tem)));
4883 case NE_EXPR:
4884 case LTGT_EXPR:
4885 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4886 case UNGE_EXPR:
4887 case UNGT_EXPR:
4888 if (flag_trapping_math)
4889 break;
4890 /* Fall through. */
4891 case GE_EXPR:
4892 case GT_EXPR:
4893 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4894 arg1 = fold_convert_loc (loc, signed_type_for
4895 (TREE_TYPE (arg1)), arg1);
4896 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4897 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4898 case UNLE_EXPR:
4899 case UNLT_EXPR:
4900 if (flag_trapping_math)
4901 break;
4902 case LE_EXPR:
4903 case LT_EXPR:
4904 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4905 arg1 = fold_convert_loc (loc, signed_type_for
4906 (TREE_TYPE (arg1)), arg1);
4907 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4908 return negate_expr (fold_convert_loc (loc, type, tem));
4909 default:
4910 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4911 break;
4914 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4915 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4916 both transformations are correct when A is NaN: A != 0
4917 is then true, and A == 0 is false. */
4919 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4920 && integer_zerop (arg01) && integer_zerop (arg2))
4922 if (comp_code == NE_EXPR)
4923 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4924 else if (comp_code == EQ_EXPR)
4925 return build_zero_cst (type);
4928 /* Try some transformations of A op B ? A : B.
4930 A == B? A : B same as B
4931 A != B? A : B same as A
4932 A >= B? A : B same as max (A, B)
4933 A > B? A : B same as max (B, A)
4934 A <= B? A : B same as min (A, B)
4935 A < B? A : B same as min (B, A)
4937 As above, these transformations don't work in the presence
4938 of signed zeros. For example, if A and B are zeros of
4939 opposite sign, the first two transformations will change
4940 the sign of the result. In the last four, the original
4941 expressions give different results for (A=+0, B=-0) and
4942 (A=-0, B=+0), but the transformed expressions do not.
4944 The first two transformations are correct if either A or B
4945 is a NaN. In the first transformation, the condition will
4946 be false, and B will indeed be chosen. In the case of the
4947 second transformation, the condition A != B will be true,
4948 and A will be chosen.
4950 The conversions to max() and min() are not correct if B is
4951 a number and A is not. The conditions in the original
4952 expressions will be false, so all four give B. The min()
4953 and max() versions would give a NaN instead. */
4954 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4955 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4956 /* Avoid these transformations if the COND_EXPR may be used
4957 as an lvalue in the C++ front-end. PR c++/19199. */
4958 && (in_gimple_form
4959 || VECTOR_TYPE_P (type)
4960 || (! lang_GNU_CXX ()
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4977 switch (comp_code)
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (element_mode (arg1)))
4993 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4994 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4998 comp_op1, comp_op0);
4999 return pedantic_non_lvalue_loc (loc,
5000 fold_convert_loc (loc, type, tem));
5002 break;
5003 case GE_EXPR:
5004 case GT_EXPR:
5005 case UNGE_EXPR:
5006 case UNGT_EXPR:
5007 if (!HONOR_NANS (element_mode (arg1)))
5009 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5010 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5011 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5012 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5013 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5014 comp_op1, comp_op0);
5015 return pedantic_non_lvalue_loc (loc,
5016 fold_convert_loc (loc, type, tem));
5018 break;
5019 case UNEQ_EXPR:
5020 if (!HONOR_NANS (element_mode (arg1)))
5021 return pedantic_non_lvalue_loc (loc,
5022 fold_convert_loc (loc, type, arg2));
5023 break;
5024 case LTGT_EXPR:
5025 if (!HONOR_NANS (element_mode (arg1)))
5026 return pedantic_non_lvalue_loc (loc,
5027 fold_convert_loc (loc, type, arg1));
5028 break;
5029 default:
5030 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5031 break;
5035 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5036 we might still be able to simplify this. For example,
5037 if C1 is one less or one more than C2, this might have started
5038 out as a MIN or MAX and been transformed by this function.
5039 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5041 if (INTEGRAL_TYPE_P (type)
5042 && TREE_CODE (arg01) == INTEGER_CST
5043 && TREE_CODE (arg2) == INTEGER_CST)
5044 switch (comp_code)
5046 case EQ_EXPR:
5047 if (TREE_CODE (arg1) == INTEGER_CST)
5048 break;
5049 /* We can replace A with C1 in this case. */
5050 arg1 = fold_convert_loc (loc, type, arg01);
5051 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5053 case LT_EXPR:
5054 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5055 MIN_EXPR, to preserve the signedness of the comparison. */
5056 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5057 OEP_ONLY_CONST)
5058 && operand_equal_p (arg01,
5059 const_binop (PLUS_EXPR, arg2,
5060 build_int_cst (type, 1)),
5061 OEP_ONLY_CONST))
5063 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5064 fold_convert_loc (loc, TREE_TYPE (arg00),
5065 arg2));
5066 return pedantic_non_lvalue_loc (loc,
5067 fold_convert_loc (loc, type, tem));
5069 break;
5071 case LE_EXPR:
5072 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5073 as above. */
5074 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5075 OEP_ONLY_CONST)
5076 && operand_equal_p (arg01,
5077 const_binop (MINUS_EXPR, arg2,
5078 build_int_cst (type, 1)),
5079 OEP_ONLY_CONST))
5081 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5082 fold_convert_loc (loc, TREE_TYPE (arg00),
5083 arg2));
5084 return pedantic_non_lvalue_loc (loc,
5085 fold_convert_loc (loc, type, tem));
5087 break;
5089 case GT_EXPR:
5090 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5091 MAX_EXPR, to preserve the signedness of the comparison. */
5092 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5093 OEP_ONLY_CONST)
5094 && operand_equal_p (arg01,
5095 const_binop (MINUS_EXPR, arg2,
5096 build_int_cst (type, 1)),
5097 OEP_ONLY_CONST))
5099 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5100 fold_convert_loc (loc, TREE_TYPE (arg00),
5101 arg2));
5102 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5104 break;
5106 case GE_EXPR:
5107 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5108 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5109 OEP_ONLY_CONST)
5110 && operand_equal_p (arg01,
5111 const_binop (PLUS_EXPR, arg2,
5112 build_int_cst (type, 1)),
5113 OEP_ONLY_CONST))
5115 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5116 fold_convert_loc (loc, TREE_TYPE (arg00),
5117 arg2));
5118 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5120 break;
5121 case NE_EXPR:
5122 break;
5123 default:
5124 gcc_unreachable ();
5127 return NULL_TREE;
5132 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5133 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5134 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5135 false) >= 2)
5136 #endif
5138 /* EXP is some logical combination of boolean tests. See if we can
5139 merge it into some range test. Return the new tree if so. */
5141 static tree
5142 fold_range_test (location_t loc, enum tree_code code, tree type,
5143 tree op0, tree op1)
5145 int or_op = (code == TRUTH_ORIF_EXPR
5146 || code == TRUTH_OR_EXPR);
5147 int in0_p, in1_p, in_p;
5148 tree low0, low1, low, high0, high1, high;
5149 bool strict_overflow_p = false;
5150 tree tem, lhs, rhs;
5151 const char * const warnmsg = G_("assuming signed overflow does not occur "
5152 "when simplifying range test");
5154 if (!INTEGRAL_TYPE_P (type))
5155 return 0;
5157 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5158 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5160 /* If this is an OR operation, invert both sides; we will invert
5161 again at the end. */
5162 if (or_op)
5163 in0_p = ! in0_p, in1_p = ! in1_p;
5165 /* If both expressions are the same, if we can merge the ranges, and we
5166 can build the range test, return it or it inverted. If one of the
5167 ranges is always true or always false, consider it to be the same
5168 expression as the other. */
5169 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5170 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5171 in1_p, low1, high1)
5172 && 0 != (tem = (build_range_check (loc, type,
5173 lhs != 0 ? lhs
5174 : rhs != 0 ? rhs : integer_zero_node,
5175 in_p, low, high))))
5177 if (strict_overflow_p)
5178 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5179 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5182 /* On machines where the branch cost is expensive, if this is a
5183 short-circuited branch and the underlying object on both sides
5184 is the same, make a non-short-circuit operation. */
5185 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5186 && lhs != 0 && rhs != 0
5187 && (code == TRUTH_ANDIF_EXPR
5188 || code == TRUTH_ORIF_EXPR)
5189 && operand_equal_p (lhs, rhs, 0))
5191 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5192 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5193 which cases we can't do this. */
5194 if (simple_operand_p (lhs))
5195 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5196 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5197 type, op0, op1);
5199 else if (!lang_hooks.decls.global_bindings_p ()
5200 && !CONTAINS_PLACEHOLDER_P (lhs))
5202 tree common = save_expr (lhs);
5204 if (0 != (lhs = build_range_check (loc, type, common,
5205 or_op ? ! in0_p : in0_p,
5206 low0, high0))
5207 && (0 != (rhs = build_range_check (loc, type, common,
5208 or_op ? ! in1_p : in1_p,
5209 low1, high1))))
5211 if (strict_overflow_p)
5212 fold_overflow_warning (warnmsg,
5213 WARN_STRICT_OVERFLOW_COMPARISON);
5214 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5216 type, lhs, rhs);
5221 return 0;
5224 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5225 bit value. Arrange things so the extra bits will be set to zero if and
5226 only if C is signed-extended to its full width. If MASK is nonzero,
5227 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5229 static tree
5230 unextend (tree c, int p, int unsignedp, tree mask)
5232 tree type = TREE_TYPE (c);
5233 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5234 tree temp;
5236 if (p == modesize || unsignedp)
5237 return c;
5239 /* We work by getting just the sign bit into the low-order bit, then
5240 into the high-order bit, then sign-extend. We then XOR that value
5241 with C. */
5242 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5244 /* We must use a signed type in order to get an arithmetic right shift.
5245 However, we must also avoid introducing accidental overflows, so that
5246 a subsequent call to integer_zerop will work. Hence we must
5247 do the type conversion here. At this point, the constant is either
5248 zero or one, and the conversion to a signed type can never overflow.
5249 We could get an overflow if this conversion is done anywhere else. */
5250 if (TYPE_UNSIGNED (type))
5251 temp = fold_convert (signed_type_for (type), temp);
5253 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5254 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5255 if (mask != 0)
5256 temp = const_binop (BIT_AND_EXPR, temp,
5257 fold_convert (TREE_TYPE (c), mask));
5258 /* If necessary, convert the type back to match the type of C. */
5259 if (TYPE_UNSIGNED (type))
5260 temp = fold_convert (type, temp);
5262 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5265 /* For an expression that has the form
5266 (A && B) || ~B
5268 (A || B) && ~B,
5269 we can drop one of the inner expressions and simplify to
5270 A || ~B
5272 A && ~B
5273 LOC is the location of the resulting expression. OP is the inner
5274 logical operation; the left-hand side in the examples above, while CMPOP
5275 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5276 removing a condition that guards another, as in
5277 (A != NULL && A->...) || A == NULL
5278 which we must not transform. If RHS_ONLY is true, only eliminate the
5279 right-most operand of the inner logical operation. */
5281 static tree
5282 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5283 bool rhs_only)
5285 tree type = TREE_TYPE (cmpop);
5286 enum tree_code code = TREE_CODE (cmpop);
5287 enum tree_code truthop_code = TREE_CODE (op);
5288 tree lhs = TREE_OPERAND (op, 0);
5289 tree rhs = TREE_OPERAND (op, 1);
5290 tree orig_lhs = lhs, orig_rhs = rhs;
5291 enum tree_code rhs_code = TREE_CODE (rhs);
5292 enum tree_code lhs_code = TREE_CODE (lhs);
5293 enum tree_code inv_code;
5295 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5296 return NULL_TREE;
5298 if (TREE_CODE_CLASS (code) != tcc_comparison)
5299 return NULL_TREE;
5301 if (rhs_code == truthop_code)
5303 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5304 if (newrhs != NULL_TREE)
5306 rhs = newrhs;
5307 rhs_code = TREE_CODE (rhs);
5310 if (lhs_code == truthop_code && !rhs_only)
5312 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5313 if (newlhs != NULL_TREE)
5315 lhs = newlhs;
5316 lhs_code = TREE_CODE (lhs);
5320 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5321 if (inv_code == rhs_code
5322 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5323 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5324 return lhs;
5325 if (!rhs_only && inv_code == lhs_code
5326 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5327 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5328 return rhs;
5329 if (rhs != orig_rhs || lhs != orig_lhs)
5330 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5331 lhs, rhs);
5332 return NULL_TREE;
5335 /* Find ways of folding logical expressions of LHS and RHS:
5336 Try to merge two comparisons to the same innermost item.
5337 Look for range tests like "ch >= '0' && ch <= '9'".
5338 Look for combinations of simple terms on machines with expensive branches
5339 and evaluate the RHS unconditionally.
5341 For example, if we have p->a == 2 && p->b == 4 and we can make an
5342 object large enough to span both A and B, we can do this with a comparison
5343 against the object ANDed with the a mask.
5345 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5346 operations to do this with one comparison.
5348 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5349 function and the one above.
5351 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5352 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5354 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5355 two operands.
5357 We return the simplified tree or 0 if no optimization is possible. */
5359 static tree
5360 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5361 tree lhs, tree rhs)
5363 /* If this is the "or" of two comparisons, we can do something if
5364 the comparisons are NE_EXPR. If this is the "and", we can do something
5365 if the comparisons are EQ_EXPR. I.e.,
5366 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5368 WANTED_CODE is this operation code. For single bit fields, we can
5369 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5370 comparison for one-bit fields. */
5372 enum tree_code wanted_code;
5373 enum tree_code lcode, rcode;
5374 tree ll_arg, lr_arg, rl_arg, rr_arg;
5375 tree ll_inner, lr_inner, rl_inner, rr_inner;
5376 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5377 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5378 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5379 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5380 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5381 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5382 machine_mode lnmode, rnmode;
5383 tree ll_mask, lr_mask, rl_mask, rr_mask;
5384 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5385 tree l_const, r_const;
5386 tree lntype, rntype, result;
5387 HOST_WIDE_INT first_bit, end_bit;
5388 int volatilep;
5390 /* Start by getting the comparison codes. Fail if anything is volatile.
5391 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5392 it were surrounded with a NE_EXPR. */
5394 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5395 return 0;
5397 lcode = TREE_CODE (lhs);
5398 rcode = TREE_CODE (rhs);
5400 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5402 lhs = build2 (NE_EXPR, truth_type, lhs,
5403 build_int_cst (TREE_TYPE (lhs), 0));
5404 lcode = NE_EXPR;
5407 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5409 rhs = build2 (NE_EXPR, truth_type, rhs,
5410 build_int_cst (TREE_TYPE (rhs), 0));
5411 rcode = NE_EXPR;
5414 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5415 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5416 return 0;
5418 ll_arg = TREE_OPERAND (lhs, 0);
5419 lr_arg = TREE_OPERAND (lhs, 1);
5420 rl_arg = TREE_OPERAND (rhs, 0);
5421 rr_arg = TREE_OPERAND (rhs, 1);
5423 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5424 if (simple_operand_p (ll_arg)
5425 && simple_operand_p (lr_arg))
5427 if (operand_equal_p (ll_arg, rl_arg, 0)
5428 && operand_equal_p (lr_arg, rr_arg, 0))
5430 result = combine_comparisons (loc, code, lcode, rcode,
5431 truth_type, ll_arg, lr_arg);
5432 if (result)
5433 return result;
5435 else if (operand_equal_p (ll_arg, rr_arg, 0)
5436 && operand_equal_p (lr_arg, rl_arg, 0))
5438 result = combine_comparisons (loc, code, lcode,
5439 swap_tree_comparison (rcode),
5440 truth_type, ll_arg, lr_arg);
5441 if (result)
5442 return result;
5446 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5447 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5449 /* If the RHS can be evaluated unconditionally and its operands are
5450 simple, it wins to evaluate the RHS unconditionally on machines
5451 with expensive branches. In this case, this isn't a comparison
5452 that can be merged. */
5454 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5455 false) >= 2
5456 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5457 && simple_operand_p (rl_arg)
5458 && simple_operand_p (rr_arg))
5460 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5461 if (code == TRUTH_OR_EXPR
5462 && lcode == NE_EXPR && integer_zerop (lr_arg)
5463 && rcode == NE_EXPR && integer_zerop (rr_arg)
5464 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5465 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5466 return build2_loc (loc, NE_EXPR, truth_type,
5467 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5468 ll_arg, rl_arg),
5469 build_int_cst (TREE_TYPE (ll_arg), 0));
5471 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5472 if (code == TRUTH_AND_EXPR
5473 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5474 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5475 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5476 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5477 return build2_loc (loc, EQ_EXPR, truth_type,
5478 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5479 ll_arg, rl_arg),
5480 build_int_cst (TREE_TYPE (ll_arg), 0));
5483 /* See if the comparisons can be merged. Then get all the parameters for
5484 each side. */
5486 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5487 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5488 return 0;
5490 volatilep = 0;
5491 ll_inner = decode_field_reference (loc, ll_arg,
5492 &ll_bitsize, &ll_bitpos, &ll_mode,
5493 &ll_unsignedp, &volatilep, &ll_mask,
5494 &ll_and_mask);
5495 lr_inner = decode_field_reference (loc, lr_arg,
5496 &lr_bitsize, &lr_bitpos, &lr_mode,
5497 &lr_unsignedp, &volatilep, &lr_mask,
5498 &lr_and_mask);
5499 rl_inner = decode_field_reference (loc, rl_arg,
5500 &rl_bitsize, &rl_bitpos, &rl_mode,
5501 &rl_unsignedp, &volatilep, &rl_mask,
5502 &rl_and_mask);
5503 rr_inner = decode_field_reference (loc, rr_arg,
5504 &rr_bitsize, &rr_bitpos, &rr_mode,
5505 &rr_unsignedp, &volatilep, &rr_mask,
5506 &rr_and_mask);
5508 /* It must be true that the inner operation on the lhs of each
5509 comparison must be the same if we are to be able to do anything.
5510 Then see if we have constants. If not, the same must be true for
5511 the rhs's. */
5512 if (volatilep || ll_inner == 0 || rl_inner == 0
5513 || ! operand_equal_p (ll_inner, rl_inner, 0))
5514 return 0;
5516 if (TREE_CODE (lr_arg) == INTEGER_CST
5517 && TREE_CODE (rr_arg) == INTEGER_CST)
5518 l_const = lr_arg, r_const = rr_arg;
5519 else if (lr_inner == 0 || rr_inner == 0
5520 || ! operand_equal_p (lr_inner, rr_inner, 0))
5521 return 0;
5522 else
5523 l_const = r_const = 0;
5525 /* If either comparison code is not correct for our logical operation,
5526 fail. However, we can convert a one-bit comparison against zero into
5527 the opposite comparison against that bit being set in the field. */
5529 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5530 if (lcode != wanted_code)
5532 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5534 /* Make the left operand unsigned, since we are only interested
5535 in the value of one bit. Otherwise we are doing the wrong
5536 thing below. */
5537 ll_unsignedp = 1;
5538 l_const = ll_mask;
5540 else
5541 return 0;
5544 /* This is analogous to the code for l_const above. */
5545 if (rcode != wanted_code)
5547 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5549 rl_unsignedp = 1;
5550 r_const = rl_mask;
5552 else
5553 return 0;
5556 /* See if we can find a mode that contains both fields being compared on
5557 the left. If we can't, fail. Otherwise, update all constants and masks
5558 to be relative to a field of that size. */
5559 first_bit = MIN (ll_bitpos, rl_bitpos);
5560 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5561 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5562 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5563 volatilep);
5564 if (lnmode == VOIDmode)
5565 return 0;
5567 lnbitsize = GET_MODE_BITSIZE (lnmode);
5568 lnbitpos = first_bit & ~ (lnbitsize - 1);
5569 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5570 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5572 if (BYTES_BIG_ENDIAN)
5574 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5575 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5578 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5579 size_int (xll_bitpos));
5580 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5581 size_int (xrl_bitpos));
5583 if (l_const)
5585 l_const = fold_convert_loc (loc, lntype, l_const);
5586 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5587 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5588 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5589 fold_build1_loc (loc, BIT_NOT_EXPR,
5590 lntype, ll_mask))))
5592 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5594 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5597 if (r_const)
5599 r_const = fold_convert_loc (loc, lntype, r_const);
5600 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5601 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5602 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5603 fold_build1_loc (loc, BIT_NOT_EXPR,
5604 lntype, rl_mask))))
5606 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5608 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5612 /* If the right sides are not constant, do the same for it. Also,
5613 disallow this optimization if a size or signedness mismatch occurs
5614 between the left and right sides. */
5615 if (l_const == 0)
5617 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5618 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5619 /* Make sure the two fields on the right
5620 correspond to the left without being swapped. */
5621 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5622 return 0;
5624 first_bit = MIN (lr_bitpos, rr_bitpos);
5625 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5626 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5627 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5628 volatilep);
5629 if (rnmode == VOIDmode)
5630 return 0;
5632 rnbitsize = GET_MODE_BITSIZE (rnmode);
5633 rnbitpos = first_bit & ~ (rnbitsize - 1);
5634 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5635 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5637 if (BYTES_BIG_ENDIAN)
5639 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5640 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5643 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5644 rntype, lr_mask),
5645 size_int (xlr_bitpos));
5646 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5647 rntype, rr_mask),
5648 size_int (xrr_bitpos));
5650 /* Make a mask that corresponds to both fields being compared.
5651 Do this for both items being compared. If the operands are the
5652 same size and the bits being compared are in the same position
5653 then we can do this by masking both and comparing the masked
5654 results. */
5655 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5656 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5657 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5659 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5660 ll_unsignedp || rl_unsignedp);
5661 if (! all_ones_mask_p (ll_mask, lnbitsize))
5662 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5664 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5665 lr_unsignedp || rr_unsignedp);
5666 if (! all_ones_mask_p (lr_mask, rnbitsize))
5667 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5669 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5672 /* There is still another way we can do something: If both pairs of
5673 fields being compared are adjacent, we may be able to make a wider
5674 field containing them both.
5676 Note that we still must mask the lhs/rhs expressions. Furthermore,
5677 the mask must be shifted to account for the shift done by
5678 make_bit_field_ref. */
5679 if ((ll_bitsize + ll_bitpos == rl_bitpos
5680 && lr_bitsize + lr_bitpos == rr_bitpos)
5681 || (ll_bitpos == rl_bitpos + rl_bitsize
5682 && lr_bitpos == rr_bitpos + rr_bitsize))
5684 tree type;
5686 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5687 ll_bitsize + rl_bitsize,
5688 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5689 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5690 lr_bitsize + rr_bitsize,
5691 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5693 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5694 size_int (MIN (xll_bitpos, xrl_bitpos)));
5695 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5696 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5698 /* Convert to the smaller type before masking out unwanted bits. */
5699 type = lntype;
5700 if (lntype != rntype)
5702 if (lnbitsize > rnbitsize)
5704 lhs = fold_convert_loc (loc, rntype, lhs);
5705 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5706 type = rntype;
5708 else if (lnbitsize < rnbitsize)
5710 rhs = fold_convert_loc (loc, lntype, rhs);
5711 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5712 type = lntype;
5716 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5717 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5719 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5720 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5722 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5725 return 0;
5728 /* Handle the case of comparisons with constants. If there is something in
5729 common between the masks, those bits of the constants must be the same.
5730 If not, the condition is always false. Test for this to avoid generating
5731 incorrect code below. */
5732 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5733 if (! integer_zerop (result)
5734 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5735 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5737 if (wanted_code == NE_EXPR)
5739 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5740 return constant_boolean_node (true, truth_type);
5742 else
5744 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5745 return constant_boolean_node (false, truth_type);
5749 /* Construct the expression we will return. First get the component
5750 reference we will make. Unless the mask is all ones the width of
5751 that field, perform the mask operation. Then compare with the
5752 merged constant. */
5753 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5754 ll_unsignedp || rl_unsignedp);
5756 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5757 if (! all_ones_mask_p (ll_mask, lnbitsize))
5758 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5760 return build2_loc (loc, wanted_code, truth_type, result,
5761 const_binop (BIT_IOR_EXPR, l_const, r_const));
5764 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5765 constant. */
5767 static tree
5768 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5769 tree op0, tree op1)
5771 tree arg0 = op0;
5772 enum tree_code op_code;
5773 tree comp_const;
5774 tree minmax_const;
5775 int consts_equal, consts_lt;
5776 tree inner;
5778 STRIP_SIGN_NOPS (arg0);
5780 op_code = TREE_CODE (arg0);
5781 minmax_const = TREE_OPERAND (arg0, 1);
5782 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5783 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5784 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5785 inner = TREE_OPERAND (arg0, 0);
5787 /* If something does not permit us to optimize, return the original tree. */
5788 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5789 || TREE_CODE (comp_const) != INTEGER_CST
5790 || TREE_OVERFLOW (comp_const)
5791 || TREE_CODE (minmax_const) != INTEGER_CST
5792 || TREE_OVERFLOW (minmax_const))
5793 return NULL_TREE;
5795 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5796 and GT_EXPR, doing the rest with recursive calls using logical
5797 simplifications. */
5798 switch (code)
5800 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5802 tree tem
5803 = optimize_minmax_comparison (loc,
5804 invert_tree_comparison (code, false),
5805 type, op0, op1);
5806 if (tem)
5807 return invert_truthvalue_loc (loc, tem);
5808 return NULL_TREE;
5811 case GE_EXPR:
5812 return
5813 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5814 optimize_minmax_comparison
5815 (loc, EQ_EXPR, type, arg0, comp_const),
5816 optimize_minmax_comparison
5817 (loc, GT_EXPR, type, arg0, comp_const));
5819 case EQ_EXPR:
5820 if (op_code == MAX_EXPR && consts_equal)
5821 /* MAX (X, 0) == 0 -> X <= 0 */
5822 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5824 else if (op_code == MAX_EXPR && consts_lt)
5825 /* MAX (X, 0) == 5 -> X == 5 */
5826 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5828 else if (op_code == MAX_EXPR)
5829 /* MAX (X, 0) == -1 -> false */
5830 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5832 else if (consts_equal)
5833 /* MIN (X, 0) == 0 -> X >= 0 */
5834 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5836 else if (consts_lt)
5837 /* MIN (X, 0) == 5 -> false */
5838 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5840 else
5841 /* MIN (X, 0) == -1 -> X == -1 */
5842 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5844 case GT_EXPR:
5845 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5846 /* MAX (X, 0) > 0 -> X > 0
5847 MAX (X, 0) > 5 -> X > 5 */
5848 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5850 else if (op_code == MAX_EXPR)
5851 /* MAX (X, 0) > -1 -> true */
5852 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5854 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5855 /* MIN (X, 0) > 0 -> false
5856 MIN (X, 0) > 5 -> false */
5857 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5859 else
5860 /* MIN (X, 0) > -1 -> X > -1 */
5861 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5863 default:
5864 return NULL_TREE;
5868 /* T is an integer expression that is being multiplied, divided, or taken a
5869 modulus (CODE says which and what kind of divide or modulus) by a
5870 constant C. See if we can eliminate that operation by folding it with
5871 other operations already in T. WIDE_TYPE, if non-null, is a type that
5872 should be used for the computation if wider than our type.
5874 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5875 (X * 2) + (Y * 4). We must, however, be assured that either the original
5876 expression would not overflow or that overflow is undefined for the type
5877 in the language in question.
5879 If we return a non-null expression, it is an equivalent form of the
5880 original computation, but need not be in the original type.
5882 We set *STRICT_OVERFLOW_P to true if the return values depends on
5883 signed overflow being undefined. Otherwise we do not change
5884 *STRICT_OVERFLOW_P. */
5886 static tree
5887 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5888 bool *strict_overflow_p)
5890 /* To avoid exponential search depth, refuse to allow recursion past
5891 three levels. Beyond that (1) it's highly unlikely that we'll find
5892 something interesting and (2) we've probably processed it before
5893 when we built the inner expression. */
5895 static int depth;
5896 tree ret;
5898 if (depth > 3)
5899 return NULL;
5901 depth++;
5902 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5903 depth--;
5905 return ret;
5908 static tree
5909 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5910 bool *strict_overflow_p)
5912 tree type = TREE_TYPE (t);
5913 enum tree_code tcode = TREE_CODE (t);
5914 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5915 > GET_MODE_SIZE (TYPE_MODE (type)))
5916 ? wide_type : type);
5917 tree t1, t2;
5918 int same_p = tcode == code;
5919 tree op0 = NULL_TREE, op1 = NULL_TREE;
5920 bool sub_strict_overflow_p;
5922 /* Don't deal with constants of zero here; they confuse the code below. */
5923 if (integer_zerop (c))
5924 return NULL_TREE;
5926 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5927 op0 = TREE_OPERAND (t, 0);
5929 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5930 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5932 /* Note that we need not handle conditional operations here since fold
5933 already handles those cases. So just do arithmetic here. */
5934 switch (tcode)
5936 case INTEGER_CST:
5937 /* For a constant, we can always simplify if we are a multiply
5938 or (for divide and modulus) if it is a multiple of our constant. */
5939 if (code == MULT_EXPR
5940 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5941 return const_binop (code, fold_convert (ctype, t),
5942 fold_convert (ctype, c));
5943 break;
5945 CASE_CONVERT: case NON_LVALUE_EXPR:
5946 /* If op0 is an expression ... */
5947 if ((COMPARISON_CLASS_P (op0)
5948 || UNARY_CLASS_P (op0)
5949 || BINARY_CLASS_P (op0)
5950 || VL_EXP_CLASS_P (op0)
5951 || EXPRESSION_CLASS_P (op0))
5952 /* ... and has wrapping overflow, and its type is smaller
5953 than ctype, then we cannot pass through as widening. */
5954 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5955 && (TYPE_PRECISION (ctype)
5956 > TYPE_PRECISION (TREE_TYPE (op0))))
5957 /* ... or this is a truncation (t is narrower than op0),
5958 then we cannot pass through this narrowing. */
5959 || (TYPE_PRECISION (type)
5960 < TYPE_PRECISION (TREE_TYPE (op0)))
5961 /* ... or signedness changes for division or modulus,
5962 then we cannot pass through this conversion. */
5963 || (code != MULT_EXPR
5964 && (TYPE_UNSIGNED (ctype)
5965 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5966 /* ... or has undefined overflow while the converted to
5967 type has not, we cannot do the operation in the inner type
5968 as that would introduce undefined overflow. */
5969 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5970 && !TYPE_OVERFLOW_UNDEFINED (type))))
5971 break;
5973 /* Pass the constant down and see if we can make a simplification. If
5974 we can, replace this expression with the inner simplification for
5975 possible later conversion to our or some other type. */
5976 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5977 && TREE_CODE (t2) == INTEGER_CST
5978 && !TREE_OVERFLOW (t2)
5979 && (0 != (t1 = extract_muldiv (op0, t2, code,
5980 code == MULT_EXPR
5981 ? ctype : NULL_TREE,
5982 strict_overflow_p))))
5983 return t1;
5984 break;
5986 case ABS_EXPR:
5987 /* If widening the type changes it from signed to unsigned, then we
5988 must avoid building ABS_EXPR itself as unsigned. */
5989 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5991 tree cstype = (*signed_type_for) (ctype);
5992 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5993 != 0)
5995 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5996 return fold_convert (ctype, t1);
5998 break;
6000 /* If the constant is negative, we cannot simplify this. */
6001 if (tree_int_cst_sgn (c) == -1)
6002 break;
6003 /* FALLTHROUGH */
6004 case NEGATE_EXPR:
6005 /* For division and modulus, type can't be unsigned, as e.g.
6006 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6007 For signed types, even with wrapping overflow, this is fine. */
6008 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6009 break;
6010 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6011 != 0)
6012 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6013 break;
6015 case MIN_EXPR: case MAX_EXPR:
6016 /* If widening the type changes the signedness, then we can't perform
6017 this optimization as that changes the result. */
6018 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6019 break;
6021 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6022 sub_strict_overflow_p = false;
6023 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6024 &sub_strict_overflow_p)) != 0
6025 && (t2 = extract_muldiv (op1, c, code, wide_type,
6026 &sub_strict_overflow_p)) != 0)
6028 if (tree_int_cst_sgn (c) < 0)
6029 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6030 if (sub_strict_overflow_p)
6031 *strict_overflow_p = true;
6032 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6033 fold_convert (ctype, t2));
6035 break;
6037 case LSHIFT_EXPR: case RSHIFT_EXPR:
6038 /* If the second operand is constant, this is a multiplication
6039 or floor division, by a power of two, so we can treat it that
6040 way unless the multiplier or divisor overflows. Signed
6041 left-shift overflow is implementation-defined rather than
6042 undefined in C90, so do not convert signed left shift into
6043 multiplication. */
6044 if (TREE_CODE (op1) == INTEGER_CST
6045 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6046 /* const_binop may not detect overflow correctly,
6047 so check for it explicitly here. */
6048 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6049 && 0 != (t1 = fold_convert (ctype,
6050 const_binop (LSHIFT_EXPR,
6051 size_one_node,
6052 op1)))
6053 && !TREE_OVERFLOW (t1))
6054 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6055 ? MULT_EXPR : FLOOR_DIV_EXPR,
6056 ctype,
6057 fold_convert (ctype, op0),
6058 t1),
6059 c, code, wide_type, strict_overflow_p);
6060 break;
6062 case PLUS_EXPR: case MINUS_EXPR:
6063 /* See if we can eliminate the operation on both sides. If we can, we
6064 can return a new PLUS or MINUS. If we can't, the only remaining
6065 cases where we can do anything are if the second operand is a
6066 constant. */
6067 sub_strict_overflow_p = false;
6068 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6069 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6070 if (t1 != 0 && t2 != 0
6071 && (code == MULT_EXPR
6072 /* If not multiplication, we can only do this if both operands
6073 are divisible by c. */
6074 || (multiple_of_p (ctype, op0, c)
6075 && multiple_of_p (ctype, op1, c))))
6077 if (sub_strict_overflow_p)
6078 *strict_overflow_p = true;
6079 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6080 fold_convert (ctype, t2));
6083 /* If this was a subtraction, negate OP1 and set it to be an addition.
6084 This simplifies the logic below. */
6085 if (tcode == MINUS_EXPR)
6087 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6088 /* If OP1 was not easily negatable, the constant may be OP0. */
6089 if (TREE_CODE (op0) == INTEGER_CST)
6091 tree tem = op0;
6092 op0 = op1;
6093 op1 = tem;
6094 tem = t1;
6095 t1 = t2;
6096 t2 = tem;
6100 if (TREE_CODE (op1) != INTEGER_CST)
6101 break;
6103 /* If either OP1 or C are negative, this optimization is not safe for
6104 some of the division and remainder types while for others we need
6105 to change the code. */
6106 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6108 if (code == CEIL_DIV_EXPR)
6109 code = FLOOR_DIV_EXPR;
6110 else if (code == FLOOR_DIV_EXPR)
6111 code = CEIL_DIV_EXPR;
6112 else if (code != MULT_EXPR
6113 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6114 break;
6117 /* If it's a multiply or a division/modulus operation of a multiple
6118 of our constant, do the operation and verify it doesn't overflow. */
6119 if (code == MULT_EXPR
6120 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6122 op1 = const_binop (code, fold_convert (ctype, op1),
6123 fold_convert (ctype, c));
6124 /* We allow the constant to overflow with wrapping semantics. */
6125 if (op1 == 0
6126 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6127 break;
6129 else
6130 break;
6132 /* If we have an unsigned type, we cannot widen the operation since it
6133 will change the result if the original computation overflowed. */
6134 if (TYPE_UNSIGNED (ctype) && ctype != type)
6135 break;
6137 /* If we were able to eliminate our operation from the first side,
6138 apply our operation to the second side and reform the PLUS. */
6139 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6140 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6142 /* The last case is if we are a multiply. In that case, we can
6143 apply the distributive law to commute the multiply and addition
6144 if the multiplication of the constants doesn't overflow
6145 and overflow is defined. With undefined overflow
6146 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6147 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6148 return fold_build2 (tcode, ctype,
6149 fold_build2 (code, ctype,
6150 fold_convert (ctype, op0),
6151 fold_convert (ctype, c)),
6152 op1);
6154 break;
6156 case MULT_EXPR:
6157 /* We have a special case here if we are doing something like
6158 (C * 8) % 4 since we know that's zero. */
6159 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6160 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6161 /* If the multiplication can overflow we cannot optimize this. */
6162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6163 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6164 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6166 *strict_overflow_p = true;
6167 return omit_one_operand (type, integer_zero_node, op0);
6170 /* ... fall through ... */
6172 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6173 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6174 /* If we can extract our operation from the LHS, do so and return a
6175 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6176 do something only if the second operand is a constant. */
6177 if (same_p
6178 && (t1 = extract_muldiv (op0, c, code, wide_type,
6179 strict_overflow_p)) != 0)
6180 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6181 fold_convert (ctype, op1));
6182 else if (tcode == MULT_EXPR && code == MULT_EXPR
6183 && (t1 = extract_muldiv (op1, c, code, wide_type,
6184 strict_overflow_p)) != 0)
6185 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6186 fold_convert (ctype, t1));
6187 else if (TREE_CODE (op1) != INTEGER_CST)
6188 return 0;
6190 /* If these are the same operation types, we can associate them
6191 assuming no overflow. */
6192 if (tcode == code)
6194 bool overflow_p = false;
6195 bool overflow_mul_p;
6196 signop sign = TYPE_SIGN (ctype);
6197 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6198 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6199 if (overflow_mul_p
6200 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6201 overflow_p = true;
6202 if (!overflow_p)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6204 wide_int_to_tree (ctype, mul));
6207 /* If these operations "cancel" each other, we have the main
6208 optimizations of this pass, which occur when either constant is a
6209 multiple of the other, in which case we replace this with either an
6210 operation or CODE or TCODE.
6212 If we have an unsigned type, we cannot do this since it will change
6213 the result if the original computation overflowed. */
6214 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6215 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6216 || (tcode == MULT_EXPR
6217 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6218 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6219 && code != MULT_EXPR)))
6221 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6223 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6224 *strict_overflow_p = true;
6225 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6226 fold_convert (ctype,
6227 const_binop (TRUNC_DIV_EXPR,
6228 op1, c)));
6230 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6232 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6233 *strict_overflow_p = true;
6234 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6235 fold_convert (ctype,
6236 const_binop (TRUNC_DIV_EXPR,
6237 c, op1)));
6240 break;
6242 default:
6243 break;
6246 return 0;
6249 /* Return a node which has the indicated constant VALUE (either 0 or
6250 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6251 and is of the indicated TYPE. */
6253 tree
6254 constant_boolean_node (bool value, tree type)
6256 if (type == integer_type_node)
6257 return value ? integer_one_node : integer_zero_node;
6258 else if (type == boolean_type_node)
6259 return value ? boolean_true_node : boolean_false_node;
6260 else if (TREE_CODE (type) == VECTOR_TYPE)
6261 return build_vector_from_val (type,
6262 build_int_cst (TREE_TYPE (type),
6263 value ? -1 : 0));
6264 else
6265 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6269 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6270 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6271 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6272 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6273 COND is the first argument to CODE; otherwise (as in the example
6274 given here), it is the second argument. TYPE is the type of the
6275 original expression. Return NULL_TREE if no simplification is
6276 possible. */
6278 static tree
6279 fold_binary_op_with_conditional_arg (location_t loc,
6280 enum tree_code code,
6281 tree type, tree op0, tree op1,
6282 tree cond, tree arg, int cond_first_p)
6284 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6285 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6286 tree test, true_value, false_value;
6287 tree lhs = NULL_TREE;
6288 tree rhs = NULL_TREE;
6289 enum tree_code cond_code = COND_EXPR;
6291 if (TREE_CODE (cond) == COND_EXPR
6292 || TREE_CODE (cond) == VEC_COND_EXPR)
6294 test = TREE_OPERAND (cond, 0);
6295 true_value = TREE_OPERAND (cond, 1);
6296 false_value = TREE_OPERAND (cond, 2);
6297 /* If this operand throws an expression, then it does not make
6298 sense to try to perform a logical or arithmetic operation
6299 involving it. */
6300 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6301 lhs = true_value;
6302 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6303 rhs = false_value;
6305 else
6307 tree testtype = TREE_TYPE (cond);
6308 test = cond;
6309 true_value = constant_boolean_node (true, testtype);
6310 false_value = constant_boolean_node (false, testtype);
6313 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6314 cond_code = VEC_COND_EXPR;
6316 /* This transformation is only worthwhile if we don't have to wrap ARG
6317 in a SAVE_EXPR and the operation can be simplified without recursing
6318 on at least one of the branches once its pushed inside the COND_EXPR. */
6319 if (!TREE_CONSTANT (arg)
6320 && (TREE_SIDE_EFFECTS (arg)
6321 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6322 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6323 return NULL_TREE;
6325 arg = fold_convert_loc (loc, arg_type, arg);
6326 if (lhs == 0)
6328 true_value = fold_convert_loc (loc, cond_type, true_value);
6329 if (cond_first_p)
6330 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6331 else
6332 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6334 if (rhs == 0)
6336 false_value = fold_convert_loc (loc, cond_type, false_value);
6337 if (cond_first_p)
6338 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6339 else
6340 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6343 /* Check that we have simplified at least one of the branches. */
6344 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6345 return NULL_TREE;
6347 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6351 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6353 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6354 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6355 ADDEND is the same as X.
6357 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6358 and finite. The problematic cases are when X is zero, and its mode
6359 has signed zeros. In the case of rounding towards -infinity,
6360 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6361 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6363 bool
6364 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6366 if (!real_zerop (addend))
6367 return false;
6369 /* Don't allow the fold with -fsignaling-nans. */
6370 if (HONOR_SNANS (element_mode (type)))
6371 return false;
6373 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6374 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6375 return true;
6377 /* In a vector or complex, we would need to check the sign of all zeros. */
6378 if (TREE_CODE (addend) != REAL_CST)
6379 return false;
6381 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6382 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6383 negate = !negate;
6385 /* The mode has signed zeros, and we have to honor their sign.
6386 In this situation, there is only one case we can return true for.
6387 X - 0 is the same as X unless rounding towards -infinity is
6388 supported. */
6389 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6392 /* Subroutine of fold() that checks comparisons of built-in math
6393 functions against real constants.
6395 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6396 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6397 is the type of the result and ARG0 and ARG1 are the operands of the
6398 comparison. ARG1 must be a TREE_REAL_CST.
6400 The function returns the constant folded tree if a simplification
6401 can be made, and NULL_TREE otherwise. */
6403 static tree
6404 fold_mathfn_compare (location_t loc,
6405 enum built_in_function fcode, enum tree_code code,
6406 tree type, tree arg0, tree arg1)
6408 REAL_VALUE_TYPE c;
6410 if (BUILTIN_SQRT_P (fcode))
6412 tree arg = CALL_EXPR_ARG (arg0, 0);
6413 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6415 c = TREE_REAL_CST (arg1);
6416 if (REAL_VALUE_NEGATIVE (c))
6418 /* sqrt(x) < y is always false, if y is negative. */
6419 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6420 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6422 /* sqrt(x) > y is always true, if y is negative and we
6423 don't care about NaNs, i.e. negative values of x. */
6424 if (code == NE_EXPR || !HONOR_NANS (mode))
6425 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6427 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6428 return fold_build2_loc (loc, GE_EXPR, type, arg,
6429 build_real (TREE_TYPE (arg), dconst0));
6431 else if (code == GT_EXPR || code == GE_EXPR)
6433 REAL_VALUE_TYPE c2;
6435 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6436 real_convert (&c2, mode, &c2);
6438 if (REAL_VALUE_ISINF (c2))
6440 /* sqrt(x) > y is x == +Inf, when y is very large. */
6441 if (HONOR_INFINITIES (mode))
6442 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6443 build_real (TREE_TYPE (arg), c2));
6445 /* sqrt(x) > y is always false, when y is very large
6446 and we don't care about infinities. */
6447 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6450 /* sqrt(x) > c is the same as x > c*c. */
6451 return fold_build2_loc (loc, code, type, arg,
6452 build_real (TREE_TYPE (arg), c2));
6454 else if (code == LT_EXPR || code == LE_EXPR)
6456 REAL_VALUE_TYPE c2;
6458 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6459 real_convert (&c2, mode, &c2);
6461 if (REAL_VALUE_ISINF (c2))
6463 /* sqrt(x) < y is always true, when y is a very large
6464 value and we don't care about NaNs or Infinities. */
6465 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6466 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6468 /* sqrt(x) < y is x != +Inf when y is very large and we
6469 don't care about NaNs. */
6470 if (! HONOR_NANS (mode))
6471 return fold_build2_loc (loc, NE_EXPR, type, arg,
6472 build_real (TREE_TYPE (arg), c2));
6474 /* sqrt(x) < y is x >= 0 when y is very large and we
6475 don't care about Infinities. */
6476 if (! HONOR_INFINITIES (mode))
6477 return fold_build2_loc (loc, GE_EXPR, type, arg,
6478 build_real (TREE_TYPE (arg), dconst0));
6480 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6481 arg = save_expr (arg);
6482 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6483 fold_build2_loc (loc, GE_EXPR, type, arg,
6484 build_real (TREE_TYPE (arg),
6485 dconst0)),
6486 fold_build2_loc (loc, NE_EXPR, type, arg,
6487 build_real (TREE_TYPE (arg),
6488 c2)));
6491 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6492 if (! HONOR_NANS (mode))
6493 return fold_build2_loc (loc, code, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6496 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6497 arg = save_expr (arg);
6498 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6499 fold_build2_loc (loc, GE_EXPR, type, arg,
6500 build_real (TREE_TYPE (arg),
6501 dconst0)),
6502 fold_build2_loc (loc, code, type, arg,
6503 build_real (TREE_TYPE (arg),
6504 c2)));
6508 return NULL_TREE;
6511 /* Subroutine of fold() that optimizes comparisons against Infinities,
6512 either +Inf or -Inf.
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6521 static tree
6522 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6523 tree arg0, tree arg1)
6525 machine_mode mode;
6526 REAL_VALUE_TYPE max;
6527 tree temp;
6528 bool neg;
6530 mode = TYPE_MODE (TREE_TYPE (arg0));
6532 /* For negative infinity swap the sense of the comparison. */
6533 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6534 if (neg)
6535 code = swap_tree_comparison (code);
6537 switch (code)
6539 case GT_EXPR:
6540 /* x > +Inf is always false, if with ignore sNANs. */
6541 if (HONOR_SNANS (mode))
6542 return NULL_TREE;
6543 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6545 case LE_EXPR:
6546 /* x <= +Inf is always true, if we don't case about NaNs. */
6547 if (! HONOR_NANS (mode))
6548 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6550 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6551 arg0 = save_expr (arg0);
6552 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6554 case EQ_EXPR:
6555 case GE_EXPR:
6556 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6557 real_maxval (&max, neg, mode);
6558 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6559 arg0, build_real (TREE_TYPE (arg0), max));
6561 case LT_EXPR:
6562 /* x < +Inf is always equal to x <= DBL_MAX. */
6563 real_maxval (&max, neg, mode);
6564 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6565 arg0, build_real (TREE_TYPE (arg0), max));
6567 case NE_EXPR:
6568 /* x != +Inf is always equal to !(x > DBL_MAX). */
6569 real_maxval (&max, neg, mode);
6570 if (! HONOR_NANS (mode))
6571 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6572 arg0, build_real (TREE_TYPE (arg0), max));
6574 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6575 arg0, build_real (TREE_TYPE (arg0), max));
6576 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6578 default:
6579 break;
6582 return NULL_TREE;
6585 /* Subroutine of fold() that optimizes comparisons of a division by
6586 a nonzero integer constant against an integer constant, i.e.
6587 X/C1 op C2.
6589 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6590 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6591 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6593 The function returns the constant folded tree if a simplification
6594 can be made, and NULL_TREE otherwise. */
6596 static tree
6597 fold_div_compare (location_t loc,
6598 enum tree_code code, tree type, tree arg0, tree arg1)
6600 tree prod, tmp, hi, lo;
6601 tree arg00 = TREE_OPERAND (arg0, 0);
6602 tree arg01 = TREE_OPERAND (arg0, 1);
6603 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6604 bool neg_overflow = false;
6605 bool overflow;
6607 /* We have to do this the hard way to detect unsigned overflow.
6608 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6609 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6610 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6611 neg_overflow = false;
6613 if (sign == UNSIGNED)
6615 tmp = int_const_binop (MINUS_EXPR, arg01,
6616 build_int_cst (TREE_TYPE (arg01), 1));
6617 lo = prod;
6619 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6620 val = wi::add (prod, tmp, sign, &overflow);
6621 hi = force_fit_type (TREE_TYPE (arg00), val,
6622 -1, overflow | TREE_OVERFLOW (prod));
6624 else if (tree_int_cst_sgn (arg01) >= 0)
6626 tmp = int_const_binop (MINUS_EXPR, arg01,
6627 build_int_cst (TREE_TYPE (arg01), 1));
6628 switch (tree_int_cst_sgn (arg1))
6630 case -1:
6631 neg_overflow = true;
6632 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6633 hi = prod;
6634 break;
6636 case 0:
6637 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6638 hi = tmp;
6639 break;
6641 case 1:
6642 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6643 lo = prod;
6644 break;
6646 default:
6647 gcc_unreachable ();
6650 else
6652 /* A negative divisor reverses the relational operators. */
6653 code = swap_tree_comparison (code);
6655 tmp = int_const_binop (PLUS_EXPR, arg01,
6656 build_int_cst (TREE_TYPE (arg01), 1));
6657 switch (tree_int_cst_sgn (arg1))
6659 case -1:
6660 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6661 lo = prod;
6662 break;
6664 case 0:
6665 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6666 lo = tmp;
6667 break;
6669 case 1:
6670 neg_overflow = true;
6671 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6672 hi = prod;
6673 break;
6675 default:
6676 gcc_unreachable ();
6680 switch (code)
6682 case EQ_EXPR:
6683 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6684 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6685 if (TREE_OVERFLOW (hi))
6686 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6687 if (TREE_OVERFLOW (lo))
6688 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6689 return build_range_check (loc, type, arg00, 1, lo, hi);
6691 case NE_EXPR:
6692 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6693 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6694 if (TREE_OVERFLOW (hi))
6695 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6696 if (TREE_OVERFLOW (lo))
6697 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6698 return build_range_check (loc, type, arg00, 0, lo, hi);
6700 case LT_EXPR:
6701 if (TREE_OVERFLOW (lo))
6703 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6704 return omit_one_operand_loc (loc, type, tmp, arg00);
6706 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6708 case LE_EXPR:
6709 if (TREE_OVERFLOW (hi))
6711 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6712 return omit_one_operand_loc (loc, type, tmp, arg00);
6714 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6716 case GT_EXPR:
6717 if (TREE_OVERFLOW (hi))
6719 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6720 return omit_one_operand_loc (loc, type, tmp, arg00);
6722 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6724 case GE_EXPR:
6725 if (TREE_OVERFLOW (lo))
6727 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6728 return omit_one_operand_loc (loc, type, tmp, arg00);
6730 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6732 default:
6733 break;
6736 return NULL_TREE;
6740 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6741 equality/inequality test, then return a simplified form of the test
6742 using a sign testing. Otherwise return NULL. TYPE is the desired
6743 result type. */
6745 static tree
6746 fold_single_bit_test_into_sign_test (location_t loc,
6747 enum tree_code code, tree arg0, tree arg1,
6748 tree result_type)
6750 /* If this is testing a single bit, we can optimize the test. */
6751 if ((code == NE_EXPR || code == EQ_EXPR)
6752 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6753 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6755 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6756 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6757 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6759 if (arg00 != NULL_TREE
6760 /* This is only a win if casting to a signed type is cheap,
6761 i.e. when arg00's type is not a partial mode. */
6762 && TYPE_PRECISION (TREE_TYPE (arg00))
6763 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6765 tree stype = signed_type_for (TREE_TYPE (arg00));
6766 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6767 result_type,
6768 fold_convert_loc (loc, stype, arg00),
6769 build_int_cst (stype, 0));
6773 return NULL_TREE;
6776 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6777 equality/inequality test, then return a simplified form of
6778 the test using shifts and logical operations. Otherwise return
6779 NULL. TYPE is the desired result type. */
6781 tree
6782 fold_single_bit_test (location_t loc, enum tree_code code,
6783 tree arg0, tree arg1, tree result_type)
6785 /* If this is testing a single bit, we can optimize the test. */
6786 if ((code == NE_EXPR || code == EQ_EXPR)
6787 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6790 tree inner = TREE_OPERAND (arg0, 0);
6791 tree type = TREE_TYPE (arg0);
6792 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6793 machine_mode operand_mode = TYPE_MODE (type);
6794 int ops_unsigned;
6795 tree signed_type, unsigned_type, intermediate_type;
6796 tree tem, one;
6798 /* First, see if we can fold the single bit test into a sign-bit
6799 test. */
6800 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6801 result_type);
6802 if (tem)
6803 return tem;
6805 /* Otherwise we have (A & C) != 0 where C is a single bit,
6806 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6807 Similarly for (A & C) == 0. */
6809 /* If INNER is a right shift of a constant and it plus BITNUM does
6810 not overflow, adjust BITNUM and INNER. */
6811 if (TREE_CODE (inner) == RSHIFT_EXPR
6812 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6813 && bitnum < TYPE_PRECISION (type)
6814 && wi::ltu_p (TREE_OPERAND (inner, 1),
6815 TYPE_PRECISION (type) - bitnum))
6817 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6818 inner = TREE_OPERAND (inner, 0);
6821 /* If we are going to be able to omit the AND below, we must do our
6822 operations as unsigned. If we must use the AND, we have a choice.
6823 Normally unsigned is faster, but for some machines signed is. */
6824 #ifdef LOAD_EXTEND_OP
6825 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6826 && !flag_syntax_only) ? 0 : 1;
6827 #else
6828 ops_unsigned = 1;
6829 #endif
6831 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6832 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6833 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6834 inner = fold_convert_loc (loc, intermediate_type, inner);
6836 if (bitnum != 0)
6837 inner = build2 (RSHIFT_EXPR, intermediate_type,
6838 inner, size_int (bitnum));
6840 one = build_int_cst (intermediate_type, 1);
6842 if (code == EQ_EXPR)
6843 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6845 /* Put the AND last so it can combine with more things. */
6846 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6848 /* Make sure to return the proper type. */
6849 inner = fold_convert_loc (loc, result_type, inner);
6851 return inner;
6853 return NULL_TREE;
6856 /* Check whether we are allowed to reorder operands arg0 and arg1,
6857 such that the evaluation of arg1 occurs before arg0. */
6859 static bool
6860 reorder_operands_p (const_tree arg0, const_tree arg1)
6862 if (! flag_evaluation_order)
6863 return true;
6864 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6865 return true;
6866 return ! TREE_SIDE_EFFECTS (arg0)
6867 && ! TREE_SIDE_EFFECTS (arg1);
6870 /* Test whether it is preferable two swap two operands, ARG0 and
6871 ARG1, for example because ARG0 is an integer constant and ARG1
6872 isn't. If REORDER is true, only recommend swapping if we can
6873 evaluate the operands in reverse order. */
6875 bool
6876 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6878 if (CONSTANT_CLASS_P (arg1))
6879 return 0;
6880 if (CONSTANT_CLASS_P (arg0))
6881 return 1;
6883 STRIP_NOPS (arg0);
6884 STRIP_NOPS (arg1);
6886 if (TREE_CONSTANT (arg1))
6887 return 0;
6888 if (TREE_CONSTANT (arg0))
6889 return 1;
6891 if (reorder && flag_evaluation_order
6892 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6893 return 0;
6895 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6896 for commutative and comparison operators. Ensuring a canonical
6897 form allows the optimizers to find additional redundancies without
6898 having to explicitly check for both orderings. */
6899 if (TREE_CODE (arg0) == SSA_NAME
6900 && TREE_CODE (arg1) == SSA_NAME
6901 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6902 return 1;
6904 /* Put SSA_NAMEs last. */
6905 if (TREE_CODE (arg1) == SSA_NAME)
6906 return 0;
6907 if (TREE_CODE (arg0) == SSA_NAME)
6908 return 1;
6910 /* Put variables last. */
6911 if (DECL_P (arg1))
6912 return 0;
6913 if (DECL_P (arg0))
6914 return 1;
6916 return 0;
6919 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6920 ARG0 is extended to a wider type. */
6922 static tree
6923 fold_widened_comparison (location_t loc, enum tree_code code,
6924 tree type, tree arg0, tree arg1)
6926 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6927 tree arg1_unw;
6928 tree shorter_type, outer_type;
6929 tree min, max;
6930 bool above, below;
6932 if (arg0_unw == arg0)
6933 return NULL_TREE;
6934 shorter_type = TREE_TYPE (arg0_unw);
6936 #ifdef HAVE_canonicalize_funcptr_for_compare
6937 /* Disable this optimization if we're casting a function pointer
6938 type on targets that require function pointer canonicalization. */
6939 if (HAVE_canonicalize_funcptr_for_compare
6940 && TREE_CODE (shorter_type) == POINTER_TYPE
6941 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6942 return NULL_TREE;
6943 #endif
6945 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6946 return NULL_TREE;
6948 arg1_unw = get_unwidened (arg1, NULL_TREE);
6950 /* If possible, express the comparison in the shorter mode. */
6951 if ((code == EQ_EXPR || code == NE_EXPR
6952 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6953 && (TREE_TYPE (arg1_unw) == shorter_type
6954 || ((TYPE_PRECISION (shorter_type)
6955 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6956 && (TYPE_UNSIGNED (shorter_type)
6957 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6958 || (TREE_CODE (arg1_unw) == INTEGER_CST
6959 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6960 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6961 && int_fits_type_p (arg1_unw, shorter_type))))
6962 return fold_build2_loc (loc, code, type, arg0_unw,
6963 fold_convert_loc (loc, shorter_type, arg1_unw));
6965 if (TREE_CODE (arg1_unw) != INTEGER_CST
6966 || TREE_CODE (shorter_type) != INTEGER_TYPE
6967 || !int_fits_type_p (arg1_unw, shorter_type))
6968 return NULL_TREE;
6970 /* If we are comparing with the integer that does not fit into the range
6971 of the shorter type, the result is known. */
6972 outer_type = TREE_TYPE (arg1_unw);
6973 min = lower_bound_in_type (outer_type, shorter_type);
6974 max = upper_bound_in_type (outer_type, shorter_type);
6976 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6977 max, arg1_unw));
6978 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6979 arg1_unw, min));
6981 switch (code)
6983 case EQ_EXPR:
6984 if (above || below)
6985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6986 break;
6988 case NE_EXPR:
6989 if (above || below)
6990 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6991 break;
6993 case LT_EXPR:
6994 case LE_EXPR:
6995 if (above)
6996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6997 else if (below)
6998 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7000 case GT_EXPR:
7001 case GE_EXPR:
7002 if (above)
7003 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7004 else if (below)
7005 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7007 default:
7008 break;
7011 return NULL_TREE;
7014 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7015 ARG0 just the signedness is changed. */
7017 static tree
7018 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7019 tree arg0, tree arg1)
7021 tree arg0_inner;
7022 tree inner_type, outer_type;
7024 if (!CONVERT_EXPR_P (arg0))
7025 return NULL_TREE;
7027 outer_type = TREE_TYPE (arg0);
7028 arg0_inner = TREE_OPERAND (arg0, 0);
7029 inner_type = TREE_TYPE (arg0_inner);
7031 #ifdef HAVE_canonicalize_funcptr_for_compare
7032 /* Disable this optimization if we're casting a function pointer
7033 type on targets that require function pointer canonicalization. */
7034 if (HAVE_canonicalize_funcptr_for_compare
7035 && TREE_CODE (inner_type) == POINTER_TYPE
7036 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7037 return NULL_TREE;
7038 #endif
7040 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7041 return NULL_TREE;
7043 if (TREE_CODE (arg1) != INTEGER_CST
7044 && !(CONVERT_EXPR_P (arg1)
7045 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7046 return NULL_TREE;
7048 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7049 && code != NE_EXPR
7050 && code != EQ_EXPR)
7051 return NULL_TREE;
7053 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7054 return NULL_TREE;
7056 if (TREE_CODE (arg1) == INTEGER_CST)
7057 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7058 TREE_OVERFLOW (arg1));
7059 else
7060 arg1 = fold_convert_loc (loc, inner_type, arg1);
7062 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7066 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7067 means A >= Y && A != MAX, but in this case we know that
7068 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7070 static tree
7071 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7073 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7075 if (TREE_CODE (bound) == LT_EXPR)
7076 a = TREE_OPERAND (bound, 0);
7077 else if (TREE_CODE (bound) == GT_EXPR)
7078 a = TREE_OPERAND (bound, 1);
7079 else
7080 return NULL_TREE;
7082 typea = TREE_TYPE (a);
7083 if (!INTEGRAL_TYPE_P (typea)
7084 && !POINTER_TYPE_P (typea))
7085 return NULL_TREE;
7087 if (TREE_CODE (ineq) == LT_EXPR)
7089 a1 = TREE_OPERAND (ineq, 1);
7090 y = TREE_OPERAND (ineq, 0);
7092 else if (TREE_CODE (ineq) == GT_EXPR)
7094 a1 = TREE_OPERAND (ineq, 0);
7095 y = TREE_OPERAND (ineq, 1);
7097 else
7098 return NULL_TREE;
7100 if (TREE_TYPE (a1) != typea)
7101 return NULL_TREE;
7103 if (POINTER_TYPE_P (typea))
7105 /* Convert the pointer types into integer before taking the difference. */
7106 tree ta = fold_convert_loc (loc, ssizetype, a);
7107 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7108 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7110 else
7111 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7113 if (!diff || !integer_onep (diff))
7114 return NULL_TREE;
7116 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7119 /* Fold a sum or difference of at least one multiplication.
7120 Returns the folded tree or NULL if no simplification could be made. */
7122 static tree
7123 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7124 tree arg0, tree arg1)
7126 tree arg00, arg01, arg10, arg11;
7127 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7129 /* (A * C) +- (B * C) -> (A+-B) * C.
7130 (A * C) +- A -> A * (C+-1).
7131 We are most concerned about the case where C is a constant,
7132 but other combinations show up during loop reduction. Since
7133 it is not difficult, try all four possibilities. */
7135 if (TREE_CODE (arg0) == MULT_EXPR)
7137 arg00 = TREE_OPERAND (arg0, 0);
7138 arg01 = TREE_OPERAND (arg0, 1);
7140 else if (TREE_CODE (arg0) == INTEGER_CST)
7142 arg00 = build_one_cst (type);
7143 arg01 = arg0;
7145 else
7147 /* We cannot generate constant 1 for fract. */
7148 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7149 return NULL_TREE;
7150 arg00 = arg0;
7151 arg01 = build_one_cst (type);
7153 if (TREE_CODE (arg1) == MULT_EXPR)
7155 arg10 = TREE_OPERAND (arg1, 0);
7156 arg11 = TREE_OPERAND (arg1, 1);
7158 else if (TREE_CODE (arg1) == INTEGER_CST)
7160 arg10 = build_one_cst (type);
7161 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7162 the purpose of this canonicalization. */
7163 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7164 && negate_expr_p (arg1)
7165 && code == PLUS_EXPR)
7167 arg11 = negate_expr (arg1);
7168 code = MINUS_EXPR;
7170 else
7171 arg11 = arg1;
7173 else
7175 /* We cannot generate constant 1 for fract. */
7176 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7177 return NULL_TREE;
7178 arg10 = arg1;
7179 arg11 = build_one_cst (type);
7181 same = NULL_TREE;
7183 if (operand_equal_p (arg01, arg11, 0))
7184 same = arg01, alt0 = arg00, alt1 = arg10;
7185 else if (operand_equal_p (arg00, arg10, 0))
7186 same = arg00, alt0 = arg01, alt1 = arg11;
7187 else if (operand_equal_p (arg00, arg11, 0))
7188 same = arg00, alt0 = arg01, alt1 = arg10;
7189 else if (operand_equal_p (arg01, arg10, 0))
7190 same = arg01, alt0 = arg00, alt1 = arg11;
7192 /* No identical multiplicands; see if we can find a common
7193 power-of-two factor in non-power-of-two multiplies. This
7194 can help in multi-dimensional array access. */
7195 else if (tree_fits_shwi_p (arg01)
7196 && tree_fits_shwi_p (arg11))
7198 HOST_WIDE_INT int01, int11, tmp;
7199 bool swap = false;
7200 tree maybe_same;
7201 int01 = tree_to_shwi (arg01);
7202 int11 = tree_to_shwi (arg11);
7204 /* Move min of absolute values to int11. */
7205 if (absu_hwi (int01) < absu_hwi (int11))
7207 tmp = int01, int01 = int11, int11 = tmp;
7208 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7209 maybe_same = arg01;
7210 swap = true;
7212 else
7213 maybe_same = arg11;
7215 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7216 /* The remainder should not be a constant, otherwise we
7217 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7218 increased the number of multiplications necessary. */
7219 && TREE_CODE (arg10) != INTEGER_CST)
7221 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7222 build_int_cst (TREE_TYPE (arg00),
7223 int01 / int11));
7224 alt1 = arg10;
7225 same = maybe_same;
7226 if (swap)
7227 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7231 if (same)
7232 return fold_build2_loc (loc, MULT_EXPR, type,
7233 fold_build2_loc (loc, code, type,
7234 fold_convert_loc (loc, type, alt0),
7235 fold_convert_loc (loc, type, alt1)),
7236 fold_convert_loc (loc, type, same));
7238 return NULL_TREE;
7241 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7246 static int
7247 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7249 tree type = TREE_TYPE (expr);
7250 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7251 int byte, offset, word, words;
7252 unsigned char value;
7254 if ((off == -1 && total_bytes > len)
7255 || off >= total_bytes)
7256 return 0;
7257 if (off == -1)
7258 off = 0;
7259 words = total_bytes / UNITS_PER_WORD;
7261 for (byte = 0; byte < total_bytes; byte++)
7263 int bitpos = byte * BITS_PER_UNIT;
7264 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7265 number of bytes. */
7266 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7268 if (total_bytes > UNITS_PER_WORD)
7270 word = byte / UNITS_PER_WORD;
7271 if (WORDS_BIG_ENDIAN)
7272 word = (words - 1) - word;
7273 offset = word * UNITS_PER_WORD;
7274 if (BYTES_BIG_ENDIAN)
7275 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7276 else
7277 offset += byte % UNITS_PER_WORD;
7279 else
7280 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7281 if (offset >= off
7282 && offset - off < len)
7283 ptr[offset - off] = value;
7285 return MIN (len, total_bytes - off);
7289 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7290 specified by EXPR into the buffer PTR of length LEN bytes.
7291 Return the number of bytes placed in the buffer, or zero
7292 upon failure. */
7294 static int
7295 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7297 tree type = TREE_TYPE (expr);
7298 machine_mode mode = TYPE_MODE (type);
7299 int total_bytes = GET_MODE_SIZE (mode);
7300 FIXED_VALUE_TYPE value;
7301 tree i_value, i_type;
7303 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7304 return 0;
7306 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7308 if (NULL_TREE == i_type
7309 || TYPE_PRECISION (i_type) != total_bytes)
7310 return 0;
7312 value = TREE_FIXED_CST (expr);
7313 i_value = double_int_to_tree (i_type, value.data);
7315 return native_encode_int (i_value, ptr, len, off);
7319 /* Subroutine of native_encode_expr. Encode the REAL_CST
7320 specified by EXPR into the buffer PTR of length LEN bytes.
7321 Return the number of bytes placed in the buffer, or zero
7322 upon failure. */
7324 static int
7325 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7327 tree type = TREE_TYPE (expr);
7328 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 int byte, offset, word, words, bitpos;
7330 unsigned char value;
7332 /* There are always 32 bits in each long, no matter the size of
7333 the hosts long. We handle floating point representations with
7334 up to 192 bits. */
7335 long tmp[6];
7337 if ((off == -1 && total_bytes > len)
7338 || off >= total_bytes)
7339 return 0;
7340 if (off == -1)
7341 off = 0;
7342 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7344 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7352 if (UNITS_PER_WORD < 4)
7354 word = byte / UNITS_PER_WORD;
7355 if (WORDS_BIG_ENDIAN)
7356 word = (words - 1) - word;
7357 offset = word * UNITS_PER_WORD;
7358 if (BYTES_BIG_ENDIAN)
7359 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 else
7361 offset += byte % UNITS_PER_WORD;
7363 else
7364 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7365 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7366 if (offset >= off
7367 && offset - off < len)
7368 ptr[offset - off] = value;
7370 return MIN (len, total_bytes - off);
7373 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7374 specified by EXPR into the buffer PTR of length LEN bytes.
7375 Return the number of bytes placed in the buffer, or zero
7376 upon failure. */
7378 static int
7379 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7381 int rsize, isize;
7382 tree part;
7384 part = TREE_REALPART (expr);
7385 rsize = native_encode_expr (part, ptr, len, off);
7386 if (off == -1
7387 && rsize == 0)
7388 return 0;
7389 part = TREE_IMAGPART (expr);
7390 if (off != -1)
7391 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7392 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7393 if (off == -1
7394 && isize != rsize)
7395 return 0;
7396 return rsize + isize;
7400 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7401 specified by EXPR into the buffer PTR of length LEN bytes.
7402 Return the number of bytes placed in the buffer, or zero
7403 upon failure. */
7405 static int
7406 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7408 unsigned i, count;
7409 int size, offset;
7410 tree itype, elem;
7412 offset = 0;
7413 count = VECTOR_CST_NELTS (expr);
7414 itype = TREE_TYPE (TREE_TYPE (expr));
7415 size = GET_MODE_SIZE (TYPE_MODE (itype));
7416 for (i = 0; i < count; i++)
7418 if (off >= size)
7420 off -= size;
7421 continue;
7423 elem = VECTOR_CST_ELT (expr, i);
7424 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7425 if ((off == -1 && res != size)
7426 || res == 0)
7427 return 0;
7428 offset += res;
7429 if (offset >= len)
7430 return offset;
7431 if (off != -1)
7432 off = 0;
7434 return offset;
7438 /* Subroutine of native_encode_expr. Encode the STRING_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7441 upon failure. */
7443 static int
7444 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7446 tree type = TREE_TYPE (expr);
7447 HOST_WIDE_INT total_bytes;
7449 if (TREE_CODE (type) != ARRAY_TYPE
7450 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7451 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7452 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7453 return 0;
7454 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7455 if ((off == -1 && total_bytes > len)
7456 || off >= total_bytes)
7457 return 0;
7458 if (off == -1)
7459 off = 0;
7460 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7462 int written = 0;
7463 if (off < TREE_STRING_LENGTH (expr))
7465 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7466 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7468 memset (ptr + written, 0,
7469 MIN (total_bytes - written, len - written));
7471 else
7472 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7473 return MIN (total_bytes - off, len);
7477 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7478 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7479 buffer PTR of length LEN bytes. If OFF is not -1 then start
7480 the encoding at byte offset OFF and encode at most LEN bytes.
7481 Return the number of bytes placed in the buffer, or zero upon failure. */
7484 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7486 switch (TREE_CODE (expr))
7488 case INTEGER_CST:
7489 return native_encode_int (expr, ptr, len, off);
7491 case REAL_CST:
7492 return native_encode_real (expr, ptr, len, off);
7494 case FIXED_CST:
7495 return native_encode_fixed (expr, ptr, len, off);
7497 case COMPLEX_CST:
7498 return native_encode_complex (expr, ptr, len, off);
7500 case VECTOR_CST:
7501 return native_encode_vector (expr, ptr, len, off);
7503 case STRING_CST:
7504 return native_encode_string (expr, ptr, len, off);
7506 default:
7507 return 0;
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7516 static tree
7517 native_interpret_int (tree type, const unsigned char *ptr, int len)
7519 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7521 if (total_bytes > len
7522 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7523 return NULL_TREE;
7525 wide_int result = wi::from_buffer (ptr, total_bytes);
7527 return wide_int_to_tree (type, result);
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7535 static tree
7536 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7538 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7539 double_int result;
7540 FIXED_VALUE_TYPE fixed_value;
7542 if (total_bytes > len
7543 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7544 return NULL_TREE;
7546 result = double_int::from_buffer (ptr, total_bytes);
7547 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7549 return build_fixed (type, fixed_value);
7553 /* Subroutine of native_interpret_expr. Interpret the contents of
7554 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7555 If the buffer cannot be interpreted, return NULL_TREE. */
7557 static tree
7558 native_interpret_real (tree type, const unsigned char *ptr, int len)
7560 machine_mode mode = TYPE_MODE (type);
7561 int total_bytes = GET_MODE_SIZE (mode);
7562 int byte, offset, word, words, bitpos;
7563 unsigned char value;
7564 /* There are always 32 bits in each long, no matter the size of
7565 the hosts long. We handle floating point representations with
7566 up to 192 bits. */
7567 REAL_VALUE_TYPE r;
7568 long tmp[6];
7570 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7571 if (total_bytes > len || total_bytes > 24)
7572 return NULL_TREE;
7573 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7575 memset (tmp, 0, sizeof (tmp));
7576 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7577 bitpos += BITS_PER_UNIT)
7579 byte = (bitpos / BITS_PER_UNIT) & 3;
7580 if (UNITS_PER_WORD < 4)
7582 word = byte / UNITS_PER_WORD;
7583 if (WORDS_BIG_ENDIAN)
7584 word = (words - 1) - word;
7585 offset = word * UNITS_PER_WORD;
7586 if (BYTES_BIG_ENDIAN)
7587 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7588 else
7589 offset += byte % UNITS_PER_WORD;
7591 else
7592 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7593 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7595 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7598 real_from_target (&r, tmp, mode);
7599 return build_real (type, r);
7603 /* Subroutine of native_interpret_expr. Interpret the contents of
7604 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7605 If the buffer cannot be interpreted, return NULL_TREE. */
7607 static tree
7608 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7610 tree etype, rpart, ipart;
7611 int size;
7613 etype = TREE_TYPE (type);
7614 size = GET_MODE_SIZE (TYPE_MODE (etype));
7615 if (size * 2 > len)
7616 return NULL_TREE;
7617 rpart = native_interpret_expr (etype, ptr, size);
7618 if (!rpart)
7619 return NULL_TREE;
7620 ipart = native_interpret_expr (etype, ptr+size, size);
7621 if (!ipart)
7622 return NULL_TREE;
7623 return build_complex (type, rpart, ipart);
7627 /* Subroutine of native_interpret_expr. Interpret the contents of
7628 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7629 If the buffer cannot be interpreted, return NULL_TREE. */
7631 static tree
7632 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7634 tree etype, elem;
7635 int i, size, count;
7636 tree *elements;
7638 etype = TREE_TYPE (type);
7639 size = GET_MODE_SIZE (TYPE_MODE (etype));
7640 count = TYPE_VECTOR_SUBPARTS (type);
7641 if (size * count > len)
7642 return NULL_TREE;
7644 elements = XALLOCAVEC (tree, count);
7645 for (i = count - 1; i >= 0; i--)
7647 elem = native_interpret_expr (etype, ptr+(i*size), size);
7648 if (!elem)
7649 return NULL_TREE;
7650 elements[i] = elem;
7652 return build_vector (type, elements);
7656 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7657 the buffer PTR of length LEN as a constant of type TYPE. For
7658 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7659 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7660 return NULL_TREE. */
7662 tree
7663 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7665 switch (TREE_CODE (type))
7667 case INTEGER_TYPE:
7668 case ENUMERAL_TYPE:
7669 case BOOLEAN_TYPE:
7670 case POINTER_TYPE:
7671 case REFERENCE_TYPE:
7672 return native_interpret_int (type, ptr, len);
7674 case REAL_TYPE:
7675 return native_interpret_real (type, ptr, len);
7677 case FIXED_POINT_TYPE:
7678 return native_interpret_fixed (type, ptr, len);
7680 case COMPLEX_TYPE:
7681 return native_interpret_complex (type, ptr, len);
7683 case VECTOR_TYPE:
7684 return native_interpret_vector (type, ptr, len);
7686 default:
7687 return NULL_TREE;
7691 /* Returns true if we can interpret the contents of a native encoding
7692 as TYPE. */
7694 static bool
7695 can_native_interpret_type_p (tree type)
7697 switch (TREE_CODE (type))
7699 case INTEGER_TYPE:
7700 case ENUMERAL_TYPE:
7701 case BOOLEAN_TYPE:
7702 case POINTER_TYPE:
7703 case REFERENCE_TYPE:
7704 case FIXED_POINT_TYPE:
7705 case REAL_TYPE:
7706 case COMPLEX_TYPE:
7707 case VECTOR_TYPE:
7708 return true;
7709 default:
7710 return false;
7714 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7715 TYPE at compile-time. If we're unable to perform the conversion
7716 return NULL_TREE. */
7718 static tree
7719 fold_view_convert_expr (tree type, tree expr)
7721 /* We support up to 512-bit values (for V8DFmode). */
7722 unsigned char buffer[64];
7723 int len;
7725 /* Check that the host and target are sane. */
7726 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7727 return NULL_TREE;
7729 len = native_encode_expr (expr, buffer, sizeof (buffer));
7730 if (len == 0)
7731 return NULL_TREE;
7733 return native_interpret_expr (type, buffer, len);
7736 /* Build an expression for the address of T. Folds away INDIRECT_REF
7737 to avoid confusing the gimplify process. */
7739 tree
7740 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7742 /* The size of the object is not relevant when talking about its address. */
7743 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7744 t = TREE_OPERAND (t, 0);
7746 if (TREE_CODE (t) == INDIRECT_REF)
7748 t = TREE_OPERAND (t, 0);
7750 if (TREE_TYPE (t) != ptrtype)
7751 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7753 else if (TREE_CODE (t) == MEM_REF
7754 && integer_zerop (TREE_OPERAND (t, 1)))
7755 return TREE_OPERAND (t, 0);
7756 else if (TREE_CODE (t) == MEM_REF
7757 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7758 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7759 TREE_OPERAND (t, 0),
7760 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7761 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7763 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7765 if (TREE_TYPE (t) != ptrtype)
7766 t = fold_convert_loc (loc, ptrtype, t);
7768 else
7769 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7771 return t;
7774 /* Build an expression for the address of T. */
7776 tree
7777 build_fold_addr_expr_loc (location_t loc, tree t)
7779 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7781 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7784 /* Fold a unary expression of code CODE and type TYPE with operand
7785 OP0. Return the folded expression if folding is successful.
7786 Otherwise, return NULL_TREE. */
7788 tree
7789 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7791 tree tem;
7792 tree arg0;
7793 enum tree_code_class kind = TREE_CODE_CLASS (code);
7795 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7796 && TREE_CODE_LENGTH (code) == 1);
7798 arg0 = op0;
7799 if (arg0)
7801 if (CONVERT_EXPR_CODE_P (code)
7802 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7804 /* Don't use STRIP_NOPS, because signedness of argument type
7805 matters. */
7806 STRIP_SIGN_NOPS (arg0);
7808 else
7810 /* Strip any conversions that don't change the mode. This
7811 is safe for every expression, except for a comparison
7812 expression because its signedness is derived from its
7813 operands.
7815 Note that this is done as an internal manipulation within
7816 the constant folder, in order to find the simplest
7817 representation of the arguments so that their form can be
7818 studied. In any cases, the appropriate type conversions
7819 should be put back in the tree that will get out of the
7820 constant folder. */
7821 STRIP_NOPS (arg0);
7824 if (CONSTANT_CLASS_P (arg0))
7826 tree tem = const_unop (code, type, arg0);
7827 if (tem)
7829 if (TREE_TYPE (tem) != type)
7830 tem = fold_convert_loc (loc, type, tem);
7831 return tem;
7836 tem = generic_simplify (loc, code, type, op0);
7837 if (tem)
7838 return tem;
7840 if (TREE_CODE_CLASS (code) == tcc_unary)
7842 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7843 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7844 fold_build1_loc (loc, code, type,
7845 fold_convert_loc (loc, TREE_TYPE (op0),
7846 TREE_OPERAND (arg0, 1))));
7847 else if (TREE_CODE (arg0) == COND_EXPR)
7849 tree arg01 = TREE_OPERAND (arg0, 1);
7850 tree arg02 = TREE_OPERAND (arg0, 2);
7851 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7852 arg01 = fold_build1_loc (loc, code, type,
7853 fold_convert_loc (loc,
7854 TREE_TYPE (op0), arg01));
7855 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7856 arg02 = fold_build1_loc (loc, code, type,
7857 fold_convert_loc (loc,
7858 TREE_TYPE (op0), arg02));
7859 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7860 arg01, arg02);
7862 /* If this was a conversion, and all we did was to move into
7863 inside the COND_EXPR, bring it back out. But leave it if
7864 it is a conversion from integer to integer and the
7865 result precision is no wider than a word since such a
7866 conversion is cheap and may be optimized away by combine,
7867 while it couldn't if it were outside the COND_EXPR. Then return
7868 so we don't get into an infinite recursion loop taking the
7869 conversion out and then back in. */
7871 if ((CONVERT_EXPR_CODE_P (code)
7872 || code == NON_LVALUE_EXPR)
7873 && TREE_CODE (tem) == COND_EXPR
7874 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7875 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7876 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7877 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7878 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7879 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7880 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7881 && (INTEGRAL_TYPE_P
7882 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7883 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7884 || flag_syntax_only))
7885 tem = build1_loc (loc, code, type,
7886 build3 (COND_EXPR,
7887 TREE_TYPE (TREE_OPERAND
7888 (TREE_OPERAND (tem, 1), 0)),
7889 TREE_OPERAND (tem, 0),
7890 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7891 TREE_OPERAND (TREE_OPERAND (tem, 2),
7892 0)));
7893 return tem;
7897 switch (code)
7899 case NON_LVALUE_EXPR:
7900 if (!maybe_lvalue_p (op0))
7901 return fold_convert_loc (loc, type, op0);
7902 return NULL_TREE;
7904 CASE_CONVERT:
7905 case FLOAT_EXPR:
7906 case FIX_TRUNC_EXPR:
7907 if (COMPARISON_CLASS_P (op0))
7909 /* If we have (type) (a CMP b) and type is an integral type, return
7910 new expression involving the new type. Canonicalize
7911 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7912 non-integral type.
7913 Do not fold the result as that would not simplify further, also
7914 folding again results in recursions. */
7915 if (TREE_CODE (type) == BOOLEAN_TYPE)
7916 return build2_loc (loc, TREE_CODE (op0), type,
7917 TREE_OPERAND (op0, 0),
7918 TREE_OPERAND (op0, 1));
7919 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7920 && TREE_CODE (type) != VECTOR_TYPE)
7921 return build3_loc (loc, COND_EXPR, type, op0,
7922 constant_boolean_node (true, type),
7923 constant_boolean_node (false, type));
7926 /* Handle (T *)&A.B.C for A being of type T and B and C
7927 living at offset zero. This occurs frequently in
7928 C++ upcasting and then accessing the base. */
7929 if (TREE_CODE (op0) == ADDR_EXPR
7930 && POINTER_TYPE_P (type)
7931 && handled_component_p (TREE_OPERAND (op0, 0)))
7933 HOST_WIDE_INT bitsize, bitpos;
7934 tree offset;
7935 machine_mode mode;
7936 int unsignedp, volatilep;
7937 tree base = TREE_OPERAND (op0, 0);
7938 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7939 &mode, &unsignedp, &volatilep, false);
7940 /* If the reference was to a (constant) zero offset, we can use
7941 the address of the base if it has the same base type
7942 as the result type and the pointer type is unqualified. */
7943 if (! offset && bitpos == 0
7944 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7945 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7946 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7947 return fold_convert_loc (loc, type,
7948 build_fold_addr_expr_loc (loc, base));
7951 if (TREE_CODE (op0) == MODIFY_EXPR
7952 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7953 /* Detect assigning a bitfield. */
7954 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7955 && DECL_BIT_FIELD
7956 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7958 /* Don't leave an assignment inside a conversion
7959 unless assigning a bitfield. */
7960 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7961 /* First do the assignment, then return converted constant. */
7962 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7963 TREE_NO_WARNING (tem) = 1;
7964 TREE_USED (tem) = 1;
7965 return tem;
7968 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7969 constants (if x has signed type, the sign bit cannot be set
7970 in c). This folds extension into the BIT_AND_EXPR.
7971 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7972 very likely don't have maximal range for their precision and this
7973 transformation effectively doesn't preserve non-maximal ranges. */
7974 if (TREE_CODE (type) == INTEGER_TYPE
7975 && TREE_CODE (op0) == BIT_AND_EXPR
7976 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7978 tree and_expr = op0;
7979 tree and0 = TREE_OPERAND (and_expr, 0);
7980 tree and1 = TREE_OPERAND (and_expr, 1);
7981 int change = 0;
7983 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7984 || (TYPE_PRECISION (type)
7985 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7986 change = 1;
7987 else if (TYPE_PRECISION (TREE_TYPE (and1))
7988 <= HOST_BITS_PER_WIDE_INT
7989 && tree_fits_uhwi_p (and1))
7991 unsigned HOST_WIDE_INT cst;
7993 cst = tree_to_uhwi (and1);
7994 cst &= HOST_WIDE_INT_M1U
7995 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7996 change = (cst == 0);
7997 #ifdef LOAD_EXTEND_OP
7998 if (change
7999 && !flag_syntax_only
8000 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8001 == ZERO_EXTEND))
8003 tree uns = unsigned_type_for (TREE_TYPE (and0));
8004 and0 = fold_convert_loc (loc, uns, and0);
8005 and1 = fold_convert_loc (loc, uns, and1);
8007 #endif
8009 if (change)
8011 tem = force_fit_type (type, wi::to_widest (and1), 0,
8012 TREE_OVERFLOW (and1));
8013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8014 fold_convert_loc (loc, type, and0), tem);
8018 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8019 when one of the new casts will fold away. Conservatively we assume
8020 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8021 if (POINTER_TYPE_P (type)
8022 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8023 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8024 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8025 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8026 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8028 tree arg00 = TREE_OPERAND (arg0, 0);
8029 tree arg01 = TREE_OPERAND (arg0, 1);
8031 return fold_build_pointer_plus_loc
8032 (loc, fold_convert_loc (loc, type, arg00), arg01);
8035 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8036 of the same precision, and X is an integer type not narrower than
8037 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8038 if (INTEGRAL_TYPE_P (type)
8039 && TREE_CODE (op0) == BIT_NOT_EXPR
8040 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8044 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8045 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8046 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8047 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8048 fold_convert_loc (loc, type, tem));
8051 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8052 type of X and Y (integer types only). */
8053 if (INTEGRAL_TYPE_P (type)
8054 && TREE_CODE (op0) == MULT_EXPR
8055 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8056 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8058 /* Be careful not to introduce new overflows. */
8059 tree mult_type;
8060 if (TYPE_OVERFLOW_WRAPS (type))
8061 mult_type = type;
8062 else
8063 mult_type = unsigned_type_for (type);
8065 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8067 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8068 fold_convert_loc (loc, mult_type,
8069 TREE_OPERAND (op0, 0)),
8070 fold_convert_loc (loc, mult_type,
8071 TREE_OPERAND (op0, 1)));
8072 return fold_convert_loc (loc, type, tem);
8076 return NULL_TREE;
8078 case VIEW_CONVERT_EXPR:
8079 if (TREE_CODE (op0) == MEM_REF)
8080 return fold_build2_loc (loc, MEM_REF, type,
8081 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8083 return NULL_TREE;
8085 case NEGATE_EXPR:
8086 tem = fold_negate_expr (loc, arg0);
8087 if (tem)
8088 return fold_convert_loc (loc, type, tem);
8089 return NULL_TREE;
8091 case ABS_EXPR:
8092 /* Convert fabs((double)float) into (double)fabsf(float). */
8093 if (TREE_CODE (arg0) == NOP_EXPR
8094 && TREE_CODE (type) == REAL_TYPE)
8096 tree targ0 = strip_float_extensions (arg0);
8097 if (targ0 != arg0)
8098 return fold_convert_loc (loc, type,
8099 fold_build1_loc (loc, ABS_EXPR,
8100 TREE_TYPE (targ0),
8101 targ0));
8103 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8104 else if (TREE_CODE (arg0) == ABS_EXPR)
8105 return arg0;
8107 /* Strip sign ops from argument. */
8108 if (TREE_CODE (type) == REAL_TYPE)
8110 tem = fold_strip_sign_ops (arg0);
8111 if (tem)
8112 return fold_build1_loc (loc, ABS_EXPR, type,
8113 fold_convert_loc (loc, type, tem));
8115 return NULL_TREE;
8117 case CONJ_EXPR:
8118 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8119 return fold_convert_loc (loc, type, arg0);
8120 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8122 tree itype = TREE_TYPE (type);
8123 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8124 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8125 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8126 negate_expr (ipart));
8128 if (TREE_CODE (arg0) == CONJ_EXPR)
8129 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8130 return NULL_TREE;
8132 case BIT_NOT_EXPR:
8133 /* Convert ~ (-A) to A - 1. */
8134 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8135 return fold_build2_loc (loc, MINUS_EXPR, type,
8136 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8137 build_int_cst (type, 1));
8138 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8139 else if (INTEGRAL_TYPE_P (type)
8140 && ((TREE_CODE (arg0) == MINUS_EXPR
8141 && integer_onep (TREE_OPERAND (arg0, 1)))
8142 || (TREE_CODE (arg0) == PLUS_EXPR
8143 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8145 /* Perform the negation in ARG0's type and only then convert
8146 to TYPE as to avoid introducing undefined behavior. */
8147 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8148 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8149 TREE_OPERAND (arg0, 0));
8150 return fold_convert_loc (loc, type, t);
8152 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8153 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8154 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 0)))))
8157 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8158 fold_convert_loc (loc, type,
8159 TREE_OPERAND (arg0, 1)));
8160 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8161 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 1)))))
8164 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8165 fold_convert_loc (loc, type,
8166 TREE_OPERAND (arg0, 0)), tem);
8168 return NULL_TREE;
8170 case TRUTH_NOT_EXPR:
8171 /* Note that the operand of this must be an int
8172 and its values must be 0 or 1.
8173 ("true" is a fixed value perhaps depending on the language,
8174 but we don't handle values other than 1 correctly yet.) */
8175 tem = fold_truth_not_expr (loc, arg0);
8176 if (!tem)
8177 return NULL_TREE;
8178 return fold_convert_loc (loc, type, tem);
8180 case REALPART_EXPR:
8181 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8182 return fold_convert_loc (loc, type, arg0);
8183 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8187 fold_build1_loc (loc, REALPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 0)),
8189 fold_build1_loc (loc, REALPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 1)));
8191 return fold_convert_loc (loc, type, tem);
8193 if (TREE_CODE (arg0) == CONJ_EXPR)
8195 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8196 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8197 TREE_OPERAND (arg0, 0));
8198 return fold_convert_loc (loc, type, tem);
8200 if (TREE_CODE (arg0) == CALL_EXPR)
8202 tree fn = get_callee_fndecl (arg0);
8203 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8204 switch (DECL_FUNCTION_CODE (fn))
8206 CASE_FLT_FN (BUILT_IN_CEXPI):
8207 fn = mathfn_built_in (type, BUILT_IN_COS);
8208 if (fn)
8209 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8210 break;
8212 default:
8213 break;
8216 return NULL_TREE;
8218 case IMAGPART_EXPR:
8219 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8220 return build_zero_cst (type);
8221 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8223 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8224 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8225 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8226 TREE_OPERAND (arg0, 0)),
8227 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 1)));
8229 return fold_convert_loc (loc, type, tem);
8231 if (TREE_CODE (arg0) == CONJ_EXPR)
8233 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8234 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8235 return fold_convert_loc (loc, type, negate_expr (tem));
8237 if (TREE_CODE (arg0) == CALL_EXPR)
8239 tree fn = get_callee_fndecl (arg0);
8240 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8241 switch (DECL_FUNCTION_CODE (fn))
8243 CASE_FLT_FN (BUILT_IN_CEXPI):
8244 fn = mathfn_built_in (type, BUILT_IN_SIN);
8245 if (fn)
8246 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8247 break;
8249 default:
8250 break;
8253 return NULL_TREE;
8255 case INDIRECT_REF:
8256 /* Fold *&X to X if X is an lvalue. */
8257 if (TREE_CODE (op0) == ADDR_EXPR)
8259 tree op00 = TREE_OPERAND (op0, 0);
8260 if ((TREE_CODE (op00) == VAR_DECL
8261 || TREE_CODE (op00) == PARM_DECL
8262 || TREE_CODE (op00) == RESULT_DECL)
8263 && !TREE_READONLY (op00))
8264 return op00;
8266 return NULL_TREE;
8268 default:
8269 return NULL_TREE;
8270 } /* switch (code) */
8274 /* If the operation was a conversion do _not_ mark a resulting constant
8275 with TREE_OVERFLOW if the original constant was not. These conversions
8276 have implementation defined behavior and retaining the TREE_OVERFLOW
8277 flag here would confuse later passes such as VRP. */
8278 tree
8279 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8280 tree type, tree op0)
8282 tree res = fold_unary_loc (loc, code, type, op0);
8283 if (res
8284 && TREE_CODE (res) == INTEGER_CST
8285 && TREE_CODE (op0) == INTEGER_CST
8286 && CONVERT_EXPR_CODE_P (code))
8287 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8289 return res;
8292 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8293 operands OP0 and OP1. LOC is the location of the resulting expression.
8294 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8295 Return the folded expression if folding is successful. Otherwise,
8296 return NULL_TREE. */
8297 static tree
8298 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8299 tree arg0, tree arg1, tree op0, tree op1)
8301 tree tem;
8303 /* We only do these simplifications if we are optimizing. */
8304 if (!optimize)
8305 return NULL_TREE;
8307 /* Check for things like (A || B) && (A || C). We can convert this
8308 to A || (B && C). Note that either operator can be any of the four
8309 truth and/or operations and the transformation will still be
8310 valid. Also note that we only care about order for the
8311 ANDIF and ORIF operators. If B contains side effects, this
8312 might change the truth-value of A. */
8313 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8314 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8315 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8316 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8317 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8318 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8320 tree a00 = TREE_OPERAND (arg0, 0);
8321 tree a01 = TREE_OPERAND (arg0, 1);
8322 tree a10 = TREE_OPERAND (arg1, 0);
8323 tree a11 = TREE_OPERAND (arg1, 1);
8324 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8325 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8326 && (code == TRUTH_AND_EXPR
8327 || code == TRUTH_OR_EXPR));
8329 if (operand_equal_p (a00, a10, 0))
8330 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8331 fold_build2_loc (loc, code, type, a01, a11));
8332 else if (commutative && operand_equal_p (a00, a11, 0))
8333 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8334 fold_build2_loc (loc, code, type, a01, a10));
8335 else if (commutative && operand_equal_p (a01, a10, 0))
8336 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8337 fold_build2_loc (loc, code, type, a00, a11));
8339 /* This case if tricky because we must either have commutative
8340 operators or else A10 must not have side-effects. */
8342 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8343 && operand_equal_p (a01, a11, 0))
8344 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8345 fold_build2_loc (loc, code, type, a00, a10),
8346 a01);
8349 /* See if we can build a range comparison. */
8350 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8351 return tem;
8353 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8354 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8356 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8357 if (tem)
8358 return fold_build2_loc (loc, code, type, tem, arg1);
8361 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8362 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8364 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8365 if (tem)
8366 return fold_build2_loc (loc, code, type, arg0, tem);
8369 /* Check for the possibility of merging component references. If our
8370 lhs is another similar operation, try to merge its rhs with our
8371 rhs. Then try to merge our lhs and rhs. */
8372 if (TREE_CODE (arg0) == code
8373 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8374 TREE_OPERAND (arg0, 1), arg1)))
8375 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8377 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8378 return tem;
8380 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8381 && (code == TRUTH_AND_EXPR
8382 || code == TRUTH_ANDIF_EXPR
8383 || code == TRUTH_OR_EXPR
8384 || code == TRUTH_ORIF_EXPR))
8386 enum tree_code ncode, icode;
8388 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8390 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8392 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8393 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8394 We don't want to pack more than two leafs to a non-IF AND/OR
8395 expression.
8396 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8397 equal to IF-CODE, then we don't want to add right-hand operand.
8398 If the inner right-hand side of left-hand operand has
8399 side-effects, or isn't simple, then we can't add to it,
8400 as otherwise we might destroy if-sequence. */
8401 if (TREE_CODE (arg0) == icode
8402 && simple_operand_p_2 (arg1)
8403 /* Needed for sequence points to handle trappings, and
8404 side-effects. */
8405 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8407 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8408 arg1);
8409 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8410 tem);
8412 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8413 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8414 else if (TREE_CODE (arg1) == icode
8415 && simple_operand_p_2 (arg0)
8416 /* Needed for sequence points to handle trappings, and
8417 side-effects. */
8418 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8420 tem = fold_build2_loc (loc, ncode, type,
8421 arg0, TREE_OPERAND (arg1, 0));
8422 return fold_build2_loc (loc, icode, type, tem,
8423 TREE_OPERAND (arg1, 1));
8425 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8426 into (A OR B).
8427 For sequence point consistancy, we need to check for trapping,
8428 and side-effects. */
8429 else if (code == icode && simple_operand_p_2 (arg0)
8430 && simple_operand_p_2 (arg1))
8431 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8434 return NULL_TREE;
8437 /* Fold a binary expression of code CODE and type TYPE with operands
8438 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8439 Return the folded expression if folding is successful. Otherwise,
8440 return NULL_TREE. */
8442 static tree
8443 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8445 enum tree_code compl_code;
8447 if (code == MIN_EXPR)
8448 compl_code = MAX_EXPR;
8449 else if (code == MAX_EXPR)
8450 compl_code = MIN_EXPR;
8451 else
8452 gcc_unreachable ();
8454 /* MIN (MAX (a, b), b) == b. */
8455 if (TREE_CODE (op0) == compl_code
8456 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8457 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8459 /* MIN (MAX (b, a), b) == b. */
8460 if (TREE_CODE (op0) == compl_code
8461 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8462 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8463 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8465 /* MIN (a, MAX (a, b)) == a. */
8466 if (TREE_CODE (op1) == compl_code
8467 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8468 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8469 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8471 /* MIN (a, MAX (b, a)) == a. */
8472 if (TREE_CODE (op1) == compl_code
8473 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8474 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8475 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8477 return NULL_TREE;
8480 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8481 by changing CODE to reduce the magnitude of constants involved in
8482 ARG0 of the comparison.
8483 Returns a canonicalized comparison tree if a simplification was
8484 possible, otherwise returns NULL_TREE.
8485 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8486 valid if signed overflow is undefined. */
8488 static tree
8489 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8490 tree arg0, tree arg1,
8491 bool *strict_overflow_p)
8493 enum tree_code code0 = TREE_CODE (arg0);
8494 tree t, cst0 = NULL_TREE;
8495 int sgn0;
8496 bool swap = false;
8498 /* Match A +- CST code arg1 and CST code arg1. We can change the
8499 first form only if overflow is undefined. */
8500 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8501 /* In principle pointers also have undefined overflow behavior,
8502 but that causes problems elsewhere. */
8503 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8504 && (code0 == MINUS_EXPR
8505 || code0 == PLUS_EXPR)
8506 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8507 || code0 == INTEGER_CST))
8508 return NULL_TREE;
8510 /* Identify the constant in arg0 and its sign. */
8511 if (code0 == INTEGER_CST)
8512 cst0 = arg0;
8513 else
8514 cst0 = TREE_OPERAND (arg0, 1);
8515 sgn0 = tree_int_cst_sgn (cst0);
8517 /* Overflowed constants and zero will cause problems. */
8518 if (integer_zerop (cst0)
8519 || TREE_OVERFLOW (cst0))
8520 return NULL_TREE;
8522 /* See if we can reduce the magnitude of the constant in
8523 arg0 by changing the comparison code. */
8524 if (code0 == INTEGER_CST)
8526 /* CST <= arg1 -> CST-1 < arg1. */
8527 if (code == LE_EXPR && sgn0 == 1)
8528 code = LT_EXPR;
8529 /* -CST < arg1 -> -CST-1 <= arg1. */
8530 else if (code == LT_EXPR && sgn0 == -1)
8531 code = LE_EXPR;
8532 /* CST > arg1 -> CST-1 >= arg1. */
8533 else if (code == GT_EXPR && sgn0 == 1)
8534 code = GE_EXPR;
8535 /* -CST >= arg1 -> -CST-1 > arg1. */
8536 else if (code == GE_EXPR && sgn0 == -1)
8537 code = GT_EXPR;
8538 else
8539 return NULL_TREE;
8540 /* arg1 code' CST' might be more canonical. */
8541 swap = true;
8543 else
8545 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8546 if (code == LT_EXPR
8547 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8548 code = LE_EXPR;
8549 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8550 else if (code == GT_EXPR
8551 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8552 code = GE_EXPR;
8553 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8554 else if (code == LE_EXPR
8555 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8556 code = LT_EXPR;
8557 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8558 else if (code == GE_EXPR
8559 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8560 code = GT_EXPR;
8561 else
8562 return NULL_TREE;
8563 *strict_overflow_p = true;
8566 /* Now build the constant reduced in magnitude. But not if that
8567 would produce one outside of its types range. */
8568 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8569 && ((sgn0 == 1
8570 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8571 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8572 || (sgn0 == -1
8573 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8574 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8575 /* We cannot swap the comparison here as that would cause us to
8576 endlessly recurse. */
8577 return NULL_TREE;
8579 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8580 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8581 if (code0 != INTEGER_CST)
8582 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8583 t = fold_convert (TREE_TYPE (arg1), t);
8585 /* If swapping might yield to a more canonical form, do so. */
8586 if (swap)
8587 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8588 else
8589 return fold_build2_loc (loc, code, type, t, arg1);
8592 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8593 overflow further. Try to decrease the magnitude of constants involved
8594 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8595 and put sole constants at the second argument position.
8596 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8598 static tree
8599 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8600 tree arg0, tree arg1)
8602 tree t;
8603 bool strict_overflow_p;
8604 const char * const warnmsg = G_("assuming signed overflow does not occur "
8605 "when reducing constant in comparison");
8607 /* Try canonicalization by simplifying arg0. */
8608 strict_overflow_p = false;
8609 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8610 &strict_overflow_p);
8611 if (t)
8613 if (strict_overflow_p)
8614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8615 return t;
8618 /* Try canonicalization by simplifying arg1 using the swapped
8619 comparison. */
8620 code = swap_tree_comparison (code);
8621 strict_overflow_p = false;
8622 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8623 &strict_overflow_p);
8624 if (t && strict_overflow_p)
8625 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8626 return t;
8629 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8630 space. This is used to avoid issuing overflow warnings for
8631 expressions like &p->x which can not wrap. */
8633 static bool
8634 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8636 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8637 return true;
8639 if (bitpos < 0)
8640 return true;
8642 wide_int wi_offset;
8643 int precision = TYPE_PRECISION (TREE_TYPE (base));
8644 if (offset == NULL_TREE)
8645 wi_offset = wi::zero (precision);
8646 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8647 return true;
8648 else
8649 wi_offset = offset;
8651 bool overflow;
8652 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8653 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8654 if (overflow)
8655 return true;
8657 if (!wi::fits_uhwi_p (total))
8658 return true;
8660 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8661 if (size <= 0)
8662 return true;
8664 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8665 array. */
8666 if (TREE_CODE (base) == ADDR_EXPR)
8668 HOST_WIDE_INT base_size;
8670 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8671 if (base_size > 0 && size < base_size)
8672 size = base_size;
8675 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8678 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8679 kind INTEGER_CST. This makes sure to properly sign-extend the
8680 constant. */
8682 static HOST_WIDE_INT
8683 size_low_cst (const_tree t)
8685 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8686 int prec = TYPE_PRECISION (TREE_TYPE (t));
8687 if (prec < HOST_BITS_PER_WIDE_INT)
8688 return sext_hwi (w, prec);
8689 return w;
8692 /* Subroutine of fold_binary. This routine performs all of the
8693 transformations that are common to the equality/inequality
8694 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8695 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8696 fold_binary should call fold_binary. Fold a comparison with
8697 tree code CODE and type TYPE with operands OP0 and OP1. Return
8698 the folded comparison or NULL_TREE. */
8700 static tree
8701 fold_comparison (location_t loc, enum tree_code code, tree type,
8702 tree op0, tree op1)
8704 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8705 tree arg0, arg1, tem;
8707 arg0 = op0;
8708 arg1 = op1;
8710 STRIP_SIGN_NOPS (arg0);
8711 STRIP_SIGN_NOPS (arg1);
8713 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8714 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8715 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8717 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8718 && TREE_CODE (arg1) == INTEGER_CST
8719 && !TREE_OVERFLOW (arg1))
8721 const enum tree_code
8722 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8723 tree const1 = TREE_OPERAND (arg0, 1);
8724 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8725 tree variable = TREE_OPERAND (arg0, 0);
8726 tree new_const = int_const_binop (reverse_op, const2, const1);
8728 /* If the constant operation overflowed this can be
8729 simplified as a comparison against INT_MAX/INT_MIN. */
8730 if (TREE_OVERFLOW (new_const)
8731 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8733 int const1_sgn = tree_int_cst_sgn (const1);
8734 enum tree_code code2 = code;
8736 /* Get the sign of the constant on the lhs if the
8737 operation were VARIABLE + CONST1. */
8738 if (TREE_CODE (arg0) == MINUS_EXPR)
8739 const1_sgn = -const1_sgn;
8741 /* The sign of the constant determines if we overflowed
8742 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8743 Canonicalize to the INT_MIN overflow by swapping the comparison
8744 if necessary. */
8745 if (const1_sgn == -1)
8746 code2 = swap_tree_comparison (code);
8748 /* We now can look at the canonicalized case
8749 VARIABLE + 1 CODE2 INT_MIN
8750 and decide on the result. */
8751 switch (code2)
8753 case EQ_EXPR:
8754 case LT_EXPR:
8755 case LE_EXPR:
8756 return
8757 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8759 case NE_EXPR:
8760 case GE_EXPR:
8761 case GT_EXPR:
8762 return
8763 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8765 default:
8766 gcc_unreachable ();
8769 else
8771 if (!equality_code)
8772 fold_overflow_warning ("assuming signed overflow does not occur "
8773 "when changing X +- C1 cmp C2 to "
8774 "X cmp C2 -+ C1",
8775 WARN_STRICT_OVERFLOW_COMPARISON);
8776 return fold_build2_loc (loc, code, type, variable, new_const);
8780 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8781 if (TREE_CODE (arg0) == MINUS_EXPR
8782 && equality_code
8783 && integer_zerop (arg1))
8785 /* ??? The transformation is valid for the other operators if overflow
8786 is undefined for the type, but performing it here badly interacts
8787 with the transformation in fold_cond_expr_with_comparison which
8788 attempts to synthetize ABS_EXPR. */
8789 if (!equality_code)
8790 fold_overflow_warning ("assuming signed overflow does not occur "
8791 "when changing X - Y cmp 0 to X cmp Y",
8792 WARN_STRICT_OVERFLOW_COMPARISON);
8793 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8794 TREE_OPERAND (arg0, 1));
8797 /* For comparisons of pointers we can decompose it to a compile time
8798 comparison of the base objects and the offsets into the object.
8799 This requires at least one operand being an ADDR_EXPR or a
8800 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8801 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8802 && (TREE_CODE (arg0) == ADDR_EXPR
8803 || TREE_CODE (arg1) == ADDR_EXPR
8804 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8805 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8807 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8808 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8809 machine_mode mode;
8810 int volatilep, unsignedp;
8811 bool indirect_base0 = false, indirect_base1 = false;
8813 /* Get base and offset for the access. Strip ADDR_EXPR for
8814 get_inner_reference, but put it back by stripping INDIRECT_REF
8815 off the base object if possible. indirect_baseN will be true
8816 if baseN is not an address but refers to the object itself. */
8817 base0 = arg0;
8818 if (TREE_CODE (arg0) == ADDR_EXPR)
8820 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8821 &bitsize, &bitpos0, &offset0, &mode,
8822 &unsignedp, &volatilep, false);
8823 if (TREE_CODE (base0) == INDIRECT_REF)
8824 base0 = TREE_OPERAND (base0, 0);
8825 else
8826 indirect_base0 = true;
8828 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8830 base0 = TREE_OPERAND (arg0, 0);
8831 STRIP_SIGN_NOPS (base0);
8832 if (TREE_CODE (base0) == ADDR_EXPR)
8834 base0 = TREE_OPERAND (base0, 0);
8835 indirect_base0 = true;
8837 offset0 = TREE_OPERAND (arg0, 1);
8838 if (tree_fits_shwi_p (offset0))
8840 HOST_WIDE_INT off = size_low_cst (offset0);
8841 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8842 * BITS_PER_UNIT)
8843 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8845 bitpos0 = off * BITS_PER_UNIT;
8846 offset0 = NULL_TREE;
8851 base1 = arg1;
8852 if (TREE_CODE (arg1) == ADDR_EXPR)
8854 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8855 &bitsize, &bitpos1, &offset1, &mode,
8856 &unsignedp, &volatilep, false);
8857 if (TREE_CODE (base1) == INDIRECT_REF)
8858 base1 = TREE_OPERAND (base1, 0);
8859 else
8860 indirect_base1 = true;
8862 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8864 base1 = TREE_OPERAND (arg1, 0);
8865 STRIP_SIGN_NOPS (base1);
8866 if (TREE_CODE (base1) == ADDR_EXPR)
8868 base1 = TREE_OPERAND (base1, 0);
8869 indirect_base1 = true;
8871 offset1 = TREE_OPERAND (arg1, 1);
8872 if (tree_fits_shwi_p (offset1))
8874 HOST_WIDE_INT off = size_low_cst (offset1);
8875 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8876 * BITS_PER_UNIT)
8877 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8879 bitpos1 = off * BITS_PER_UNIT;
8880 offset1 = NULL_TREE;
8885 /* A local variable can never be pointed to by
8886 the default SSA name of an incoming parameter. */
8887 if ((TREE_CODE (arg0) == ADDR_EXPR
8888 && indirect_base0
8889 && TREE_CODE (base0) == VAR_DECL
8890 && auto_var_in_fn_p (base0, current_function_decl)
8891 && !indirect_base1
8892 && TREE_CODE (base1) == SSA_NAME
8893 && SSA_NAME_IS_DEFAULT_DEF (base1)
8894 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8895 || (TREE_CODE (arg1) == ADDR_EXPR
8896 && indirect_base1
8897 && TREE_CODE (base1) == VAR_DECL
8898 && auto_var_in_fn_p (base1, current_function_decl)
8899 && !indirect_base0
8900 && TREE_CODE (base0) == SSA_NAME
8901 && SSA_NAME_IS_DEFAULT_DEF (base0)
8902 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8904 if (code == NE_EXPR)
8905 return constant_boolean_node (1, type);
8906 else if (code == EQ_EXPR)
8907 return constant_boolean_node (0, type);
8909 /* If we have equivalent bases we might be able to simplify. */
8910 else if (indirect_base0 == indirect_base1
8911 && operand_equal_p (base0, base1, 0))
8913 /* We can fold this expression to a constant if the non-constant
8914 offset parts are equal. */
8915 if ((offset0 == offset1
8916 || (offset0 && offset1
8917 && operand_equal_p (offset0, offset1, 0)))
8918 && (code == EQ_EXPR
8919 || code == NE_EXPR
8920 || (indirect_base0 && DECL_P (base0))
8921 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8924 if (!equality_code
8925 && bitpos0 != bitpos1
8926 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8927 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8928 fold_overflow_warning (("assuming pointer wraparound does not "
8929 "occur when comparing P +- C1 with "
8930 "P +- C2"),
8931 WARN_STRICT_OVERFLOW_CONDITIONAL);
8933 switch (code)
8935 case EQ_EXPR:
8936 return constant_boolean_node (bitpos0 == bitpos1, type);
8937 case NE_EXPR:
8938 return constant_boolean_node (bitpos0 != bitpos1, type);
8939 case LT_EXPR:
8940 return constant_boolean_node (bitpos0 < bitpos1, type);
8941 case LE_EXPR:
8942 return constant_boolean_node (bitpos0 <= bitpos1, type);
8943 case GE_EXPR:
8944 return constant_boolean_node (bitpos0 >= bitpos1, type);
8945 case GT_EXPR:
8946 return constant_boolean_node (bitpos0 > bitpos1, type);
8947 default:;
8950 /* We can simplify the comparison to a comparison of the variable
8951 offset parts if the constant offset parts are equal.
8952 Be careful to use signed sizetype here because otherwise we
8953 mess with array offsets in the wrong way. This is possible
8954 because pointer arithmetic is restricted to retain within an
8955 object and overflow on pointer differences is undefined as of
8956 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8957 else if (bitpos0 == bitpos1
8958 && (equality_code
8959 || (indirect_base0 && DECL_P (base0))
8960 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8962 /* By converting to signed sizetype we cover middle-end pointer
8963 arithmetic which operates on unsigned pointer types of size
8964 type size and ARRAY_REF offsets which are properly sign or
8965 zero extended from their type in case it is narrower than
8966 sizetype. */
8967 if (offset0 == NULL_TREE)
8968 offset0 = build_int_cst (ssizetype, 0);
8969 else
8970 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8971 if (offset1 == NULL_TREE)
8972 offset1 = build_int_cst (ssizetype, 0);
8973 else
8974 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8976 if (!equality_code
8977 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8978 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8979 fold_overflow_warning (("assuming pointer wraparound does not "
8980 "occur when comparing P +- C1 with "
8981 "P +- C2"),
8982 WARN_STRICT_OVERFLOW_COMPARISON);
8984 return fold_build2_loc (loc, code, type, offset0, offset1);
8987 /* For non-equal bases we can simplify if they are addresses
8988 declarations with different addresses. */
8989 else if (indirect_base0 && indirect_base1
8990 /* We know that !operand_equal_p (base0, base1, 0)
8991 because the if condition was false. But make
8992 sure two decls are not the same. */
8993 && base0 != base1
8994 && TREE_CODE (arg0) == ADDR_EXPR
8995 && TREE_CODE (arg1) == ADDR_EXPR
8996 && DECL_P (base0)
8997 && DECL_P (base1)
8998 /* Watch for aliases. */
8999 && (!decl_in_symtab_p (base0)
9000 || !decl_in_symtab_p (base1)
9001 || !symtab_node::get_create (base0)->equal_address_to
9002 (symtab_node::get_create (base1))))
9004 if (code == EQ_EXPR)
9005 return omit_two_operands_loc (loc, type, boolean_false_node,
9006 arg0, arg1);
9007 else if (code == NE_EXPR)
9008 return omit_two_operands_loc (loc, type, boolean_true_node,
9009 arg0, arg1);
9011 /* For equal offsets we can simplify to a comparison of the
9012 base addresses. */
9013 else if (bitpos0 == bitpos1
9014 && (indirect_base0
9015 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9016 && (indirect_base1
9017 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9018 && ((offset0 == offset1)
9019 || (offset0 && offset1
9020 && operand_equal_p (offset0, offset1, 0))))
9022 if (indirect_base0)
9023 base0 = build_fold_addr_expr_loc (loc, base0);
9024 if (indirect_base1)
9025 base1 = build_fold_addr_expr_loc (loc, base1);
9026 return fold_build2_loc (loc, code, type, base0, base1);
9030 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9031 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9032 the resulting offset is smaller in absolute value than the
9033 original one and has the same sign. */
9034 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9035 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9036 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9037 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9038 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9039 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9040 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9042 tree const1 = TREE_OPERAND (arg0, 1);
9043 tree const2 = TREE_OPERAND (arg1, 1);
9044 tree variable1 = TREE_OPERAND (arg0, 0);
9045 tree variable2 = TREE_OPERAND (arg1, 0);
9046 tree cst;
9047 const char * const warnmsg = G_("assuming signed overflow does not "
9048 "occur when combining constants around "
9049 "a comparison");
9051 /* Put the constant on the side where it doesn't overflow and is
9052 of lower absolute value and of same sign than before. */
9053 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9054 ? MINUS_EXPR : PLUS_EXPR,
9055 const2, const1);
9056 if (!TREE_OVERFLOW (cst)
9057 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9058 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9060 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9061 return fold_build2_loc (loc, code, type,
9062 variable1,
9063 fold_build2_loc (loc, TREE_CODE (arg1),
9064 TREE_TYPE (arg1),
9065 variable2, cst));
9068 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9069 ? MINUS_EXPR : PLUS_EXPR,
9070 const1, const2);
9071 if (!TREE_OVERFLOW (cst)
9072 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9073 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9075 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9076 return fold_build2_loc (loc, code, type,
9077 fold_build2_loc (loc, TREE_CODE (arg0),
9078 TREE_TYPE (arg0),
9079 variable1, cst),
9080 variable2);
9084 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9085 signed arithmetic case. That form is created by the compiler
9086 often enough for folding it to be of value. One example is in
9087 computing loop trip counts after Operator Strength Reduction. */
9088 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9089 && TREE_CODE (arg0) == MULT_EXPR
9090 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9091 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9092 && integer_zerop (arg1))
9094 tree const1 = TREE_OPERAND (arg0, 1);
9095 tree const2 = arg1; /* zero */
9096 tree variable1 = TREE_OPERAND (arg0, 0);
9097 enum tree_code cmp_code = code;
9099 /* Handle unfolded multiplication by zero. */
9100 if (integer_zerop (const1))
9101 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9103 fold_overflow_warning (("assuming signed overflow does not occur when "
9104 "eliminating multiplication in comparison "
9105 "with zero"),
9106 WARN_STRICT_OVERFLOW_COMPARISON);
9108 /* If const1 is negative we swap the sense of the comparison. */
9109 if (tree_int_cst_sgn (const1) < 0)
9110 cmp_code = swap_tree_comparison (cmp_code);
9112 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9115 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9116 if (tem)
9117 return tem;
9119 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9121 tree targ0 = strip_float_extensions (arg0);
9122 tree targ1 = strip_float_extensions (arg1);
9123 tree newtype = TREE_TYPE (targ0);
9125 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9126 newtype = TREE_TYPE (targ1);
9128 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9129 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9130 return fold_build2_loc (loc, code, type,
9131 fold_convert_loc (loc, newtype, targ0),
9132 fold_convert_loc (loc, newtype, targ1));
9134 /* (-a) CMP (-b) -> b CMP a */
9135 if (TREE_CODE (arg0) == NEGATE_EXPR
9136 && TREE_CODE (arg1) == NEGATE_EXPR)
9137 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9138 TREE_OPERAND (arg0, 0));
9140 if (TREE_CODE (arg1) == REAL_CST)
9142 REAL_VALUE_TYPE cst;
9143 cst = TREE_REAL_CST (arg1);
9145 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9146 if (TREE_CODE (arg0) == NEGATE_EXPR)
9147 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9148 TREE_OPERAND (arg0, 0),
9149 build_real (TREE_TYPE (arg1),
9150 real_value_negate (&cst)));
9152 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9153 /* a CMP (-0) -> a CMP 0 */
9154 if (REAL_VALUE_MINUS_ZERO (cst))
9155 return fold_build2_loc (loc, code, type, arg0,
9156 build_real (TREE_TYPE (arg1), dconst0));
9158 /* x != NaN is always true, other ops are always false. */
9159 if (REAL_VALUE_ISNAN (cst)
9160 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9162 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9163 return omit_one_operand_loc (loc, type, tem, arg0);
9166 /* Fold comparisons against infinity. */
9167 if (REAL_VALUE_ISINF (cst)
9168 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9170 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9171 if (tem != NULL_TREE)
9172 return tem;
9176 /* If this is a comparison of a real constant with a PLUS_EXPR
9177 or a MINUS_EXPR of a real constant, we can convert it into a
9178 comparison with a revised real constant as long as no overflow
9179 occurs when unsafe_math_optimizations are enabled. */
9180 if (flag_unsafe_math_optimizations
9181 && TREE_CODE (arg1) == REAL_CST
9182 && (TREE_CODE (arg0) == PLUS_EXPR
9183 || TREE_CODE (arg0) == MINUS_EXPR)
9184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9185 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9186 ? MINUS_EXPR : PLUS_EXPR,
9187 arg1, TREE_OPERAND (arg0, 1)))
9188 && !TREE_OVERFLOW (tem))
9189 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9191 /* Likewise, we can simplify a comparison of a real constant with
9192 a MINUS_EXPR whose first operand is also a real constant, i.e.
9193 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9194 floating-point types only if -fassociative-math is set. */
9195 if (flag_associative_math
9196 && TREE_CODE (arg1) == REAL_CST
9197 && TREE_CODE (arg0) == MINUS_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9199 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9200 arg1))
9201 && !TREE_OVERFLOW (tem))
9202 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9203 TREE_OPERAND (arg0, 1), tem);
9205 /* Fold comparisons against built-in math functions. */
9206 if (TREE_CODE (arg1) == REAL_CST
9207 && flag_unsafe_math_optimizations
9208 && ! flag_errno_math)
9210 enum built_in_function fcode = builtin_mathfn_code (arg0);
9212 if (fcode != END_BUILTINS)
9214 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9215 if (tem != NULL_TREE)
9216 return tem;
9221 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9222 && CONVERT_EXPR_P (arg0))
9224 /* If we are widening one operand of an integer comparison,
9225 see if the other operand is similarly being widened. Perhaps we
9226 can do the comparison in the narrower type. */
9227 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9228 if (tem)
9229 return tem;
9231 /* Or if we are changing signedness. */
9232 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9233 if (tem)
9234 return tem;
9237 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9238 constant, we can simplify it. */
9239 if (TREE_CODE (arg1) == INTEGER_CST
9240 && (TREE_CODE (arg0) == MIN_EXPR
9241 || TREE_CODE (arg0) == MAX_EXPR)
9242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9244 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9245 if (tem)
9246 return tem;
9249 /* Simplify comparison of something with itself. (For IEEE
9250 floating-point, we can only do some of these simplifications.) */
9251 if (operand_equal_p (arg0, arg1, 0))
9253 switch (code)
9255 case EQ_EXPR:
9256 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9257 || ! HONOR_NANS (element_mode (arg0)))
9258 return constant_boolean_node (1, type);
9259 break;
9261 case GE_EXPR:
9262 case LE_EXPR:
9263 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9264 || ! HONOR_NANS (element_mode (arg0)))
9265 return constant_boolean_node (1, type);
9266 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9268 case NE_EXPR:
9269 /* For NE, we can only do this simplification if integer
9270 or we don't honor IEEE floating point NaNs. */
9271 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9272 && HONOR_NANS (element_mode (arg0)))
9273 break;
9274 /* ... fall through ... */
9275 case GT_EXPR:
9276 case LT_EXPR:
9277 return constant_boolean_node (0, type);
9278 default:
9279 gcc_unreachable ();
9283 /* If we are comparing an expression that just has comparisons
9284 of two integer values, arithmetic expressions of those comparisons,
9285 and constants, we can simplify it. There are only three cases
9286 to check: the two values can either be equal, the first can be
9287 greater, or the second can be greater. Fold the expression for
9288 those three values. Since each value must be 0 or 1, we have
9289 eight possibilities, each of which corresponds to the constant 0
9290 or 1 or one of the six possible comparisons.
9292 This handles common cases like (a > b) == 0 but also handles
9293 expressions like ((x > y) - (y > x)) > 0, which supposedly
9294 occur in macroized code. */
9296 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9298 tree cval1 = 0, cval2 = 0;
9299 int save_p = 0;
9301 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9302 /* Don't handle degenerate cases here; they should already
9303 have been handled anyway. */
9304 && cval1 != 0 && cval2 != 0
9305 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9306 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9307 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9308 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9309 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9310 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9311 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9313 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9314 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9316 /* We can't just pass T to eval_subst in case cval1 or cval2
9317 was the same as ARG1. */
9319 tree high_result
9320 = fold_build2_loc (loc, code, type,
9321 eval_subst (loc, arg0, cval1, maxval,
9322 cval2, minval),
9323 arg1);
9324 tree equal_result
9325 = fold_build2_loc (loc, code, type,
9326 eval_subst (loc, arg0, cval1, maxval,
9327 cval2, maxval),
9328 arg1);
9329 tree low_result
9330 = fold_build2_loc (loc, code, type,
9331 eval_subst (loc, arg0, cval1, minval,
9332 cval2, maxval),
9333 arg1);
9335 /* All three of these results should be 0 or 1. Confirm they are.
9336 Then use those values to select the proper code to use. */
9338 if (TREE_CODE (high_result) == INTEGER_CST
9339 && TREE_CODE (equal_result) == INTEGER_CST
9340 && TREE_CODE (low_result) == INTEGER_CST)
9342 /* Make a 3-bit mask with the high-order bit being the
9343 value for `>', the next for '=', and the low for '<'. */
9344 switch ((integer_onep (high_result) * 4)
9345 + (integer_onep (equal_result) * 2)
9346 + integer_onep (low_result))
9348 case 0:
9349 /* Always false. */
9350 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9351 case 1:
9352 code = LT_EXPR;
9353 break;
9354 case 2:
9355 code = EQ_EXPR;
9356 break;
9357 case 3:
9358 code = LE_EXPR;
9359 break;
9360 case 4:
9361 code = GT_EXPR;
9362 break;
9363 case 5:
9364 code = NE_EXPR;
9365 break;
9366 case 6:
9367 code = GE_EXPR;
9368 break;
9369 case 7:
9370 /* Always true. */
9371 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9374 if (save_p)
9376 tem = save_expr (build2 (code, type, cval1, cval2));
9377 SET_EXPR_LOCATION (tem, loc);
9378 return tem;
9380 return fold_build2_loc (loc, code, type, cval1, cval2);
9385 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9386 into a single range test. */
9387 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9388 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9389 && TREE_CODE (arg1) == INTEGER_CST
9390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9391 && !integer_zerop (TREE_OPERAND (arg0, 1))
9392 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9393 && !TREE_OVERFLOW (arg1))
9395 tem = fold_div_compare (loc, code, type, arg0, arg1);
9396 if (tem != NULL_TREE)
9397 return tem;
9400 /* Fold ~X op ~Y as Y op X. */
9401 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9402 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9404 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9405 return fold_build2_loc (loc, code, type,
9406 fold_convert_loc (loc, cmp_type,
9407 TREE_OPERAND (arg1, 0)),
9408 TREE_OPERAND (arg0, 0));
9411 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9412 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9413 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9415 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9416 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9417 TREE_OPERAND (arg0, 0),
9418 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9419 fold_convert_loc (loc, cmp_type, arg1)));
9422 return NULL_TREE;
9426 /* Subroutine of fold_binary. Optimize complex multiplications of the
9427 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9428 argument EXPR represents the expression "z" of type TYPE. */
9430 static tree
9431 fold_mult_zconjz (location_t loc, tree type, tree expr)
9433 tree itype = TREE_TYPE (type);
9434 tree rpart, ipart, tem;
9436 if (TREE_CODE (expr) == COMPLEX_EXPR)
9438 rpart = TREE_OPERAND (expr, 0);
9439 ipart = TREE_OPERAND (expr, 1);
9441 else if (TREE_CODE (expr) == COMPLEX_CST)
9443 rpart = TREE_REALPART (expr);
9444 ipart = TREE_IMAGPART (expr);
9446 else
9448 expr = save_expr (expr);
9449 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9450 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9453 rpart = save_expr (rpart);
9454 ipart = save_expr (ipart);
9455 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9456 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9457 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9458 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9459 build_zero_cst (itype));
9463 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9464 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9465 guarantees that P and N have the same least significant log2(M) bits.
9466 N is not otherwise constrained. In particular, N is not normalized to
9467 0 <= N < M as is common. In general, the precise value of P is unknown.
9468 M is chosen as large as possible such that constant N can be determined.
9470 Returns M and sets *RESIDUE to N.
9472 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9473 account. This is not always possible due to PR 35705.
9476 static unsigned HOST_WIDE_INT
9477 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9478 bool allow_func_align)
9480 enum tree_code code;
9482 *residue = 0;
9484 code = TREE_CODE (expr);
9485 if (code == ADDR_EXPR)
9487 unsigned int bitalign;
9488 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9489 *residue /= BITS_PER_UNIT;
9490 return bitalign / BITS_PER_UNIT;
9492 else if (code == POINTER_PLUS_EXPR)
9494 tree op0, op1;
9495 unsigned HOST_WIDE_INT modulus;
9496 enum tree_code inner_code;
9498 op0 = TREE_OPERAND (expr, 0);
9499 STRIP_NOPS (op0);
9500 modulus = get_pointer_modulus_and_residue (op0, residue,
9501 allow_func_align);
9503 op1 = TREE_OPERAND (expr, 1);
9504 STRIP_NOPS (op1);
9505 inner_code = TREE_CODE (op1);
9506 if (inner_code == INTEGER_CST)
9508 *residue += TREE_INT_CST_LOW (op1);
9509 return modulus;
9511 else if (inner_code == MULT_EXPR)
9513 op1 = TREE_OPERAND (op1, 1);
9514 if (TREE_CODE (op1) == INTEGER_CST)
9516 unsigned HOST_WIDE_INT align;
9518 /* Compute the greatest power-of-2 divisor of op1. */
9519 align = TREE_INT_CST_LOW (op1);
9520 align &= -align;
9522 /* If align is non-zero and less than *modulus, replace
9523 *modulus with align., If align is 0, then either op1 is 0
9524 or the greatest power-of-2 divisor of op1 doesn't fit in an
9525 unsigned HOST_WIDE_INT. In either case, no additional
9526 constraint is imposed. */
9527 if (align)
9528 modulus = MIN (modulus, align);
9530 return modulus;
9535 /* If we get here, we were unable to determine anything useful about the
9536 expression. */
9537 return 1;
9540 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9541 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9543 static bool
9544 vec_cst_ctor_to_array (tree arg, tree *elts)
9546 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9548 if (TREE_CODE (arg) == VECTOR_CST)
9550 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9551 elts[i] = VECTOR_CST_ELT (arg, i);
9553 else if (TREE_CODE (arg) == CONSTRUCTOR)
9555 constructor_elt *elt;
9557 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9558 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9559 return false;
9560 else
9561 elts[i] = elt->value;
9563 else
9564 return false;
9565 for (; i < nelts; i++)
9566 elts[i]
9567 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9568 return true;
9571 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9572 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9573 NULL_TREE otherwise. */
9575 static tree
9576 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9578 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9579 tree *elts;
9580 bool need_ctor = false;
9582 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9583 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9584 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9585 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9586 return NULL_TREE;
9588 elts = XALLOCAVEC (tree, nelts * 3);
9589 if (!vec_cst_ctor_to_array (arg0, elts)
9590 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9591 return NULL_TREE;
9593 for (i = 0; i < nelts; i++)
9595 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9596 need_ctor = true;
9597 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9600 if (need_ctor)
9602 vec<constructor_elt, va_gc> *v;
9603 vec_alloc (v, nelts);
9604 for (i = 0; i < nelts; i++)
9605 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9606 return build_constructor (type, v);
9608 else
9609 return build_vector (type, &elts[2 * nelts]);
9612 /* Try to fold a pointer difference of type TYPE two address expressions of
9613 array references AREF0 and AREF1 using location LOC. Return a
9614 simplified expression for the difference or NULL_TREE. */
9616 static tree
9617 fold_addr_of_array_ref_difference (location_t loc, tree type,
9618 tree aref0, tree aref1)
9620 tree base0 = TREE_OPERAND (aref0, 0);
9621 tree base1 = TREE_OPERAND (aref1, 0);
9622 tree base_offset = build_int_cst (type, 0);
9624 /* If the bases are array references as well, recurse. If the bases
9625 are pointer indirections compute the difference of the pointers.
9626 If the bases are equal, we are set. */
9627 if ((TREE_CODE (base0) == ARRAY_REF
9628 && TREE_CODE (base1) == ARRAY_REF
9629 && (base_offset
9630 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9631 || (INDIRECT_REF_P (base0)
9632 && INDIRECT_REF_P (base1)
9633 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9634 TREE_OPERAND (base0, 0),
9635 TREE_OPERAND (base1, 0))))
9636 || operand_equal_p (base0, base1, 0))
9638 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9639 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9640 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9641 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9642 return fold_build2_loc (loc, PLUS_EXPR, type,
9643 base_offset,
9644 fold_build2_loc (loc, MULT_EXPR, type,
9645 diff, esz));
9647 return NULL_TREE;
9650 /* If the real or vector real constant CST of type TYPE has an exact
9651 inverse, return it, else return NULL. */
9653 tree
9654 exact_inverse (tree type, tree cst)
9656 REAL_VALUE_TYPE r;
9657 tree unit_type, *elts;
9658 machine_mode mode;
9659 unsigned vec_nelts, i;
9661 switch (TREE_CODE (cst))
9663 case REAL_CST:
9664 r = TREE_REAL_CST (cst);
9666 if (exact_real_inverse (TYPE_MODE (type), &r))
9667 return build_real (type, r);
9669 return NULL_TREE;
9671 case VECTOR_CST:
9672 vec_nelts = VECTOR_CST_NELTS (cst);
9673 elts = XALLOCAVEC (tree, vec_nelts);
9674 unit_type = TREE_TYPE (type);
9675 mode = TYPE_MODE (unit_type);
9677 for (i = 0; i < vec_nelts; i++)
9679 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9680 if (!exact_real_inverse (mode, &r))
9681 return NULL_TREE;
9682 elts[i] = build_real (unit_type, r);
9685 return build_vector (type, elts);
9687 default:
9688 return NULL_TREE;
9692 /* Mask out the tz least significant bits of X of type TYPE where
9693 tz is the number of trailing zeroes in Y. */
9694 static wide_int
9695 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9697 int tz = wi::ctz (y);
9698 if (tz > 0)
9699 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9700 return x;
9703 /* Return true when T is an address and is known to be nonzero.
9704 For floating point we further ensure that T is not denormal.
9705 Similar logic is present in nonzero_address in rtlanal.h.
9707 If the return value is based on the assumption that signed overflow
9708 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9709 change *STRICT_OVERFLOW_P. */
9711 static bool
9712 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9714 tree type = TREE_TYPE (t);
9715 enum tree_code code;
9717 /* Doing something useful for floating point would need more work. */
9718 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9719 return false;
9721 code = TREE_CODE (t);
9722 switch (TREE_CODE_CLASS (code))
9724 case tcc_unary:
9725 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9726 strict_overflow_p);
9727 case tcc_binary:
9728 case tcc_comparison:
9729 return tree_binary_nonzero_warnv_p (code, type,
9730 TREE_OPERAND (t, 0),
9731 TREE_OPERAND (t, 1),
9732 strict_overflow_p);
9733 case tcc_constant:
9734 case tcc_declaration:
9735 case tcc_reference:
9736 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9738 default:
9739 break;
9742 switch (code)
9744 case TRUTH_NOT_EXPR:
9745 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9746 strict_overflow_p);
9748 case TRUTH_AND_EXPR:
9749 case TRUTH_OR_EXPR:
9750 case TRUTH_XOR_EXPR:
9751 return tree_binary_nonzero_warnv_p (code, type,
9752 TREE_OPERAND (t, 0),
9753 TREE_OPERAND (t, 1),
9754 strict_overflow_p);
9756 case COND_EXPR:
9757 case CONSTRUCTOR:
9758 case OBJ_TYPE_REF:
9759 case ASSERT_EXPR:
9760 case ADDR_EXPR:
9761 case WITH_SIZE_EXPR:
9762 case SSA_NAME:
9763 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9765 case COMPOUND_EXPR:
9766 case MODIFY_EXPR:
9767 case BIND_EXPR:
9768 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9769 strict_overflow_p);
9771 case SAVE_EXPR:
9772 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9773 strict_overflow_p);
9775 case CALL_EXPR:
9777 tree fndecl = get_callee_fndecl (t);
9778 if (!fndecl) return false;
9779 if (flag_delete_null_pointer_checks && !flag_check_new
9780 && DECL_IS_OPERATOR_NEW (fndecl)
9781 && !TREE_NOTHROW (fndecl))
9782 return true;
9783 if (flag_delete_null_pointer_checks
9784 && lookup_attribute ("returns_nonnull",
9785 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9786 return true;
9787 return alloca_call_p (t);
9790 default:
9791 break;
9793 return false;
9796 /* Return true when T is an address and is known to be nonzero.
9797 Handle warnings about undefined signed overflow. */
9799 static bool
9800 tree_expr_nonzero_p (tree t)
9802 bool ret, strict_overflow_p;
9804 strict_overflow_p = false;
9805 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9806 if (strict_overflow_p)
9807 fold_overflow_warning (("assuming signed overflow does not occur when "
9808 "determining that expression is always "
9809 "non-zero"),
9810 WARN_STRICT_OVERFLOW_MISC);
9811 return ret;
9814 /* Fold a binary expression of code CODE and type TYPE with operands
9815 OP0 and OP1. LOC is the location of the resulting expression.
9816 Return the folded expression if folding is successful. Otherwise,
9817 return NULL_TREE. */
9819 tree
9820 fold_binary_loc (location_t loc,
9821 enum tree_code code, tree type, tree op0, tree op1)
9823 enum tree_code_class kind = TREE_CODE_CLASS (code);
9824 tree arg0, arg1, tem;
9825 tree t1 = NULL_TREE;
9826 bool strict_overflow_p;
9827 unsigned int prec;
9829 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9830 && TREE_CODE_LENGTH (code) == 2
9831 && op0 != NULL_TREE
9832 && op1 != NULL_TREE);
9834 arg0 = op0;
9835 arg1 = op1;
9837 /* Strip any conversions that don't change the mode. This is
9838 safe for every expression, except for a comparison expression
9839 because its signedness is derived from its operands. So, in
9840 the latter case, only strip conversions that don't change the
9841 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9842 preserved.
9844 Note that this is done as an internal manipulation within the
9845 constant folder, in order to find the simplest representation
9846 of the arguments so that their form can be studied. In any
9847 cases, the appropriate type conversions should be put back in
9848 the tree that will get out of the constant folder. */
9850 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9852 STRIP_SIGN_NOPS (arg0);
9853 STRIP_SIGN_NOPS (arg1);
9855 else
9857 STRIP_NOPS (arg0);
9858 STRIP_NOPS (arg1);
9861 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9862 constant but we can't do arithmetic on them. */
9863 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9865 tem = const_binop (code, type, arg0, arg1);
9866 if (tem != NULL_TREE)
9868 if (TREE_TYPE (tem) != type)
9869 tem = fold_convert_loc (loc, type, tem);
9870 return tem;
9874 /* If this is a commutative operation, and ARG0 is a constant, move it
9875 to ARG1 to reduce the number of tests below. */
9876 if (commutative_tree_code (code)
9877 && tree_swap_operands_p (arg0, arg1, true))
9878 return fold_build2_loc (loc, code, type, op1, op0);
9880 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9881 to ARG1 to reduce the number of tests below. */
9882 if (kind == tcc_comparison
9883 && tree_swap_operands_p (arg0, arg1, true))
9884 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9886 tem = generic_simplify (loc, code, type, op0, op1);
9887 if (tem)
9888 return tem;
9890 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9892 First check for cases where an arithmetic operation is applied to a
9893 compound, conditional, or comparison operation. Push the arithmetic
9894 operation inside the compound or conditional to see if any folding
9895 can then be done. Convert comparison to conditional for this purpose.
9896 The also optimizes non-constant cases that used to be done in
9897 expand_expr.
9899 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9900 one of the operands is a comparison and the other is a comparison, a
9901 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9902 code below would make the expression more complex. Change it to a
9903 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9904 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9906 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9907 || code == EQ_EXPR || code == NE_EXPR)
9908 && TREE_CODE (type) != VECTOR_TYPE
9909 && ((truth_value_p (TREE_CODE (arg0))
9910 && (truth_value_p (TREE_CODE (arg1))
9911 || (TREE_CODE (arg1) == BIT_AND_EXPR
9912 && integer_onep (TREE_OPERAND (arg1, 1)))))
9913 || (truth_value_p (TREE_CODE (arg1))
9914 && (truth_value_p (TREE_CODE (arg0))
9915 || (TREE_CODE (arg0) == BIT_AND_EXPR
9916 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9918 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9919 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9920 : TRUTH_XOR_EXPR,
9921 boolean_type_node,
9922 fold_convert_loc (loc, boolean_type_node, arg0),
9923 fold_convert_loc (loc, boolean_type_node, arg1));
9925 if (code == EQ_EXPR)
9926 tem = invert_truthvalue_loc (loc, tem);
9928 return fold_convert_loc (loc, type, tem);
9931 if (TREE_CODE_CLASS (code) == tcc_binary
9932 || TREE_CODE_CLASS (code) == tcc_comparison)
9934 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9936 tem = fold_build2_loc (loc, code, type,
9937 fold_convert_loc (loc, TREE_TYPE (op0),
9938 TREE_OPERAND (arg0, 1)), op1);
9939 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9940 tem);
9942 if (TREE_CODE (arg1) == COMPOUND_EXPR
9943 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9945 tem = fold_build2_loc (loc, code, type, op0,
9946 fold_convert_loc (loc, TREE_TYPE (op1),
9947 TREE_OPERAND (arg1, 1)));
9948 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9949 tem);
9952 if (TREE_CODE (arg0) == COND_EXPR
9953 || TREE_CODE (arg0) == VEC_COND_EXPR
9954 || COMPARISON_CLASS_P (arg0))
9956 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9957 arg0, arg1,
9958 /*cond_first_p=*/1);
9959 if (tem != NULL_TREE)
9960 return tem;
9963 if (TREE_CODE (arg1) == COND_EXPR
9964 || TREE_CODE (arg1) == VEC_COND_EXPR
9965 || COMPARISON_CLASS_P (arg1))
9967 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9968 arg1, arg0,
9969 /*cond_first_p=*/0);
9970 if (tem != NULL_TREE)
9971 return tem;
9975 switch (code)
9977 case MEM_REF:
9978 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9979 if (TREE_CODE (arg0) == ADDR_EXPR
9980 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9982 tree iref = TREE_OPERAND (arg0, 0);
9983 return fold_build2 (MEM_REF, type,
9984 TREE_OPERAND (iref, 0),
9985 int_const_binop (PLUS_EXPR, arg1,
9986 TREE_OPERAND (iref, 1)));
9989 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9990 if (TREE_CODE (arg0) == ADDR_EXPR
9991 && handled_component_p (TREE_OPERAND (arg0, 0)))
9993 tree base;
9994 HOST_WIDE_INT coffset;
9995 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9996 &coffset);
9997 if (!base)
9998 return NULL_TREE;
9999 return fold_build2 (MEM_REF, type,
10000 build_fold_addr_expr (base),
10001 int_const_binop (PLUS_EXPR, arg1,
10002 size_int (coffset)));
10005 return NULL_TREE;
10007 case POINTER_PLUS_EXPR:
10008 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10009 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10010 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10011 return fold_convert_loc (loc, type,
10012 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10013 fold_convert_loc (loc, sizetype,
10014 arg1),
10015 fold_convert_loc (loc, sizetype,
10016 arg0)));
10018 return NULL_TREE;
10020 case PLUS_EXPR:
10021 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10023 /* X + (X / CST) * -CST is X % CST. */
10024 if (TREE_CODE (arg1) == MULT_EXPR
10025 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10026 && operand_equal_p (arg0,
10027 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10029 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10030 tree cst1 = TREE_OPERAND (arg1, 1);
10031 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10032 cst1, cst0);
10033 if (sum && integer_zerop (sum))
10034 return fold_convert_loc (loc, type,
10035 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10036 TREE_TYPE (arg0), arg0,
10037 cst0));
10041 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10042 one. Make sure the type is not saturating and has the signedness of
10043 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10044 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10045 if ((TREE_CODE (arg0) == MULT_EXPR
10046 || TREE_CODE (arg1) == MULT_EXPR)
10047 && !TYPE_SATURATING (type)
10048 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10049 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10050 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10052 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10053 if (tem)
10054 return tem;
10057 if (! FLOAT_TYPE_P (type))
10059 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10060 with a constant, and the two constants have no bits in common,
10061 we should treat this as a BIT_IOR_EXPR since this may produce more
10062 simplifications. */
10063 if (TREE_CODE (arg0) == BIT_AND_EXPR
10064 && TREE_CODE (arg1) == BIT_AND_EXPR
10065 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10066 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10067 && wi::bit_and (TREE_OPERAND (arg0, 1),
10068 TREE_OPERAND (arg1, 1)) == 0)
10070 code = BIT_IOR_EXPR;
10071 goto bit_ior;
10074 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10075 (plus (plus (mult) (mult)) (foo)) so that we can
10076 take advantage of the factoring cases below. */
10077 if (TYPE_OVERFLOW_WRAPS (type)
10078 && (((TREE_CODE (arg0) == PLUS_EXPR
10079 || TREE_CODE (arg0) == MINUS_EXPR)
10080 && TREE_CODE (arg1) == MULT_EXPR)
10081 || ((TREE_CODE (arg1) == PLUS_EXPR
10082 || TREE_CODE (arg1) == MINUS_EXPR)
10083 && TREE_CODE (arg0) == MULT_EXPR)))
10085 tree parg0, parg1, parg, marg;
10086 enum tree_code pcode;
10088 if (TREE_CODE (arg1) == MULT_EXPR)
10089 parg = arg0, marg = arg1;
10090 else
10091 parg = arg1, marg = arg0;
10092 pcode = TREE_CODE (parg);
10093 parg0 = TREE_OPERAND (parg, 0);
10094 parg1 = TREE_OPERAND (parg, 1);
10095 STRIP_NOPS (parg0);
10096 STRIP_NOPS (parg1);
10098 if (TREE_CODE (parg0) == MULT_EXPR
10099 && TREE_CODE (parg1) != MULT_EXPR)
10100 return fold_build2_loc (loc, pcode, type,
10101 fold_build2_loc (loc, PLUS_EXPR, type,
10102 fold_convert_loc (loc, type,
10103 parg0),
10104 fold_convert_loc (loc, type,
10105 marg)),
10106 fold_convert_loc (loc, type, parg1));
10107 if (TREE_CODE (parg0) != MULT_EXPR
10108 && TREE_CODE (parg1) == MULT_EXPR)
10109 return
10110 fold_build2_loc (loc, PLUS_EXPR, type,
10111 fold_convert_loc (loc, type, parg0),
10112 fold_build2_loc (loc, pcode, type,
10113 fold_convert_loc (loc, type, marg),
10114 fold_convert_loc (loc, type,
10115 parg1)));
10118 else
10120 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10121 to __complex__ ( x, y ). This is not the same for SNaNs or
10122 if signed zeros are involved. */
10123 if (!HONOR_SNANS (element_mode (arg0))
10124 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10125 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10127 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10128 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10129 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10130 bool arg0rz = false, arg0iz = false;
10131 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10132 || (arg0i && (arg0iz = real_zerop (arg0i))))
10134 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10135 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10136 if (arg0rz && arg1i && real_zerop (arg1i))
10138 tree rp = arg1r ? arg1r
10139 : build1 (REALPART_EXPR, rtype, arg1);
10140 tree ip = arg0i ? arg0i
10141 : build1 (IMAGPART_EXPR, rtype, arg0);
10142 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10144 else if (arg0iz && arg1r && real_zerop (arg1r))
10146 tree rp = arg0r ? arg0r
10147 : build1 (REALPART_EXPR, rtype, arg0);
10148 tree ip = arg1i ? arg1i
10149 : build1 (IMAGPART_EXPR, rtype, arg1);
10150 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10155 if (flag_unsafe_math_optimizations
10156 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10157 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10158 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10159 return tem;
10161 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10162 We associate floats only if the user has specified
10163 -fassociative-math. */
10164 if (flag_associative_math
10165 && TREE_CODE (arg1) == PLUS_EXPR
10166 && TREE_CODE (arg0) != MULT_EXPR)
10168 tree tree10 = TREE_OPERAND (arg1, 0);
10169 tree tree11 = TREE_OPERAND (arg1, 1);
10170 if (TREE_CODE (tree11) == MULT_EXPR
10171 && TREE_CODE (tree10) == MULT_EXPR)
10173 tree tree0;
10174 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10175 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10178 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10179 We associate floats only if the user has specified
10180 -fassociative-math. */
10181 if (flag_associative_math
10182 && TREE_CODE (arg0) == PLUS_EXPR
10183 && TREE_CODE (arg1) != MULT_EXPR)
10185 tree tree00 = TREE_OPERAND (arg0, 0);
10186 tree tree01 = TREE_OPERAND (arg0, 1);
10187 if (TREE_CODE (tree01) == MULT_EXPR
10188 && TREE_CODE (tree00) == MULT_EXPR)
10190 tree tree0;
10191 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10192 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10197 bit_rotate:
10198 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10199 is a rotate of A by C1 bits. */
10200 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10201 is a rotate of A by B bits. */
10203 enum tree_code code0, code1;
10204 tree rtype;
10205 code0 = TREE_CODE (arg0);
10206 code1 = TREE_CODE (arg1);
10207 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10208 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10209 && operand_equal_p (TREE_OPERAND (arg0, 0),
10210 TREE_OPERAND (arg1, 0), 0)
10211 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10212 TYPE_UNSIGNED (rtype))
10213 /* Only create rotates in complete modes. Other cases are not
10214 expanded properly. */
10215 && (element_precision (rtype)
10216 == element_precision (TYPE_MODE (rtype))))
10218 tree tree01, tree11;
10219 enum tree_code code01, code11;
10221 tree01 = TREE_OPERAND (arg0, 1);
10222 tree11 = TREE_OPERAND (arg1, 1);
10223 STRIP_NOPS (tree01);
10224 STRIP_NOPS (tree11);
10225 code01 = TREE_CODE (tree01);
10226 code11 = TREE_CODE (tree11);
10227 if (code01 == INTEGER_CST
10228 && code11 == INTEGER_CST
10229 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10230 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10232 tem = build2_loc (loc, LROTATE_EXPR,
10233 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10234 TREE_OPERAND (arg0, 0),
10235 code0 == LSHIFT_EXPR ? tree01 : tree11);
10236 return fold_convert_loc (loc, type, tem);
10238 else if (code11 == MINUS_EXPR)
10240 tree tree110, tree111;
10241 tree110 = TREE_OPERAND (tree11, 0);
10242 tree111 = TREE_OPERAND (tree11, 1);
10243 STRIP_NOPS (tree110);
10244 STRIP_NOPS (tree111);
10245 if (TREE_CODE (tree110) == INTEGER_CST
10246 && 0 == compare_tree_int (tree110,
10247 element_precision
10248 (TREE_TYPE (TREE_OPERAND
10249 (arg0, 0))))
10250 && operand_equal_p (tree01, tree111, 0))
10251 return
10252 fold_convert_loc (loc, type,
10253 build2 ((code0 == LSHIFT_EXPR
10254 ? LROTATE_EXPR
10255 : RROTATE_EXPR),
10256 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10257 TREE_OPERAND (arg0, 0), tree01));
10259 else if (code01 == MINUS_EXPR)
10261 tree tree010, tree011;
10262 tree010 = TREE_OPERAND (tree01, 0);
10263 tree011 = TREE_OPERAND (tree01, 1);
10264 STRIP_NOPS (tree010);
10265 STRIP_NOPS (tree011);
10266 if (TREE_CODE (tree010) == INTEGER_CST
10267 && 0 == compare_tree_int (tree010,
10268 element_precision
10269 (TREE_TYPE (TREE_OPERAND
10270 (arg0, 0))))
10271 && operand_equal_p (tree11, tree011, 0))
10272 return fold_convert_loc
10273 (loc, type,
10274 build2 ((code0 != LSHIFT_EXPR
10275 ? LROTATE_EXPR
10276 : RROTATE_EXPR),
10277 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10278 TREE_OPERAND (arg0, 0), tree11));
10283 associate:
10284 /* In most languages, can't associate operations on floats through
10285 parentheses. Rather than remember where the parentheses were, we
10286 don't associate floats at all, unless the user has specified
10287 -fassociative-math.
10288 And, we need to make sure type is not saturating. */
10290 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10291 && !TYPE_SATURATING (type))
10293 tree var0, con0, lit0, minus_lit0;
10294 tree var1, con1, lit1, minus_lit1;
10295 tree atype = type;
10296 bool ok = true;
10298 /* Split both trees into variables, constants, and literals. Then
10299 associate each group together, the constants with literals,
10300 then the result with variables. This increases the chances of
10301 literals being recombined later and of generating relocatable
10302 expressions for the sum of a constant and literal. */
10303 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10304 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10305 code == MINUS_EXPR);
10307 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10308 if (code == MINUS_EXPR)
10309 code = PLUS_EXPR;
10311 /* With undefined overflow prefer doing association in a type
10312 which wraps on overflow, if that is one of the operand types. */
10313 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10314 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10316 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10317 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10318 atype = TREE_TYPE (arg0);
10319 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10320 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10321 atype = TREE_TYPE (arg1);
10322 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10325 /* With undefined overflow we can only associate constants with one
10326 variable, and constants whose association doesn't overflow. */
10327 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10328 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10330 if (var0 && var1)
10332 tree tmp0 = var0;
10333 tree tmp1 = var1;
10335 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10336 tmp0 = TREE_OPERAND (tmp0, 0);
10337 if (CONVERT_EXPR_P (tmp0)
10338 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10339 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10340 <= TYPE_PRECISION (atype)))
10341 tmp0 = TREE_OPERAND (tmp0, 0);
10342 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10343 tmp1 = TREE_OPERAND (tmp1, 0);
10344 if (CONVERT_EXPR_P (tmp1)
10345 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10346 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10347 <= TYPE_PRECISION (atype)))
10348 tmp1 = TREE_OPERAND (tmp1, 0);
10349 /* The only case we can still associate with two variables
10350 is if they are the same, modulo negation and bit-pattern
10351 preserving conversions. */
10352 if (!operand_equal_p (tmp0, tmp1, 0))
10353 ok = false;
10357 /* Only do something if we found more than two objects. Otherwise,
10358 nothing has changed and we risk infinite recursion. */
10359 if (ok
10360 && (2 < ((var0 != 0) + (var1 != 0)
10361 + (con0 != 0) + (con1 != 0)
10362 + (lit0 != 0) + (lit1 != 0)
10363 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10365 bool any_overflows = false;
10366 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10367 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10368 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10369 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10370 var0 = associate_trees (loc, var0, var1, code, atype);
10371 con0 = associate_trees (loc, con0, con1, code, atype);
10372 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10373 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10374 code, atype);
10376 /* Preserve the MINUS_EXPR if the negative part of the literal is
10377 greater than the positive part. Otherwise, the multiplicative
10378 folding code (i.e extract_muldiv) may be fooled in case
10379 unsigned constants are subtracted, like in the following
10380 example: ((X*2 + 4) - 8U)/2. */
10381 if (minus_lit0 && lit0)
10383 if (TREE_CODE (lit0) == INTEGER_CST
10384 && TREE_CODE (minus_lit0) == INTEGER_CST
10385 && tree_int_cst_lt (lit0, minus_lit0))
10387 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10388 MINUS_EXPR, atype);
10389 lit0 = 0;
10391 else
10393 lit0 = associate_trees (loc, lit0, minus_lit0,
10394 MINUS_EXPR, atype);
10395 minus_lit0 = 0;
10399 /* Don't introduce overflows through reassociation. */
10400 if (!any_overflows
10401 && ((lit0 && TREE_OVERFLOW (lit0))
10402 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10403 return NULL_TREE;
10405 if (minus_lit0)
10407 if (con0 == 0)
10408 return
10409 fold_convert_loc (loc, type,
10410 associate_trees (loc, var0, minus_lit0,
10411 MINUS_EXPR, atype));
10412 else
10414 con0 = associate_trees (loc, con0, minus_lit0,
10415 MINUS_EXPR, atype);
10416 return
10417 fold_convert_loc (loc, type,
10418 associate_trees (loc, var0, con0,
10419 PLUS_EXPR, atype));
10423 con0 = associate_trees (loc, con0, lit0, code, atype);
10424 return
10425 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10426 code, atype));
10430 return NULL_TREE;
10432 case MINUS_EXPR:
10433 /* Pointer simplifications for subtraction, simple reassociations. */
10434 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10436 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10437 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10438 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10440 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10441 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10442 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10443 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10444 return fold_build2_loc (loc, PLUS_EXPR, type,
10445 fold_build2_loc (loc, MINUS_EXPR, type,
10446 arg00, arg10),
10447 fold_build2_loc (loc, MINUS_EXPR, type,
10448 arg01, arg11));
10450 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10451 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10453 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10454 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10455 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10456 fold_convert_loc (loc, type, arg1));
10457 if (tmp)
10458 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10460 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10461 simplifies. */
10462 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10464 tree arg10 = fold_convert_loc (loc, type,
10465 TREE_OPERAND (arg1, 0));
10466 tree arg11 = fold_convert_loc (loc, type,
10467 TREE_OPERAND (arg1, 1));
10468 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10469 fold_convert_loc (loc, type, arg0),
10470 arg10);
10471 if (tmp)
10472 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10475 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10476 if (TREE_CODE (arg0) == NEGATE_EXPR
10477 && negate_expr_p (arg1)
10478 && reorder_operands_p (arg0, arg1))
10479 return fold_build2_loc (loc, MINUS_EXPR, type,
10480 fold_convert_loc (loc, type,
10481 negate_expr (arg1)),
10482 fold_convert_loc (loc, type,
10483 TREE_OPERAND (arg0, 0)));
10485 /* X - (X / Y) * Y is X % Y. */
10486 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10487 && TREE_CODE (arg1) == MULT_EXPR
10488 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10489 && operand_equal_p (arg0,
10490 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10491 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10492 TREE_OPERAND (arg1, 1), 0))
10493 return
10494 fold_convert_loc (loc, type,
10495 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10496 arg0, TREE_OPERAND (arg1, 1)));
10498 if (! FLOAT_TYPE_P (type))
10500 /* Fold A - (A & B) into ~B & A. */
10501 if (!TREE_SIDE_EFFECTS (arg0)
10502 && TREE_CODE (arg1) == BIT_AND_EXPR)
10504 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10506 tree arg10 = fold_convert_loc (loc, type,
10507 TREE_OPERAND (arg1, 0));
10508 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10509 fold_build1_loc (loc, BIT_NOT_EXPR,
10510 type, arg10),
10511 fold_convert_loc (loc, type, arg0));
10513 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10515 tree arg11 = fold_convert_loc (loc,
10516 type, TREE_OPERAND (arg1, 1));
10517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10518 fold_build1_loc (loc, BIT_NOT_EXPR,
10519 type, arg11),
10520 fold_convert_loc (loc, type, arg0));
10524 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10525 any power of 2 minus 1. */
10526 if (TREE_CODE (arg0) == BIT_AND_EXPR
10527 && TREE_CODE (arg1) == BIT_AND_EXPR
10528 && operand_equal_p (TREE_OPERAND (arg0, 0),
10529 TREE_OPERAND (arg1, 0), 0))
10531 tree mask0 = TREE_OPERAND (arg0, 1);
10532 tree mask1 = TREE_OPERAND (arg1, 1);
10533 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10535 if (operand_equal_p (tem, mask1, 0))
10537 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10538 TREE_OPERAND (arg0, 0), mask1);
10539 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10544 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10545 __complex__ ( x, -y ). This is not the same for SNaNs or if
10546 signed zeros are involved. */
10547 if (!HONOR_SNANS (element_mode (arg0))
10548 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10549 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10551 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10552 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10553 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10554 bool arg0rz = false, arg0iz = false;
10555 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10556 || (arg0i && (arg0iz = real_zerop (arg0i))))
10558 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10559 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10560 if (arg0rz && arg1i && real_zerop (arg1i))
10562 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10563 arg1r ? arg1r
10564 : build1 (REALPART_EXPR, rtype, arg1));
10565 tree ip = arg0i ? arg0i
10566 : build1 (IMAGPART_EXPR, rtype, arg0);
10567 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10569 else if (arg0iz && arg1r && real_zerop (arg1r))
10571 tree rp = arg0r ? arg0r
10572 : build1 (REALPART_EXPR, rtype, arg0);
10573 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10574 arg1i ? arg1i
10575 : build1 (IMAGPART_EXPR, rtype, arg1));
10576 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10581 /* A - B -> A + (-B) if B is easily negatable. */
10582 if (negate_expr_p (arg1)
10583 && !TYPE_OVERFLOW_SANITIZED (type)
10584 && ((FLOAT_TYPE_P (type)
10585 /* Avoid this transformation if B is a positive REAL_CST. */
10586 && (TREE_CODE (arg1) != REAL_CST
10587 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10588 || INTEGRAL_TYPE_P (type)))
10589 return fold_build2_loc (loc, PLUS_EXPR, type,
10590 fold_convert_loc (loc, type, arg0),
10591 fold_convert_loc (loc, type,
10592 negate_expr (arg1)));
10594 /* Try folding difference of addresses. */
10596 HOST_WIDE_INT diff;
10598 if ((TREE_CODE (arg0) == ADDR_EXPR
10599 || TREE_CODE (arg1) == ADDR_EXPR)
10600 && ptr_difference_const (arg0, arg1, &diff))
10601 return build_int_cst_type (type, diff);
10604 /* Fold &a[i] - &a[j] to i-j. */
10605 if (TREE_CODE (arg0) == ADDR_EXPR
10606 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10607 && TREE_CODE (arg1) == ADDR_EXPR
10608 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10610 tree tem = fold_addr_of_array_ref_difference (loc, type,
10611 TREE_OPERAND (arg0, 0),
10612 TREE_OPERAND (arg1, 0));
10613 if (tem)
10614 return tem;
10617 if (FLOAT_TYPE_P (type)
10618 && flag_unsafe_math_optimizations
10619 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10620 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10621 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10622 return tem;
10624 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10625 one. Make sure the type is not saturating and has the signedness of
10626 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10627 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10628 if ((TREE_CODE (arg0) == MULT_EXPR
10629 || TREE_CODE (arg1) == MULT_EXPR)
10630 && !TYPE_SATURATING (type)
10631 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10632 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10633 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10635 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10636 if (tem)
10637 return tem;
10640 goto associate;
10642 case MULT_EXPR:
10643 /* (-A) * (-B) -> A * B */
10644 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10645 return fold_build2_loc (loc, MULT_EXPR, type,
10646 fold_convert_loc (loc, type,
10647 TREE_OPERAND (arg0, 0)),
10648 fold_convert_loc (loc, type,
10649 negate_expr (arg1)));
10650 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10651 return fold_build2_loc (loc, MULT_EXPR, type,
10652 fold_convert_loc (loc, type,
10653 negate_expr (arg0)),
10654 fold_convert_loc (loc, type,
10655 TREE_OPERAND (arg1, 0)));
10657 if (! FLOAT_TYPE_P (type))
10659 /* Transform x * -C into -x * C if x is easily negatable. */
10660 if (TREE_CODE (arg1) == INTEGER_CST
10661 && tree_int_cst_sgn (arg1) == -1
10662 && negate_expr_p (arg0)
10663 && (tem = negate_expr (arg1)) != arg1
10664 && !TREE_OVERFLOW (tem))
10665 return fold_build2_loc (loc, MULT_EXPR, type,
10666 fold_convert_loc (loc, type,
10667 negate_expr (arg0)),
10668 tem);
10670 /* (a * (1 << b)) is (a << b) */
10671 if (TREE_CODE (arg1) == LSHIFT_EXPR
10672 && integer_onep (TREE_OPERAND (arg1, 0)))
10673 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10674 TREE_OPERAND (arg1, 1));
10675 if (TREE_CODE (arg0) == LSHIFT_EXPR
10676 && integer_onep (TREE_OPERAND (arg0, 0)))
10677 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10678 TREE_OPERAND (arg0, 1));
10680 /* (A + A) * C -> A * 2 * C */
10681 if (TREE_CODE (arg0) == PLUS_EXPR
10682 && TREE_CODE (arg1) == INTEGER_CST
10683 && operand_equal_p (TREE_OPERAND (arg0, 0),
10684 TREE_OPERAND (arg0, 1), 0))
10685 return fold_build2_loc (loc, MULT_EXPR, type,
10686 omit_one_operand_loc (loc, type,
10687 TREE_OPERAND (arg0, 0),
10688 TREE_OPERAND (arg0, 1)),
10689 fold_build2_loc (loc, MULT_EXPR, type,
10690 build_int_cst (type, 2) , arg1));
10692 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10693 sign-changing only. */
10694 if (TREE_CODE (arg1) == INTEGER_CST
10695 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10696 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10697 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10699 strict_overflow_p = false;
10700 if (TREE_CODE (arg1) == INTEGER_CST
10701 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10702 &strict_overflow_p)))
10704 if (strict_overflow_p)
10705 fold_overflow_warning (("assuming signed overflow does not "
10706 "occur when simplifying "
10707 "multiplication"),
10708 WARN_STRICT_OVERFLOW_MISC);
10709 return fold_convert_loc (loc, type, tem);
10712 /* Optimize z * conj(z) for integer complex numbers. */
10713 if (TREE_CODE (arg0) == CONJ_EXPR
10714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10715 return fold_mult_zconjz (loc, type, arg1);
10716 if (TREE_CODE (arg1) == CONJ_EXPR
10717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10718 return fold_mult_zconjz (loc, type, arg0);
10720 else
10722 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10723 the result for floating point types due to rounding so it is applied
10724 only if -fassociative-math was specify. */
10725 if (flag_associative_math
10726 && TREE_CODE (arg0) == RDIV_EXPR
10727 && TREE_CODE (arg1) == REAL_CST
10728 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10730 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10731 arg1);
10732 if (tem)
10733 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10734 TREE_OPERAND (arg0, 1));
10737 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10738 if (operand_equal_p (arg0, arg1, 0))
10740 tree tem = fold_strip_sign_ops (arg0);
10741 if (tem != NULL_TREE)
10743 tem = fold_convert_loc (loc, type, tem);
10744 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10748 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10749 This is not the same for NaNs or if signed zeros are
10750 involved. */
10751 if (!HONOR_NANS (element_mode (arg0))
10752 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10753 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10754 && TREE_CODE (arg1) == COMPLEX_CST
10755 && real_zerop (TREE_REALPART (arg1)))
10757 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10758 if (real_onep (TREE_IMAGPART (arg1)))
10759 return
10760 fold_build2_loc (loc, COMPLEX_EXPR, type,
10761 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10762 rtype, arg0)),
10763 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10764 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10765 return
10766 fold_build2_loc (loc, COMPLEX_EXPR, type,
10767 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10768 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10769 rtype, arg0)));
10772 /* Optimize z * conj(z) for floating point complex numbers.
10773 Guarded by flag_unsafe_math_optimizations as non-finite
10774 imaginary components don't produce scalar results. */
10775 if (flag_unsafe_math_optimizations
10776 && TREE_CODE (arg0) == CONJ_EXPR
10777 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10778 return fold_mult_zconjz (loc, type, arg1);
10779 if (flag_unsafe_math_optimizations
10780 && TREE_CODE (arg1) == CONJ_EXPR
10781 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10782 return fold_mult_zconjz (loc, type, arg0);
10784 if (flag_unsafe_math_optimizations)
10786 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10787 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10789 /* Optimizations of root(...)*root(...). */
10790 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10792 tree rootfn, arg;
10793 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10794 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10796 /* Optimize sqrt(x)*sqrt(x) as x. */
10797 if (BUILTIN_SQRT_P (fcode0)
10798 && operand_equal_p (arg00, arg10, 0)
10799 && ! HONOR_SNANS (element_mode (type)))
10800 return arg00;
10802 /* Optimize root(x)*root(y) as root(x*y). */
10803 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10804 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10805 return build_call_expr_loc (loc, rootfn, 1, arg);
10808 /* Optimize expN(x)*expN(y) as expN(x+y). */
10809 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10811 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10812 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10813 CALL_EXPR_ARG (arg0, 0),
10814 CALL_EXPR_ARG (arg1, 0));
10815 return build_call_expr_loc (loc, expfn, 1, arg);
10818 /* Optimizations of pow(...)*pow(...). */
10819 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10820 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10821 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10823 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10824 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10825 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10826 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10828 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10829 if (operand_equal_p (arg01, arg11, 0))
10831 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10832 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10833 arg00, arg10);
10834 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10837 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10838 if (operand_equal_p (arg00, arg10, 0))
10840 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10841 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10842 arg01, arg11);
10843 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10847 /* Optimize tan(x)*cos(x) as sin(x). */
10848 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10849 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10850 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10851 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10852 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10853 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10854 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10855 CALL_EXPR_ARG (arg1, 0), 0))
10857 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10859 if (sinfn != NULL_TREE)
10860 return build_call_expr_loc (loc, sinfn, 1,
10861 CALL_EXPR_ARG (arg0, 0));
10864 /* Optimize x*pow(x,c) as pow(x,c+1). */
10865 if (fcode1 == BUILT_IN_POW
10866 || fcode1 == BUILT_IN_POWF
10867 || fcode1 == BUILT_IN_POWL)
10869 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10870 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10871 if (TREE_CODE (arg11) == REAL_CST
10872 && !TREE_OVERFLOW (arg11)
10873 && operand_equal_p (arg0, arg10, 0))
10875 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10876 REAL_VALUE_TYPE c;
10877 tree arg;
10879 c = TREE_REAL_CST (arg11);
10880 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10881 arg = build_real (type, c);
10882 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10886 /* Optimize pow(x,c)*x as pow(x,c+1). */
10887 if (fcode0 == BUILT_IN_POW
10888 || fcode0 == BUILT_IN_POWF
10889 || fcode0 == BUILT_IN_POWL)
10891 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10892 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10893 if (TREE_CODE (arg01) == REAL_CST
10894 && !TREE_OVERFLOW (arg01)
10895 && operand_equal_p (arg1, arg00, 0))
10897 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10898 REAL_VALUE_TYPE c;
10899 tree arg;
10901 c = TREE_REAL_CST (arg01);
10902 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10903 arg = build_real (type, c);
10904 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10908 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10909 if (!in_gimple_form
10910 && optimize
10911 && operand_equal_p (arg0, arg1, 0))
10913 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10915 if (powfn)
10917 tree arg = build_real (type, dconst2);
10918 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10923 goto associate;
10925 case BIT_IOR_EXPR:
10926 bit_ior:
10927 /* ~X | X is -1. */
10928 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10931 t1 = build_zero_cst (type);
10932 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10933 return omit_one_operand_loc (loc, type, t1, arg1);
10936 /* X | ~X is -1. */
10937 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10940 t1 = build_zero_cst (type);
10941 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10942 return omit_one_operand_loc (loc, type, t1, arg0);
10945 /* Canonicalize (X & C1) | C2. */
10946 if (TREE_CODE (arg0) == BIT_AND_EXPR
10947 && TREE_CODE (arg1) == INTEGER_CST
10948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10950 int width = TYPE_PRECISION (type), w;
10951 wide_int c1 = TREE_OPERAND (arg0, 1);
10952 wide_int c2 = arg1;
10954 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10955 if ((c1 & c2) == c1)
10956 return omit_one_operand_loc (loc, type, arg1,
10957 TREE_OPERAND (arg0, 0));
10959 wide_int msk = wi::mask (width, false,
10960 TYPE_PRECISION (TREE_TYPE (arg1)));
10962 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10963 if (msk.and_not (c1 | c2) == 0)
10964 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10965 TREE_OPERAND (arg0, 0), arg1);
10967 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10968 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10969 mode which allows further optimizations. */
10970 c1 &= msk;
10971 c2 &= msk;
10972 wide_int c3 = c1.and_not (c2);
10973 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10975 wide_int mask = wi::mask (w, false,
10976 TYPE_PRECISION (type));
10977 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10979 c3 = mask;
10980 break;
10984 if (c3 != c1)
10985 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10986 fold_build2_loc (loc, BIT_AND_EXPR, type,
10987 TREE_OPERAND (arg0, 0),
10988 wide_int_to_tree (type,
10989 c3)),
10990 arg1);
10993 /* (X & ~Y) | (~X & Y) is X ^ Y */
10994 if (TREE_CODE (arg0) == BIT_AND_EXPR
10995 && TREE_CODE (arg1) == BIT_AND_EXPR)
10997 tree a0, a1, l0, l1, n0, n1;
10999 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11000 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11002 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11003 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11005 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11006 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11008 if ((operand_equal_p (n0, a0, 0)
11009 && operand_equal_p (n1, a1, 0))
11010 || (operand_equal_p (n0, a1, 0)
11011 && operand_equal_p (n1, a0, 0)))
11012 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11015 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11016 if (t1 != NULL_TREE)
11017 return t1;
11019 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11021 This results in more efficient code for machines without a NAND
11022 instruction. Combine will canonicalize to the first form
11023 which will allow use of NAND instructions provided by the
11024 backend if they exist. */
11025 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11026 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11028 return
11029 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11030 build2 (BIT_AND_EXPR, type,
11031 fold_convert_loc (loc, type,
11032 TREE_OPERAND (arg0, 0)),
11033 fold_convert_loc (loc, type,
11034 TREE_OPERAND (arg1, 0))));
11037 /* See if this can be simplified into a rotate first. If that
11038 is unsuccessful continue in the association code. */
11039 goto bit_rotate;
11041 case BIT_XOR_EXPR:
11042 /* ~X ^ X is -1. */
11043 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11046 t1 = build_zero_cst (type);
11047 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11048 return omit_one_operand_loc (loc, type, t1, arg1);
11051 /* X ^ ~X is -1. */
11052 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11053 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11055 t1 = build_zero_cst (type);
11056 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11057 return omit_one_operand_loc (loc, type, t1, arg0);
11060 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11061 with a constant, and the two constants have no bits in common,
11062 we should treat this as a BIT_IOR_EXPR since this may produce more
11063 simplifications. */
11064 if (TREE_CODE (arg0) == BIT_AND_EXPR
11065 && TREE_CODE (arg1) == BIT_AND_EXPR
11066 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11067 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11068 && wi::bit_and (TREE_OPERAND (arg0, 1),
11069 TREE_OPERAND (arg1, 1)) == 0)
11071 code = BIT_IOR_EXPR;
11072 goto bit_ior;
11075 /* (X | Y) ^ X -> Y & ~ X*/
11076 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11079 tree t2 = TREE_OPERAND (arg0, 1);
11080 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11081 arg1);
11082 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11083 fold_convert_loc (loc, type, t2),
11084 fold_convert_loc (loc, type, t1));
11085 return t1;
11088 /* (Y | X) ^ X -> Y & ~ X*/
11089 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11090 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11092 tree t2 = TREE_OPERAND (arg0, 0);
11093 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11094 arg1);
11095 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11096 fold_convert_loc (loc, type, t2),
11097 fold_convert_loc (loc, type, t1));
11098 return t1;
11101 /* X ^ (X | Y) -> Y & ~ X*/
11102 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11103 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11105 tree t2 = TREE_OPERAND (arg1, 1);
11106 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11107 arg0);
11108 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11109 fold_convert_loc (loc, type, t2),
11110 fold_convert_loc (loc, type, t1));
11111 return t1;
11114 /* X ^ (Y | X) -> Y & ~ X*/
11115 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11116 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11118 tree t2 = TREE_OPERAND (arg1, 0);
11119 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11120 arg0);
11121 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11122 fold_convert_loc (loc, type, t2),
11123 fold_convert_loc (loc, type, t1));
11124 return t1;
11127 /* Convert ~X ^ ~Y to X ^ Y. */
11128 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11129 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11130 return fold_build2_loc (loc, code, type,
11131 fold_convert_loc (loc, type,
11132 TREE_OPERAND (arg0, 0)),
11133 fold_convert_loc (loc, type,
11134 TREE_OPERAND (arg1, 0)));
11136 /* Convert ~X ^ C to X ^ ~C. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && TREE_CODE (arg1) == INTEGER_CST)
11139 return fold_build2_loc (loc, code, type,
11140 fold_convert_loc (loc, type,
11141 TREE_OPERAND (arg0, 0)),
11142 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11144 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11145 if (TREE_CODE (arg0) == BIT_AND_EXPR
11146 && INTEGRAL_TYPE_P (type)
11147 && integer_onep (TREE_OPERAND (arg0, 1))
11148 && integer_onep (arg1))
11149 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11150 build_zero_cst (TREE_TYPE (arg0)));
11152 /* Fold (X & Y) ^ Y as ~X & Y. */
11153 if (TREE_CODE (arg0) == BIT_AND_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11156 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11157 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11158 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11159 fold_convert_loc (loc, type, arg1));
11161 /* Fold (X & Y) ^ X as ~Y & X. */
11162 if (TREE_CODE (arg0) == BIT_AND_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11164 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11166 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11167 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11168 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11169 fold_convert_loc (loc, type, arg1));
11171 /* Fold X ^ (X & Y) as X & ~Y. */
11172 if (TREE_CODE (arg1) == BIT_AND_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11175 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11176 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11177 fold_convert_loc (loc, type, arg0),
11178 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11180 /* Fold X ^ (Y & X) as ~Y & X. */
11181 if (TREE_CODE (arg1) == BIT_AND_EXPR
11182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11183 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11185 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11186 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11187 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11188 fold_convert_loc (loc, type, arg0));
11191 /* See if this can be simplified into a rotate first. If that
11192 is unsuccessful continue in the association code. */
11193 goto bit_rotate;
11195 case BIT_AND_EXPR:
11196 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11197 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11198 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11199 || (TREE_CODE (arg0) == EQ_EXPR
11200 && integer_zerop (TREE_OPERAND (arg0, 1))))
11201 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11202 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11204 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11205 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11206 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11207 || (TREE_CODE (arg1) == EQ_EXPR
11208 && integer_zerop (TREE_OPERAND (arg1, 1))))
11209 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11210 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11212 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11213 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11214 && INTEGRAL_TYPE_P (type)
11215 && integer_onep (TREE_OPERAND (arg0, 1))
11216 && integer_onep (arg1))
11218 tree tem2;
11219 tem = TREE_OPERAND (arg0, 0);
11220 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11221 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11222 tem, tem2);
11223 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11224 build_zero_cst (TREE_TYPE (tem)));
11226 /* Fold ~X & 1 as (X & 1) == 0. */
11227 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11228 && INTEGRAL_TYPE_P (type)
11229 && integer_onep (arg1))
11231 tree tem2;
11232 tem = TREE_OPERAND (arg0, 0);
11233 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11234 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11235 tem, tem2);
11236 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11237 build_zero_cst (TREE_TYPE (tem)));
11239 /* Fold !X & 1 as X == 0. */
11240 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11241 && integer_onep (arg1))
11243 tem = TREE_OPERAND (arg0, 0);
11244 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11245 build_zero_cst (TREE_TYPE (tem)));
11248 /* Fold (X ^ Y) & Y as ~X & Y. */
11249 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11250 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11252 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11253 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11254 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11255 fold_convert_loc (loc, type, arg1));
11257 /* Fold (X ^ Y) & X as ~Y & X. */
11258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11259 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11260 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11262 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11263 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11264 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11265 fold_convert_loc (loc, type, arg1));
11267 /* Fold X & (X ^ Y) as X & ~Y. */
11268 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11269 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11271 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11272 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11273 fold_convert_loc (loc, type, arg0),
11274 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11276 /* Fold X & (Y ^ X) as ~Y & X. */
11277 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11278 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11279 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11281 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11282 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11283 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11284 fold_convert_loc (loc, type, arg0));
11287 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11288 multiple of 1 << CST. */
11289 if (TREE_CODE (arg1) == INTEGER_CST)
11291 wide_int cst1 = arg1;
11292 wide_int ncst1 = -cst1;
11293 if ((cst1 & ncst1) == ncst1
11294 && multiple_of_p (type, arg0,
11295 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11296 return fold_convert_loc (loc, type, arg0);
11299 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11300 bits from CST2. */
11301 if (TREE_CODE (arg1) == INTEGER_CST
11302 && TREE_CODE (arg0) == MULT_EXPR
11303 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11305 wide_int warg1 = arg1;
11306 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11308 if (masked == 0)
11309 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11310 arg0, arg1);
11311 else if (masked != warg1)
11313 /* Avoid the transform if arg1 is a mask of some
11314 mode which allows further optimizations. */
11315 int pop = wi::popcount (warg1);
11316 if (!(pop >= BITS_PER_UNIT
11317 && exact_log2 (pop) != -1
11318 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11319 return fold_build2_loc (loc, code, type, op0,
11320 wide_int_to_tree (type, masked));
11324 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11325 ((A & N) + B) & M -> (A + B) & M
11326 Similarly if (N & M) == 0,
11327 ((A | N) + B) & M -> (A + B) & M
11328 and for - instead of + (or unary - instead of +)
11329 and/or ^ instead of |.
11330 If B is constant and (B & M) == 0, fold into A & M. */
11331 if (TREE_CODE (arg1) == INTEGER_CST)
11333 wide_int cst1 = arg1;
11334 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11335 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11336 && (TREE_CODE (arg0) == PLUS_EXPR
11337 || TREE_CODE (arg0) == MINUS_EXPR
11338 || TREE_CODE (arg0) == NEGATE_EXPR)
11339 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11340 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11342 tree pmop[2];
11343 int which = 0;
11344 wide_int cst0;
11346 /* Now we know that arg0 is (C + D) or (C - D) or
11347 -C and arg1 (M) is == (1LL << cst) - 1.
11348 Store C into PMOP[0] and D into PMOP[1]. */
11349 pmop[0] = TREE_OPERAND (arg0, 0);
11350 pmop[1] = NULL;
11351 if (TREE_CODE (arg0) != NEGATE_EXPR)
11353 pmop[1] = TREE_OPERAND (arg0, 1);
11354 which = 1;
11357 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11358 which = -1;
11360 for (; which >= 0; which--)
11361 switch (TREE_CODE (pmop[which]))
11363 case BIT_AND_EXPR:
11364 case BIT_IOR_EXPR:
11365 case BIT_XOR_EXPR:
11366 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11367 != INTEGER_CST)
11368 break;
11369 cst0 = TREE_OPERAND (pmop[which], 1);
11370 cst0 &= cst1;
11371 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11373 if (cst0 != cst1)
11374 break;
11376 else if (cst0 != 0)
11377 break;
11378 /* If C or D is of the form (A & N) where
11379 (N & M) == M, or of the form (A | N) or
11380 (A ^ N) where (N & M) == 0, replace it with A. */
11381 pmop[which] = TREE_OPERAND (pmop[which], 0);
11382 break;
11383 case INTEGER_CST:
11384 /* If C or D is a N where (N & M) == 0, it can be
11385 omitted (assumed 0). */
11386 if ((TREE_CODE (arg0) == PLUS_EXPR
11387 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11388 && (cst1 & pmop[which]) == 0)
11389 pmop[which] = NULL;
11390 break;
11391 default:
11392 break;
11395 /* Only build anything new if we optimized one or both arguments
11396 above. */
11397 if (pmop[0] != TREE_OPERAND (arg0, 0)
11398 || (TREE_CODE (arg0) != NEGATE_EXPR
11399 && pmop[1] != TREE_OPERAND (arg0, 1)))
11401 tree utype = TREE_TYPE (arg0);
11402 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11404 /* Perform the operations in a type that has defined
11405 overflow behavior. */
11406 utype = unsigned_type_for (TREE_TYPE (arg0));
11407 if (pmop[0] != NULL)
11408 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11409 if (pmop[1] != NULL)
11410 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11413 if (TREE_CODE (arg0) == NEGATE_EXPR)
11414 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11415 else if (TREE_CODE (arg0) == PLUS_EXPR)
11417 if (pmop[0] != NULL && pmop[1] != NULL)
11418 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11419 pmop[0], pmop[1]);
11420 else if (pmop[0] != NULL)
11421 tem = pmop[0];
11422 else if (pmop[1] != NULL)
11423 tem = pmop[1];
11424 else
11425 return build_int_cst (type, 0);
11427 else if (pmop[0] == NULL)
11428 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11429 else
11430 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11431 pmop[0], pmop[1]);
11432 /* TEM is now the new binary +, - or unary - replacement. */
11433 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11434 fold_convert_loc (loc, utype, arg1));
11435 return fold_convert_loc (loc, type, tem);
11440 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11441 if (t1 != NULL_TREE)
11442 return t1;
11443 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11444 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11445 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11447 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11449 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11450 if (mask == -1)
11451 return
11452 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11455 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11457 This results in more efficient code for machines without a NOR
11458 instruction. Combine will canonicalize to the first form
11459 which will allow use of NOR instructions provided by the
11460 backend if they exist. */
11461 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11462 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11464 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11465 build2 (BIT_IOR_EXPR, type,
11466 fold_convert_loc (loc, type,
11467 TREE_OPERAND (arg0, 0)),
11468 fold_convert_loc (loc, type,
11469 TREE_OPERAND (arg1, 0))));
11472 /* If arg0 is derived from the address of an object or function, we may
11473 be able to fold this expression using the object or function's
11474 alignment. */
11475 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11477 unsigned HOST_WIDE_INT modulus, residue;
11478 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11480 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11481 integer_onep (arg1));
11483 /* This works because modulus is a power of 2. If this weren't the
11484 case, we'd have to replace it by its greatest power-of-2
11485 divisor: modulus & -modulus. */
11486 if (low < modulus)
11487 return build_int_cst (type, residue & low);
11490 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11491 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11492 if the new mask might be further optimized. */
11493 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11494 || TREE_CODE (arg0) == RSHIFT_EXPR)
11495 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11496 && TREE_CODE (arg1) == INTEGER_CST
11497 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11498 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11499 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11500 < TYPE_PRECISION (TREE_TYPE (arg0))))
11502 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11503 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11504 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11505 tree shift_type = TREE_TYPE (arg0);
11507 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11508 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11509 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11510 && TYPE_PRECISION (TREE_TYPE (arg0))
11511 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11513 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11514 tree arg00 = TREE_OPERAND (arg0, 0);
11515 /* See if more bits can be proven as zero because of
11516 zero extension. */
11517 if (TREE_CODE (arg00) == NOP_EXPR
11518 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11520 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11521 if (TYPE_PRECISION (inner_type)
11522 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11523 && TYPE_PRECISION (inner_type) < prec)
11525 prec = TYPE_PRECISION (inner_type);
11526 /* See if we can shorten the right shift. */
11527 if (shiftc < prec)
11528 shift_type = inner_type;
11529 /* Otherwise X >> C1 is all zeros, so we'll optimize
11530 it into (X, 0) later on by making sure zerobits
11531 is all ones. */
11534 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11535 if (shiftc < prec)
11537 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11538 zerobits <<= prec - shiftc;
11540 /* For arithmetic shift if sign bit could be set, zerobits
11541 can contain actually sign bits, so no transformation is
11542 possible, unless MASK masks them all away. In that
11543 case the shift needs to be converted into logical shift. */
11544 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11545 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11547 if ((mask & zerobits) == 0)
11548 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11549 else
11550 zerobits = 0;
11554 /* ((X << 16) & 0xff00) is (X, 0). */
11555 if ((mask & zerobits) == mask)
11556 return omit_one_operand_loc (loc, type,
11557 build_int_cst (type, 0), arg0);
11559 newmask = mask | zerobits;
11560 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11562 /* Only do the transformation if NEWMASK is some integer
11563 mode's mask. */
11564 for (prec = BITS_PER_UNIT;
11565 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11566 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11567 break;
11568 if (prec < HOST_BITS_PER_WIDE_INT
11569 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11571 tree newmaskt;
11573 if (shift_type != TREE_TYPE (arg0))
11575 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11576 fold_convert_loc (loc, shift_type,
11577 TREE_OPERAND (arg0, 0)),
11578 TREE_OPERAND (arg0, 1));
11579 tem = fold_convert_loc (loc, type, tem);
11581 else
11582 tem = op0;
11583 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11584 if (!tree_int_cst_equal (newmaskt, arg1))
11585 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11590 goto associate;
11592 case RDIV_EXPR:
11593 /* Don't touch a floating-point divide by zero unless the mode
11594 of the constant can represent infinity. */
11595 if (TREE_CODE (arg1) == REAL_CST
11596 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11597 && real_zerop (arg1))
11598 return NULL_TREE;
11600 /* (-A) / (-B) -> A / B */
11601 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11602 return fold_build2_loc (loc, RDIV_EXPR, type,
11603 TREE_OPERAND (arg0, 0),
11604 negate_expr (arg1));
11605 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11606 return fold_build2_loc (loc, RDIV_EXPR, type,
11607 negate_expr (arg0),
11608 TREE_OPERAND (arg1, 0));
11610 /* Convert A/B/C to A/(B*C). */
11611 if (flag_reciprocal_math
11612 && TREE_CODE (arg0) == RDIV_EXPR)
11613 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11614 fold_build2_loc (loc, MULT_EXPR, type,
11615 TREE_OPERAND (arg0, 1), arg1));
11617 /* Convert A/(B/C) to (A/B)*C. */
11618 if (flag_reciprocal_math
11619 && TREE_CODE (arg1) == RDIV_EXPR)
11620 return fold_build2_loc (loc, MULT_EXPR, type,
11621 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11622 TREE_OPERAND (arg1, 0)),
11623 TREE_OPERAND (arg1, 1));
11625 /* Convert C1/(X*C2) into (C1/C2)/X. */
11626 if (flag_reciprocal_math
11627 && TREE_CODE (arg1) == MULT_EXPR
11628 && TREE_CODE (arg0) == REAL_CST
11629 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11631 tree tem = const_binop (RDIV_EXPR, arg0,
11632 TREE_OPERAND (arg1, 1));
11633 if (tem)
11634 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11635 TREE_OPERAND (arg1, 0));
11638 if (flag_unsafe_math_optimizations)
11640 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11641 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11643 /* Optimize sin(x)/cos(x) as tan(x). */
11644 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11645 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11646 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11647 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11648 CALL_EXPR_ARG (arg1, 0), 0))
11650 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11652 if (tanfn != NULL_TREE)
11653 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11656 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11657 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11658 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11659 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11660 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11661 CALL_EXPR_ARG (arg1, 0), 0))
11663 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11665 if (tanfn != NULL_TREE)
11667 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11668 CALL_EXPR_ARG (arg0, 0));
11669 return fold_build2_loc (loc, RDIV_EXPR, type,
11670 build_real (type, dconst1), tmp);
11674 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11675 NaNs or Infinities. */
11676 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11677 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11678 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11680 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11681 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11683 if (! HONOR_NANS (element_mode (arg00))
11684 && ! HONOR_INFINITIES (element_mode (arg00))
11685 && operand_equal_p (arg00, arg01, 0))
11687 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11689 if (cosfn != NULL_TREE)
11690 return build_call_expr_loc (loc, cosfn, 1, arg00);
11694 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11695 NaNs or Infinities. */
11696 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11697 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11698 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11700 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11701 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11703 if (! HONOR_NANS (element_mode (arg00))
11704 && ! HONOR_INFINITIES (element_mode (arg00))
11705 && operand_equal_p (arg00, arg01, 0))
11707 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11709 if (cosfn != NULL_TREE)
11711 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11712 return fold_build2_loc (loc, RDIV_EXPR, type,
11713 build_real (type, dconst1),
11714 tmp);
11719 /* Optimize pow(x,c)/x as pow(x,c-1). */
11720 if (fcode0 == BUILT_IN_POW
11721 || fcode0 == BUILT_IN_POWF
11722 || fcode0 == BUILT_IN_POWL)
11724 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11725 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11726 if (TREE_CODE (arg01) == REAL_CST
11727 && !TREE_OVERFLOW (arg01)
11728 && operand_equal_p (arg1, arg00, 0))
11730 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11731 REAL_VALUE_TYPE c;
11732 tree arg;
11734 c = TREE_REAL_CST (arg01);
11735 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11736 arg = build_real (type, c);
11737 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11741 /* Optimize a/root(b/c) into a*root(c/b). */
11742 if (BUILTIN_ROOT_P (fcode1))
11744 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11746 if (TREE_CODE (rootarg) == RDIV_EXPR)
11748 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11749 tree b = TREE_OPERAND (rootarg, 0);
11750 tree c = TREE_OPERAND (rootarg, 1);
11752 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11754 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11755 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11759 /* Optimize x/expN(y) into x*expN(-y). */
11760 if (BUILTIN_EXPONENT_P (fcode1))
11762 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11763 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11764 arg1 = build_call_expr_loc (loc,
11765 expfn, 1,
11766 fold_convert_loc (loc, type, arg));
11767 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11770 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11771 if (fcode1 == BUILT_IN_POW
11772 || fcode1 == BUILT_IN_POWF
11773 || fcode1 == BUILT_IN_POWL)
11775 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11776 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11777 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11778 tree neg11 = fold_convert_loc (loc, type,
11779 negate_expr (arg11));
11780 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11781 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11784 return NULL_TREE;
11786 case TRUNC_DIV_EXPR:
11787 /* Optimize (X & (-A)) / A where A is a power of 2,
11788 to X >> log2(A) */
11789 if (TREE_CODE (arg0) == BIT_AND_EXPR
11790 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11791 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11793 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11794 arg1, TREE_OPERAND (arg0, 1));
11795 if (sum && integer_zerop (sum)) {
11796 tree pow2 = build_int_cst (integer_type_node,
11797 wi::exact_log2 (arg1));
11798 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11799 TREE_OPERAND (arg0, 0), pow2);
11803 /* Fall through */
11805 case FLOOR_DIV_EXPR:
11806 /* Simplify A / (B << N) where A and B are positive and B is
11807 a power of 2, to A >> (N + log2(B)). */
11808 strict_overflow_p = false;
11809 if (TREE_CODE (arg1) == LSHIFT_EXPR
11810 && (TYPE_UNSIGNED (type)
11811 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11813 tree sval = TREE_OPERAND (arg1, 0);
11814 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11816 tree sh_cnt = TREE_OPERAND (arg1, 1);
11817 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11818 wi::exact_log2 (sval));
11820 if (strict_overflow_p)
11821 fold_overflow_warning (("assuming signed overflow does not "
11822 "occur when simplifying A / (B << N)"),
11823 WARN_STRICT_OVERFLOW_MISC);
11825 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11826 sh_cnt, pow2);
11827 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11828 fold_convert_loc (loc, type, arg0), sh_cnt);
11832 /* Fall through */
11834 case ROUND_DIV_EXPR:
11835 case CEIL_DIV_EXPR:
11836 case EXACT_DIV_EXPR:
11837 if (integer_zerop (arg1))
11838 return NULL_TREE;
11840 /* Convert -A / -B to A / B when the type is signed and overflow is
11841 undefined. */
11842 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11843 && TREE_CODE (arg0) == NEGATE_EXPR
11844 && negate_expr_p (arg1))
11846 if (INTEGRAL_TYPE_P (type))
11847 fold_overflow_warning (("assuming signed overflow does not occur "
11848 "when distributing negation across "
11849 "division"),
11850 WARN_STRICT_OVERFLOW_MISC);
11851 return fold_build2_loc (loc, code, type,
11852 fold_convert_loc (loc, type,
11853 TREE_OPERAND (arg0, 0)),
11854 fold_convert_loc (loc, type,
11855 negate_expr (arg1)));
11857 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11858 && TREE_CODE (arg1) == NEGATE_EXPR
11859 && negate_expr_p (arg0))
11861 if (INTEGRAL_TYPE_P (type))
11862 fold_overflow_warning (("assuming signed overflow does not occur "
11863 "when distributing negation across "
11864 "division"),
11865 WARN_STRICT_OVERFLOW_MISC);
11866 return fold_build2_loc (loc, code, type,
11867 fold_convert_loc (loc, type,
11868 negate_expr (arg0)),
11869 fold_convert_loc (loc, type,
11870 TREE_OPERAND (arg1, 0)));
11873 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11874 operation, EXACT_DIV_EXPR.
11876 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11877 At one time others generated faster code, it's not clear if they do
11878 after the last round to changes to the DIV code in expmed.c. */
11879 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11880 && multiple_of_p (type, arg0, arg1))
11881 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11883 strict_overflow_p = false;
11884 if (TREE_CODE (arg1) == INTEGER_CST
11885 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11886 &strict_overflow_p)))
11888 if (strict_overflow_p)
11889 fold_overflow_warning (("assuming signed overflow does not occur "
11890 "when simplifying division"),
11891 WARN_STRICT_OVERFLOW_MISC);
11892 return fold_convert_loc (loc, type, tem);
11895 return NULL_TREE;
11897 case CEIL_MOD_EXPR:
11898 case FLOOR_MOD_EXPR:
11899 case ROUND_MOD_EXPR:
11900 case TRUNC_MOD_EXPR:
11901 /* X % -Y is the same as X % Y. */
11902 if (code == TRUNC_MOD_EXPR
11903 && !TYPE_UNSIGNED (type)
11904 && TREE_CODE (arg1) == NEGATE_EXPR
11905 && !TYPE_OVERFLOW_TRAPS (type))
11906 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11907 fold_convert_loc (loc, type,
11908 TREE_OPERAND (arg1, 0)));
11910 strict_overflow_p = false;
11911 if (TREE_CODE (arg1) == INTEGER_CST
11912 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11913 &strict_overflow_p)))
11915 if (strict_overflow_p)
11916 fold_overflow_warning (("assuming signed overflow does not occur "
11917 "when simplifying modulus"),
11918 WARN_STRICT_OVERFLOW_MISC);
11919 return fold_convert_loc (loc, type, tem);
11922 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11923 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11924 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11925 && (TYPE_UNSIGNED (type)
11926 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11928 tree c = arg1;
11929 /* Also optimize A % (C << N) where C is a power of 2,
11930 to A & ((C << N) - 1). */
11931 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11932 c = TREE_OPERAND (arg1, 0);
11934 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11936 tree mask
11937 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11938 build_int_cst (TREE_TYPE (arg1), 1));
11939 if (strict_overflow_p)
11940 fold_overflow_warning (("assuming signed overflow does not "
11941 "occur when simplifying "
11942 "X % (power of two)"),
11943 WARN_STRICT_OVERFLOW_MISC);
11944 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11945 fold_convert_loc (loc, type, arg0),
11946 fold_convert_loc (loc, type, mask));
11950 return NULL_TREE;
11952 case LROTATE_EXPR:
11953 case RROTATE_EXPR:
11954 case RSHIFT_EXPR:
11955 case LSHIFT_EXPR:
11956 /* Since negative shift count is not well-defined,
11957 don't try to compute it in the compiler. */
11958 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11959 return NULL_TREE;
11961 prec = element_precision (type);
11963 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11964 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11965 && tree_to_uhwi (arg1) < prec
11966 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11967 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11969 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11970 + tree_to_uhwi (arg1));
11972 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11973 being well defined. */
11974 if (low >= prec)
11976 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11977 low = low % prec;
11978 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11979 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11980 TREE_OPERAND (arg0, 0));
11981 else
11982 low = prec - 1;
11985 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11986 build_int_cst (TREE_TYPE (arg1), low));
11989 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11990 into x & ((unsigned)-1 >> c) for unsigned types. */
11991 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11992 || (TYPE_UNSIGNED (type)
11993 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11994 && tree_fits_uhwi_p (arg1)
11995 && tree_to_uhwi (arg1) < prec
11996 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11997 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11999 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12000 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12001 tree lshift;
12002 tree arg00;
12004 if (low0 == low1)
12006 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12008 lshift = build_minus_one_cst (type);
12009 lshift = const_binop (code, lshift, arg1);
12011 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12015 /* If we have a rotate of a bit operation with the rotate count and
12016 the second operand of the bit operation both constant,
12017 permute the two operations. */
12018 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12019 && (TREE_CODE (arg0) == BIT_AND_EXPR
12020 || TREE_CODE (arg0) == BIT_IOR_EXPR
12021 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12022 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12023 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12024 fold_build2_loc (loc, code, type,
12025 TREE_OPERAND (arg0, 0), arg1),
12026 fold_build2_loc (loc, code, type,
12027 TREE_OPERAND (arg0, 1), arg1));
12029 /* Two consecutive rotates adding up to the some integer
12030 multiple of the precision of the type can be ignored. */
12031 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12032 && TREE_CODE (arg0) == RROTATE_EXPR
12033 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12034 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12035 prec) == 0)
12036 return TREE_OPERAND (arg0, 0);
12038 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12039 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12040 if the latter can be further optimized. */
12041 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12042 && TREE_CODE (arg0) == BIT_AND_EXPR
12043 && TREE_CODE (arg1) == INTEGER_CST
12044 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12046 tree mask = fold_build2_loc (loc, code, type,
12047 fold_convert_loc (loc, type,
12048 TREE_OPERAND (arg0, 1)),
12049 arg1);
12050 tree shift = fold_build2_loc (loc, code, type,
12051 fold_convert_loc (loc, type,
12052 TREE_OPERAND (arg0, 0)),
12053 arg1);
12054 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12055 if (tem)
12056 return tem;
12059 return NULL_TREE;
12061 case MIN_EXPR:
12062 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12063 if (tem)
12064 return tem;
12065 goto associate;
12067 case MAX_EXPR:
12068 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12069 if (tem)
12070 return tem;
12071 goto associate;
12073 case TRUTH_ANDIF_EXPR:
12074 /* Note that the operands of this must be ints
12075 and their values must be 0 or 1.
12076 ("true" is a fixed value perhaps depending on the language.) */
12077 /* If first arg is constant zero, return it. */
12078 if (integer_zerop (arg0))
12079 return fold_convert_loc (loc, type, arg0);
12080 case TRUTH_AND_EXPR:
12081 /* If either arg is constant true, drop it. */
12082 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12083 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12084 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12085 /* Preserve sequence points. */
12086 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12087 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12088 /* If second arg is constant zero, result is zero, but first arg
12089 must be evaluated. */
12090 if (integer_zerop (arg1))
12091 return omit_one_operand_loc (loc, type, arg1, arg0);
12092 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12093 case will be handled here. */
12094 if (integer_zerop (arg0))
12095 return omit_one_operand_loc (loc, type, arg0, arg1);
12097 /* !X && X is always false. */
12098 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12100 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12101 /* X && !X is always false. */
12102 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12104 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12106 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12107 means A >= Y && A != MAX, but in this case we know that
12108 A < X <= MAX. */
12110 if (!TREE_SIDE_EFFECTS (arg0)
12111 && !TREE_SIDE_EFFECTS (arg1))
12113 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12114 if (tem && !operand_equal_p (tem, arg0, 0))
12115 return fold_build2_loc (loc, code, type, tem, arg1);
12117 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12118 if (tem && !operand_equal_p (tem, arg1, 0))
12119 return fold_build2_loc (loc, code, type, arg0, tem);
12122 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12123 != NULL_TREE)
12124 return tem;
12126 return NULL_TREE;
12128 case TRUTH_ORIF_EXPR:
12129 /* Note that the operands of this must be ints
12130 and their values must be 0 or true.
12131 ("true" is a fixed value perhaps depending on the language.) */
12132 /* If first arg is constant true, return it. */
12133 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12134 return fold_convert_loc (loc, type, arg0);
12135 case TRUTH_OR_EXPR:
12136 /* If either arg is constant zero, drop it. */
12137 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12138 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12139 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12140 /* Preserve sequence points. */
12141 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12143 /* If second arg is constant true, result is true, but we must
12144 evaluate first arg. */
12145 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12146 return omit_one_operand_loc (loc, type, arg1, arg0);
12147 /* Likewise for first arg, but note this only occurs here for
12148 TRUTH_OR_EXPR. */
12149 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12150 return omit_one_operand_loc (loc, type, arg0, arg1);
12152 /* !X || X is always true. */
12153 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12155 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12156 /* X || !X is always true. */
12157 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12158 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12159 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12161 /* (X && !Y) || (!X && Y) is X ^ Y */
12162 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12163 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12165 tree a0, a1, l0, l1, n0, n1;
12167 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12168 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12170 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12171 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12173 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12174 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12176 if ((operand_equal_p (n0, a0, 0)
12177 && operand_equal_p (n1, a1, 0))
12178 || (operand_equal_p (n0, a1, 0)
12179 && operand_equal_p (n1, a0, 0)))
12180 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12183 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12184 != NULL_TREE)
12185 return tem;
12187 return NULL_TREE;
12189 case TRUTH_XOR_EXPR:
12190 /* If the second arg is constant zero, drop it. */
12191 if (integer_zerop (arg1))
12192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12193 /* If the second arg is constant true, this is a logical inversion. */
12194 if (integer_onep (arg1))
12196 tem = invert_truthvalue_loc (loc, arg0);
12197 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12199 /* Identical arguments cancel to zero. */
12200 if (operand_equal_p (arg0, arg1, 0))
12201 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12203 /* !X ^ X is always true. */
12204 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12206 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12208 /* X ^ !X is always true. */
12209 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12211 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12213 return NULL_TREE;
12215 case EQ_EXPR:
12216 case NE_EXPR:
12217 STRIP_NOPS (arg0);
12218 STRIP_NOPS (arg1);
12220 tem = fold_comparison (loc, code, type, op0, op1);
12221 if (tem != NULL_TREE)
12222 return tem;
12224 /* bool_var != 0 becomes bool_var. */
12225 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12226 && code == NE_EXPR)
12227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12229 /* bool_var == 1 becomes bool_var. */
12230 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12231 && code == EQ_EXPR)
12232 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12234 /* bool_var != 1 becomes !bool_var. */
12235 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12236 && code == NE_EXPR)
12237 return fold_convert_loc (loc, type,
12238 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12239 TREE_TYPE (arg0), arg0));
12241 /* bool_var == 0 becomes !bool_var. */
12242 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12243 && code == EQ_EXPR)
12244 return fold_convert_loc (loc, type,
12245 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12246 TREE_TYPE (arg0), arg0));
12248 /* !exp != 0 becomes !exp */
12249 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12250 && code == NE_EXPR)
12251 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12253 /* If this is an equality comparison of the address of two non-weak,
12254 unaliased symbols neither of which are extern (since we do not
12255 have access to attributes for externs), then we know the result. */
12256 if (TREE_CODE (arg0) == ADDR_EXPR
12257 && DECL_P (TREE_OPERAND (arg0, 0))
12258 && TREE_CODE (arg1) == ADDR_EXPR
12259 && DECL_P (TREE_OPERAND (arg1, 0)))
12261 int equal;
12263 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12264 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12265 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12266 ->equal_address_to (symtab_node::get_create
12267 (TREE_OPERAND (arg1, 0)));
12268 else
12269 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12270 if (equal != 2)
12271 return constant_boolean_node (equal
12272 ? code == EQ_EXPR : code != EQ_EXPR,
12273 type);
12276 /* Similarly for a NEGATE_EXPR. */
12277 if (TREE_CODE (arg0) == NEGATE_EXPR
12278 && TREE_CODE (arg1) == INTEGER_CST
12279 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12280 arg1)))
12281 && TREE_CODE (tem) == INTEGER_CST
12282 && !TREE_OVERFLOW (tem))
12283 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12285 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12286 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12287 && TREE_CODE (arg1) == INTEGER_CST
12288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12289 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12290 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12291 fold_convert_loc (loc,
12292 TREE_TYPE (arg0),
12293 arg1),
12294 TREE_OPERAND (arg0, 1)));
12296 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12297 if ((TREE_CODE (arg0) == PLUS_EXPR
12298 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12299 || TREE_CODE (arg0) == MINUS_EXPR)
12300 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12301 0)),
12302 arg1, 0)
12303 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12304 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12306 tree val = TREE_OPERAND (arg0, 1);
12307 return omit_two_operands_loc (loc, type,
12308 fold_build2_loc (loc, code, type,
12309 val,
12310 build_int_cst (TREE_TYPE (val),
12311 0)),
12312 TREE_OPERAND (arg0, 0), arg1);
12315 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12316 if (TREE_CODE (arg0) == MINUS_EXPR
12317 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12318 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12319 1)),
12320 arg1, 0)
12321 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12323 return omit_two_operands_loc (loc, type,
12324 code == NE_EXPR
12325 ? boolean_true_node : boolean_false_node,
12326 TREE_OPERAND (arg0, 1), arg1);
12329 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12330 if (TREE_CODE (arg0) == ABS_EXPR
12331 && (integer_zerop (arg1) || real_zerop (arg1)))
12332 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12334 /* If this is an EQ or NE comparison with zero and ARG0 is
12335 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12336 two operations, but the latter can be done in one less insn
12337 on machines that have only two-operand insns or on which a
12338 constant cannot be the first operand. */
12339 if (TREE_CODE (arg0) == BIT_AND_EXPR
12340 && integer_zerop (arg1))
12342 tree arg00 = TREE_OPERAND (arg0, 0);
12343 tree arg01 = TREE_OPERAND (arg0, 1);
12344 if (TREE_CODE (arg00) == LSHIFT_EXPR
12345 && integer_onep (TREE_OPERAND (arg00, 0)))
12347 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12348 arg01, TREE_OPERAND (arg00, 1));
12349 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12350 build_int_cst (TREE_TYPE (arg0), 1));
12351 return fold_build2_loc (loc, code, type,
12352 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12353 arg1);
12355 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12356 && integer_onep (TREE_OPERAND (arg01, 0)))
12358 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12359 arg00, TREE_OPERAND (arg01, 1));
12360 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12361 build_int_cst (TREE_TYPE (arg0), 1));
12362 return fold_build2_loc (loc, code, type,
12363 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12364 arg1);
12368 /* If this is an NE or EQ comparison of zero against the result of a
12369 signed MOD operation whose second operand is a power of 2, make
12370 the MOD operation unsigned since it is simpler and equivalent. */
12371 if (integer_zerop (arg1)
12372 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12373 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12374 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12375 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12376 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12377 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12379 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12380 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12381 fold_convert_loc (loc, newtype,
12382 TREE_OPERAND (arg0, 0)),
12383 fold_convert_loc (loc, newtype,
12384 TREE_OPERAND (arg0, 1)));
12386 return fold_build2_loc (loc, code, type, newmod,
12387 fold_convert_loc (loc, newtype, arg1));
12390 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12391 C1 is a valid shift constant, and C2 is a power of two, i.e.
12392 a single bit. */
12393 if (TREE_CODE (arg0) == BIT_AND_EXPR
12394 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12395 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12396 == INTEGER_CST
12397 && integer_pow2p (TREE_OPERAND (arg0, 1))
12398 && integer_zerop (arg1))
12400 tree itype = TREE_TYPE (arg0);
12401 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12402 prec = TYPE_PRECISION (itype);
12404 /* Check for a valid shift count. */
12405 if (wi::ltu_p (arg001, prec))
12407 tree arg01 = TREE_OPERAND (arg0, 1);
12408 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12409 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12410 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12411 can be rewritten as (X & (C2 << C1)) != 0. */
12412 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12414 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12415 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12416 return fold_build2_loc (loc, code, type, tem,
12417 fold_convert_loc (loc, itype, arg1));
12419 /* Otherwise, for signed (arithmetic) shifts,
12420 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12421 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12422 else if (!TYPE_UNSIGNED (itype))
12423 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12424 arg000, build_int_cst (itype, 0));
12425 /* Otherwise, of unsigned (logical) shifts,
12426 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12427 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12428 else
12429 return omit_one_operand_loc (loc, type,
12430 code == EQ_EXPR ? integer_one_node
12431 : integer_zero_node,
12432 arg000);
12436 /* If we have (A & C) == C where C is a power of 2, convert this into
12437 (A & C) != 0. Similarly for NE_EXPR. */
12438 if (TREE_CODE (arg0) == BIT_AND_EXPR
12439 && integer_pow2p (TREE_OPERAND (arg0, 1))
12440 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12441 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12442 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12443 integer_zero_node));
12445 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12446 bit, then fold the expression into A < 0 or A >= 0. */
12447 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12448 if (tem)
12449 return tem;
12451 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12452 Similarly for NE_EXPR. */
12453 if (TREE_CODE (arg0) == BIT_AND_EXPR
12454 && TREE_CODE (arg1) == INTEGER_CST
12455 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12457 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12458 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12459 TREE_OPERAND (arg0, 1));
12460 tree dandnotc
12461 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12462 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12463 notc);
12464 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12465 if (integer_nonzerop (dandnotc))
12466 return omit_one_operand_loc (loc, type, rslt, arg0);
12469 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12470 Similarly for NE_EXPR. */
12471 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12472 && TREE_CODE (arg1) == INTEGER_CST
12473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12475 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12476 tree candnotd
12477 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12478 TREE_OPERAND (arg0, 1),
12479 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12480 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12481 if (integer_nonzerop (candnotd))
12482 return omit_one_operand_loc (loc, type, rslt, arg0);
12485 /* If this is a comparison of a field, we may be able to simplify it. */
12486 if ((TREE_CODE (arg0) == COMPONENT_REF
12487 || TREE_CODE (arg0) == BIT_FIELD_REF)
12488 /* Handle the constant case even without -O
12489 to make sure the warnings are given. */
12490 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12492 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12493 if (t1)
12494 return t1;
12497 /* Optimize comparisons of strlen vs zero to a compare of the
12498 first character of the string vs zero. To wit,
12499 strlen(ptr) == 0 => *ptr == 0
12500 strlen(ptr) != 0 => *ptr != 0
12501 Other cases should reduce to one of these two (or a constant)
12502 due to the return value of strlen being unsigned. */
12503 if (TREE_CODE (arg0) == CALL_EXPR
12504 && integer_zerop (arg1))
12506 tree fndecl = get_callee_fndecl (arg0);
12508 if (fndecl
12509 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12510 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12511 && call_expr_nargs (arg0) == 1
12512 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12514 tree iref = build_fold_indirect_ref_loc (loc,
12515 CALL_EXPR_ARG (arg0, 0));
12516 return fold_build2_loc (loc, code, type, iref,
12517 build_int_cst (TREE_TYPE (iref), 0));
12521 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12522 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12523 if (TREE_CODE (arg0) == RSHIFT_EXPR
12524 && integer_zerop (arg1)
12525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12527 tree arg00 = TREE_OPERAND (arg0, 0);
12528 tree arg01 = TREE_OPERAND (arg0, 1);
12529 tree itype = TREE_TYPE (arg00);
12530 if (wi::eq_p (arg01, element_precision (itype) - 1))
12532 if (TYPE_UNSIGNED (itype))
12534 itype = signed_type_for (itype);
12535 arg00 = fold_convert_loc (loc, itype, arg00);
12537 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12538 type, arg00, build_zero_cst (itype));
12542 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12543 if (integer_zerop (arg1)
12544 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12545 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12546 TREE_OPERAND (arg0, 1));
12548 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12549 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12550 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12551 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12552 build_zero_cst (TREE_TYPE (arg0)));
12553 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12554 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12556 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12557 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12558 build_zero_cst (TREE_TYPE (arg0)));
12560 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12561 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12562 && TREE_CODE (arg1) == INTEGER_CST
12563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12564 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12565 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12566 TREE_OPERAND (arg0, 1), arg1));
12568 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12569 (X & C) == 0 when C is a single bit. */
12570 if (TREE_CODE (arg0) == BIT_AND_EXPR
12571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12572 && integer_zerop (arg1)
12573 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12575 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12576 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12577 TREE_OPERAND (arg0, 1));
12578 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12579 type, tem,
12580 fold_convert_loc (loc, TREE_TYPE (arg0),
12581 arg1));
12584 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12585 constant C is a power of two, i.e. a single bit. */
12586 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12587 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12588 && integer_zerop (arg1)
12589 && integer_pow2p (TREE_OPERAND (arg0, 1))
12590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12591 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12593 tree arg00 = TREE_OPERAND (arg0, 0);
12594 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12595 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12598 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12599 when is C is a power of two, i.e. a single bit. */
12600 if (TREE_CODE (arg0) == BIT_AND_EXPR
12601 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12602 && integer_zerop (arg1)
12603 && integer_pow2p (TREE_OPERAND (arg0, 1))
12604 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12605 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12607 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12608 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12609 arg000, TREE_OPERAND (arg0, 1));
12610 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12611 tem, build_int_cst (TREE_TYPE (tem), 0));
12614 if (integer_zerop (arg1)
12615 && tree_expr_nonzero_p (arg0))
12617 tree res = constant_boolean_node (code==NE_EXPR, type);
12618 return omit_one_operand_loc (loc, type, res, arg0);
12621 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12622 if (TREE_CODE (arg0) == NEGATE_EXPR
12623 && TREE_CODE (arg1) == NEGATE_EXPR)
12624 return fold_build2_loc (loc, code, type,
12625 TREE_OPERAND (arg0, 0),
12626 fold_convert_loc (loc, TREE_TYPE (arg0),
12627 TREE_OPERAND (arg1, 0)));
12629 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12630 if (TREE_CODE (arg0) == BIT_AND_EXPR
12631 && TREE_CODE (arg1) == BIT_AND_EXPR)
12633 tree arg00 = TREE_OPERAND (arg0, 0);
12634 tree arg01 = TREE_OPERAND (arg0, 1);
12635 tree arg10 = TREE_OPERAND (arg1, 0);
12636 tree arg11 = TREE_OPERAND (arg1, 1);
12637 tree itype = TREE_TYPE (arg0);
12639 if (operand_equal_p (arg01, arg11, 0))
12640 return fold_build2_loc (loc, code, type,
12641 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12642 fold_build2_loc (loc,
12643 BIT_XOR_EXPR, itype,
12644 arg00, arg10),
12645 arg01),
12646 build_zero_cst (itype));
12648 if (operand_equal_p (arg01, arg10, 0))
12649 return fold_build2_loc (loc, code, type,
12650 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12651 fold_build2_loc (loc,
12652 BIT_XOR_EXPR, itype,
12653 arg00, arg11),
12654 arg01),
12655 build_zero_cst (itype));
12657 if (operand_equal_p (arg00, arg11, 0))
12658 return fold_build2_loc (loc, code, type,
12659 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12660 fold_build2_loc (loc,
12661 BIT_XOR_EXPR, itype,
12662 arg01, arg10),
12663 arg00),
12664 build_zero_cst (itype));
12666 if (operand_equal_p (arg00, arg10, 0))
12667 return fold_build2_loc (loc, code, type,
12668 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12669 fold_build2_loc (loc,
12670 BIT_XOR_EXPR, itype,
12671 arg01, arg11),
12672 arg00),
12673 build_zero_cst (itype));
12676 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12677 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12679 tree arg00 = TREE_OPERAND (arg0, 0);
12680 tree arg01 = TREE_OPERAND (arg0, 1);
12681 tree arg10 = TREE_OPERAND (arg1, 0);
12682 tree arg11 = TREE_OPERAND (arg1, 1);
12683 tree itype = TREE_TYPE (arg0);
12685 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12686 operand_equal_p guarantees no side-effects so we don't need
12687 to use omit_one_operand on Z. */
12688 if (operand_equal_p (arg01, arg11, 0))
12689 return fold_build2_loc (loc, code, type, arg00,
12690 fold_convert_loc (loc, TREE_TYPE (arg00),
12691 arg10));
12692 if (operand_equal_p (arg01, arg10, 0))
12693 return fold_build2_loc (loc, code, type, arg00,
12694 fold_convert_loc (loc, TREE_TYPE (arg00),
12695 arg11));
12696 if (operand_equal_p (arg00, arg11, 0))
12697 return fold_build2_loc (loc, code, type, arg01,
12698 fold_convert_loc (loc, TREE_TYPE (arg01),
12699 arg10));
12700 if (operand_equal_p (arg00, arg10, 0))
12701 return fold_build2_loc (loc, code, type, arg01,
12702 fold_convert_loc (loc, TREE_TYPE (arg01),
12703 arg11));
12705 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12706 if (TREE_CODE (arg01) == INTEGER_CST
12707 && TREE_CODE (arg11) == INTEGER_CST)
12709 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12710 fold_convert_loc (loc, itype, arg11));
12711 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12712 return fold_build2_loc (loc, code, type, tem,
12713 fold_convert_loc (loc, itype, arg10));
12717 /* Attempt to simplify equality/inequality comparisons of complex
12718 values. Only lower the comparison if the result is known or
12719 can be simplified to a single scalar comparison. */
12720 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12721 || TREE_CODE (arg0) == COMPLEX_CST)
12722 && (TREE_CODE (arg1) == COMPLEX_EXPR
12723 || TREE_CODE (arg1) == COMPLEX_CST))
12725 tree real0, imag0, real1, imag1;
12726 tree rcond, icond;
12728 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12730 real0 = TREE_OPERAND (arg0, 0);
12731 imag0 = TREE_OPERAND (arg0, 1);
12733 else
12735 real0 = TREE_REALPART (arg0);
12736 imag0 = TREE_IMAGPART (arg0);
12739 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12741 real1 = TREE_OPERAND (arg1, 0);
12742 imag1 = TREE_OPERAND (arg1, 1);
12744 else
12746 real1 = TREE_REALPART (arg1);
12747 imag1 = TREE_IMAGPART (arg1);
12750 rcond = fold_binary_loc (loc, code, type, real0, real1);
12751 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12753 if (integer_zerop (rcond))
12755 if (code == EQ_EXPR)
12756 return omit_two_operands_loc (loc, type, boolean_false_node,
12757 imag0, imag1);
12758 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12760 else
12762 if (code == NE_EXPR)
12763 return omit_two_operands_loc (loc, type, boolean_true_node,
12764 imag0, imag1);
12765 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12769 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12770 if (icond && TREE_CODE (icond) == INTEGER_CST)
12772 if (integer_zerop (icond))
12774 if (code == EQ_EXPR)
12775 return omit_two_operands_loc (loc, type, boolean_false_node,
12776 real0, real1);
12777 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12779 else
12781 if (code == NE_EXPR)
12782 return omit_two_operands_loc (loc, type, boolean_true_node,
12783 real0, real1);
12784 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12789 return NULL_TREE;
12791 case LT_EXPR:
12792 case GT_EXPR:
12793 case LE_EXPR:
12794 case GE_EXPR:
12795 tem = fold_comparison (loc, code, type, op0, op1);
12796 if (tem != NULL_TREE)
12797 return tem;
12799 /* Transform comparisons of the form X +- C CMP X. */
12800 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12801 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12802 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12803 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12804 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12805 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12807 tree arg01 = TREE_OPERAND (arg0, 1);
12808 enum tree_code code0 = TREE_CODE (arg0);
12809 int is_positive;
12811 if (TREE_CODE (arg01) == REAL_CST)
12812 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12813 else
12814 is_positive = tree_int_cst_sgn (arg01);
12816 /* (X - c) > X becomes false. */
12817 if (code == GT_EXPR
12818 && ((code0 == MINUS_EXPR && is_positive >= 0)
12819 || (code0 == PLUS_EXPR && is_positive <= 0)))
12821 if (TREE_CODE (arg01) == INTEGER_CST
12822 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12823 fold_overflow_warning (("assuming signed overflow does not "
12824 "occur when assuming that (X - c) > X "
12825 "is always false"),
12826 WARN_STRICT_OVERFLOW_ALL);
12827 return constant_boolean_node (0, type);
12830 /* Likewise (X + c) < X becomes false. */
12831 if (code == LT_EXPR
12832 && ((code0 == PLUS_EXPR && is_positive >= 0)
12833 || (code0 == MINUS_EXPR && is_positive <= 0)))
12835 if (TREE_CODE (arg01) == INTEGER_CST
12836 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12837 fold_overflow_warning (("assuming signed overflow does not "
12838 "occur when assuming that "
12839 "(X + c) < X is always false"),
12840 WARN_STRICT_OVERFLOW_ALL);
12841 return constant_boolean_node (0, type);
12844 /* Convert (X - c) <= X to true. */
12845 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12846 && code == LE_EXPR
12847 && ((code0 == MINUS_EXPR && is_positive >= 0)
12848 || (code0 == PLUS_EXPR && is_positive <= 0)))
12850 if (TREE_CODE (arg01) == INTEGER_CST
12851 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12852 fold_overflow_warning (("assuming signed overflow does not "
12853 "occur when assuming that "
12854 "(X - c) <= X is always true"),
12855 WARN_STRICT_OVERFLOW_ALL);
12856 return constant_boolean_node (1, type);
12859 /* Convert (X + c) >= X to true. */
12860 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12861 && code == GE_EXPR
12862 && ((code0 == PLUS_EXPR && is_positive >= 0)
12863 || (code0 == MINUS_EXPR && is_positive <= 0)))
12865 if (TREE_CODE (arg01) == INTEGER_CST
12866 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12867 fold_overflow_warning (("assuming signed overflow does not "
12868 "occur when assuming that "
12869 "(X + c) >= X is always true"),
12870 WARN_STRICT_OVERFLOW_ALL);
12871 return constant_boolean_node (1, type);
12874 if (TREE_CODE (arg01) == INTEGER_CST)
12876 /* Convert X + c > X and X - c < X to true for integers. */
12877 if (code == GT_EXPR
12878 && ((code0 == PLUS_EXPR && is_positive > 0)
12879 || (code0 == MINUS_EXPR && is_positive < 0)))
12881 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does "
12883 "not occur when assuming that "
12884 "(X + c) > X is always true"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (1, type);
12889 if (code == LT_EXPR
12890 && ((code0 == MINUS_EXPR && is_positive > 0)
12891 || (code0 == PLUS_EXPR && is_positive < 0)))
12893 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12894 fold_overflow_warning (("assuming signed overflow does "
12895 "not occur when assuming that "
12896 "(X - c) < X is always true"),
12897 WARN_STRICT_OVERFLOW_ALL);
12898 return constant_boolean_node (1, type);
12901 /* Convert X + c <= X and X - c >= X to false for integers. */
12902 if (code == LE_EXPR
12903 && ((code0 == PLUS_EXPR && is_positive > 0)
12904 || (code0 == MINUS_EXPR && is_positive < 0)))
12906 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12907 fold_overflow_warning (("assuming signed overflow does "
12908 "not occur when assuming that "
12909 "(X + c) <= X is always false"),
12910 WARN_STRICT_OVERFLOW_ALL);
12911 return constant_boolean_node (0, type);
12914 if (code == GE_EXPR
12915 && ((code0 == MINUS_EXPR && is_positive > 0)
12916 || (code0 == PLUS_EXPR && is_positive < 0)))
12918 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12919 fold_overflow_warning (("assuming signed overflow does "
12920 "not occur when assuming that "
12921 "(X - c) >= X is always false"),
12922 WARN_STRICT_OVERFLOW_ALL);
12923 return constant_boolean_node (0, type);
12928 /* Comparisons with the highest or lowest possible integer of
12929 the specified precision will have known values. */
12931 tree arg1_type = TREE_TYPE (arg1);
12932 unsigned int prec = TYPE_PRECISION (arg1_type);
12934 if (TREE_CODE (arg1) == INTEGER_CST
12935 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12937 wide_int max = wi::max_value (arg1_type);
12938 wide_int signed_max = wi::max_value (prec, SIGNED);
12939 wide_int min = wi::min_value (arg1_type);
12941 if (wi::eq_p (arg1, max))
12942 switch (code)
12944 case GT_EXPR:
12945 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12947 case GE_EXPR:
12948 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12950 case LE_EXPR:
12951 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12953 case LT_EXPR:
12954 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12956 /* The GE_EXPR and LT_EXPR cases above are not normally
12957 reached because of previous transformations. */
12959 default:
12960 break;
12962 else if (wi::eq_p (arg1, max - 1))
12963 switch (code)
12965 case GT_EXPR:
12966 arg1 = const_binop (PLUS_EXPR, arg1,
12967 build_int_cst (TREE_TYPE (arg1), 1));
12968 return fold_build2_loc (loc, EQ_EXPR, type,
12969 fold_convert_loc (loc,
12970 TREE_TYPE (arg1), arg0),
12971 arg1);
12972 case LE_EXPR:
12973 arg1 = const_binop (PLUS_EXPR, arg1,
12974 build_int_cst (TREE_TYPE (arg1), 1));
12975 return fold_build2_loc (loc, NE_EXPR, type,
12976 fold_convert_loc (loc, TREE_TYPE (arg1),
12977 arg0),
12978 arg1);
12979 default:
12980 break;
12982 else if (wi::eq_p (arg1, min))
12983 switch (code)
12985 case LT_EXPR:
12986 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12988 case LE_EXPR:
12989 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12991 case GE_EXPR:
12992 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12994 case GT_EXPR:
12995 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12997 default:
12998 break;
13000 else if (wi::eq_p (arg1, min + 1))
13001 switch (code)
13003 case GE_EXPR:
13004 arg1 = const_binop (MINUS_EXPR, arg1,
13005 build_int_cst (TREE_TYPE (arg1), 1));
13006 return fold_build2_loc (loc, NE_EXPR, type,
13007 fold_convert_loc (loc,
13008 TREE_TYPE (arg1), arg0),
13009 arg1);
13010 case LT_EXPR:
13011 arg1 = const_binop (MINUS_EXPR, arg1,
13012 build_int_cst (TREE_TYPE (arg1), 1));
13013 return fold_build2_loc (loc, EQ_EXPR, type,
13014 fold_convert_loc (loc, TREE_TYPE (arg1),
13015 arg0),
13016 arg1);
13017 default:
13018 break;
13021 else if (wi::eq_p (arg1, signed_max)
13022 && TYPE_UNSIGNED (arg1_type)
13023 /* We will flip the signedness of the comparison operator
13024 associated with the mode of arg1, so the sign bit is
13025 specified by this mode. Check that arg1 is the signed
13026 max associated with this sign bit. */
13027 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13028 /* signed_type does not work on pointer types. */
13029 && INTEGRAL_TYPE_P (arg1_type))
13031 /* The following case also applies to X < signed_max+1
13032 and X >= signed_max+1 because previous transformations. */
13033 if (code == LE_EXPR || code == GT_EXPR)
13035 tree st = signed_type_for (arg1_type);
13036 return fold_build2_loc (loc,
13037 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13038 type, fold_convert_loc (loc, st, arg0),
13039 build_int_cst (st, 0));
13045 /* If we are comparing an ABS_EXPR with a constant, we can
13046 convert all the cases into explicit comparisons, but they may
13047 well not be faster than doing the ABS and one comparison.
13048 But ABS (X) <= C is a range comparison, which becomes a subtraction
13049 and a comparison, and is probably faster. */
13050 if (code == LE_EXPR
13051 && TREE_CODE (arg1) == INTEGER_CST
13052 && TREE_CODE (arg0) == ABS_EXPR
13053 && ! TREE_SIDE_EFFECTS (arg0)
13054 && (0 != (tem = negate_expr (arg1)))
13055 && TREE_CODE (tem) == INTEGER_CST
13056 && !TREE_OVERFLOW (tem))
13057 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13058 build2 (GE_EXPR, type,
13059 TREE_OPERAND (arg0, 0), tem),
13060 build2 (LE_EXPR, type,
13061 TREE_OPERAND (arg0, 0), arg1));
13063 /* Convert ABS_EXPR<x> >= 0 to true. */
13064 strict_overflow_p = false;
13065 if (code == GE_EXPR
13066 && (integer_zerop (arg1)
13067 || (! HONOR_NANS (element_mode (arg0))
13068 && real_zerop (arg1)))
13069 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13071 if (strict_overflow_p)
13072 fold_overflow_warning (("assuming signed overflow does not occur "
13073 "when simplifying comparison of "
13074 "absolute value and zero"),
13075 WARN_STRICT_OVERFLOW_CONDITIONAL);
13076 return omit_one_operand_loc (loc, type,
13077 constant_boolean_node (true, type),
13078 arg0);
13081 /* Convert ABS_EXPR<x> < 0 to false. */
13082 strict_overflow_p = false;
13083 if (code == LT_EXPR
13084 && (integer_zerop (arg1) || real_zerop (arg1))
13085 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13087 if (strict_overflow_p)
13088 fold_overflow_warning (("assuming signed overflow does not occur "
13089 "when simplifying comparison of "
13090 "absolute value and zero"),
13091 WARN_STRICT_OVERFLOW_CONDITIONAL);
13092 return omit_one_operand_loc (loc, type,
13093 constant_boolean_node (false, type),
13094 arg0);
13097 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13098 and similarly for >= into !=. */
13099 if ((code == LT_EXPR || code == GE_EXPR)
13100 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13101 && TREE_CODE (arg1) == LSHIFT_EXPR
13102 && integer_onep (TREE_OPERAND (arg1, 0)))
13103 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13104 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13105 TREE_OPERAND (arg1, 1)),
13106 build_zero_cst (TREE_TYPE (arg0)));
13108 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13109 otherwise Y might be >= # of bits in X's type and thus e.g.
13110 (unsigned char) (1 << Y) for Y 15 might be 0.
13111 If the cast is widening, then 1 << Y should have unsigned type,
13112 otherwise if Y is number of bits in the signed shift type minus 1,
13113 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13114 31 might be 0xffffffff80000000. */
13115 if ((code == LT_EXPR || code == GE_EXPR)
13116 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13117 && CONVERT_EXPR_P (arg1)
13118 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13119 && (element_precision (TREE_TYPE (arg1))
13120 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13121 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13122 || (element_precision (TREE_TYPE (arg1))
13123 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13124 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13126 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13127 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13128 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13129 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13130 build_zero_cst (TREE_TYPE (arg0)));
13133 return NULL_TREE;
13135 case UNORDERED_EXPR:
13136 case ORDERED_EXPR:
13137 case UNLT_EXPR:
13138 case UNLE_EXPR:
13139 case UNGT_EXPR:
13140 case UNGE_EXPR:
13141 case UNEQ_EXPR:
13142 case LTGT_EXPR:
13143 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13145 t1 = fold_relational_const (code, type, arg0, arg1);
13146 if (t1 != NULL_TREE)
13147 return t1;
13150 /* If the first operand is NaN, the result is constant. */
13151 if (TREE_CODE (arg0) == REAL_CST
13152 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13153 && (code != LTGT_EXPR || ! flag_trapping_math))
13155 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13156 ? integer_zero_node
13157 : integer_one_node;
13158 return omit_one_operand_loc (loc, type, t1, arg1);
13161 /* If the second operand is NaN, the result is constant. */
13162 if (TREE_CODE (arg1) == REAL_CST
13163 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13164 && (code != LTGT_EXPR || ! flag_trapping_math))
13166 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13167 ? integer_zero_node
13168 : integer_one_node;
13169 return omit_one_operand_loc (loc, type, t1, arg0);
13172 /* Simplify unordered comparison of something with itself. */
13173 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13174 && operand_equal_p (arg0, arg1, 0))
13175 return constant_boolean_node (1, type);
13177 if (code == LTGT_EXPR
13178 && !flag_trapping_math
13179 && operand_equal_p (arg0, arg1, 0))
13180 return constant_boolean_node (0, type);
13182 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13184 tree targ0 = strip_float_extensions (arg0);
13185 tree targ1 = strip_float_extensions (arg1);
13186 tree newtype = TREE_TYPE (targ0);
13188 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13189 newtype = TREE_TYPE (targ1);
13191 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13192 return fold_build2_loc (loc, code, type,
13193 fold_convert_loc (loc, newtype, targ0),
13194 fold_convert_loc (loc, newtype, targ1));
13197 return NULL_TREE;
13199 case COMPOUND_EXPR:
13200 /* When pedantic, a compound expression can be neither an lvalue
13201 nor an integer constant expression. */
13202 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13203 return NULL_TREE;
13204 /* Don't let (0, 0) be null pointer constant. */
13205 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13206 : fold_convert_loc (loc, type, arg1);
13207 return pedantic_non_lvalue_loc (loc, tem);
13209 case ASSERT_EXPR:
13210 /* An ASSERT_EXPR should never be passed to fold_binary. */
13211 gcc_unreachable ();
13213 default:
13214 return NULL_TREE;
13215 } /* switch (code) */
13218 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13219 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13220 of GOTO_EXPR. */
13222 static tree
13223 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13225 switch (TREE_CODE (*tp))
13227 case LABEL_EXPR:
13228 return *tp;
13230 case GOTO_EXPR:
13231 *walk_subtrees = 0;
13233 /* ... fall through ... */
13235 default:
13236 return NULL_TREE;
13240 /* Return whether the sub-tree ST contains a label which is accessible from
13241 outside the sub-tree. */
13243 static bool
13244 contains_label_p (tree st)
13246 return
13247 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13250 /* Fold a ternary expression of code CODE and type TYPE with operands
13251 OP0, OP1, and OP2. Return the folded expression if folding is
13252 successful. Otherwise, return NULL_TREE. */
13254 tree
13255 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13256 tree op0, tree op1, tree op2)
13258 tree tem;
13259 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13260 enum tree_code_class kind = TREE_CODE_CLASS (code);
13262 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13263 && TREE_CODE_LENGTH (code) == 3);
13265 /* If this is a commutative operation, and OP0 is a constant, move it
13266 to OP1 to reduce the number of tests below. */
13267 if (commutative_ternary_tree_code (code)
13268 && tree_swap_operands_p (op0, op1, true))
13269 return fold_build3_loc (loc, code, type, op1, op0, op2);
13271 tem = generic_simplify (loc, code, type, op0, op1, op2);
13272 if (tem)
13273 return tem;
13275 /* Strip any conversions that don't change the mode. This is safe
13276 for every expression, except for a comparison expression because
13277 its signedness is derived from its operands. So, in the latter
13278 case, only strip conversions that don't change the signedness.
13280 Note that this is done as an internal manipulation within the
13281 constant folder, in order to find the simplest representation of
13282 the arguments so that their form can be studied. In any cases,
13283 the appropriate type conversions should be put back in the tree
13284 that will get out of the constant folder. */
13285 if (op0)
13287 arg0 = op0;
13288 STRIP_NOPS (arg0);
13291 if (op1)
13293 arg1 = op1;
13294 STRIP_NOPS (arg1);
13297 if (op2)
13299 arg2 = op2;
13300 STRIP_NOPS (arg2);
13303 switch (code)
13305 case COMPONENT_REF:
13306 if (TREE_CODE (arg0) == CONSTRUCTOR
13307 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13309 unsigned HOST_WIDE_INT idx;
13310 tree field, value;
13311 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13312 if (field == arg1)
13313 return value;
13315 return NULL_TREE;
13317 case COND_EXPR:
13318 case VEC_COND_EXPR:
13319 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13320 so all simple results must be passed through pedantic_non_lvalue. */
13321 if (TREE_CODE (arg0) == INTEGER_CST)
13323 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13324 tem = integer_zerop (arg0) ? op2 : op1;
13325 /* Only optimize constant conditions when the selected branch
13326 has the same type as the COND_EXPR. This avoids optimizing
13327 away "c ? x : throw", where the throw has a void type.
13328 Avoid throwing away that operand which contains label. */
13329 if ((!TREE_SIDE_EFFECTS (unused_op)
13330 || !contains_label_p (unused_op))
13331 && (! VOID_TYPE_P (TREE_TYPE (tem))
13332 || VOID_TYPE_P (type)))
13333 return pedantic_non_lvalue_loc (loc, tem);
13334 return NULL_TREE;
13336 else if (TREE_CODE (arg0) == VECTOR_CST)
13338 if ((TREE_CODE (arg1) == VECTOR_CST
13339 || TREE_CODE (arg1) == CONSTRUCTOR)
13340 && (TREE_CODE (arg2) == VECTOR_CST
13341 || TREE_CODE (arg2) == CONSTRUCTOR))
13343 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13344 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13345 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13346 for (i = 0; i < nelts; i++)
13348 tree val = VECTOR_CST_ELT (arg0, i);
13349 if (integer_all_onesp (val))
13350 sel[i] = i;
13351 else if (integer_zerop (val))
13352 sel[i] = nelts + i;
13353 else /* Currently unreachable. */
13354 return NULL_TREE;
13356 tree t = fold_vec_perm (type, arg1, arg2, sel);
13357 if (t != NULL_TREE)
13358 return t;
13362 /* If we have A op B ? A : C, we may be able to convert this to a
13363 simpler expression, depending on the operation and the values
13364 of B and C. Signed zeros prevent all of these transformations,
13365 for reasons given above each one.
13367 Also try swapping the arguments and inverting the conditional. */
13368 if (COMPARISON_CLASS_P (arg0)
13369 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13370 arg1, TREE_OPERAND (arg0, 1))
13371 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13373 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13374 if (tem)
13375 return tem;
13378 if (COMPARISON_CLASS_P (arg0)
13379 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13380 op2,
13381 TREE_OPERAND (arg0, 1))
13382 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13384 location_t loc0 = expr_location_or (arg0, loc);
13385 tem = fold_invert_truthvalue (loc0, arg0);
13386 if (tem && COMPARISON_CLASS_P (tem))
13388 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13389 if (tem)
13390 return tem;
13394 /* If the second operand is simpler than the third, swap them
13395 since that produces better jump optimization results. */
13396 if (truth_value_p (TREE_CODE (arg0))
13397 && tree_swap_operands_p (op1, op2, false))
13399 location_t loc0 = expr_location_or (arg0, loc);
13400 /* See if this can be inverted. If it can't, possibly because
13401 it was a floating-point inequality comparison, don't do
13402 anything. */
13403 tem = fold_invert_truthvalue (loc0, arg0);
13404 if (tem)
13405 return fold_build3_loc (loc, code, type, tem, op2, op1);
13408 /* Convert A ? 1 : 0 to simply A. */
13409 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13410 : (integer_onep (op1)
13411 && !VECTOR_TYPE_P (type)))
13412 && integer_zerop (op2)
13413 /* If we try to convert OP0 to our type, the
13414 call to fold will try to move the conversion inside
13415 a COND, which will recurse. In that case, the COND_EXPR
13416 is probably the best choice, so leave it alone. */
13417 && type == TREE_TYPE (arg0))
13418 return pedantic_non_lvalue_loc (loc, arg0);
13420 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13421 over COND_EXPR in cases such as floating point comparisons. */
13422 if (integer_zerop (op1)
13423 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13424 : (integer_onep (op2)
13425 && !VECTOR_TYPE_P (type)))
13426 && truth_value_p (TREE_CODE (arg0)))
13427 return pedantic_non_lvalue_loc (loc,
13428 fold_convert_loc (loc, type,
13429 invert_truthvalue_loc (loc,
13430 arg0)));
13432 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13433 if (TREE_CODE (arg0) == LT_EXPR
13434 && integer_zerop (TREE_OPERAND (arg0, 1))
13435 && integer_zerop (op2)
13436 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13438 /* sign_bit_p looks through both zero and sign extensions,
13439 but for this optimization only sign extensions are
13440 usable. */
13441 tree tem2 = TREE_OPERAND (arg0, 0);
13442 while (tem != tem2)
13444 if (TREE_CODE (tem2) != NOP_EXPR
13445 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13447 tem = NULL_TREE;
13448 break;
13450 tem2 = TREE_OPERAND (tem2, 0);
13452 /* sign_bit_p only checks ARG1 bits within A's precision.
13453 If <sign bit of A> has wider type than A, bits outside
13454 of A's precision in <sign bit of A> need to be checked.
13455 If they are all 0, this optimization needs to be done
13456 in unsigned A's type, if they are all 1 in signed A's type,
13457 otherwise this can't be done. */
13458 if (tem
13459 && TYPE_PRECISION (TREE_TYPE (tem))
13460 < TYPE_PRECISION (TREE_TYPE (arg1))
13461 && TYPE_PRECISION (TREE_TYPE (tem))
13462 < TYPE_PRECISION (type))
13464 int inner_width, outer_width;
13465 tree tem_type;
13467 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13468 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13469 if (outer_width > TYPE_PRECISION (type))
13470 outer_width = TYPE_PRECISION (type);
13472 wide_int mask = wi::shifted_mask
13473 (inner_width, outer_width - inner_width, false,
13474 TYPE_PRECISION (TREE_TYPE (arg1)));
13476 wide_int common = mask & arg1;
13477 if (common == mask)
13479 tem_type = signed_type_for (TREE_TYPE (tem));
13480 tem = fold_convert_loc (loc, tem_type, tem);
13482 else if (common == 0)
13484 tem_type = unsigned_type_for (TREE_TYPE (tem));
13485 tem = fold_convert_loc (loc, tem_type, tem);
13487 else
13488 tem = NULL;
13491 if (tem)
13492 return
13493 fold_convert_loc (loc, type,
13494 fold_build2_loc (loc, BIT_AND_EXPR,
13495 TREE_TYPE (tem), tem,
13496 fold_convert_loc (loc,
13497 TREE_TYPE (tem),
13498 arg1)));
13501 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13502 already handled above. */
13503 if (TREE_CODE (arg0) == BIT_AND_EXPR
13504 && integer_onep (TREE_OPERAND (arg0, 1))
13505 && integer_zerop (op2)
13506 && integer_pow2p (arg1))
13508 tree tem = TREE_OPERAND (arg0, 0);
13509 STRIP_NOPS (tem);
13510 if (TREE_CODE (tem) == RSHIFT_EXPR
13511 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13512 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13513 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13514 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13515 TREE_OPERAND (tem, 0), arg1);
13518 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13519 is probably obsolete because the first operand should be a
13520 truth value (that's why we have the two cases above), but let's
13521 leave it in until we can confirm this for all front-ends. */
13522 if (integer_zerop (op2)
13523 && TREE_CODE (arg0) == NE_EXPR
13524 && integer_zerop (TREE_OPERAND (arg0, 1))
13525 && integer_pow2p (arg1)
13526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13527 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13528 arg1, OEP_ONLY_CONST))
13529 return pedantic_non_lvalue_loc (loc,
13530 fold_convert_loc (loc, type,
13531 TREE_OPERAND (arg0, 0)));
13533 /* Disable the transformations below for vectors, since
13534 fold_binary_op_with_conditional_arg may undo them immediately,
13535 yielding an infinite loop. */
13536 if (code == VEC_COND_EXPR)
13537 return NULL_TREE;
13539 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13540 if (integer_zerop (op2)
13541 && truth_value_p (TREE_CODE (arg0))
13542 && truth_value_p (TREE_CODE (arg1))
13543 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13544 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13545 : TRUTH_ANDIF_EXPR,
13546 type, fold_convert_loc (loc, type, arg0), arg1);
13548 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13549 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13550 && truth_value_p (TREE_CODE (arg0))
13551 && truth_value_p (TREE_CODE (arg1))
13552 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13554 location_t loc0 = expr_location_or (arg0, loc);
13555 /* Only perform transformation if ARG0 is easily inverted. */
13556 tem = fold_invert_truthvalue (loc0, arg0);
13557 if (tem)
13558 return fold_build2_loc (loc, code == VEC_COND_EXPR
13559 ? BIT_IOR_EXPR
13560 : TRUTH_ORIF_EXPR,
13561 type, fold_convert_loc (loc, type, tem),
13562 arg1);
13565 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13566 if (integer_zerop (arg1)
13567 && truth_value_p (TREE_CODE (arg0))
13568 && truth_value_p (TREE_CODE (op2))
13569 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13571 location_t loc0 = expr_location_or (arg0, loc);
13572 /* Only perform transformation if ARG0 is easily inverted. */
13573 tem = fold_invert_truthvalue (loc0, arg0);
13574 if (tem)
13575 return fold_build2_loc (loc, code == VEC_COND_EXPR
13576 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13577 type, fold_convert_loc (loc, type, tem),
13578 op2);
13581 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13582 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13583 && truth_value_p (TREE_CODE (arg0))
13584 && truth_value_p (TREE_CODE (op2))
13585 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13586 return fold_build2_loc (loc, code == VEC_COND_EXPR
13587 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13588 type, fold_convert_loc (loc, type, arg0), op2);
13590 return NULL_TREE;
13592 case CALL_EXPR:
13593 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13594 of fold_ternary on them. */
13595 gcc_unreachable ();
13597 case BIT_FIELD_REF:
13598 if ((TREE_CODE (arg0) == VECTOR_CST
13599 || (TREE_CODE (arg0) == CONSTRUCTOR
13600 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13601 && (type == TREE_TYPE (TREE_TYPE (arg0))
13602 || (TREE_CODE (type) == VECTOR_TYPE
13603 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13605 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13606 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13607 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13608 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13610 if (n != 0
13611 && (idx % width) == 0
13612 && (n % width) == 0
13613 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13615 idx = idx / width;
13616 n = n / width;
13618 if (TREE_CODE (arg0) == VECTOR_CST)
13620 if (n == 1)
13621 return VECTOR_CST_ELT (arg0, idx);
13623 tree *vals = XALLOCAVEC (tree, n);
13624 for (unsigned i = 0; i < n; ++i)
13625 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13626 return build_vector (type, vals);
13629 /* Constructor elements can be subvectors. */
13630 unsigned HOST_WIDE_INT k = 1;
13631 if (CONSTRUCTOR_NELTS (arg0) != 0)
13633 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13634 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13635 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13638 /* We keep an exact subset of the constructor elements. */
13639 if ((idx % k) == 0 && (n % k) == 0)
13641 if (CONSTRUCTOR_NELTS (arg0) == 0)
13642 return build_constructor (type, NULL);
13643 idx /= k;
13644 n /= k;
13645 if (n == 1)
13647 if (idx < CONSTRUCTOR_NELTS (arg0))
13648 return CONSTRUCTOR_ELT (arg0, idx)->value;
13649 return build_zero_cst (type);
13652 vec<constructor_elt, va_gc> *vals;
13653 vec_alloc (vals, n);
13654 for (unsigned i = 0;
13655 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13656 ++i)
13657 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13658 CONSTRUCTOR_ELT
13659 (arg0, idx + i)->value);
13660 return build_constructor (type, vals);
13662 /* The bitfield references a single constructor element. */
13663 else if (idx + n <= (idx / k + 1) * k)
13665 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13666 return build_zero_cst (type);
13667 else if (n == k)
13668 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13669 else
13670 return fold_build3_loc (loc, code, type,
13671 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13672 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13677 /* A bit-field-ref that referenced the full argument can be stripped. */
13678 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13679 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13680 && integer_zerop (op2))
13681 return fold_convert_loc (loc, type, arg0);
13683 /* On constants we can use native encode/interpret to constant
13684 fold (nearly) all BIT_FIELD_REFs. */
13685 if (CONSTANT_CLASS_P (arg0)
13686 && can_native_interpret_type_p (type)
13687 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13688 /* This limitation should not be necessary, we just need to
13689 round this up to mode size. */
13690 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13691 /* Need bit-shifting of the buffer to relax the following. */
13692 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13694 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13695 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13696 unsigned HOST_WIDE_INT clen;
13697 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13698 /* ??? We cannot tell native_encode_expr to start at
13699 some random byte only. So limit us to a reasonable amount
13700 of work. */
13701 if (clen <= 4096)
13703 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13704 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13705 if (len > 0
13706 && len * BITS_PER_UNIT >= bitpos + bitsize)
13708 tree v = native_interpret_expr (type,
13709 b + bitpos / BITS_PER_UNIT,
13710 bitsize / BITS_PER_UNIT);
13711 if (v)
13712 return v;
13717 return NULL_TREE;
13719 case FMA_EXPR:
13720 /* For integers we can decompose the FMA if possible. */
13721 if (TREE_CODE (arg0) == INTEGER_CST
13722 && TREE_CODE (arg1) == INTEGER_CST)
13723 return fold_build2_loc (loc, PLUS_EXPR, type,
13724 const_binop (MULT_EXPR, arg0, arg1), arg2);
13725 if (integer_zerop (arg2))
13726 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13728 return fold_fma (loc, type, arg0, arg1, arg2);
13730 case VEC_PERM_EXPR:
13731 if (TREE_CODE (arg2) == VECTOR_CST)
13733 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13734 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13735 unsigned char *sel2 = sel + nelts;
13736 bool need_mask_canon = false;
13737 bool need_mask_canon2 = false;
13738 bool all_in_vec0 = true;
13739 bool all_in_vec1 = true;
13740 bool maybe_identity = true;
13741 bool single_arg = (op0 == op1);
13742 bool changed = false;
13744 mask2 = 2 * nelts - 1;
13745 mask = single_arg ? (nelts - 1) : mask2;
13746 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13747 for (i = 0; i < nelts; i++)
13749 tree val = VECTOR_CST_ELT (arg2, i);
13750 if (TREE_CODE (val) != INTEGER_CST)
13751 return NULL_TREE;
13753 /* Make sure that the perm value is in an acceptable
13754 range. */
13755 wide_int t = val;
13756 need_mask_canon |= wi::gtu_p (t, mask);
13757 need_mask_canon2 |= wi::gtu_p (t, mask2);
13758 sel[i] = t.to_uhwi () & mask;
13759 sel2[i] = t.to_uhwi () & mask2;
13761 if (sel[i] < nelts)
13762 all_in_vec1 = false;
13763 else
13764 all_in_vec0 = false;
13766 if ((sel[i] & (nelts-1)) != i)
13767 maybe_identity = false;
13770 if (maybe_identity)
13772 if (all_in_vec0)
13773 return op0;
13774 if (all_in_vec1)
13775 return op1;
13778 if (all_in_vec0)
13779 op1 = op0;
13780 else if (all_in_vec1)
13782 op0 = op1;
13783 for (i = 0; i < nelts; i++)
13784 sel[i] -= nelts;
13785 need_mask_canon = true;
13788 if ((TREE_CODE (op0) == VECTOR_CST
13789 || TREE_CODE (op0) == CONSTRUCTOR)
13790 && (TREE_CODE (op1) == VECTOR_CST
13791 || TREE_CODE (op1) == CONSTRUCTOR))
13793 tree t = fold_vec_perm (type, op0, op1, sel);
13794 if (t != NULL_TREE)
13795 return t;
13798 if (op0 == op1 && !single_arg)
13799 changed = true;
13801 /* Some targets are deficient and fail to expand a single
13802 argument permutation while still allowing an equivalent
13803 2-argument version. */
13804 if (need_mask_canon && arg2 == op2
13805 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13806 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13808 need_mask_canon = need_mask_canon2;
13809 sel = sel2;
13812 if (need_mask_canon && arg2 == op2)
13814 tree *tsel = XALLOCAVEC (tree, nelts);
13815 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13816 for (i = 0; i < nelts; i++)
13817 tsel[i] = build_int_cst (eltype, sel[i]);
13818 op2 = build_vector (TREE_TYPE (arg2), tsel);
13819 changed = true;
13822 if (changed)
13823 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13825 return NULL_TREE;
13827 default:
13828 return NULL_TREE;
13829 } /* switch (code) */
13832 /* Perform constant folding and related simplification of EXPR.
13833 The related simplifications include x*1 => x, x*0 => 0, etc.,
13834 and application of the associative law.
13835 NOP_EXPR conversions may be removed freely (as long as we
13836 are careful not to change the type of the overall expression).
13837 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13838 but we can constant-fold them if they have constant operands. */
13840 #ifdef ENABLE_FOLD_CHECKING
13841 # define fold(x) fold_1 (x)
13842 static tree fold_1 (tree);
13843 static
13844 #endif
13845 tree
13846 fold (tree expr)
13848 const tree t = expr;
13849 enum tree_code code = TREE_CODE (t);
13850 enum tree_code_class kind = TREE_CODE_CLASS (code);
13851 tree tem;
13852 location_t loc = EXPR_LOCATION (expr);
13854 /* Return right away if a constant. */
13855 if (kind == tcc_constant)
13856 return t;
13858 /* CALL_EXPR-like objects with variable numbers of operands are
13859 treated specially. */
13860 if (kind == tcc_vl_exp)
13862 if (code == CALL_EXPR)
13864 tem = fold_call_expr (loc, expr, false);
13865 return tem ? tem : expr;
13867 return expr;
13870 if (IS_EXPR_CODE_CLASS (kind))
13872 tree type = TREE_TYPE (t);
13873 tree op0, op1, op2;
13875 switch (TREE_CODE_LENGTH (code))
13877 case 1:
13878 op0 = TREE_OPERAND (t, 0);
13879 tem = fold_unary_loc (loc, code, type, op0);
13880 return tem ? tem : expr;
13881 case 2:
13882 op0 = TREE_OPERAND (t, 0);
13883 op1 = TREE_OPERAND (t, 1);
13884 tem = fold_binary_loc (loc, code, type, op0, op1);
13885 return tem ? tem : expr;
13886 case 3:
13887 op0 = TREE_OPERAND (t, 0);
13888 op1 = TREE_OPERAND (t, 1);
13889 op2 = TREE_OPERAND (t, 2);
13890 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13891 return tem ? tem : expr;
13892 default:
13893 break;
13897 switch (code)
13899 case ARRAY_REF:
13901 tree op0 = TREE_OPERAND (t, 0);
13902 tree op1 = TREE_OPERAND (t, 1);
13904 if (TREE_CODE (op1) == INTEGER_CST
13905 && TREE_CODE (op0) == CONSTRUCTOR
13906 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13908 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13909 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13910 unsigned HOST_WIDE_INT begin = 0;
13912 /* Find a matching index by means of a binary search. */
13913 while (begin != end)
13915 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13916 tree index = (*elts)[middle].index;
13918 if (TREE_CODE (index) == INTEGER_CST
13919 && tree_int_cst_lt (index, op1))
13920 begin = middle + 1;
13921 else if (TREE_CODE (index) == INTEGER_CST
13922 && tree_int_cst_lt (op1, index))
13923 end = middle;
13924 else if (TREE_CODE (index) == RANGE_EXPR
13925 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13926 begin = middle + 1;
13927 else if (TREE_CODE (index) == RANGE_EXPR
13928 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13929 end = middle;
13930 else
13931 return (*elts)[middle].value;
13935 return t;
13938 /* Return a VECTOR_CST if possible. */
13939 case CONSTRUCTOR:
13941 tree type = TREE_TYPE (t);
13942 if (TREE_CODE (type) != VECTOR_TYPE)
13943 return t;
13945 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13946 unsigned HOST_WIDE_INT idx, pos = 0;
13947 tree value;
13949 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13951 if (!CONSTANT_CLASS_P (value))
13952 return t;
13953 if (TREE_CODE (value) == VECTOR_CST)
13955 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13956 vec[pos++] = VECTOR_CST_ELT (value, i);
13958 else
13959 vec[pos++] = value;
13961 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13962 vec[pos] = build_zero_cst (TREE_TYPE (type));
13964 return build_vector (type, vec);
13967 case CONST_DECL:
13968 return fold (DECL_INITIAL (t));
13970 default:
13971 return t;
13972 } /* switch (code) */
13975 #ifdef ENABLE_FOLD_CHECKING
13976 #undef fold
13978 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13979 hash_table<pointer_hash<const tree_node> > *);
13980 static void fold_check_failed (const_tree, const_tree);
13981 void print_fold_checksum (const_tree);
13983 /* When --enable-checking=fold, compute a digest of expr before
13984 and after actual fold call to see if fold did not accidentally
13985 change original expr. */
13987 tree
13988 fold (tree expr)
13990 tree ret;
13991 struct md5_ctx ctx;
13992 unsigned char checksum_before[16], checksum_after[16];
13993 hash_table<pointer_hash<const tree_node> > ht (32);
13995 md5_init_ctx (&ctx);
13996 fold_checksum_tree (expr, &ctx, &ht);
13997 md5_finish_ctx (&ctx, checksum_before);
13998 ht.empty ();
14000 ret = fold_1 (expr);
14002 md5_init_ctx (&ctx);
14003 fold_checksum_tree (expr, &ctx, &ht);
14004 md5_finish_ctx (&ctx, checksum_after);
14006 if (memcmp (checksum_before, checksum_after, 16))
14007 fold_check_failed (expr, ret);
14009 return ret;
14012 void
14013 print_fold_checksum (const_tree expr)
14015 struct md5_ctx ctx;
14016 unsigned char checksum[16], cnt;
14017 hash_table<pointer_hash<const tree_node> > ht (32);
14019 md5_init_ctx (&ctx);
14020 fold_checksum_tree (expr, &ctx, &ht);
14021 md5_finish_ctx (&ctx, checksum);
14022 for (cnt = 0; cnt < 16; ++cnt)
14023 fprintf (stderr, "%02x", checksum[cnt]);
14024 putc ('\n', stderr);
14027 static void
14028 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14030 internal_error ("fold check: original tree changed by fold");
14033 static void
14034 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14035 hash_table<pointer_hash <const tree_node> > *ht)
14037 const tree_node **slot;
14038 enum tree_code code;
14039 union tree_node buf;
14040 int i, len;
14042 recursive_label:
14043 if (expr == NULL)
14044 return;
14045 slot = ht->find_slot (expr, INSERT);
14046 if (*slot != NULL)
14047 return;
14048 *slot = expr;
14049 code = TREE_CODE (expr);
14050 if (TREE_CODE_CLASS (code) == tcc_declaration
14051 && DECL_ASSEMBLER_NAME_SET_P (expr))
14053 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14054 memcpy ((char *) &buf, expr, tree_size (expr));
14055 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14056 expr = (tree) &buf;
14058 else if (TREE_CODE_CLASS (code) == tcc_type
14059 && (TYPE_POINTER_TO (expr)
14060 || TYPE_REFERENCE_TO (expr)
14061 || TYPE_CACHED_VALUES_P (expr)
14062 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14063 || TYPE_NEXT_VARIANT (expr)))
14065 /* Allow these fields to be modified. */
14066 tree tmp;
14067 memcpy ((char *) &buf, expr, tree_size (expr));
14068 expr = tmp = (tree) &buf;
14069 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14070 TYPE_POINTER_TO (tmp) = NULL;
14071 TYPE_REFERENCE_TO (tmp) = NULL;
14072 TYPE_NEXT_VARIANT (tmp) = NULL;
14073 if (TYPE_CACHED_VALUES_P (tmp))
14075 TYPE_CACHED_VALUES_P (tmp) = 0;
14076 TYPE_CACHED_VALUES (tmp) = NULL;
14079 md5_process_bytes (expr, tree_size (expr), ctx);
14080 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14081 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14082 if (TREE_CODE_CLASS (code) != tcc_type
14083 && TREE_CODE_CLASS (code) != tcc_declaration
14084 && code != TREE_LIST
14085 && code != SSA_NAME
14086 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14087 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14088 switch (TREE_CODE_CLASS (code))
14090 case tcc_constant:
14091 switch (code)
14093 case STRING_CST:
14094 md5_process_bytes (TREE_STRING_POINTER (expr),
14095 TREE_STRING_LENGTH (expr), ctx);
14096 break;
14097 case COMPLEX_CST:
14098 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14099 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14100 break;
14101 case VECTOR_CST:
14102 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14103 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14104 break;
14105 default:
14106 break;
14108 break;
14109 case tcc_exceptional:
14110 switch (code)
14112 case TREE_LIST:
14113 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14114 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14115 expr = TREE_CHAIN (expr);
14116 goto recursive_label;
14117 break;
14118 case TREE_VEC:
14119 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14120 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14121 break;
14122 default:
14123 break;
14125 break;
14126 case tcc_expression:
14127 case tcc_reference:
14128 case tcc_comparison:
14129 case tcc_unary:
14130 case tcc_binary:
14131 case tcc_statement:
14132 case tcc_vl_exp:
14133 len = TREE_OPERAND_LENGTH (expr);
14134 for (i = 0; i < len; ++i)
14135 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14136 break;
14137 case tcc_declaration:
14138 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14139 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14140 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14142 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14143 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14144 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14145 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14146 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14149 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14151 if (TREE_CODE (expr) == FUNCTION_DECL)
14153 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14154 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14156 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14158 break;
14159 case tcc_type:
14160 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14161 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14162 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14163 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14164 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14165 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14166 if (INTEGRAL_TYPE_P (expr)
14167 || SCALAR_FLOAT_TYPE_P (expr))
14169 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14170 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14172 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14173 if (TREE_CODE (expr) == RECORD_TYPE
14174 || TREE_CODE (expr) == UNION_TYPE
14175 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14176 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14177 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14178 break;
14179 default:
14180 break;
14184 /* Helper function for outputting the checksum of a tree T. When
14185 debugging with gdb, you can "define mynext" to be "next" followed
14186 by "call debug_fold_checksum (op0)", then just trace down till the
14187 outputs differ. */
14189 DEBUG_FUNCTION void
14190 debug_fold_checksum (const_tree t)
14192 int i;
14193 unsigned char checksum[16];
14194 struct md5_ctx ctx;
14195 hash_table<pointer_hash<const tree_node> > ht (32);
14197 md5_init_ctx (&ctx);
14198 fold_checksum_tree (t, &ctx, &ht);
14199 md5_finish_ctx (&ctx, checksum);
14200 ht.empty ();
14202 for (i = 0; i < 16; i++)
14203 fprintf (stderr, "%d ", checksum[i]);
14205 fprintf (stderr, "\n");
14208 #endif
14210 /* Fold a unary tree expression with code CODE of type TYPE with an
14211 operand OP0. LOC is the location of the resulting expression.
14212 Return a folded expression if successful. Otherwise, return a tree
14213 expression with code CODE of type TYPE with an operand OP0. */
14215 tree
14216 fold_build1_stat_loc (location_t loc,
14217 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14219 tree tem;
14220 #ifdef ENABLE_FOLD_CHECKING
14221 unsigned char checksum_before[16], checksum_after[16];
14222 struct md5_ctx ctx;
14223 hash_table<pointer_hash<const tree_node> > ht (32);
14225 md5_init_ctx (&ctx);
14226 fold_checksum_tree (op0, &ctx, &ht);
14227 md5_finish_ctx (&ctx, checksum_before);
14228 ht.empty ();
14229 #endif
14231 tem = fold_unary_loc (loc, code, type, op0);
14232 if (!tem)
14233 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14235 #ifdef ENABLE_FOLD_CHECKING
14236 md5_init_ctx (&ctx);
14237 fold_checksum_tree (op0, &ctx, &ht);
14238 md5_finish_ctx (&ctx, checksum_after);
14240 if (memcmp (checksum_before, checksum_after, 16))
14241 fold_check_failed (op0, tem);
14242 #endif
14243 return tem;
14246 /* Fold a binary tree expression with code CODE of type TYPE with
14247 operands OP0 and OP1. LOC is the location of the resulting
14248 expression. Return a folded expression if successful. Otherwise,
14249 return a tree expression with code CODE of type TYPE with operands
14250 OP0 and OP1. */
14252 tree
14253 fold_build2_stat_loc (location_t loc,
14254 enum tree_code code, tree type, tree op0, tree op1
14255 MEM_STAT_DECL)
14257 tree tem;
14258 #ifdef ENABLE_FOLD_CHECKING
14259 unsigned char checksum_before_op0[16],
14260 checksum_before_op1[16],
14261 checksum_after_op0[16],
14262 checksum_after_op1[16];
14263 struct md5_ctx ctx;
14264 hash_table<pointer_hash<const tree_node> > ht (32);
14266 md5_init_ctx (&ctx);
14267 fold_checksum_tree (op0, &ctx, &ht);
14268 md5_finish_ctx (&ctx, checksum_before_op0);
14269 ht.empty ();
14271 md5_init_ctx (&ctx);
14272 fold_checksum_tree (op1, &ctx, &ht);
14273 md5_finish_ctx (&ctx, checksum_before_op1);
14274 ht.empty ();
14275 #endif
14277 tem = fold_binary_loc (loc, code, type, op0, op1);
14278 if (!tem)
14279 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14281 #ifdef ENABLE_FOLD_CHECKING
14282 md5_init_ctx (&ctx);
14283 fold_checksum_tree (op0, &ctx, &ht);
14284 md5_finish_ctx (&ctx, checksum_after_op0);
14285 ht.empty ();
14287 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14288 fold_check_failed (op0, tem);
14290 md5_init_ctx (&ctx);
14291 fold_checksum_tree (op1, &ctx, &ht);
14292 md5_finish_ctx (&ctx, checksum_after_op1);
14294 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14295 fold_check_failed (op1, tem);
14296 #endif
14297 return tem;
14300 /* Fold a ternary tree expression with code CODE of type TYPE with
14301 operands OP0, OP1, and OP2. Return a folded expression if
14302 successful. Otherwise, return a tree expression with code CODE of
14303 type TYPE with operands OP0, OP1, and OP2. */
14305 tree
14306 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14307 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14309 tree tem;
14310 #ifdef ENABLE_FOLD_CHECKING
14311 unsigned char checksum_before_op0[16],
14312 checksum_before_op1[16],
14313 checksum_before_op2[16],
14314 checksum_after_op0[16],
14315 checksum_after_op1[16],
14316 checksum_after_op2[16];
14317 struct md5_ctx ctx;
14318 hash_table<pointer_hash<const tree_node> > ht (32);
14320 md5_init_ctx (&ctx);
14321 fold_checksum_tree (op0, &ctx, &ht);
14322 md5_finish_ctx (&ctx, checksum_before_op0);
14323 ht.empty ();
14325 md5_init_ctx (&ctx);
14326 fold_checksum_tree (op1, &ctx, &ht);
14327 md5_finish_ctx (&ctx, checksum_before_op1);
14328 ht.empty ();
14330 md5_init_ctx (&ctx);
14331 fold_checksum_tree (op2, &ctx, &ht);
14332 md5_finish_ctx (&ctx, checksum_before_op2);
14333 ht.empty ();
14334 #endif
14336 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14337 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14338 if (!tem)
14339 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14341 #ifdef ENABLE_FOLD_CHECKING
14342 md5_init_ctx (&ctx);
14343 fold_checksum_tree (op0, &ctx, &ht);
14344 md5_finish_ctx (&ctx, checksum_after_op0);
14345 ht.empty ();
14347 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14348 fold_check_failed (op0, tem);
14350 md5_init_ctx (&ctx);
14351 fold_checksum_tree (op1, &ctx, &ht);
14352 md5_finish_ctx (&ctx, checksum_after_op1);
14353 ht.empty ();
14355 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14356 fold_check_failed (op1, tem);
14358 md5_init_ctx (&ctx);
14359 fold_checksum_tree (op2, &ctx, &ht);
14360 md5_finish_ctx (&ctx, checksum_after_op2);
14362 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14363 fold_check_failed (op2, tem);
14364 #endif
14365 return tem;
14368 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14369 arguments in ARGARRAY, and a null static chain.
14370 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14371 of type TYPE from the given operands as constructed by build_call_array. */
14373 tree
14374 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14375 int nargs, tree *argarray)
14377 tree tem;
14378 #ifdef ENABLE_FOLD_CHECKING
14379 unsigned char checksum_before_fn[16],
14380 checksum_before_arglist[16],
14381 checksum_after_fn[16],
14382 checksum_after_arglist[16];
14383 struct md5_ctx ctx;
14384 hash_table<pointer_hash<const tree_node> > ht (32);
14385 int i;
14387 md5_init_ctx (&ctx);
14388 fold_checksum_tree (fn, &ctx, &ht);
14389 md5_finish_ctx (&ctx, checksum_before_fn);
14390 ht.empty ();
14392 md5_init_ctx (&ctx);
14393 for (i = 0; i < nargs; i++)
14394 fold_checksum_tree (argarray[i], &ctx, &ht);
14395 md5_finish_ctx (&ctx, checksum_before_arglist);
14396 ht.empty ();
14397 #endif
14399 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14400 if (!tem)
14401 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14403 #ifdef ENABLE_FOLD_CHECKING
14404 md5_init_ctx (&ctx);
14405 fold_checksum_tree (fn, &ctx, &ht);
14406 md5_finish_ctx (&ctx, checksum_after_fn);
14407 ht.empty ();
14409 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14410 fold_check_failed (fn, tem);
14412 md5_init_ctx (&ctx);
14413 for (i = 0; i < nargs; i++)
14414 fold_checksum_tree (argarray[i], &ctx, &ht);
14415 md5_finish_ctx (&ctx, checksum_after_arglist);
14417 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14418 fold_check_failed (NULL_TREE, tem);
14419 #endif
14420 return tem;
14423 /* Perform constant folding and related simplification of initializer
14424 expression EXPR. These behave identically to "fold_buildN" but ignore
14425 potential run-time traps and exceptions that fold must preserve. */
14427 #define START_FOLD_INIT \
14428 int saved_signaling_nans = flag_signaling_nans;\
14429 int saved_trapping_math = flag_trapping_math;\
14430 int saved_rounding_math = flag_rounding_math;\
14431 int saved_trapv = flag_trapv;\
14432 int saved_folding_initializer = folding_initializer;\
14433 flag_signaling_nans = 0;\
14434 flag_trapping_math = 0;\
14435 flag_rounding_math = 0;\
14436 flag_trapv = 0;\
14437 folding_initializer = 1;
14439 #define END_FOLD_INIT \
14440 flag_signaling_nans = saved_signaling_nans;\
14441 flag_trapping_math = saved_trapping_math;\
14442 flag_rounding_math = saved_rounding_math;\
14443 flag_trapv = saved_trapv;\
14444 folding_initializer = saved_folding_initializer;
14446 tree
14447 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14448 tree type, tree op)
14450 tree result;
14451 START_FOLD_INIT;
14453 result = fold_build1_loc (loc, code, type, op);
14455 END_FOLD_INIT;
14456 return result;
14459 tree
14460 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14461 tree type, tree op0, tree op1)
14463 tree result;
14464 START_FOLD_INIT;
14466 result = fold_build2_loc (loc, code, type, op0, op1);
14468 END_FOLD_INIT;
14469 return result;
14472 tree
14473 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14474 int nargs, tree *argarray)
14476 tree result;
14477 START_FOLD_INIT;
14479 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14481 END_FOLD_INIT;
14482 return result;
14485 #undef START_FOLD_INIT
14486 #undef END_FOLD_INIT
14488 /* Determine if first argument is a multiple of second argument. Return 0 if
14489 it is not, or we cannot easily determined it to be.
14491 An example of the sort of thing we care about (at this point; this routine
14492 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14493 fold cases do now) is discovering that
14495 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14497 is a multiple of
14499 SAVE_EXPR (J * 8)
14501 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14503 This code also handles discovering that
14505 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14507 is a multiple of 8 so we don't have to worry about dealing with a
14508 possible remainder.
14510 Note that we *look* inside a SAVE_EXPR only to determine how it was
14511 calculated; it is not safe for fold to do much of anything else with the
14512 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14513 at run time. For example, the latter example above *cannot* be implemented
14514 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14515 evaluation time of the original SAVE_EXPR is not necessarily the same at
14516 the time the new expression is evaluated. The only optimization of this
14517 sort that would be valid is changing
14519 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14521 divided by 8 to
14523 SAVE_EXPR (I) * SAVE_EXPR (J)
14525 (where the same SAVE_EXPR (J) is used in the original and the
14526 transformed version). */
14529 multiple_of_p (tree type, const_tree top, const_tree bottom)
14531 if (operand_equal_p (top, bottom, 0))
14532 return 1;
14534 if (TREE_CODE (type) != INTEGER_TYPE)
14535 return 0;
14537 switch (TREE_CODE (top))
14539 case BIT_AND_EXPR:
14540 /* Bitwise and provides a power of two multiple. If the mask is
14541 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14542 if (!integer_pow2p (bottom))
14543 return 0;
14544 /* FALLTHRU */
14546 case MULT_EXPR:
14547 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14548 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14550 case PLUS_EXPR:
14551 case MINUS_EXPR:
14552 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14553 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14555 case LSHIFT_EXPR:
14556 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14558 tree op1, t1;
14560 op1 = TREE_OPERAND (top, 1);
14561 /* const_binop may not detect overflow correctly,
14562 so check for it explicitly here. */
14563 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14564 && 0 != (t1 = fold_convert (type,
14565 const_binop (LSHIFT_EXPR,
14566 size_one_node,
14567 op1)))
14568 && !TREE_OVERFLOW (t1))
14569 return multiple_of_p (type, t1, bottom);
14571 return 0;
14573 case NOP_EXPR:
14574 /* Can't handle conversions from non-integral or wider integral type. */
14575 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14576 || (TYPE_PRECISION (type)
14577 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14578 return 0;
14580 /* .. fall through ... */
14582 case SAVE_EXPR:
14583 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14585 case COND_EXPR:
14586 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14587 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14589 case INTEGER_CST:
14590 if (TREE_CODE (bottom) != INTEGER_CST
14591 || integer_zerop (bottom)
14592 || (TYPE_UNSIGNED (type)
14593 && (tree_int_cst_sgn (top) < 0
14594 || tree_int_cst_sgn (bottom) < 0)))
14595 return 0;
14596 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14597 SIGNED);
14599 default:
14600 return 0;
14604 /* Return true if CODE or TYPE is known to be non-negative. */
14606 static bool
14607 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14609 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14610 && truth_value_p (code))
14611 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14612 have a signed:1 type (where the value is -1 and 0). */
14613 return true;
14614 return false;
14617 /* Return true if (CODE OP0) is known to be non-negative. If the return
14618 value is based on the assumption that signed overflow is undefined,
14619 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14620 *STRICT_OVERFLOW_P. */
14622 bool
14623 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14624 bool *strict_overflow_p)
14626 if (TYPE_UNSIGNED (type))
14627 return true;
14629 switch (code)
14631 case ABS_EXPR:
14632 /* We can't return 1 if flag_wrapv is set because
14633 ABS_EXPR<INT_MIN> = INT_MIN. */
14634 if (!INTEGRAL_TYPE_P (type))
14635 return true;
14636 if (TYPE_OVERFLOW_UNDEFINED (type))
14638 *strict_overflow_p = true;
14639 return true;
14641 break;
14643 case NON_LVALUE_EXPR:
14644 case FLOAT_EXPR:
14645 case FIX_TRUNC_EXPR:
14646 return tree_expr_nonnegative_warnv_p (op0,
14647 strict_overflow_p);
14649 CASE_CONVERT:
14651 tree inner_type = TREE_TYPE (op0);
14652 tree outer_type = type;
14654 if (TREE_CODE (outer_type) == REAL_TYPE)
14656 if (TREE_CODE (inner_type) == REAL_TYPE)
14657 return tree_expr_nonnegative_warnv_p (op0,
14658 strict_overflow_p);
14659 if (INTEGRAL_TYPE_P (inner_type))
14661 if (TYPE_UNSIGNED (inner_type))
14662 return true;
14663 return tree_expr_nonnegative_warnv_p (op0,
14664 strict_overflow_p);
14667 else if (INTEGRAL_TYPE_P (outer_type))
14669 if (TREE_CODE (inner_type) == REAL_TYPE)
14670 return tree_expr_nonnegative_warnv_p (op0,
14671 strict_overflow_p);
14672 if (INTEGRAL_TYPE_P (inner_type))
14673 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14674 && TYPE_UNSIGNED (inner_type);
14677 break;
14679 default:
14680 return tree_simple_nonnegative_warnv_p (code, type);
14683 /* We don't know sign of `t', so be conservative and return false. */
14684 return false;
14687 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14688 value is based on the assumption that signed overflow is undefined,
14689 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14690 *STRICT_OVERFLOW_P. */
14692 bool
14693 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14694 tree op1, bool *strict_overflow_p)
14696 if (TYPE_UNSIGNED (type))
14697 return true;
14699 switch (code)
14701 case POINTER_PLUS_EXPR:
14702 case PLUS_EXPR:
14703 if (FLOAT_TYPE_P (type))
14704 return (tree_expr_nonnegative_warnv_p (op0,
14705 strict_overflow_p)
14706 && tree_expr_nonnegative_warnv_p (op1,
14707 strict_overflow_p));
14709 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14710 both unsigned and at least 2 bits shorter than the result. */
14711 if (TREE_CODE (type) == INTEGER_TYPE
14712 && TREE_CODE (op0) == NOP_EXPR
14713 && TREE_CODE (op1) == NOP_EXPR)
14715 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14716 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14717 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14718 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14720 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14721 TYPE_PRECISION (inner2)) + 1;
14722 return prec < TYPE_PRECISION (type);
14725 break;
14727 case MULT_EXPR:
14728 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14730 /* x * x is always non-negative for floating point x
14731 or without overflow. */
14732 if (operand_equal_p (op0, op1, 0)
14733 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14734 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14736 if (TYPE_OVERFLOW_UNDEFINED (type))
14737 *strict_overflow_p = true;
14738 return true;
14742 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14743 both unsigned and their total bits is shorter than the result. */
14744 if (TREE_CODE (type) == INTEGER_TYPE
14745 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14746 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14748 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14749 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14750 : TREE_TYPE (op0);
14751 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14752 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14753 : TREE_TYPE (op1);
14755 bool unsigned0 = TYPE_UNSIGNED (inner0);
14756 bool unsigned1 = TYPE_UNSIGNED (inner1);
14758 if (TREE_CODE (op0) == INTEGER_CST)
14759 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14761 if (TREE_CODE (op1) == INTEGER_CST)
14762 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14764 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14765 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14767 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14768 ? tree_int_cst_min_precision (op0, UNSIGNED)
14769 : TYPE_PRECISION (inner0);
14771 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14772 ? tree_int_cst_min_precision (op1, UNSIGNED)
14773 : TYPE_PRECISION (inner1);
14775 return precision0 + precision1 < TYPE_PRECISION (type);
14778 return false;
14780 case BIT_AND_EXPR:
14781 case MAX_EXPR:
14782 return (tree_expr_nonnegative_warnv_p (op0,
14783 strict_overflow_p)
14784 || tree_expr_nonnegative_warnv_p (op1,
14785 strict_overflow_p));
14787 case BIT_IOR_EXPR:
14788 case BIT_XOR_EXPR:
14789 case MIN_EXPR:
14790 case RDIV_EXPR:
14791 case TRUNC_DIV_EXPR:
14792 case CEIL_DIV_EXPR:
14793 case FLOOR_DIV_EXPR:
14794 case ROUND_DIV_EXPR:
14795 return (tree_expr_nonnegative_warnv_p (op0,
14796 strict_overflow_p)
14797 && tree_expr_nonnegative_warnv_p (op1,
14798 strict_overflow_p));
14800 case TRUNC_MOD_EXPR:
14801 case CEIL_MOD_EXPR:
14802 case FLOOR_MOD_EXPR:
14803 case ROUND_MOD_EXPR:
14804 return tree_expr_nonnegative_warnv_p (op0,
14805 strict_overflow_p);
14806 default:
14807 return tree_simple_nonnegative_warnv_p (code, type);
14810 /* We don't know sign of `t', so be conservative and return false. */
14811 return false;
14814 /* Return true if T is known to be non-negative. If the return
14815 value is based on the assumption that signed overflow is undefined,
14816 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14817 *STRICT_OVERFLOW_P. */
14819 bool
14820 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14822 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14823 return true;
14825 switch (TREE_CODE (t))
14827 case INTEGER_CST:
14828 return tree_int_cst_sgn (t) >= 0;
14830 case REAL_CST:
14831 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14833 case FIXED_CST:
14834 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14836 case COND_EXPR:
14837 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14838 strict_overflow_p)
14839 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14840 strict_overflow_p));
14841 default:
14842 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14843 TREE_TYPE (t));
14845 /* We don't know sign of `t', so be conservative and return false. */
14846 return false;
14849 /* Return true if T is known to be non-negative. If the return
14850 value is based on the assumption that signed overflow is undefined,
14851 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14852 *STRICT_OVERFLOW_P. */
14854 bool
14855 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14856 tree arg0, tree arg1, bool *strict_overflow_p)
14858 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14859 switch (DECL_FUNCTION_CODE (fndecl))
14861 CASE_FLT_FN (BUILT_IN_ACOS):
14862 CASE_FLT_FN (BUILT_IN_ACOSH):
14863 CASE_FLT_FN (BUILT_IN_CABS):
14864 CASE_FLT_FN (BUILT_IN_COSH):
14865 CASE_FLT_FN (BUILT_IN_ERFC):
14866 CASE_FLT_FN (BUILT_IN_EXP):
14867 CASE_FLT_FN (BUILT_IN_EXP10):
14868 CASE_FLT_FN (BUILT_IN_EXP2):
14869 CASE_FLT_FN (BUILT_IN_FABS):
14870 CASE_FLT_FN (BUILT_IN_FDIM):
14871 CASE_FLT_FN (BUILT_IN_HYPOT):
14872 CASE_FLT_FN (BUILT_IN_POW10):
14873 CASE_INT_FN (BUILT_IN_FFS):
14874 CASE_INT_FN (BUILT_IN_PARITY):
14875 CASE_INT_FN (BUILT_IN_POPCOUNT):
14876 CASE_INT_FN (BUILT_IN_CLZ):
14877 CASE_INT_FN (BUILT_IN_CLRSB):
14878 case BUILT_IN_BSWAP32:
14879 case BUILT_IN_BSWAP64:
14880 /* Always true. */
14881 return true;
14883 CASE_FLT_FN (BUILT_IN_SQRT):
14884 /* sqrt(-0.0) is -0.0. */
14885 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14886 return true;
14887 return tree_expr_nonnegative_warnv_p (arg0,
14888 strict_overflow_p);
14890 CASE_FLT_FN (BUILT_IN_ASINH):
14891 CASE_FLT_FN (BUILT_IN_ATAN):
14892 CASE_FLT_FN (BUILT_IN_ATANH):
14893 CASE_FLT_FN (BUILT_IN_CBRT):
14894 CASE_FLT_FN (BUILT_IN_CEIL):
14895 CASE_FLT_FN (BUILT_IN_ERF):
14896 CASE_FLT_FN (BUILT_IN_EXPM1):
14897 CASE_FLT_FN (BUILT_IN_FLOOR):
14898 CASE_FLT_FN (BUILT_IN_FMOD):
14899 CASE_FLT_FN (BUILT_IN_FREXP):
14900 CASE_FLT_FN (BUILT_IN_ICEIL):
14901 CASE_FLT_FN (BUILT_IN_IFLOOR):
14902 CASE_FLT_FN (BUILT_IN_IRINT):
14903 CASE_FLT_FN (BUILT_IN_IROUND):
14904 CASE_FLT_FN (BUILT_IN_LCEIL):
14905 CASE_FLT_FN (BUILT_IN_LDEXP):
14906 CASE_FLT_FN (BUILT_IN_LFLOOR):
14907 CASE_FLT_FN (BUILT_IN_LLCEIL):
14908 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14909 CASE_FLT_FN (BUILT_IN_LLRINT):
14910 CASE_FLT_FN (BUILT_IN_LLROUND):
14911 CASE_FLT_FN (BUILT_IN_LRINT):
14912 CASE_FLT_FN (BUILT_IN_LROUND):
14913 CASE_FLT_FN (BUILT_IN_MODF):
14914 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14915 CASE_FLT_FN (BUILT_IN_RINT):
14916 CASE_FLT_FN (BUILT_IN_ROUND):
14917 CASE_FLT_FN (BUILT_IN_SCALB):
14918 CASE_FLT_FN (BUILT_IN_SCALBLN):
14919 CASE_FLT_FN (BUILT_IN_SCALBN):
14920 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14921 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14922 CASE_FLT_FN (BUILT_IN_SINH):
14923 CASE_FLT_FN (BUILT_IN_TANH):
14924 CASE_FLT_FN (BUILT_IN_TRUNC):
14925 /* True if the 1st argument is nonnegative. */
14926 return tree_expr_nonnegative_warnv_p (arg0,
14927 strict_overflow_p);
14929 CASE_FLT_FN (BUILT_IN_FMAX):
14930 /* True if the 1st OR 2nd arguments are nonnegative. */
14931 return (tree_expr_nonnegative_warnv_p (arg0,
14932 strict_overflow_p)
14933 || (tree_expr_nonnegative_warnv_p (arg1,
14934 strict_overflow_p)));
14936 CASE_FLT_FN (BUILT_IN_FMIN):
14937 /* True if the 1st AND 2nd arguments are nonnegative. */
14938 return (tree_expr_nonnegative_warnv_p (arg0,
14939 strict_overflow_p)
14940 && (tree_expr_nonnegative_warnv_p (arg1,
14941 strict_overflow_p)));
14943 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14944 /* True if the 2nd argument is nonnegative. */
14945 return tree_expr_nonnegative_warnv_p (arg1,
14946 strict_overflow_p);
14948 CASE_FLT_FN (BUILT_IN_POWI):
14949 /* True if the 1st argument is nonnegative or the second
14950 argument is an even integer. */
14951 if (TREE_CODE (arg1) == INTEGER_CST
14952 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14953 return true;
14954 return tree_expr_nonnegative_warnv_p (arg0,
14955 strict_overflow_p);
14957 CASE_FLT_FN (BUILT_IN_POW):
14958 /* True if the 1st argument is nonnegative or the second
14959 argument is an even integer valued real. */
14960 if (TREE_CODE (arg1) == REAL_CST)
14962 REAL_VALUE_TYPE c;
14963 HOST_WIDE_INT n;
14965 c = TREE_REAL_CST (arg1);
14966 n = real_to_integer (&c);
14967 if ((n & 1) == 0)
14969 REAL_VALUE_TYPE cint;
14970 real_from_integer (&cint, VOIDmode, n, SIGNED);
14971 if (real_identical (&c, &cint))
14972 return true;
14975 return tree_expr_nonnegative_warnv_p (arg0,
14976 strict_overflow_p);
14978 default:
14979 break;
14981 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14982 type);
14985 /* Return true if T is known to be non-negative. If the return
14986 value is based on the assumption that signed overflow is undefined,
14987 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14988 *STRICT_OVERFLOW_P. */
14990 static bool
14991 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14993 enum tree_code code = TREE_CODE (t);
14994 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14995 return true;
14997 switch (code)
14999 case TARGET_EXPR:
15001 tree temp = TARGET_EXPR_SLOT (t);
15002 t = TARGET_EXPR_INITIAL (t);
15004 /* If the initializer is non-void, then it's a normal expression
15005 that will be assigned to the slot. */
15006 if (!VOID_TYPE_P (t))
15007 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15009 /* Otherwise, the initializer sets the slot in some way. One common
15010 way is an assignment statement at the end of the initializer. */
15011 while (1)
15013 if (TREE_CODE (t) == BIND_EXPR)
15014 t = expr_last (BIND_EXPR_BODY (t));
15015 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15016 || TREE_CODE (t) == TRY_CATCH_EXPR)
15017 t = expr_last (TREE_OPERAND (t, 0));
15018 else if (TREE_CODE (t) == STATEMENT_LIST)
15019 t = expr_last (t);
15020 else
15021 break;
15023 if (TREE_CODE (t) == MODIFY_EXPR
15024 && TREE_OPERAND (t, 0) == temp)
15025 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15026 strict_overflow_p);
15028 return false;
15031 case CALL_EXPR:
15033 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15034 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15036 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15037 get_callee_fndecl (t),
15038 arg0,
15039 arg1,
15040 strict_overflow_p);
15042 case COMPOUND_EXPR:
15043 case MODIFY_EXPR:
15044 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15045 strict_overflow_p);
15046 case BIND_EXPR:
15047 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15048 strict_overflow_p);
15049 case SAVE_EXPR:
15050 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15051 strict_overflow_p);
15053 default:
15054 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15055 TREE_TYPE (t));
15058 /* We don't know sign of `t', so be conservative and return false. */
15059 return false;
15062 /* Return true if T is known to be non-negative. If the return
15063 value is based on the assumption that signed overflow is undefined,
15064 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15065 *STRICT_OVERFLOW_P. */
15067 bool
15068 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15070 enum tree_code code;
15071 if (t == error_mark_node)
15072 return false;
15074 code = TREE_CODE (t);
15075 switch (TREE_CODE_CLASS (code))
15077 case tcc_binary:
15078 case tcc_comparison:
15079 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15080 TREE_TYPE (t),
15081 TREE_OPERAND (t, 0),
15082 TREE_OPERAND (t, 1),
15083 strict_overflow_p);
15085 case tcc_unary:
15086 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15087 TREE_TYPE (t),
15088 TREE_OPERAND (t, 0),
15089 strict_overflow_p);
15091 case tcc_constant:
15092 case tcc_declaration:
15093 case tcc_reference:
15094 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15096 default:
15097 break;
15100 switch (code)
15102 case TRUTH_AND_EXPR:
15103 case TRUTH_OR_EXPR:
15104 case TRUTH_XOR_EXPR:
15105 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15106 TREE_TYPE (t),
15107 TREE_OPERAND (t, 0),
15108 TREE_OPERAND (t, 1),
15109 strict_overflow_p);
15110 case TRUTH_NOT_EXPR:
15111 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15112 TREE_TYPE (t),
15113 TREE_OPERAND (t, 0),
15114 strict_overflow_p);
15116 case COND_EXPR:
15117 case CONSTRUCTOR:
15118 case OBJ_TYPE_REF:
15119 case ASSERT_EXPR:
15120 case ADDR_EXPR:
15121 case WITH_SIZE_EXPR:
15122 case SSA_NAME:
15123 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15125 default:
15126 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15130 /* Return true if `t' is known to be non-negative. Handle warnings
15131 about undefined signed overflow. */
15133 bool
15134 tree_expr_nonnegative_p (tree t)
15136 bool ret, strict_overflow_p;
15138 strict_overflow_p = false;
15139 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15140 if (strict_overflow_p)
15141 fold_overflow_warning (("assuming signed overflow does not occur when "
15142 "determining that expression is always "
15143 "non-negative"),
15144 WARN_STRICT_OVERFLOW_MISC);
15145 return ret;
15149 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15150 For floating point we further ensure that T is not denormal.
15151 Similar logic is present in nonzero_address in rtlanal.h.
15153 If the return value is based on the assumption that signed overflow
15154 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15155 change *STRICT_OVERFLOW_P. */
15157 bool
15158 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15159 bool *strict_overflow_p)
15161 switch (code)
15163 case ABS_EXPR:
15164 return tree_expr_nonzero_warnv_p (op0,
15165 strict_overflow_p);
15167 case NOP_EXPR:
15169 tree inner_type = TREE_TYPE (op0);
15170 tree outer_type = type;
15172 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15173 && tree_expr_nonzero_warnv_p (op0,
15174 strict_overflow_p));
15176 break;
15178 case NON_LVALUE_EXPR:
15179 return tree_expr_nonzero_warnv_p (op0,
15180 strict_overflow_p);
15182 default:
15183 break;
15186 return false;
15189 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15190 For floating point we further ensure that T is not denormal.
15191 Similar logic is present in nonzero_address in rtlanal.h.
15193 If the return value is based on the assumption that signed overflow
15194 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15195 change *STRICT_OVERFLOW_P. */
15197 bool
15198 tree_binary_nonzero_warnv_p (enum tree_code code,
15199 tree type,
15200 tree op0,
15201 tree op1, bool *strict_overflow_p)
15203 bool sub_strict_overflow_p;
15204 switch (code)
15206 case POINTER_PLUS_EXPR:
15207 case PLUS_EXPR:
15208 if (TYPE_OVERFLOW_UNDEFINED (type))
15210 /* With the presence of negative values it is hard
15211 to say something. */
15212 sub_strict_overflow_p = false;
15213 if (!tree_expr_nonnegative_warnv_p (op0,
15214 &sub_strict_overflow_p)
15215 || !tree_expr_nonnegative_warnv_p (op1,
15216 &sub_strict_overflow_p))
15217 return false;
15218 /* One of operands must be positive and the other non-negative. */
15219 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15220 overflows, on a twos-complement machine the sum of two
15221 nonnegative numbers can never be zero. */
15222 return (tree_expr_nonzero_warnv_p (op0,
15223 strict_overflow_p)
15224 || tree_expr_nonzero_warnv_p (op1,
15225 strict_overflow_p));
15227 break;
15229 case MULT_EXPR:
15230 if (TYPE_OVERFLOW_UNDEFINED (type))
15232 if (tree_expr_nonzero_warnv_p (op0,
15233 strict_overflow_p)
15234 && tree_expr_nonzero_warnv_p (op1,
15235 strict_overflow_p))
15237 *strict_overflow_p = true;
15238 return true;
15241 break;
15243 case MIN_EXPR:
15244 sub_strict_overflow_p = false;
15245 if (tree_expr_nonzero_warnv_p (op0,
15246 &sub_strict_overflow_p)
15247 && tree_expr_nonzero_warnv_p (op1,
15248 &sub_strict_overflow_p))
15250 if (sub_strict_overflow_p)
15251 *strict_overflow_p = true;
15253 break;
15255 case MAX_EXPR:
15256 sub_strict_overflow_p = false;
15257 if (tree_expr_nonzero_warnv_p (op0,
15258 &sub_strict_overflow_p))
15260 if (sub_strict_overflow_p)
15261 *strict_overflow_p = true;
15263 /* When both operands are nonzero, then MAX must be too. */
15264 if (tree_expr_nonzero_warnv_p (op1,
15265 strict_overflow_p))
15266 return true;
15268 /* MAX where operand 0 is positive is positive. */
15269 return tree_expr_nonnegative_warnv_p (op0,
15270 strict_overflow_p);
15272 /* MAX where operand 1 is positive is positive. */
15273 else if (tree_expr_nonzero_warnv_p (op1,
15274 &sub_strict_overflow_p)
15275 && tree_expr_nonnegative_warnv_p (op1,
15276 &sub_strict_overflow_p))
15278 if (sub_strict_overflow_p)
15279 *strict_overflow_p = true;
15280 return true;
15282 break;
15284 case BIT_IOR_EXPR:
15285 return (tree_expr_nonzero_warnv_p (op1,
15286 strict_overflow_p)
15287 || tree_expr_nonzero_warnv_p (op0,
15288 strict_overflow_p));
15290 default:
15291 break;
15294 return false;
15297 /* Return true when T is an address and is known to be nonzero.
15298 For floating point we further ensure that T is not denormal.
15299 Similar logic is present in nonzero_address in rtlanal.h.
15301 If the return value is based on the assumption that signed overflow
15302 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15303 change *STRICT_OVERFLOW_P. */
15305 bool
15306 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15308 bool sub_strict_overflow_p;
15309 switch (TREE_CODE (t))
15311 case INTEGER_CST:
15312 return !integer_zerop (t);
15314 case ADDR_EXPR:
15316 tree base = TREE_OPERAND (t, 0);
15318 if (!DECL_P (base))
15319 base = get_base_address (base);
15321 if (!base)
15322 return false;
15324 /* For objects in symbol table check if we know they are non-zero.
15325 Don't do anything for variables and functions before symtab is built;
15326 it is quite possible that they will be declared weak later. */
15327 if (DECL_P (base) && decl_in_symtab_p (base))
15329 struct symtab_node *symbol;
15331 symbol = symtab_node::get_create (base);
15332 if (symbol)
15333 return symbol->nonzero_address ();
15334 else
15335 return false;
15338 /* Function local objects are never NULL. */
15339 if (DECL_P (base)
15340 && (DECL_CONTEXT (base)
15341 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15342 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15343 return true;
15345 /* Constants are never weak. */
15346 if (CONSTANT_CLASS_P (base))
15347 return true;
15349 return false;
15352 case COND_EXPR:
15353 sub_strict_overflow_p = false;
15354 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15355 &sub_strict_overflow_p)
15356 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15357 &sub_strict_overflow_p))
15359 if (sub_strict_overflow_p)
15360 *strict_overflow_p = true;
15361 return true;
15363 break;
15365 default:
15366 break;
15368 return false;
15371 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15372 attempt to fold the expression to a constant without modifying TYPE,
15373 OP0 or OP1.
15375 If the expression could be simplified to a constant, then return
15376 the constant. If the expression would not be simplified to a
15377 constant, then return NULL_TREE. */
15379 tree
15380 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15382 tree tem = fold_binary (code, type, op0, op1);
15383 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15386 /* Given the components of a unary expression CODE, TYPE and OP0,
15387 attempt to fold the expression to a constant without modifying
15388 TYPE or OP0.
15390 If the expression could be simplified to a constant, then return
15391 the constant. If the expression would not be simplified to a
15392 constant, then return NULL_TREE. */
15394 tree
15395 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15397 tree tem = fold_unary (code, type, op0);
15398 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15401 /* If EXP represents referencing an element in a constant string
15402 (either via pointer arithmetic or array indexing), return the
15403 tree representing the value accessed, otherwise return NULL. */
15405 tree
15406 fold_read_from_constant_string (tree exp)
15408 if ((TREE_CODE (exp) == INDIRECT_REF
15409 || TREE_CODE (exp) == ARRAY_REF)
15410 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15412 tree exp1 = TREE_OPERAND (exp, 0);
15413 tree index;
15414 tree string;
15415 location_t loc = EXPR_LOCATION (exp);
15417 if (TREE_CODE (exp) == INDIRECT_REF)
15418 string = string_constant (exp1, &index);
15419 else
15421 tree low_bound = array_ref_low_bound (exp);
15422 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15424 /* Optimize the special-case of a zero lower bound.
15426 We convert the low_bound to sizetype to avoid some problems
15427 with constant folding. (E.g. suppose the lower bound is 1,
15428 and its mode is QI. Without the conversion,l (ARRAY
15429 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15430 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15431 if (! integer_zerop (low_bound))
15432 index = size_diffop_loc (loc, index,
15433 fold_convert_loc (loc, sizetype, low_bound));
15435 string = exp1;
15438 if (string
15439 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15440 && TREE_CODE (string) == STRING_CST
15441 && TREE_CODE (index) == INTEGER_CST
15442 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15443 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15444 == MODE_INT)
15445 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15446 return build_int_cst_type (TREE_TYPE (exp),
15447 (TREE_STRING_POINTER (string)
15448 [TREE_INT_CST_LOW (index)]));
15450 return NULL;
15453 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15454 an integer constant, real, or fixed-point constant.
15456 TYPE is the type of the result. */
15458 static tree
15459 fold_negate_const (tree arg0, tree type)
15461 tree t = NULL_TREE;
15463 switch (TREE_CODE (arg0))
15465 case INTEGER_CST:
15467 bool overflow;
15468 wide_int val = wi::neg (arg0, &overflow);
15469 t = force_fit_type (type, val, 1,
15470 (overflow | TREE_OVERFLOW (arg0))
15471 && !TYPE_UNSIGNED (type));
15472 break;
15475 case REAL_CST:
15476 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15477 break;
15479 case FIXED_CST:
15481 FIXED_VALUE_TYPE f;
15482 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15483 &(TREE_FIXED_CST (arg0)), NULL,
15484 TYPE_SATURATING (type));
15485 t = build_fixed (type, f);
15486 /* Propagate overflow flags. */
15487 if (overflow_p | TREE_OVERFLOW (arg0))
15488 TREE_OVERFLOW (t) = 1;
15489 break;
15492 default:
15493 gcc_unreachable ();
15496 return t;
15499 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15500 an integer constant or real constant.
15502 TYPE is the type of the result. */
15504 tree
15505 fold_abs_const (tree arg0, tree type)
15507 tree t = NULL_TREE;
15509 switch (TREE_CODE (arg0))
15511 case INTEGER_CST:
15513 /* If the value is unsigned or non-negative, then the absolute value
15514 is the same as the ordinary value. */
15515 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15516 t = arg0;
15518 /* If the value is negative, then the absolute value is
15519 its negation. */
15520 else
15522 bool overflow;
15523 wide_int val = wi::neg (arg0, &overflow);
15524 t = force_fit_type (type, val, -1,
15525 overflow | TREE_OVERFLOW (arg0));
15528 break;
15530 case REAL_CST:
15531 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15532 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15533 else
15534 t = arg0;
15535 break;
15537 default:
15538 gcc_unreachable ();
15541 return t;
15544 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15545 constant. TYPE is the type of the result. */
15547 static tree
15548 fold_not_const (const_tree arg0, tree type)
15550 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15552 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15555 /* Given CODE, a relational operator, the target type, TYPE and two
15556 constant operands OP0 and OP1, return the result of the
15557 relational operation. If the result is not a compile time
15558 constant, then return NULL_TREE. */
15560 static tree
15561 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15563 int result, invert;
15565 /* From here on, the only cases we handle are when the result is
15566 known to be a constant. */
15568 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15570 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15571 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15573 /* Handle the cases where either operand is a NaN. */
15574 if (real_isnan (c0) || real_isnan (c1))
15576 switch (code)
15578 case EQ_EXPR:
15579 case ORDERED_EXPR:
15580 result = 0;
15581 break;
15583 case NE_EXPR:
15584 case UNORDERED_EXPR:
15585 case UNLT_EXPR:
15586 case UNLE_EXPR:
15587 case UNGT_EXPR:
15588 case UNGE_EXPR:
15589 case UNEQ_EXPR:
15590 result = 1;
15591 break;
15593 case LT_EXPR:
15594 case LE_EXPR:
15595 case GT_EXPR:
15596 case GE_EXPR:
15597 case LTGT_EXPR:
15598 if (flag_trapping_math)
15599 return NULL_TREE;
15600 result = 0;
15601 break;
15603 default:
15604 gcc_unreachable ();
15607 return constant_boolean_node (result, type);
15610 return constant_boolean_node (real_compare (code, c0, c1), type);
15613 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15615 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15616 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15617 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15620 /* Handle equality/inequality of complex constants. */
15621 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15623 tree rcond = fold_relational_const (code, type,
15624 TREE_REALPART (op0),
15625 TREE_REALPART (op1));
15626 tree icond = fold_relational_const (code, type,
15627 TREE_IMAGPART (op0),
15628 TREE_IMAGPART (op1));
15629 if (code == EQ_EXPR)
15630 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15631 else if (code == NE_EXPR)
15632 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15633 else
15634 return NULL_TREE;
15637 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15639 unsigned count = VECTOR_CST_NELTS (op0);
15640 tree *elts = XALLOCAVEC (tree, count);
15641 gcc_assert (VECTOR_CST_NELTS (op1) == count
15642 && TYPE_VECTOR_SUBPARTS (type) == count);
15644 for (unsigned i = 0; i < count; i++)
15646 tree elem_type = TREE_TYPE (type);
15647 tree elem0 = VECTOR_CST_ELT (op0, i);
15648 tree elem1 = VECTOR_CST_ELT (op1, i);
15650 tree tem = fold_relational_const (code, elem_type,
15651 elem0, elem1);
15653 if (tem == NULL_TREE)
15654 return NULL_TREE;
15656 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15659 return build_vector (type, elts);
15662 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15664 To compute GT, swap the arguments and do LT.
15665 To compute GE, do LT and invert the result.
15666 To compute LE, swap the arguments, do LT and invert the result.
15667 To compute NE, do EQ and invert the result.
15669 Therefore, the code below must handle only EQ and LT. */
15671 if (code == LE_EXPR || code == GT_EXPR)
15673 tree tem = op0;
15674 op0 = op1;
15675 op1 = tem;
15676 code = swap_tree_comparison (code);
15679 /* Note that it is safe to invert for real values here because we
15680 have already handled the one case that it matters. */
15682 invert = 0;
15683 if (code == NE_EXPR || code == GE_EXPR)
15685 invert = 1;
15686 code = invert_tree_comparison (code, false);
15689 /* Compute a result for LT or EQ if args permit;
15690 Otherwise return T. */
15691 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15693 if (code == EQ_EXPR)
15694 result = tree_int_cst_equal (op0, op1);
15695 else
15696 result = tree_int_cst_lt (op0, op1);
15698 else
15699 return NULL_TREE;
15701 if (invert)
15702 result ^= 1;
15703 return constant_boolean_node (result, type);
15706 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15707 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15708 itself. */
15710 tree
15711 fold_build_cleanup_point_expr (tree type, tree expr)
15713 /* If the expression does not have side effects then we don't have to wrap
15714 it with a cleanup point expression. */
15715 if (!TREE_SIDE_EFFECTS (expr))
15716 return expr;
15718 /* If the expression is a return, check to see if the expression inside the
15719 return has no side effects or the right hand side of the modify expression
15720 inside the return. If either don't have side effects set we don't need to
15721 wrap the expression in a cleanup point expression. Note we don't check the
15722 left hand side of the modify because it should always be a return decl. */
15723 if (TREE_CODE (expr) == RETURN_EXPR)
15725 tree op = TREE_OPERAND (expr, 0);
15726 if (!op || !TREE_SIDE_EFFECTS (op))
15727 return expr;
15728 op = TREE_OPERAND (op, 1);
15729 if (!TREE_SIDE_EFFECTS (op))
15730 return expr;
15733 return build1 (CLEANUP_POINT_EXPR, type, expr);
15736 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15737 of an indirection through OP0, or NULL_TREE if no simplification is
15738 possible. */
15740 tree
15741 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15743 tree sub = op0;
15744 tree subtype;
15746 STRIP_NOPS (sub);
15747 subtype = TREE_TYPE (sub);
15748 if (!POINTER_TYPE_P (subtype))
15749 return NULL_TREE;
15751 if (TREE_CODE (sub) == ADDR_EXPR)
15753 tree op = TREE_OPERAND (sub, 0);
15754 tree optype = TREE_TYPE (op);
15755 /* *&CONST_DECL -> to the value of the const decl. */
15756 if (TREE_CODE (op) == CONST_DECL)
15757 return DECL_INITIAL (op);
15758 /* *&p => p; make sure to handle *&"str"[cst] here. */
15759 if (type == optype)
15761 tree fop = fold_read_from_constant_string (op);
15762 if (fop)
15763 return fop;
15764 else
15765 return op;
15767 /* *(foo *)&fooarray => fooarray[0] */
15768 else if (TREE_CODE (optype) == ARRAY_TYPE
15769 && type == TREE_TYPE (optype)
15770 && (!in_gimple_form
15771 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15773 tree type_domain = TYPE_DOMAIN (optype);
15774 tree min_val = size_zero_node;
15775 if (type_domain && TYPE_MIN_VALUE (type_domain))
15776 min_val = TYPE_MIN_VALUE (type_domain);
15777 if (in_gimple_form
15778 && TREE_CODE (min_val) != INTEGER_CST)
15779 return NULL_TREE;
15780 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15781 NULL_TREE, NULL_TREE);
15783 /* *(foo *)&complexfoo => __real__ complexfoo */
15784 else if (TREE_CODE (optype) == COMPLEX_TYPE
15785 && type == TREE_TYPE (optype))
15786 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15787 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15788 else if (TREE_CODE (optype) == VECTOR_TYPE
15789 && type == TREE_TYPE (optype))
15791 tree part_width = TYPE_SIZE (type);
15792 tree index = bitsize_int (0);
15793 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15797 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15798 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15800 tree op00 = TREE_OPERAND (sub, 0);
15801 tree op01 = TREE_OPERAND (sub, 1);
15803 STRIP_NOPS (op00);
15804 if (TREE_CODE (op00) == ADDR_EXPR)
15806 tree op00type;
15807 op00 = TREE_OPERAND (op00, 0);
15808 op00type = TREE_TYPE (op00);
15810 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15811 if (TREE_CODE (op00type) == VECTOR_TYPE
15812 && type == TREE_TYPE (op00type))
15814 HOST_WIDE_INT offset = tree_to_shwi (op01);
15815 tree part_width = TYPE_SIZE (type);
15816 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15817 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15818 tree index = bitsize_int (indexi);
15820 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15821 return fold_build3_loc (loc,
15822 BIT_FIELD_REF, type, op00,
15823 part_width, index);
15826 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15827 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15828 && type == TREE_TYPE (op00type))
15830 tree size = TYPE_SIZE_UNIT (type);
15831 if (tree_int_cst_equal (size, op01))
15832 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15834 /* ((foo *)&fooarray)[1] => fooarray[1] */
15835 else if (TREE_CODE (op00type) == ARRAY_TYPE
15836 && type == TREE_TYPE (op00type))
15838 tree type_domain = TYPE_DOMAIN (op00type);
15839 tree min_val = size_zero_node;
15840 if (type_domain && TYPE_MIN_VALUE (type_domain))
15841 min_val = TYPE_MIN_VALUE (type_domain);
15842 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15843 TYPE_SIZE_UNIT (type));
15844 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15845 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15846 NULL_TREE, NULL_TREE);
15851 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15852 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15853 && type == TREE_TYPE (TREE_TYPE (subtype))
15854 && (!in_gimple_form
15855 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15857 tree type_domain;
15858 tree min_val = size_zero_node;
15859 sub = build_fold_indirect_ref_loc (loc, sub);
15860 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15861 if (type_domain && TYPE_MIN_VALUE (type_domain))
15862 min_val = TYPE_MIN_VALUE (type_domain);
15863 if (in_gimple_form
15864 && TREE_CODE (min_val) != INTEGER_CST)
15865 return NULL_TREE;
15866 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15867 NULL_TREE);
15870 return NULL_TREE;
15873 /* Builds an expression for an indirection through T, simplifying some
15874 cases. */
15876 tree
15877 build_fold_indirect_ref_loc (location_t loc, tree t)
15879 tree type = TREE_TYPE (TREE_TYPE (t));
15880 tree sub = fold_indirect_ref_1 (loc, type, t);
15882 if (sub)
15883 return sub;
15885 return build1_loc (loc, INDIRECT_REF, type, t);
15888 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15890 tree
15891 fold_indirect_ref_loc (location_t loc, tree t)
15893 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15895 if (sub)
15896 return sub;
15897 else
15898 return t;
15901 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15902 whose result is ignored. The type of the returned tree need not be
15903 the same as the original expression. */
15905 tree
15906 fold_ignored_result (tree t)
15908 if (!TREE_SIDE_EFFECTS (t))
15909 return integer_zero_node;
15911 for (;;)
15912 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15914 case tcc_unary:
15915 t = TREE_OPERAND (t, 0);
15916 break;
15918 case tcc_binary:
15919 case tcc_comparison:
15920 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15921 t = TREE_OPERAND (t, 0);
15922 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15923 t = TREE_OPERAND (t, 1);
15924 else
15925 return t;
15926 break;
15928 case tcc_expression:
15929 switch (TREE_CODE (t))
15931 case COMPOUND_EXPR:
15932 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15933 return t;
15934 t = TREE_OPERAND (t, 0);
15935 break;
15937 case COND_EXPR:
15938 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15939 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15940 return t;
15941 t = TREE_OPERAND (t, 0);
15942 break;
15944 default:
15945 return t;
15947 break;
15949 default:
15950 return t;
15954 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15956 tree
15957 round_up_loc (location_t loc, tree value, unsigned int divisor)
15959 tree div = NULL_TREE;
15961 if (divisor == 1)
15962 return value;
15964 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15965 have to do anything. Only do this when we are not given a const,
15966 because in that case, this check is more expensive than just
15967 doing it. */
15968 if (TREE_CODE (value) != INTEGER_CST)
15970 div = build_int_cst (TREE_TYPE (value), divisor);
15972 if (multiple_of_p (TREE_TYPE (value), value, div))
15973 return value;
15976 /* If divisor is a power of two, simplify this to bit manipulation. */
15977 if (divisor == (divisor & -divisor))
15979 if (TREE_CODE (value) == INTEGER_CST)
15981 wide_int val = value;
15982 bool overflow_p;
15984 if ((val & (divisor - 1)) == 0)
15985 return value;
15987 overflow_p = TREE_OVERFLOW (value);
15988 val &= ~(divisor - 1);
15989 val += divisor;
15990 if (val == 0)
15991 overflow_p = true;
15993 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15995 else
15997 tree t;
15999 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16000 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16001 t = build_int_cst (TREE_TYPE (value), -divisor);
16002 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16005 else
16007 if (!div)
16008 div = build_int_cst (TREE_TYPE (value), divisor);
16009 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16010 value = size_binop_loc (loc, MULT_EXPR, value, div);
16013 return value;
16016 /* Likewise, but round down. */
16018 tree
16019 round_down_loc (location_t loc, tree value, int divisor)
16021 tree div = NULL_TREE;
16023 gcc_assert (divisor > 0);
16024 if (divisor == 1)
16025 return value;
16027 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16028 have to do anything. Only do this when we are not given a const,
16029 because in that case, this check is more expensive than just
16030 doing it. */
16031 if (TREE_CODE (value) != INTEGER_CST)
16033 div = build_int_cst (TREE_TYPE (value), divisor);
16035 if (multiple_of_p (TREE_TYPE (value), value, div))
16036 return value;
16039 /* If divisor is a power of two, simplify this to bit manipulation. */
16040 if (divisor == (divisor & -divisor))
16042 tree t;
16044 t = build_int_cst (TREE_TYPE (value), -divisor);
16045 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16047 else
16049 if (!div)
16050 div = build_int_cst (TREE_TYPE (value), divisor);
16051 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16052 value = size_binop_loc (loc, MULT_EXPR, value, div);
16055 return value;
16058 /* Returns the pointer to the base of the object addressed by EXP and
16059 extracts the information about the offset of the access, storing it
16060 to PBITPOS and POFFSET. */
16062 static tree
16063 split_address_to_core_and_offset (tree exp,
16064 HOST_WIDE_INT *pbitpos, tree *poffset)
16066 tree core;
16067 machine_mode mode;
16068 int unsignedp, volatilep;
16069 HOST_WIDE_INT bitsize;
16070 location_t loc = EXPR_LOCATION (exp);
16072 if (TREE_CODE (exp) == ADDR_EXPR)
16074 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16075 poffset, &mode, &unsignedp, &volatilep,
16076 false);
16077 core = build_fold_addr_expr_loc (loc, core);
16079 else
16081 core = exp;
16082 *pbitpos = 0;
16083 *poffset = NULL_TREE;
16086 return core;
16089 /* Returns true if addresses of E1 and E2 differ by a constant, false
16090 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16092 bool
16093 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16095 tree core1, core2;
16096 HOST_WIDE_INT bitpos1, bitpos2;
16097 tree toffset1, toffset2, tdiff, type;
16099 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16100 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16102 if (bitpos1 % BITS_PER_UNIT != 0
16103 || bitpos2 % BITS_PER_UNIT != 0
16104 || !operand_equal_p (core1, core2, 0))
16105 return false;
16107 if (toffset1 && toffset2)
16109 type = TREE_TYPE (toffset1);
16110 if (type != TREE_TYPE (toffset2))
16111 toffset2 = fold_convert (type, toffset2);
16113 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16114 if (!cst_and_fits_in_hwi (tdiff))
16115 return false;
16117 *diff = int_cst_value (tdiff);
16119 else if (toffset1 || toffset2)
16121 /* If only one of the offsets is non-constant, the difference cannot
16122 be a constant. */
16123 return false;
16125 else
16126 *diff = 0;
16128 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16129 return true;
16132 /* Simplify the floating point expression EXP when the sign of the
16133 result is not significant. Return NULL_TREE if no simplification
16134 is possible. */
16136 tree
16137 fold_strip_sign_ops (tree exp)
16139 tree arg0, arg1;
16140 location_t loc = EXPR_LOCATION (exp);
16142 switch (TREE_CODE (exp))
16144 case ABS_EXPR:
16145 case NEGATE_EXPR:
16146 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16147 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16149 case MULT_EXPR:
16150 case RDIV_EXPR:
16151 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16152 return NULL_TREE;
16153 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16154 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16155 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16156 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16157 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16158 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16159 break;
16161 case COMPOUND_EXPR:
16162 arg0 = TREE_OPERAND (exp, 0);
16163 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16164 if (arg1)
16165 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16166 break;
16168 case COND_EXPR:
16169 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16170 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16171 if (arg0 || arg1)
16172 return fold_build3_loc (loc,
16173 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16174 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16175 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16176 break;
16178 case CALL_EXPR:
16180 const enum built_in_function fcode = builtin_mathfn_code (exp);
16181 switch (fcode)
16183 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16184 /* Strip copysign function call, return the 1st argument. */
16185 arg0 = CALL_EXPR_ARG (exp, 0);
16186 arg1 = CALL_EXPR_ARG (exp, 1);
16187 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16189 default:
16190 /* Strip sign ops from the argument of "odd" math functions. */
16191 if (negate_mathfn_p (fcode))
16193 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16194 if (arg0)
16195 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16197 break;
16200 break;
16202 default:
16203 break;
16205 return NULL_TREE;