* tree.h (TYPE_OVERFLOW_SANITIZED): Define.
[official-gcc.git] / gcc / fold-const.c
bloba711be9c1064cb40c6c4da9a1b1e4c9e4245afe4
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
164 static location_t
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
186 return x;
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 widest_int quo;
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 tree type;
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
511 break;
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
527 break;
529 default:
530 break;
532 return false;
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
540 static tree
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
544 tree tem;
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
584 break;
586 case VECTOR_CST:
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
598 return build_vector (type, elts);
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1244 break;
1246 default:
1247 return NULL_TREE;
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1327 else
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1358 else
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1380 break;
1382 default:
1383 return NULL_TREE;
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 if (code == VEC_RSHIFT_EXPR)
1423 if (!tree_fits_uhwi_p (arg2))
1424 return NULL_TREE;
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1431 return NULL_TREE;
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1437 offset = -offset;
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1441 if (i + offset < 0 || i + offset >= count)
1442 elts[i] = zero;
1443 else
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1447 else
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1468 tree
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1479 tree
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 tree type = TREE_TYPE (arg0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1488 TREE_TYPE (arg1)));
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1499 return arg0;
1501 else if (code == MINUS_EXPR)
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1506 else if (code == MULT_EXPR)
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1509 return arg1;
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1525 tree
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 tree type = TREE_TYPE (arg0);
1529 tree ctype;
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1532 TREE_TYPE (arg1)));
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538 if (type == sizetype)
1539 ctype = ssizetype;
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1542 else
1543 ctype = signed_type_for (type);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1562 else
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1566 MINUS_EXPR,
1567 arg1, arg0)));
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1573 static tree
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1587 static tree
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 bool overflow = false;
1591 tree t;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1602 wide_int val;
1603 REAL_VALUE_TYPE r;
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606 switch (code)
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1610 break;
1612 default:
1613 gcc_unreachable ();
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1619 overflow = true;
1620 val = wi::zero (TYPE_PRECISION (type));
1623 /* See if R is less than the lower bound or greater than the
1624 upper bound. */
1626 if (! overflow)
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1632 overflow = true;
1633 val = lt;
1637 if (! overflow)
1639 tree ut = TYPE_MAX_VALUE (type);
1640 if (ut)
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1645 overflow = true;
1646 val = ut;
1651 if (! overflow)
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1661 static tree
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 tree t;
1665 double_int temp, temp_trunc;
1666 unsigned int mode;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1682 else
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1703 return t;
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1709 static tree
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 REAL_VALUE_TYPE value;
1713 tree t;
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1734 else
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1736 return t;
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1742 static tree
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 REAL_VALUE_TYPE value;
1746 tree t;
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1778 static tree
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 FIXED_VALUE_TYPE value;
1782 tree t;
1783 bool overflow_p;
1784 double_int di;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1791 else
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1802 return t;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1808 static tree
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 FIXED_VALUE_TYPE value;
1812 tree t;
1813 bool overflow_p;
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1823 return t;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1829 static tree
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 if (TREE_TYPE (arg1) == type)
1833 return arg1;
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1863 return NULL_TREE;
1866 /* Construct a vector of zero elements of vector type TYPE. */
1868 static tree
1869 build_zero_vector (tree type)
1871 tree t;
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879 bool
1880 fold_convertible_p (const_tree type, const_tree arg)
1882 tree orig = TREE_TYPE (arg);
1884 if (type == orig)
1885 return true;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return false;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return true;
1895 switch (TREE_CODE (type))
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1899 case OFFSET_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1902 return true;
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906 case REAL_TYPE:
1907 case FIXED_POINT_TYPE:
1908 case COMPLEX_TYPE:
1909 case VECTOR_TYPE:
1910 case VOID_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1913 default:
1914 return false;
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1921 tree
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1924 tree orig = TREE_TYPE (arg);
1925 tree tem;
1927 if (type == orig)
1928 return arg;
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1935 switch (TREE_CODE (type))
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1944 /* fall through */
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case OFFSET_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 case REAL_TYPE:
1966 if (TREE_CODE (arg) == INTEGER_CST)
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1972 else if (TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1978 else if (TREE_CODE (arg) == FIXED_CST)
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992 case REAL_TYPE:
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998 case COMPLEX_TYPE:
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2002 default:
2003 gcc_unreachable ();
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2015 switch (TREE_CODE (orig))
2017 case FIXED_POINT_TYPE:
2018 case INTEGER_TYPE:
2019 case ENUMERAL_TYPE:
2020 case BOOLEAN_TYPE:
2021 case REAL_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2028 default:
2029 gcc_unreachable ();
2032 case COMPLEX_TYPE:
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 case REAL_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2044 case COMPLEX_TYPE:
2046 tree rpart, ipart;
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 default:
2066 gcc_unreachable ();
2069 case VECTOR_TYPE:
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077 case VOID_TYPE:
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081 default:
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2084 gcc_unreachable ();
2086 fold_convert_exit:
2087 protected_set_expr_location_unshare (tem, loc);
2088 return tem;
2091 /* Return false if expr can be assumed not to be an lvalue, true
2092 otherwise. */
2094 static bool
2095 maybe_lvalue_p (const_tree x)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2100 case VAR_DECL:
2101 case PARM_DECL:
2102 case RESULT_DECL:
2103 case LABEL_DECL:
2104 case FUNCTION_DECL:
2105 case SSA_NAME:
2107 case COMPONENT_REF:
2108 case MEM_REF:
2109 case INDIRECT_REF:
2110 case ARRAY_REF:
2111 case ARRAY_RANGE_REF:
2112 case BIT_FIELD_REF:
2113 case OBJ_TYPE_REF:
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2119 case SAVE_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2122 case COMPOUND_EXPR:
2123 case MODIFY_EXPR:
2124 case TARGET_EXPR:
2125 case COND_EXPR:
2126 case BIND_EXPR:
2127 break;
2129 default:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2132 break;
2133 return false;
2136 return true;
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141 tree
2142 non_lvalue_loc (location_t loc, tree x)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2145 us. */
2146 if (in_gimple_form)
2147 return x;
2149 if (! maybe_lvalue_p (x))
2150 return x;
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2154 /* When pedantic, return an expr equal to X but certainly not valid as a
2155 pedantic lvalue. Otherwise, return X. */
2157 static tree
2158 pedantic_non_lvalue_loc (location_t loc, tree x)
2160 return protected_set_expr_location_unshare (x, loc);
2163 /* Given a tree comparison code, return the code that is the logical inverse.
2164 It is generally not safe to do this for floating-point comparisons, except
2165 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2166 ERROR_MARK in this case. */
2168 enum tree_code
2169 invert_tree_comparison (enum tree_code code, bool honor_nans)
2171 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2172 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2173 return ERROR_MARK;
2175 switch (code)
2177 case EQ_EXPR:
2178 return NE_EXPR;
2179 case NE_EXPR:
2180 return EQ_EXPR;
2181 case GT_EXPR:
2182 return honor_nans ? UNLE_EXPR : LE_EXPR;
2183 case GE_EXPR:
2184 return honor_nans ? UNLT_EXPR : LT_EXPR;
2185 case LT_EXPR:
2186 return honor_nans ? UNGE_EXPR : GE_EXPR;
2187 case LE_EXPR:
2188 return honor_nans ? UNGT_EXPR : GT_EXPR;
2189 case LTGT_EXPR:
2190 return UNEQ_EXPR;
2191 case UNEQ_EXPR:
2192 return LTGT_EXPR;
2193 case UNGT_EXPR:
2194 return LE_EXPR;
2195 case UNGE_EXPR:
2196 return LT_EXPR;
2197 case UNLT_EXPR:
2198 return GE_EXPR;
2199 case UNLE_EXPR:
2200 return GT_EXPR;
2201 case ORDERED_EXPR:
2202 return UNORDERED_EXPR;
2203 case UNORDERED_EXPR:
2204 return ORDERED_EXPR;
2205 default:
2206 gcc_unreachable ();
2210 /* Similar, but return the comparison that results if the operands are
2211 swapped. This is safe for floating-point. */
2213 enum tree_code
2214 swap_tree_comparison (enum tree_code code)
2216 switch (code)
2218 case EQ_EXPR:
2219 case NE_EXPR:
2220 case ORDERED_EXPR:
2221 case UNORDERED_EXPR:
2222 case LTGT_EXPR:
2223 case UNEQ_EXPR:
2224 return code;
2225 case GT_EXPR:
2226 return LT_EXPR;
2227 case GE_EXPR:
2228 return LE_EXPR;
2229 case LT_EXPR:
2230 return GT_EXPR;
2231 case LE_EXPR:
2232 return GE_EXPR;
2233 case UNGT_EXPR:
2234 return UNLT_EXPR;
2235 case UNGE_EXPR:
2236 return UNLE_EXPR;
2237 case UNLT_EXPR:
2238 return UNGT_EXPR;
2239 case UNLE_EXPR:
2240 return UNGE_EXPR;
2241 default:
2242 gcc_unreachable ();
2247 /* Convert a comparison tree code from an enum tree_code representation
2248 into a compcode bit-based encoding. This function is the inverse of
2249 compcode_to_comparison. */
2251 static enum comparison_code
2252 comparison_to_compcode (enum tree_code code)
2254 switch (code)
2256 case LT_EXPR:
2257 return COMPCODE_LT;
2258 case EQ_EXPR:
2259 return COMPCODE_EQ;
2260 case LE_EXPR:
2261 return COMPCODE_LE;
2262 case GT_EXPR:
2263 return COMPCODE_GT;
2264 case NE_EXPR:
2265 return COMPCODE_NE;
2266 case GE_EXPR:
2267 return COMPCODE_GE;
2268 case ORDERED_EXPR:
2269 return COMPCODE_ORD;
2270 case UNORDERED_EXPR:
2271 return COMPCODE_UNORD;
2272 case UNLT_EXPR:
2273 return COMPCODE_UNLT;
2274 case UNEQ_EXPR:
2275 return COMPCODE_UNEQ;
2276 case UNLE_EXPR:
2277 return COMPCODE_UNLE;
2278 case UNGT_EXPR:
2279 return COMPCODE_UNGT;
2280 case LTGT_EXPR:
2281 return COMPCODE_LTGT;
2282 case UNGE_EXPR:
2283 return COMPCODE_UNGE;
2284 default:
2285 gcc_unreachable ();
2289 /* Convert a compcode bit-based encoding of a comparison operator back
2290 to GCC's enum tree_code representation. This function is the
2291 inverse of comparison_to_compcode. */
2293 static enum tree_code
2294 compcode_to_comparison (enum comparison_code code)
2296 switch (code)
2298 case COMPCODE_LT:
2299 return LT_EXPR;
2300 case COMPCODE_EQ:
2301 return EQ_EXPR;
2302 case COMPCODE_LE:
2303 return LE_EXPR;
2304 case COMPCODE_GT:
2305 return GT_EXPR;
2306 case COMPCODE_NE:
2307 return NE_EXPR;
2308 case COMPCODE_GE:
2309 return GE_EXPR;
2310 case COMPCODE_ORD:
2311 return ORDERED_EXPR;
2312 case COMPCODE_UNORD:
2313 return UNORDERED_EXPR;
2314 case COMPCODE_UNLT:
2315 return UNLT_EXPR;
2316 case COMPCODE_UNEQ:
2317 return UNEQ_EXPR;
2318 case COMPCODE_UNLE:
2319 return UNLE_EXPR;
2320 case COMPCODE_UNGT:
2321 return UNGT_EXPR;
2322 case COMPCODE_LTGT:
2323 return LTGT_EXPR;
2324 case COMPCODE_UNGE:
2325 return UNGE_EXPR;
2326 default:
2327 gcc_unreachable ();
2331 /* Return a tree for the comparison which is the combination of
2332 doing the AND or OR (depending on CODE) of the two operations LCODE
2333 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2334 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2335 if this makes the transformation invalid. */
2337 tree
2338 combine_comparisons (location_t loc,
2339 enum tree_code code, enum tree_code lcode,
2340 enum tree_code rcode, tree truth_type,
2341 tree ll_arg, tree lr_arg)
2343 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2344 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2345 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2346 int compcode;
2348 switch (code)
2350 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2351 compcode = lcompcode & rcompcode;
2352 break;
2354 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2355 compcode = lcompcode | rcompcode;
2356 break;
2358 default:
2359 return NULL_TREE;
2362 if (!honor_nans)
2364 /* Eliminate unordered comparisons, as well as LTGT and ORD
2365 which are not used unless the mode has NaNs. */
2366 compcode &= ~COMPCODE_UNORD;
2367 if (compcode == COMPCODE_LTGT)
2368 compcode = COMPCODE_NE;
2369 else if (compcode == COMPCODE_ORD)
2370 compcode = COMPCODE_TRUE;
2372 else if (flag_trapping_math)
2374 /* Check that the original operation and the optimized ones will trap
2375 under the same condition. */
2376 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2377 && (lcompcode != COMPCODE_EQ)
2378 && (lcompcode != COMPCODE_ORD);
2379 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2380 && (rcompcode != COMPCODE_EQ)
2381 && (rcompcode != COMPCODE_ORD);
2382 bool trap = (compcode & COMPCODE_UNORD) == 0
2383 && (compcode != COMPCODE_EQ)
2384 && (compcode != COMPCODE_ORD);
2386 /* In a short-circuited boolean expression the LHS might be
2387 such that the RHS, if evaluated, will never trap. For
2388 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2389 if neither x nor y is NaN. (This is a mixed blessing: for
2390 example, the expression above will never trap, hence
2391 optimizing it to x < y would be invalid). */
2392 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2393 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2394 rtrap = false;
2396 /* If the comparison was short-circuited, and only the RHS
2397 trapped, we may now generate a spurious trap. */
2398 if (rtrap && !ltrap
2399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2400 return NULL_TREE;
2402 /* If we changed the conditions that cause a trap, we lose. */
2403 if ((ltrap || rtrap) != trap)
2404 return NULL_TREE;
2407 if (compcode == COMPCODE_TRUE)
2408 return constant_boolean_node (true, truth_type);
2409 else if (compcode == COMPCODE_FALSE)
2410 return constant_boolean_node (false, truth_type);
2411 else
2413 enum tree_code tcode;
2415 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2416 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2420 /* Return nonzero if two operands (typically of the same tree node)
2421 are necessarily equal. If either argument has side-effects this
2422 function returns zero. FLAGS modifies behavior as follows:
2424 If OEP_ONLY_CONST is set, only return nonzero for constants.
2425 This function tests whether the operands are indistinguishable;
2426 it does not test whether they are equal using C's == operation.
2427 The distinction is important for IEEE floating point, because
2428 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2429 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2431 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2432 even though it may hold multiple values during a function.
2433 This is because a GCC tree node guarantees that nothing else is
2434 executed between the evaluation of its "operands" (which may often
2435 be evaluated in arbitrary order). Hence if the operands themselves
2436 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2437 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2438 unset means assuming isochronic (or instantaneous) tree equivalence.
2439 Unless comparing arbitrary expression trees, such as from different
2440 statements, this flag can usually be left unset.
2442 If OEP_PURE_SAME is set, then pure functions with identical arguments
2443 are considered the same. It is used when the caller has other ways
2444 to ensure that global memory is unchanged in between. */
2447 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2449 /* If either is ERROR_MARK, they aren't equal. */
2450 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2451 || TREE_TYPE (arg0) == error_mark_node
2452 || TREE_TYPE (arg1) == error_mark_node)
2453 return 0;
2455 /* Similar, if either does not have a type (like a released SSA name),
2456 they aren't equal. */
2457 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2458 return 0;
2460 /* Check equality of integer constants before bailing out due to
2461 precision differences. */
2462 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2463 return tree_int_cst_equal (arg0, arg1);
2465 /* If both types don't have the same signedness, then we can't consider
2466 them equal. We must check this before the STRIP_NOPS calls
2467 because they may change the signedness of the arguments. As pointers
2468 strictly don't have a signedness, require either two pointers or
2469 two non-pointers as well. */
2470 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2471 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2472 return 0;
2474 /* We cannot consider pointers to different address space equal. */
2475 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2476 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2477 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2478 return 0;
2480 /* If both types don't have the same precision, then it is not safe
2481 to strip NOPs. */
2482 if (element_precision (TREE_TYPE (arg0))
2483 != element_precision (TREE_TYPE (arg1)))
2484 return 0;
2486 STRIP_NOPS (arg0);
2487 STRIP_NOPS (arg1);
2489 /* In case both args are comparisons but with different comparison
2490 code, try to swap the comparison operands of one arg to produce
2491 a match and compare that variant. */
2492 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2493 && COMPARISON_CLASS_P (arg0)
2494 && COMPARISON_CLASS_P (arg1))
2496 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2498 if (TREE_CODE (arg0) == swap_code)
2499 return operand_equal_p (TREE_OPERAND (arg0, 0),
2500 TREE_OPERAND (arg1, 1), flags)
2501 && operand_equal_p (TREE_OPERAND (arg0, 1),
2502 TREE_OPERAND (arg1, 0), flags);
2505 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2506 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2507 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2508 return 0;
2510 /* This is needed for conversions and for COMPONENT_REF.
2511 Might as well play it safe and always test this. */
2512 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2513 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2514 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2515 return 0;
2517 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2518 We don't care about side effects in that case because the SAVE_EXPR
2519 takes care of that for us. In all other cases, two expressions are
2520 equal if they have no side effects. If we have two identical
2521 expressions with side effects that should be treated the same due
2522 to the only side effects being identical SAVE_EXPR's, that will
2523 be detected in the recursive calls below.
2524 If we are taking an invariant address of two identical objects
2525 they are necessarily equal as well. */
2526 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2527 && (TREE_CODE (arg0) == SAVE_EXPR
2528 || (flags & OEP_CONSTANT_ADDRESS_OF)
2529 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2530 return 1;
2532 /* Next handle constant cases, those for which we can return 1 even
2533 if ONLY_CONST is set. */
2534 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2535 switch (TREE_CODE (arg0))
2537 case INTEGER_CST:
2538 return tree_int_cst_equal (arg0, arg1);
2540 case FIXED_CST:
2541 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2542 TREE_FIXED_CST (arg1));
2544 case REAL_CST:
2545 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2546 TREE_REAL_CST (arg1)))
2547 return 1;
2550 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2552 /* If we do not distinguish between signed and unsigned zero,
2553 consider them equal. */
2554 if (real_zerop (arg0) && real_zerop (arg1))
2555 return 1;
2557 return 0;
2559 case VECTOR_CST:
2561 unsigned i;
2563 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2564 return 0;
2566 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2568 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2569 VECTOR_CST_ELT (arg1, i), flags))
2570 return 0;
2572 return 1;
2575 case COMPLEX_CST:
2576 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2577 flags)
2578 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2579 flags));
2581 case STRING_CST:
2582 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2583 && ! memcmp (TREE_STRING_POINTER (arg0),
2584 TREE_STRING_POINTER (arg1),
2585 TREE_STRING_LENGTH (arg0)));
2587 case ADDR_EXPR:
2588 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2589 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2590 ? OEP_CONSTANT_ADDRESS_OF : 0);
2591 default:
2592 break;
2595 if (flags & OEP_ONLY_CONST)
2596 return 0;
2598 /* Define macros to test an operand from arg0 and arg1 for equality and a
2599 variant that allows null and views null as being different from any
2600 non-null value. In the latter case, if either is null, the both
2601 must be; otherwise, do the normal comparison. */
2602 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2603 TREE_OPERAND (arg1, N), flags)
2605 #define OP_SAME_WITH_NULL(N) \
2606 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2607 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2609 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2611 case tcc_unary:
2612 /* Two conversions are equal only if signedness and modes match. */
2613 switch (TREE_CODE (arg0))
2615 CASE_CONVERT:
2616 case FIX_TRUNC_EXPR:
2617 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2618 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2619 return 0;
2620 break;
2621 default:
2622 break;
2625 return OP_SAME (0);
2628 case tcc_comparison:
2629 case tcc_binary:
2630 if (OP_SAME (0) && OP_SAME (1))
2631 return 1;
2633 /* For commutative ops, allow the other order. */
2634 return (commutative_tree_code (TREE_CODE (arg0))
2635 && operand_equal_p (TREE_OPERAND (arg0, 0),
2636 TREE_OPERAND (arg1, 1), flags)
2637 && operand_equal_p (TREE_OPERAND (arg0, 1),
2638 TREE_OPERAND (arg1, 0), flags));
2640 case tcc_reference:
2641 /* If either of the pointer (or reference) expressions we are
2642 dereferencing contain a side effect, these cannot be equal,
2643 but their addresses can be. */
2644 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2645 && (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1)))
2647 return 0;
2649 switch (TREE_CODE (arg0))
2651 case INDIRECT_REF:
2652 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2653 return OP_SAME (0);
2655 case REALPART_EXPR:
2656 case IMAGPART_EXPR:
2657 return OP_SAME (0);
2659 case TARGET_MEM_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 /* Require equal extra operands and then fall through to MEM_REF
2662 handling of the two common operands. */
2663 if (!OP_SAME_WITH_NULL (2)
2664 || !OP_SAME_WITH_NULL (3)
2665 || !OP_SAME_WITH_NULL (4))
2666 return 0;
2667 /* Fallthru. */
2668 case MEM_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal access sizes, and similar pointer types.
2671 We can have incomplete types for array references of
2672 variable-sized arrays from the Fortran frontend
2673 though. Also verify the types are compatible. */
2674 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2675 || (TYPE_SIZE (TREE_TYPE (arg0))
2676 && TYPE_SIZE (TREE_TYPE (arg1))
2677 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2678 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2679 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2680 && alias_ptr_types_compatible_p
2681 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2682 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2683 && OP_SAME (0) && OP_SAME (1));
2685 case ARRAY_REF:
2686 case ARRAY_RANGE_REF:
2687 /* Operands 2 and 3 may be null.
2688 Compare the array index by value if it is constant first as we
2689 may have different types but same value here. */
2690 if (!OP_SAME (0))
2691 return 0;
2692 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2693 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2694 TREE_OPERAND (arg1, 1))
2695 || OP_SAME (1))
2696 && OP_SAME_WITH_NULL (2)
2697 && OP_SAME_WITH_NULL (3));
2699 case COMPONENT_REF:
2700 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2701 may be NULL when we're called to compare MEM_EXPRs. */
2702 if (!OP_SAME_WITH_NULL (0)
2703 || !OP_SAME (1))
2704 return 0;
2705 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 return OP_SAME_WITH_NULL (2);
2708 case BIT_FIELD_REF:
2709 if (!OP_SAME (0))
2710 return 0;
2711 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2712 return OP_SAME (1) && OP_SAME (2);
2714 default:
2715 return 0;
2718 case tcc_expression:
2719 switch (TREE_CODE (arg0))
2721 case ADDR_EXPR:
2722 case TRUTH_NOT_EXPR:
2723 return OP_SAME (0);
2725 case TRUTH_ANDIF_EXPR:
2726 case TRUTH_ORIF_EXPR:
2727 return OP_SAME (0) && OP_SAME (1);
2729 case FMA_EXPR:
2730 case WIDEN_MULT_PLUS_EXPR:
2731 case WIDEN_MULT_MINUS_EXPR:
2732 if (!OP_SAME (2))
2733 return 0;
2734 /* The multiplcation operands are commutative. */
2735 /* FALLTHRU */
2737 case TRUTH_AND_EXPR:
2738 case TRUTH_OR_EXPR:
2739 case TRUTH_XOR_EXPR:
2740 if (OP_SAME (0) && OP_SAME (1))
2741 return 1;
2743 /* Otherwise take into account this is a commutative operation. */
2744 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2745 TREE_OPERAND (arg1, 1), flags)
2746 && operand_equal_p (TREE_OPERAND (arg0, 1),
2747 TREE_OPERAND (arg1, 0), flags));
2749 case COND_EXPR:
2750 case VEC_COND_EXPR:
2751 case DOT_PROD_EXPR:
2752 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2754 default:
2755 return 0;
2758 case tcc_vl_exp:
2759 switch (TREE_CODE (arg0))
2761 case CALL_EXPR:
2762 /* If the CALL_EXPRs call different functions, then they
2763 clearly can not be equal. */
2764 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2765 flags))
2766 return 0;
2769 unsigned int cef = call_expr_flags (arg0);
2770 if (flags & OEP_PURE_SAME)
2771 cef &= ECF_CONST | ECF_PURE;
2772 else
2773 cef &= ECF_CONST;
2774 if (!cef)
2775 return 0;
2778 /* Now see if all the arguments are the same. */
2780 const_call_expr_arg_iterator iter0, iter1;
2781 const_tree a0, a1;
2782 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2783 a1 = first_const_call_expr_arg (arg1, &iter1);
2784 a0 && a1;
2785 a0 = next_const_call_expr_arg (&iter0),
2786 a1 = next_const_call_expr_arg (&iter1))
2787 if (! operand_equal_p (a0, a1, flags))
2788 return 0;
2790 /* If we get here and both argument lists are exhausted
2791 then the CALL_EXPRs are equal. */
2792 return ! (a0 || a1);
2794 default:
2795 return 0;
2798 case tcc_declaration:
2799 /* Consider __builtin_sqrt equal to sqrt. */
2800 return (TREE_CODE (arg0) == FUNCTION_DECL
2801 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2802 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2803 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2805 default:
2806 return 0;
2809 #undef OP_SAME
2810 #undef OP_SAME_WITH_NULL
2813 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2814 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2816 When in doubt, return 0. */
2818 static int
2819 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2821 int unsignedp1, unsignedpo;
2822 tree primarg0, primarg1, primother;
2823 unsigned int correct_width;
2825 if (operand_equal_p (arg0, arg1, 0))
2826 return 1;
2828 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2829 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2830 return 0;
2832 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2833 and see if the inner values are the same. This removes any
2834 signedness comparison, which doesn't matter here. */
2835 primarg0 = arg0, primarg1 = arg1;
2836 STRIP_NOPS (primarg0);
2837 STRIP_NOPS (primarg1);
2838 if (operand_equal_p (primarg0, primarg1, 0))
2839 return 1;
2841 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2842 actual comparison operand, ARG0.
2844 First throw away any conversions to wider types
2845 already present in the operands. */
2847 primarg1 = get_narrower (arg1, &unsignedp1);
2848 primother = get_narrower (other, &unsignedpo);
2850 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2851 if (unsignedp1 == unsignedpo
2852 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2853 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2855 tree type = TREE_TYPE (arg0);
2857 /* Make sure shorter operand is extended the right way
2858 to match the longer operand. */
2859 primarg1 = fold_convert (signed_or_unsigned_type_for
2860 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2862 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2863 return 1;
2866 return 0;
2869 /* See if ARG is an expression that is either a comparison or is performing
2870 arithmetic on comparisons. The comparisons must only be comparing
2871 two different values, which will be stored in *CVAL1 and *CVAL2; if
2872 they are nonzero it means that some operands have already been found.
2873 No variables may be used anywhere else in the expression except in the
2874 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2875 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2877 If this is true, return 1. Otherwise, return zero. */
2879 static int
2880 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2882 enum tree_code code = TREE_CODE (arg);
2883 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2885 /* We can handle some of the tcc_expression cases here. */
2886 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2887 tclass = tcc_unary;
2888 else if (tclass == tcc_expression
2889 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2890 || code == COMPOUND_EXPR))
2891 tclass = tcc_binary;
2893 else if (tclass == tcc_expression && code == SAVE_EXPR
2894 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2896 /* If we've already found a CVAL1 or CVAL2, this expression is
2897 two complex to handle. */
2898 if (*cval1 || *cval2)
2899 return 0;
2901 tclass = tcc_unary;
2902 *save_p = 1;
2905 switch (tclass)
2907 case tcc_unary:
2908 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2910 case tcc_binary:
2911 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2912 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2913 cval1, cval2, save_p));
2915 case tcc_constant:
2916 return 1;
2918 case tcc_expression:
2919 if (code == COND_EXPR)
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2921 cval1, cval2, save_p)
2922 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2923 cval1, cval2, save_p)
2924 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2925 cval1, cval2, save_p));
2926 return 0;
2928 case tcc_comparison:
2929 /* First see if we can handle the first operand, then the second. For
2930 the second operand, we know *CVAL1 can't be zero. It must be that
2931 one side of the comparison is each of the values; test for the
2932 case where this isn't true by failing if the two operands
2933 are the same. */
2935 if (operand_equal_p (TREE_OPERAND (arg, 0),
2936 TREE_OPERAND (arg, 1), 0))
2937 return 0;
2939 if (*cval1 == 0)
2940 *cval1 = TREE_OPERAND (arg, 0);
2941 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2943 else if (*cval2 == 0)
2944 *cval2 = TREE_OPERAND (arg, 0);
2945 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2947 else
2948 return 0;
2950 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 1);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2956 else
2957 return 0;
2959 return 1;
2961 default:
2962 return 0;
2966 /* ARG is a tree that is known to contain just arithmetic operations and
2967 comparisons. Evaluate the operations in the tree substituting NEW0 for
2968 any occurrence of OLD0 as an operand of a comparison and likewise for
2969 NEW1 and OLD1. */
2971 static tree
2972 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2973 tree old1, tree new1)
2975 tree type = TREE_TYPE (arg);
2976 enum tree_code code = TREE_CODE (arg);
2977 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2979 /* We can handle some of the tcc_expression cases here. */
2980 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2981 tclass = tcc_unary;
2982 else if (tclass == tcc_expression
2983 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2984 tclass = tcc_binary;
2986 switch (tclass)
2988 case tcc_unary:
2989 return fold_build1_loc (loc, code, type,
2990 eval_subst (loc, TREE_OPERAND (arg, 0),
2991 old0, new0, old1, new1));
2993 case tcc_binary:
2994 return fold_build2_loc (loc, code, type,
2995 eval_subst (loc, TREE_OPERAND (arg, 0),
2996 old0, new0, old1, new1),
2997 eval_subst (loc, TREE_OPERAND (arg, 1),
2998 old0, new0, old1, new1));
3000 case tcc_expression:
3001 switch (code)
3003 case SAVE_EXPR:
3004 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3005 old1, new1);
3007 case COMPOUND_EXPR:
3008 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3009 old1, new1);
3011 case COND_EXPR:
3012 return fold_build3_loc (loc, code, type,
3013 eval_subst (loc, TREE_OPERAND (arg, 0),
3014 old0, new0, old1, new1),
3015 eval_subst (loc, TREE_OPERAND (arg, 1),
3016 old0, new0, old1, new1),
3017 eval_subst (loc, TREE_OPERAND (arg, 2),
3018 old0, new0, old1, new1));
3019 default:
3020 break;
3022 /* Fall through - ??? */
3024 case tcc_comparison:
3026 tree arg0 = TREE_OPERAND (arg, 0);
3027 tree arg1 = TREE_OPERAND (arg, 1);
3029 /* We need to check both for exact equality and tree equality. The
3030 former will be true if the operand has a side-effect. In that
3031 case, we know the operand occurred exactly once. */
3033 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3034 arg0 = new0;
3035 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3036 arg0 = new1;
3038 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3039 arg1 = new0;
3040 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3041 arg1 = new1;
3043 return fold_build2_loc (loc, code, type, arg0, arg1);
3046 default:
3047 return arg;
3051 /* Return a tree for the case when the result of an expression is RESULT
3052 converted to TYPE and OMITTED was previously an operand of the expression
3053 but is now not needed (e.g., we folded OMITTED * 0).
3055 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3056 the conversion of RESULT to TYPE. */
3058 tree
3059 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3061 tree t = fold_convert_loc (loc, type, result);
3063 /* If the resulting operand is an empty statement, just return the omitted
3064 statement casted to void. */
3065 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3066 return build1_loc (loc, NOP_EXPR, void_type_node,
3067 fold_ignored_result (omitted));
3069 if (TREE_SIDE_EFFECTS (omitted))
3070 return build2_loc (loc, COMPOUND_EXPR, type,
3071 fold_ignored_result (omitted), t);
3073 return non_lvalue_loc (loc, t);
3076 /* Return a tree for the case when the result of an expression is RESULT
3077 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3078 of the expression but are now not needed.
3080 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3081 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3082 evaluated before OMITTED2. Otherwise, if neither has side effects,
3083 just do the conversion of RESULT to TYPE. */
3085 tree
3086 omit_two_operands_loc (location_t loc, tree type, tree result,
3087 tree omitted1, tree omitted2)
3089 tree t = fold_convert_loc (loc, type, result);
3091 if (TREE_SIDE_EFFECTS (omitted2))
3092 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3093 if (TREE_SIDE_EFFECTS (omitted1))
3094 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3096 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3100 /* Return a simplified tree node for the truth-negation of ARG. This
3101 never alters ARG itself. We assume that ARG is an operation that
3102 returns a truth value (0 or 1).
3104 FIXME: one would think we would fold the result, but it causes
3105 problems with the dominator optimizer. */
3107 static tree
3108 fold_truth_not_expr (location_t loc, tree arg)
3110 tree type = TREE_TYPE (arg);
3111 enum tree_code code = TREE_CODE (arg);
3112 location_t loc1, loc2;
3114 /* If this is a comparison, we can simply invert it, except for
3115 floating-point non-equality comparisons, in which case we just
3116 enclose a TRUTH_NOT_EXPR around what we have. */
3118 if (TREE_CODE_CLASS (code) == tcc_comparison)
3120 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3121 if (FLOAT_TYPE_P (op_type)
3122 && flag_trapping_math
3123 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3124 && code != NE_EXPR && code != EQ_EXPR)
3125 return NULL_TREE;
3127 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3128 if (code == ERROR_MARK)
3129 return NULL_TREE;
3131 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3132 TREE_OPERAND (arg, 1));
3135 switch (code)
3137 case INTEGER_CST:
3138 return constant_boolean_node (integer_zerop (arg), type);
3140 case TRUTH_AND_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3143 return build2_loc (loc, TRUTH_OR_EXPR, type,
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3145 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3147 case TRUTH_OR_EXPR:
3148 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3149 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3150 return build2_loc (loc, TRUTH_AND_EXPR, type,
3151 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3152 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3154 case TRUTH_XOR_EXPR:
3155 /* Here we can invert either operand. We invert the first operand
3156 unless the second operand is a TRUTH_NOT_EXPR in which case our
3157 result is the XOR of the first operand with the inside of the
3158 negation of the second operand. */
3160 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3161 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3162 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3163 else
3164 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3165 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3166 TREE_OPERAND (arg, 1));
3168 case TRUTH_ANDIF_EXPR:
3169 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3171 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3173 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3175 case TRUTH_ORIF_EXPR:
3176 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3177 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3179 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3180 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3182 case TRUTH_NOT_EXPR:
3183 return TREE_OPERAND (arg, 0);
3185 case COND_EXPR:
3187 tree arg1 = TREE_OPERAND (arg, 1);
3188 tree arg2 = TREE_OPERAND (arg, 2);
3190 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3191 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3193 /* A COND_EXPR may have a throw as one operand, which
3194 then has void type. Just leave void operands
3195 as they are. */
3196 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3197 VOID_TYPE_P (TREE_TYPE (arg1))
3198 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3199 VOID_TYPE_P (TREE_TYPE (arg2))
3200 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3203 case COMPOUND_EXPR:
3204 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3205 return build2_loc (loc, COMPOUND_EXPR, type,
3206 TREE_OPERAND (arg, 0),
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3209 case NON_LVALUE_EXPR:
3210 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3211 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3213 CASE_CONVERT:
3214 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3215 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3217 /* ... fall through ... */
3219 case FLOAT_EXPR:
3220 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3221 return build1_loc (loc, TREE_CODE (arg), type,
3222 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3224 case BIT_AND_EXPR:
3225 if (!integer_onep (TREE_OPERAND (arg, 1)))
3226 return NULL_TREE;
3227 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3229 case SAVE_EXPR:
3230 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3232 case CLEANUP_POINT_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3234 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3235 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3237 default:
3238 return NULL_TREE;
3242 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3243 assume that ARG is an operation that returns a truth value (0 or 1
3244 for scalars, 0 or -1 for vectors). Return the folded expression if
3245 folding is successful. Otherwise, return NULL_TREE. */
3247 static tree
3248 fold_invert_truthvalue (location_t loc, tree arg)
3250 tree type = TREE_TYPE (arg);
3251 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3252 ? BIT_NOT_EXPR
3253 : TRUTH_NOT_EXPR,
3254 type, arg);
3257 /* Return a simplified tree node for the truth-negation of ARG. This
3258 never alters ARG itself. We assume that ARG is an operation that
3259 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3261 tree
3262 invert_truthvalue_loc (location_t loc, tree arg)
3264 if (TREE_CODE (arg) == ERROR_MARK)
3265 return arg;
3267 tree type = TREE_TYPE (arg);
3268 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3269 ? BIT_NOT_EXPR
3270 : TRUTH_NOT_EXPR,
3271 type, arg);
3274 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3275 operands are another bit-wise operation with a common input. If so,
3276 distribute the bit operations to save an operation and possibly two if
3277 constants are involved. For example, convert
3278 (A | B) & (A | C) into A | (B & C)
3279 Further simplification will occur if B and C are constants.
3281 If this optimization cannot be done, 0 will be returned. */
3283 static tree
3284 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3285 tree arg0, tree arg1)
3287 tree common;
3288 tree left, right;
3290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3291 || TREE_CODE (arg0) == code
3292 || (TREE_CODE (arg0) != BIT_AND_EXPR
3293 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3294 return 0;
3296 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3298 common = TREE_OPERAND (arg0, 0);
3299 left = TREE_OPERAND (arg0, 1);
3300 right = TREE_OPERAND (arg1, 1);
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3304 common = TREE_OPERAND (arg0, 0);
3305 left = TREE_OPERAND (arg0, 1);
3306 right = TREE_OPERAND (arg1, 0);
3308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3310 common = TREE_OPERAND (arg0, 1);
3311 left = TREE_OPERAND (arg0, 0);
3312 right = TREE_OPERAND (arg1, 1);
3314 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3316 common = TREE_OPERAND (arg0, 1);
3317 left = TREE_OPERAND (arg0, 0);
3318 right = TREE_OPERAND (arg1, 0);
3320 else
3321 return 0;
3323 common = fold_convert_loc (loc, type, common);
3324 left = fold_convert_loc (loc, type, left);
3325 right = fold_convert_loc (loc, type, right);
3326 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3327 fold_build2_loc (loc, code, type, left, right));
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3332 static tree
3333 distribute_real_division (location_t loc, enum tree_code code, tree type,
3334 tree arg0, tree arg1)
3336 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3337 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3339 /* (A / C) +- (B / C) -> (A +- B) / C. */
3340 if (mul0 == mul1
3341 && operand_equal_p (TREE_OPERAND (arg0, 1),
3342 TREE_OPERAND (arg1, 1), 0))
3343 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3344 fold_build2_loc (loc, code, type,
3345 TREE_OPERAND (arg0, 0),
3346 TREE_OPERAND (arg1, 0)),
3347 TREE_OPERAND (arg0, 1));
3349 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3350 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3351 TREE_OPERAND (arg1, 0), 0)
3352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3353 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3355 REAL_VALUE_TYPE r0, r1;
3356 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3357 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3358 if (!mul0)
3359 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3360 if (!mul1)
3361 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3362 real_arithmetic (&r0, code, &r0, &r1);
3363 return fold_build2_loc (loc, MULT_EXPR, type,
3364 TREE_OPERAND (arg0, 0),
3365 build_real (type, r0));
3368 return NULL_TREE;
3371 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3372 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3374 static tree
3375 make_bit_field_ref (location_t loc, tree inner, tree type,
3376 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3378 tree result, bftype;
3380 if (bitpos == 0)
3382 tree size = TYPE_SIZE (TREE_TYPE (inner));
3383 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3384 || POINTER_TYPE_P (TREE_TYPE (inner)))
3385 && tree_fits_shwi_p (size)
3386 && tree_to_shwi (size) == bitsize)
3387 return fold_convert_loc (loc, type, inner);
3390 bftype = type;
3391 if (TYPE_PRECISION (bftype) != bitsize
3392 || TYPE_UNSIGNED (bftype) == !unsignedp)
3393 bftype = build_nonstandard_integer_type (bitsize, 0);
3395 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3396 size_int (bitsize), bitsize_int (bitpos));
3398 if (bftype != type)
3399 result = fold_convert_loc (loc, type, result);
3401 return result;
3404 /* Optimize a bit-field compare.
3406 There are two cases: First is a compare against a constant and the
3407 second is a comparison of two items where the fields are at the same
3408 bit position relative to the start of a chunk (byte, halfword, word)
3409 large enough to contain it. In these cases we can avoid the shift
3410 implicit in bitfield extractions.
3412 For constants, we emit a compare of the shifted constant with the
3413 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3414 compared. For two fields at the same position, we do the ANDs with the
3415 similar mask and compare the result of the ANDs.
3417 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3418 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3419 are the left and right operands of the comparison, respectively.
3421 If the optimization described above can be done, we return the resulting
3422 tree. Otherwise we return zero. */
3424 static tree
3425 optimize_bit_field_compare (location_t loc, enum tree_code code,
3426 tree compare_type, tree lhs, tree rhs)
3428 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3429 tree type = TREE_TYPE (lhs);
3430 tree unsigned_type;
3431 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3432 machine_mode lmode, rmode, nmode;
3433 int lunsignedp, runsignedp;
3434 int lvolatilep = 0, rvolatilep = 0;
3435 tree linner, rinner = NULL_TREE;
3436 tree mask;
3437 tree offset;
3439 /* Get all the information about the extractions being done. If the bit size
3440 if the same as the size of the underlying object, we aren't doing an
3441 extraction at all and so can do nothing. We also don't want to
3442 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3443 then will no longer be able to replace it. */
3444 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3445 &lunsignedp, &lvolatilep, false);
3446 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3447 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3448 return 0;
3450 if (!const_p)
3452 /* If this is not a constant, we can only do something if bit positions,
3453 sizes, and signedness are the same. */
3454 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3455 &runsignedp, &rvolatilep, false);
3457 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3458 || lunsignedp != runsignedp || offset != 0
3459 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3460 return 0;
3463 /* See if we can find a mode to refer to this field. We should be able to,
3464 but fail if we can't. */
3465 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3466 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3467 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3468 TYPE_ALIGN (TREE_TYPE (rinner))),
3469 word_mode, false);
3470 if (nmode == VOIDmode)
3471 return 0;
3473 /* Set signed and unsigned types of the precision of this mode for the
3474 shifts below. */
3475 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3477 /* Compute the bit position and size for the new reference and our offset
3478 within it. If the new reference is the same size as the original, we
3479 won't optimize anything, so return zero. */
3480 nbitsize = GET_MODE_BITSIZE (nmode);
3481 nbitpos = lbitpos & ~ (nbitsize - 1);
3482 lbitpos -= nbitpos;
3483 if (nbitsize == lbitsize)
3484 return 0;
3486 if (BYTES_BIG_ENDIAN)
3487 lbitpos = nbitsize - lbitsize - lbitpos;
3489 /* Make the mask to be used against the extracted field. */
3490 mask = build_int_cst_type (unsigned_type, -1);
3491 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3492 mask = const_binop (RSHIFT_EXPR, mask,
3493 size_int (nbitsize - lbitsize - lbitpos));
3495 if (! const_p)
3496 /* If not comparing with constant, just rework the comparison
3497 and return. */
3498 return fold_build2_loc (loc, code, compare_type,
3499 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3500 make_bit_field_ref (loc, linner,
3501 unsigned_type,
3502 nbitsize, nbitpos,
3504 mask),
3505 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3506 make_bit_field_ref (loc, rinner,
3507 unsigned_type,
3508 nbitsize, nbitpos,
3510 mask));
3512 /* Otherwise, we are handling the constant case. See if the constant is too
3513 big for the field. Warn and return a tree of for 0 (false) if so. We do
3514 this not only for its own sake, but to avoid having to test for this
3515 error case below. If we didn't, we might generate wrong code.
3517 For unsigned fields, the constant shifted right by the field length should
3518 be all zero. For signed fields, the high-order bits should agree with
3519 the sign bit. */
3521 if (lunsignedp)
3523 if (wi::lrshift (rhs, lbitsize) != 0)
3525 warning (0, "comparison is always %d due to width of bit-field",
3526 code == NE_EXPR);
3527 return constant_boolean_node (code == NE_EXPR, compare_type);
3530 else
3532 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3533 if (tem != 0 && tem != -1)
3535 warning (0, "comparison is always %d due to width of bit-field",
3536 code == NE_EXPR);
3537 return constant_boolean_node (code == NE_EXPR, compare_type);
3541 /* Single-bit compares should always be against zero. */
3542 if (lbitsize == 1 && ! integer_zerop (rhs))
3544 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3545 rhs = build_int_cst (type, 0);
3548 /* Make a new bitfield reference, shift the constant over the
3549 appropriate number of bits and mask it with the computed mask
3550 (in case this was a signed field). If we changed it, make a new one. */
3551 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3553 rhs = const_binop (BIT_AND_EXPR,
3554 const_binop (LSHIFT_EXPR,
3555 fold_convert_loc (loc, unsigned_type, rhs),
3556 size_int (lbitpos)),
3557 mask);
3559 lhs = build2_loc (loc, code, compare_type,
3560 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3561 return lhs;
3564 /* Subroutine for fold_truth_andor_1: decode a field reference.
3566 If EXP is a comparison reference, we return the innermost reference.
3568 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3569 set to the starting bit number.
3571 If the innermost field can be completely contained in a mode-sized
3572 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3574 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3575 otherwise it is not changed.
3577 *PUNSIGNEDP is set to the signedness of the field.
3579 *PMASK is set to the mask used. This is either contained in a
3580 BIT_AND_EXPR or derived from the width of the field.
3582 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3584 Return 0 if this is not a component reference or is one that we can't
3585 do anything with. */
3587 static tree
3588 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3589 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3590 int *punsignedp, int *pvolatilep,
3591 tree *pmask, tree *pand_mask)
3593 tree outer_type = 0;
3594 tree and_mask = 0;
3595 tree mask, inner, offset;
3596 tree unsigned_type;
3597 unsigned int precision;
3599 /* All the optimizations using this function assume integer fields.
3600 There are problems with FP fields since the type_for_size call
3601 below can fail for, e.g., XFmode. */
3602 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3603 return 0;
3605 /* We are interested in the bare arrangement of bits, so strip everything
3606 that doesn't affect the machine mode. However, record the type of the
3607 outermost expression if it may matter below. */
3608 if (CONVERT_EXPR_P (exp)
3609 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3610 outer_type = TREE_TYPE (exp);
3611 STRIP_NOPS (exp);
3613 if (TREE_CODE (exp) == BIT_AND_EXPR)
3615 and_mask = TREE_OPERAND (exp, 1);
3616 exp = TREE_OPERAND (exp, 0);
3617 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3618 if (TREE_CODE (and_mask) != INTEGER_CST)
3619 return 0;
3622 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3623 punsignedp, pvolatilep, false);
3624 if ((inner == exp && and_mask == 0)
3625 || *pbitsize < 0 || offset != 0
3626 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3627 return 0;
3629 /* If the number of bits in the reference is the same as the bitsize of
3630 the outer type, then the outer type gives the signedness. Otherwise
3631 (in case of a small bitfield) the signedness is unchanged. */
3632 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3633 *punsignedp = TYPE_UNSIGNED (outer_type);
3635 /* Compute the mask to access the bitfield. */
3636 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3637 precision = TYPE_PRECISION (unsigned_type);
3639 mask = build_int_cst_type (unsigned_type, -1);
3641 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3642 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3644 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3645 if (and_mask != 0)
3646 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3647 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3649 *pmask = mask;
3650 *pand_mask = and_mask;
3651 return inner;
3654 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3655 bit positions and MASK is SIGNED. */
3657 static int
3658 all_ones_mask_p (const_tree mask, unsigned int size)
3660 tree type = TREE_TYPE (mask);
3661 unsigned int precision = TYPE_PRECISION (type);
3663 /* If this function returns true when the type of the mask is
3664 UNSIGNED, then there will be errors. In particular see
3665 gcc.c-torture/execute/990326-1.c. There does not appear to be
3666 any documentation paper trail as to why this is so. But the pre
3667 wide-int worked with that restriction and it has been preserved
3668 here. */
3669 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3670 return false;
3672 return wi::mask (size, false, precision) == mask;
3675 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3676 represents the sign bit of EXP's type. If EXP represents a sign
3677 or zero extension, also test VAL against the unextended type.
3678 The return value is the (sub)expression whose sign bit is VAL,
3679 or NULL_TREE otherwise. */
3681 static tree
3682 sign_bit_p (tree exp, const_tree val)
3684 int width;
3685 tree t;
3687 /* Tree EXP must have an integral type. */
3688 t = TREE_TYPE (exp);
3689 if (! INTEGRAL_TYPE_P (t))
3690 return NULL_TREE;
3692 /* Tree VAL must be an integer constant. */
3693 if (TREE_CODE (val) != INTEGER_CST
3694 || TREE_OVERFLOW (val))
3695 return NULL_TREE;
3697 width = TYPE_PRECISION (t);
3698 if (wi::only_sign_bit_p (val, width))
3699 return exp;
3701 /* Handle extension from a narrower type. */
3702 if (TREE_CODE (exp) == NOP_EXPR
3703 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3704 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3706 return NULL_TREE;
3709 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3710 to be evaluated unconditionally. */
3712 static int
3713 simple_operand_p (const_tree exp)
3715 /* Strip any conversions that don't change the machine mode. */
3716 STRIP_NOPS (exp);
3718 return (CONSTANT_CLASS_P (exp)
3719 || TREE_CODE (exp) == SSA_NAME
3720 || (DECL_P (exp)
3721 && ! TREE_ADDRESSABLE (exp)
3722 && ! TREE_THIS_VOLATILE (exp)
3723 && ! DECL_NONLOCAL (exp)
3724 /* Don't regard global variables as simple. They may be
3725 allocated in ways unknown to the compiler (shared memory,
3726 #pragma weak, etc). */
3727 && ! TREE_PUBLIC (exp)
3728 && ! DECL_EXTERNAL (exp)
3729 /* Weakrefs are not safe to be read, since they can be NULL.
3730 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3731 have DECL_WEAK flag set. */
3732 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3733 /* Loading a static variable is unduly expensive, but global
3734 registers aren't expensive. */
3735 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3738 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3739 to be evaluated unconditionally.
3740 I addition to simple_operand_p, we assume that comparisons, conversions,
3741 and logic-not operations are simple, if their operands are simple, too. */
3743 static bool
3744 simple_operand_p_2 (tree exp)
3746 enum tree_code code;
3748 if (TREE_SIDE_EFFECTS (exp)
3749 || tree_could_trap_p (exp))
3750 return false;
3752 while (CONVERT_EXPR_P (exp))
3753 exp = TREE_OPERAND (exp, 0);
3755 code = TREE_CODE (exp);
3757 if (TREE_CODE_CLASS (code) == tcc_comparison)
3758 return (simple_operand_p (TREE_OPERAND (exp, 0))
3759 && simple_operand_p (TREE_OPERAND (exp, 1)));
3761 if (code == TRUTH_NOT_EXPR)
3762 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3764 return simple_operand_p (exp);
3768 /* The following functions are subroutines to fold_range_test and allow it to
3769 try to change a logical combination of comparisons into a range test.
3771 For example, both
3772 X == 2 || X == 3 || X == 4 || X == 5
3774 X >= 2 && X <= 5
3775 are converted to
3776 (unsigned) (X - 2) <= 3
3778 We describe each set of comparisons as being either inside or outside
3779 a range, using a variable named like IN_P, and then describe the
3780 range with a lower and upper bound. If one of the bounds is omitted,
3781 it represents either the highest or lowest value of the type.
3783 In the comments below, we represent a range by two numbers in brackets
3784 preceded by a "+" to designate being inside that range, or a "-" to
3785 designate being outside that range, so the condition can be inverted by
3786 flipping the prefix. An omitted bound is represented by a "-". For
3787 example, "- [-, 10]" means being outside the range starting at the lowest
3788 possible value and ending at 10, in other words, being greater than 10.
3789 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3790 always false.
3792 We set up things so that the missing bounds are handled in a consistent
3793 manner so neither a missing bound nor "true" and "false" need to be
3794 handled using a special case. */
3796 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3797 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3798 and UPPER1_P are nonzero if the respective argument is an upper bound
3799 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3800 must be specified for a comparison. ARG1 will be converted to ARG0's
3801 type if both are specified. */
3803 static tree
3804 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3805 tree arg1, int upper1_p)
3807 tree tem;
3808 int result;
3809 int sgn0, sgn1;
3811 /* If neither arg represents infinity, do the normal operation.
3812 Else, if not a comparison, return infinity. Else handle the special
3813 comparison rules. Note that most of the cases below won't occur, but
3814 are handled for consistency. */
3816 if (arg0 != 0 && arg1 != 0)
3818 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3819 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3820 STRIP_NOPS (tem);
3821 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3824 if (TREE_CODE_CLASS (code) != tcc_comparison)
3825 return 0;
3827 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3828 for neither. In real maths, we cannot assume open ended ranges are
3829 the same. But, this is computer arithmetic, where numbers are finite.
3830 We can therefore make the transformation of any unbounded range with
3831 the value Z, Z being greater than any representable number. This permits
3832 us to treat unbounded ranges as equal. */
3833 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3834 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3835 switch (code)
3837 case EQ_EXPR:
3838 result = sgn0 == sgn1;
3839 break;
3840 case NE_EXPR:
3841 result = sgn0 != sgn1;
3842 break;
3843 case LT_EXPR:
3844 result = sgn0 < sgn1;
3845 break;
3846 case LE_EXPR:
3847 result = sgn0 <= sgn1;
3848 break;
3849 case GT_EXPR:
3850 result = sgn0 > sgn1;
3851 break;
3852 case GE_EXPR:
3853 result = sgn0 >= sgn1;
3854 break;
3855 default:
3856 gcc_unreachable ();
3859 return constant_boolean_node (result, type);
3862 /* Helper routine for make_range. Perform one step for it, return
3863 new expression if the loop should continue or NULL_TREE if it should
3864 stop. */
3866 tree
3867 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3868 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3869 bool *strict_overflow_p)
3871 tree arg0_type = TREE_TYPE (arg0);
3872 tree n_low, n_high, low = *p_low, high = *p_high;
3873 int in_p = *p_in_p, n_in_p;
3875 switch (code)
3877 case TRUTH_NOT_EXPR:
3878 /* We can only do something if the range is testing for zero. */
3879 if (low == NULL_TREE || high == NULL_TREE
3880 || ! integer_zerop (low) || ! integer_zerop (high))
3881 return NULL_TREE;
3882 *p_in_p = ! in_p;
3883 return arg0;
3885 case EQ_EXPR: case NE_EXPR:
3886 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3887 /* We can only do something if the range is testing for zero
3888 and if the second operand is an integer constant. Note that
3889 saying something is "in" the range we make is done by
3890 complementing IN_P since it will set in the initial case of
3891 being not equal to zero; "out" is leaving it alone. */
3892 if (low == NULL_TREE || high == NULL_TREE
3893 || ! integer_zerop (low) || ! integer_zerop (high)
3894 || TREE_CODE (arg1) != INTEGER_CST)
3895 return NULL_TREE;
3897 switch (code)
3899 case NE_EXPR: /* - [c, c] */
3900 low = high = arg1;
3901 break;
3902 case EQ_EXPR: /* + [c, c] */
3903 in_p = ! in_p, low = high = arg1;
3904 break;
3905 case GT_EXPR: /* - [-, c] */
3906 low = 0, high = arg1;
3907 break;
3908 case GE_EXPR: /* + [c, -] */
3909 in_p = ! in_p, low = arg1, high = 0;
3910 break;
3911 case LT_EXPR: /* - [c, -] */
3912 low = arg1, high = 0;
3913 break;
3914 case LE_EXPR: /* + [-, c] */
3915 in_p = ! in_p, low = 0, high = arg1;
3916 break;
3917 default:
3918 gcc_unreachable ();
3921 /* If this is an unsigned comparison, we also know that EXP is
3922 greater than or equal to zero. We base the range tests we make
3923 on that fact, so we record it here so we can parse existing
3924 range tests. We test arg0_type since often the return type
3925 of, e.g. EQ_EXPR, is boolean. */
3926 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3928 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3929 in_p, low, high, 1,
3930 build_int_cst (arg0_type, 0),
3931 NULL_TREE))
3932 return NULL_TREE;
3934 in_p = n_in_p, low = n_low, high = n_high;
3936 /* If the high bound is missing, but we have a nonzero low
3937 bound, reverse the range so it goes from zero to the low bound
3938 minus 1. */
3939 if (high == 0 && low && ! integer_zerop (low))
3941 in_p = ! in_p;
3942 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3943 build_int_cst (TREE_TYPE (low), 1), 0);
3944 low = build_int_cst (arg0_type, 0);
3948 *p_low = low;
3949 *p_high = high;
3950 *p_in_p = in_p;
3951 return arg0;
3953 case NEGATE_EXPR:
3954 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3955 low and high are non-NULL, then normalize will DTRT. */
3956 if (!TYPE_UNSIGNED (arg0_type)
3957 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3959 if (low == NULL_TREE)
3960 low = TYPE_MIN_VALUE (arg0_type);
3961 if (high == NULL_TREE)
3962 high = TYPE_MAX_VALUE (arg0_type);
3965 /* (-x) IN [a,b] -> x in [-b, -a] */
3966 n_low = range_binop (MINUS_EXPR, exp_type,
3967 build_int_cst (exp_type, 0),
3968 0, high, 1);
3969 n_high = range_binop (MINUS_EXPR, exp_type,
3970 build_int_cst (exp_type, 0),
3971 0, low, 0);
3972 if (n_high != 0 && TREE_OVERFLOW (n_high))
3973 return NULL_TREE;
3974 goto normalize;
3976 case BIT_NOT_EXPR:
3977 /* ~ X -> -X - 1 */
3978 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3979 build_int_cst (exp_type, 1));
3981 case PLUS_EXPR:
3982 case MINUS_EXPR:
3983 if (TREE_CODE (arg1) != INTEGER_CST)
3984 return NULL_TREE;
3986 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3987 move a constant to the other side. */
3988 if (!TYPE_UNSIGNED (arg0_type)
3989 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3990 return NULL_TREE;
3992 /* If EXP is signed, any overflow in the computation is undefined,
3993 so we don't worry about it so long as our computations on
3994 the bounds don't overflow. For unsigned, overflow is defined
3995 and this is exactly the right thing. */
3996 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3997 arg0_type, low, 0, arg1, 0);
3998 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3999 arg0_type, high, 1, arg1, 0);
4000 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4001 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4002 return NULL_TREE;
4004 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4005 *strict_overflow_p = true;
4007 normalize:
4008 /* Check for an unsigned range which has wrapped around the maximum
4009 value thus making n_high < n_low, and normalize it. */
4010 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4012 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4013 build_int_cst (TREE_TYPE (n_high), 1), 0);
4014 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4015 build_int_cst (TREE_TYPE (n_low), 1), 0);
4017 /* If the range is of the form +/- [ x+1, x ], we won't
4018 be able to normalize it. But then, it represents the
4019 whole range or the empty set, so make it
4020 +/- [ -, - ]. */
4021 if (tree_int_cst_equal (n_low, low)
4022 && tree_int_cst_equal (n_high, high))
4023 low = high = 0;
4024 else
4025 in_p = ! in_p;
4027 else
4028 low = n_low, high = n_high;
4030 *p_low = low;
4031 *p_high = high;
4032 *p_in_p = in_p;
4033 return arg0;
4035 CASE_CONVERT:
4036 case NON_LVALUE_EXPR:
4037 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4038 return NULL_TREE;
4040 if (! INTEGRAL_TYPE_P (arg0_type)
4041 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4042 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4043 return NULL_TREE;
4045 n_low = low, n_high = high;
4047 if (n_low != 0)
4048 n_low = fold_convert_loc (loc, arg0_type, n_low);
4050 if (n_high != 0)
4051 n_high = fold_convert_loc (loc, arg0_type, n_high);
4053 /* If we're converting arg0 from an unsigned type, to exp,
4054 a signed type, we will be doing the comparison as unsigned.
4055 The tests above have already verified that LOW and HIGH
4056 are both positive.
4058 So we have to ensure that we will handle large unsigned
4059 values the same way that the current signed bounds treat
4060 negative values. */
4062 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4064 tree high_positive;
4065 tree equiv_type;
4066 /* For fixed-point modes, we need to pass the saturating flag
4067 as the 2nd parameter. */
4068 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4069 equiv_type
4070 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4071 TYPE_SATURATING (arg0_type));
4072 else
4073 equiv_type
4074 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4076 /* A range without an upper bound is, naturally, unbounded.
4077 Since convert would have cropped a very large value, use
4078 the max value for the destination type. */
4079 high_positive
4080 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4081 : TYPE_MAX_VALUE (arg0_type);
4083 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4084 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4085 fold_convert_loc (loc, arg0_type,
4086 high_positive),
4087 build_int_cst (arg0_type, 1));
4089 /* If the low bound is specified, "and" the range with the
4090 range for which the original unsigned value will be
4091 positive. */
4092 if (low != 0)
4094 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4095 1, fold_convert_loc (loc, arg0_type,
4096 integer_zero_node),
4097 high_positive))
4098 return NULL_TREE;
4100 in_p = (n_in_p == in_p);
4102 else
4104 /* Otherwise, "or" the range with the range of the input
4105 that will be interpreted as negative. */
4106 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4107 1, fold_convert_loc (loc, arg0_type,
4108 integer_zero_node),
4109 high_positive))
4110 return NULL_TREE;
4112 in_p = (in_p != n_in_p);
4116 *p_low = n_low;
4117 *p_high = n_high;
4118 *p_in_p = in_p;
4119 return arg0;
4121 default:
4122 return NULL_TREE;
4126 /* Given EXP, a logical expression, set the range it is testing into
4127 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4128 actually being tested. *PLOW and *PHIGH will be made of the same
4129 type as the returned expression. If EXP is not a comparison, we
4130 will most likely not be returning a useful value and range. Set
4131 *STRICT_OVERFLOW_P to true if the return value is only valid
4132 because signed overflow is undefined; otherwise, do not change
4133 *STRICT_OVERFLOW_P. */
4135 tree
4136 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4137 bool *strict_overflow_p)
4139 enum tree_code code;
4140 tree arg0, arg1 = NULL_TREE;
4141 tree exp_type, nexp;
4142 int in_p;
4143 tree low, high;
4144 location_t loc = EXPR_LOCATION (exp);
4146 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4147 and see if we can refine the range. Some of the cases below may not
4148 happen, but it doesn't seem worth worrying about this. We "continue"
4149 the outer loop when we've changed something; otherwise we "break"
4150 the switch, which will "break" the while. */
4152 in_p = 0;
4153 low = high = build_int_cst (TREE_TYPE (exp), 0);
4155 while (1)
4157 code = TREE_CODE (exp);
4158 exp_type = TREE_TYPE (exp);
4159 arg0 = NULL_TREE;
4161 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4163 if (TREE_OPERAND_LENGTH (exp) > 0)
4164 arg0 = TREE_OPERAND (exp, 0);
4165 if (TREE_CODE_CLASS (code) == tcc_binary
4166 || TREE_CODE_CLASS (code) == tcc_comparison
4167 || (TREE_CODE_CLASS (code) == tcc_expression
4168 && TREE_OPERAND_LENGTH (exp) > 1))
4169 arg1 = TREE_OPERAND (exp, 1);
4171 if (arg0 == NULL_TREE)
4172 break;
4174 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4175 &high, &in_p, strict_overflow_p);
4176 if (nexp == NULL_TREE)
4177 break;
4178 exp = nexp;
4181 /* If EXP is a constant, we can evaluate whether this is true or false. */
4182 if (TREE_CODE (exp) == INTEGER_CST)
4184 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4185 exp, 0, low, 0))
4186 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4187 exp, 1, high, 1)));
4188 low = high = 0;
4189 exp = 0;
4192 *pin_p = in_p, *plow = low, *phigh = high;
4193 return exp;
4196 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4197 type, TYPE, return an expression to test if EXP is in (or out of, depending
4198 on IN_P) the range. Return 0 if the test couldn't be created. */
4200 tree
4201 build_range_check (location_t loc, tree type, tree exp, int in_p,
4202 tree low, tree high)
4204 tree etype = TREE_TYPE (exp), value;
4206 #ifdef HAVE_canonicalize_funcptr_for_compare
4207 /* Disable this optimization for function pointer expressions
4208 on targets that require function pointer canonicalization. */
4209 if (HAVE_canonicalize_funcptr_for_compare
4210 && TREE_CODE (etype) == POINTER_TYPE
4211 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4212 return NULL_TREE;
4213 #endif
4215 if (! in_p)
4217 value = build_range_check (loc, type, exp, 1, low, high);
4218 if (value != 0)
4219 return invert_truthvalue_loc (loc, value);
4221 return 0;
4224 if (low == 0 && high == 0)
4225 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4227 if (low == 0)
4228 return fold_build2_loc (loc, LE_EXPR, type, exp,
4229 fold_convert_loc (loc, etype, high));
4231 if (high == 0)
4232 return fold_build2_loc (loc, GE_EXPR, type, exp,
4233 fold_convert_loc (loc, etype, low));
4235 if (operand_equal_p (low, high, 0))
4236 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4237 fold_convert_loc (loc, etype, low));
4239 if (integer_zerop (low))
4241 if (! TYPE_UNSIGNED (etype))
4243 etype = unsigned_type_for (etype);
4244 high = fold_convert_loc (loc, etype, high);
4245 exp = fold_convert_loc (loc, etype, exp);
4247 return build_range_check (loc, type, exp, 1, 0, high);
4250 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4251 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4253 int prec = TYPE_PRECISION (etype);
4255 if (wi::mask (prec - 1, false, prec) == high)
4257 if (TYPE_UNSIGNED (etype))
4259 tree signed_etype = signed_type_for (etype);
4260 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4261 etype
4262 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4263 else
4264 etype = signed_etype;
4265 exp = fold_convert_loc (loc, etype, exp);
4267 return fold_build2_loc (loc, GT_EXPR, type, exp,
4268 build_int_cst (etype, 0));
4272 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4273 This requires wrap-around arithmetics for the type of the expression.
4274 First make sure that arithmetics in this type is valid, then make sure
4275 that it wraps around. */
4276 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4277 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4278 TYPE_UNSIGNED (etype));
4280 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4282 tree utype, minv, maxv;
4284 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4285 for the type in question, as we rely on this here. */
4286 utype = unsigned_type_for (etype);
4287 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4288 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4289 build_int_cst (TREE_TYPE (maxv), 1), 1);
4290 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4292 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4293 minv, 1, maxv, 1)))
4294 etype = utype;
4295 else
4296 return 0;
4299 high = fold_convert_loc (loc, etype, high);
4300 low = fold_convert_loc (loc, etype, low);
4301 exp = fold_convert_loc (loc, etype, exp);
4303 value = const_binop (MINUS_EXPR, high, low);
4306 if (POINTER_TYPE_P (etype))
4308 if (value != 0 && !TREE_OVERFLOW (value))
4310 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4311 return build_range_check (loc, type,
4312 fold_build_pointer_plus_loc (loc, exp, low),
4313 1, build_int_cst (etype, 0), value);
4315 return 0;
4318 if (value != 0 && !TREE_OVERFLOW (value))
4319 return build_range_check (loc, type,
4320 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4321 1, build_int_cst (etype, 0), value);
4323 return 0;
4326 /* Return the predecessor of VAL in its type, handling the infinite case. */
4328 static tree
4329 range_predecessor (tree val)
4331 tree type = TREE_TYPE (val);
4333 if (INTEGRAL_TYPE_P (type)
4334 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4335 return 0;
4336 else
4337 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4338 build_int_cst (TREE_TYPE (val), 1), 0);
4341 /* Return the successor of VAL in its type, handling the infinite case. */
4343 static tree
4344 range_successor (tree val)
4346 tree type = TREE_TYPE (val);
4348 if (INTEGRAL_TYPE_P (type)
4349 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4350 return 0;
4351 else
4352 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4353 build_int_cst (TREE_TYPE (val), 1), 0);
4356 /* Given two ranges, see if we can merge them into one. Return 1 if we
4357 can, 0 if we can't. Set the output range into the specified parameters. */
4359 bool
4360 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4361 tree high0, int in1_p, tree low1, tree high1)
4363 int no_overlap;
4364 int subset;
4365 int temp;
4366 tree tem;
4367 int in_p;
4368 tree low, high;
4369 int lowequal = ((low0 == 0 && low1 == 0)
4370 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4371 low0, 0, low1, 0)));
4372 int highequal = ((high0 == 0 && high1 == 0)
4373 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4374 high0, 1, high1, 1)));
4376 /* Make range 0 be the range that starts first, or ends last if they
4377 start at the same value. Swap them if it isn't. */
4378 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4379 low0, 0, low1, 0))
4380 || (lowequal
4381 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4382 high1, 1, high0, 1))))
4384 temp = in0_p, in0_p = in1_p, in1_p = temp;
4385 tem = low0, low0 = low1, low1 = tem;
4386 tem = high0, high0 = high1, high1 = tem;
4389 /* Now flag two cases, whether the ranges are disjoint or whether the
4390 second range is totally subsumed in the first. Note that the tests
4391 below are simplified by the ones above. */
4392 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4393 high0, 1, low1, 0));
4394 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4395 high1, 1, high0, 1));
4397 /* We now have four cases, depending on whether we are including or
4398 excluding the two ranges. */
4399 if (in0_p && in1_p)
4401 /* If they don't overlap, the result is false. If the second range
4402 is a subset it is the result. Otherwise, the range is from the start
4403 of the second to the end of the first. */
4404 if (no_overlap)
4405 in_p = 0, low = high = 0;
4406 else if (subset)
4407 in_p = 1, low = low1, high = high1;
4408 else
4409 in_p = 1, low = low1, high = high0;
4412 else if (in0_p && ! in1_p)
4414 /* If they don't overlap, the result is the first range. If they are
4415 equal, the result is false. If the second range is a subset of the
4416 first, and the ranges begin at the same place, we go from just after
4417 the end of the second range to the end of the first. If the second
4418 range is not a subset of the first, or if it is a subset and both
4419 ranges end at the same place, the range starts at the start of the
4420 first range and ends just before the second range.
4421 Otherwise, we can't describe this as a single range. */
4422 if (no_overlap)
4423 in_p = 1, low = low0, high = high0;
4424 else if (lowequal && highequal)
4425 in_p = 0, low = high = 0;
4426 else if (subset && lowequal)
4428 low = range_successor (high1);
4429 high = high0;
4430 in_p = 1;
4431 if (low == 0)
4433 /* We are in the weird situation where high0 > high1 but
4434 high1 has no successor. Punt. */
4435 return 0;
4438 else if (! subset || highequal)
4440 low = low0;
4441 high = range_predecessor (low1);
4442 in_p = 1;
4443 if (high == 0)
4445 /* low0 < low1 but low1 has no predecessor. Punt. */
4446 return 0;
4449 else
4450 return 0;
4453 else if (! in0_p && in1_p)
4455 /* If they don't overlap, the result is the second range. If the second
4456 is a subset of the first, the result is false. Otherwise,
4457 the range starts just after the first range and ends at the
4458 end of the second. */
4459 if (no_overlap)
4460 in_p = 1, low = low1, high = high1;
4461 else if (subset || highequal)
4462 in_p = 0, low = high = 0;
4463 else
4465 low = range_successor (high0);
4466 high = high1;
4467 in_p = 1;
4468 if (low == 0)
4470 /* high1 > high0 but high0 has no successor. Punt. */
4471 return 0;
4476 else
4478 /* The case where we are excluding both ranges. Here the complex case
4479 is if they don't overlap. In that case, the only time we have a
4480 range is if they are adjacent. If the second is a subset of the
4481 first, the result is the first. Otherwise, the range to exclude
4482 starts at the beginning of the first range and ends at the end of the
4483 second. */
4484 if (no_overlap)
4486 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4487 range_successor (high0),
4488 1, low1, 0)))
4489 in_p = 0, low = low0, high = high1;
4490 else
4492 /* Canonicalize - [min, x] into - [-, x]. */
4493 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4494 switch (TREE_CODE (TREE_TYPE (low0)))
4496 case ENUMERAL_TYPE:
4497 if (TYPE_PRECISION (TREE_TYPE (low0))
4498 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4499 break;
4500 /* FALLTHROUGH */
4501 case INTEGER_TYPE:
4502 if (tree_int_cst_equal (low0,
4503 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4504 low0 = 0;
4505 break;
4506 case POINTER_TYPE:
4507 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4508 && integer_zerop (low0))
4509 low0 = 0;
4510 break;
4511 default:
4512 break;
4515 /* Canonicalize - [x, max] into - [x, -]. */
4516 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4517 switch (TREE_CODE (TREE_TYPE (high1)))
4519 case ENUMERAL_TYPE:
4520 if (TYPE_PRECISION (TREE_TYPE (high1))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4522 break;
4523 /* FALLTHROUGH */
4524 case INTEGER_TYPE:
4525 if (tree_int_cst_equal (high1,
4526 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4527 high1 = 0;
4528 break;
4529 case POINTER_TYPE:
4530 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4531 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4532 high1, 1,
4533 build_int_cst (TREE_TYPE (high1), 1),
4534 1)))
4535 high1 = 0;
4536 break;
4537 default:
4538 break;
4541 /* The ranges might be also adjacent between the maximum and
4542 minimum values of the given type. For
4543 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4544 return + [x + 1, y - 1]. */
4545 if (low0 == 0 && high1 == 0)
4547 low = range_successor (high0);
4548 high = range_predecessor (low1);
4549 if (low == 0 || high == 0)
4550 return 0;
4552 in_p = 1;
4554 else
4555 return 0;
4558 else if (subset)
4559 in_p = 0, low = low0, high = high0;
4560 else
4561 in_p = 0, low = low0, high = high1;
4564 *pin_p = in_p, *plow = low, *phigh = high;
4565 return 1;
4569 /* Subroutine of fold, looking inside expressions of the form
4570 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4571 of the COND_EXPR. This function is being used also to optimize
4572 A op B ? C : A, by reversing the comparison first.
4574 Return a folded expression whose code is not a COND_EXPR
4575 anymore, or NULL_TREE if no folding opportunity is found. */
4577 static tree
4578 fold_cond_expr_with_comparison (location_t loc, tree type,
4579 tree arg0, tree arg1, tree arg2)
4581 enum tree_code comp_code = TREE_CODE (arg0);
4582 tree arg00 = TREE_OPERAND (arg0, 0);
4583 tree arg01 = TREE_OPERAND (arg0, 1);
4584 tree arg1_type = TREE_TYPE (arg1);
4585 tree tem;
4587 STRIP_NOPS (arg1);
4588 STRIP_NOPS (arg2);
4590 /* If we have A op 0 ? A : -A, consider applying the following
4591 transformations:
4593 A == 0? A : -A same as -A
4594 A != 0? A : -A same as A
4595 A >= 0? A : -A same as abs (A)
4596 A > 0? A : -A same as abs (A)
4597 A <= 0? A : -A same as -abs (A)
4598 A < 0? A : -A same as -abs (A)
4600 None of these transformations work for modes with signed
4601 zeros. If A is +/-0, the first two transformations will
4602 change the sign of the result (from +0 to -0, or vice
4603 versa). The last four will fix the sign of the result,
4604 even though the original expressions could be positive or
4605 negative, depending on the sign of A.
4607 Note that all these transformations are correct if A is
4608 NaN, since the two alternatives (A and -A) are also NaNs. */
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4610 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4611 ? real_zerop (arg01)
4612 : integer_zerop (arg01))
4613 && ((TREE_CODE (arg2) == NEGATE_EXPR
4614 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4615 /* In the case that A is of the form X-Y, '-A' (arg2) may
4616 have already been folded to Y-X, check for that. */
4617 || (TREE_CODE (arg1) == MINUS_EXPR
4618 && TREE_CODE (arg2) == MINUS_EXPR
4619 && operand_equal_p (TREE_OPERAND (arg1, 0),
4620 TREE_OPERAND (arg2, 1), 0)
4621 && operand_equal_p (TREE_OPERAND (arg1, 1),
4622 TREE_OPERAND (arg2, 0), 0))))
4623 switch (comp_code)
4625 case EQ_EXPR:
4626 case UNEQ_EXPR:
4627 tem = fold_convert_loc (loc, arg1_type, arg1);
4628 return pedantic_non_lvalue_loc (loc,
4629 fold_convert_loc (loc, type,
4630 negate_expr (tem)));
4631 case NE_EXPR:
4632 case LTGT_EXPR:
4633 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4634 case UNGE_EXPR:
4635 case UNGT_EXPR:
4636 if (flag_trapping_math)
4637 break;
4638 /* Fall through. */
4639 case GE_EXPR:
4640 case GT_EXPR:
4641 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4642 arg1 = fold_convert_loc (loc, signed_type_for
4643 (TREE_TYPE (arg1)), arg1);
4644 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4645 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4646 case UNLE_EXPR:
4647 case UNLT_EXPR:
4648 if (flag_trapping_math)
4649 break;
4650 case LE_EXPR:
4651 case LT_EXPR:
4652 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4653 arg1 = fold_convert_loc (loc, signed_type_for
4654 (TREE_TYPE (arg1)), arg1);
4655 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4656 return negate_expr (fold_convert_loc (loc, type, tem));
4657 default:
4658 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4659 break;
4662 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4663 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4664 both transformations are correct when A is NaN: A != 0
4665 is then true, and A == 0 is false. */
4667 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4668 && integer_zerop (arg01) && integer_zerop (arg2))
4670 if (comp_code == NE_EXPR)
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4672 else if (comp_code == EQ_EXPR)
4673 return build_zero_cst (type);
4676 /* Try some transformations of A op B ? A : B.
4678 A == B? A : B same as B
4679 A != B? A : B same as A
4680 A >= B? A : B same as max (A, B)
4681 A > B? A : B same as max (B, A)
4682 A <= B? A : B same as min (A, B)
4683 A < B? A : B same as min (B, A)
4685 As above, these transformations don't work in the presence
4686 of signed zeros. For example, if A and B are zeros of
4687 opposite sign, the first two transformations will change
4688 the sign of the result. In the last four, the original
4689 expressions give different results for (A=+0, B=-0) and
4690 (A=-0, B=+0), but the transformed expressions do not.
4692 The first two transformations are correct if either A or B
4693 is a NaN. In the first transformation, the condition will
4694 be false, and B will indeed be chosen. In the case of the
4695 second transformation, the condition A != B will be true,
4696 and A will be chosen.
4698 The conversions to max() and min() are not correct if B is
4699 a number and A is not. The conditions in the original
4700 expressions will be false, so all four give B. The min()
4701 and max() versions would give a NaN instead. */
4702 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4703 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4704 /* Avoid these transformations if the COND_EXPR may be used
4705 as an lvalue in the C++ front-end. PR c++/19199. */
4706 && (in_gimple_form
4707 || VECTOR_TYPE_P (type)
4708 || (strcmp (lang_hooks.name, "GNU C++") != 0
4709 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4710 || ! maybe_lvalue_p (arg1)
4711 || ! maybe_lvalue_p (arg2)))
4713 tree comp_op0 = arg00;
4714 tree comp_op1 = arg01;
4715 tree comp_type = TREE_TYPE (comp_op0);
4717 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4718 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4720 comp_type = type;
4721 comp_op0 = arg1;
4722 comp_op1 = arg2;
4725 switch (comp_code)
4727 case EQ_EXPR:
4728 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4729 case NE_EXPR:
4730 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4731 case LE_EXPR:
4732 case LT_EXPR:
4733 case UNLE_EXPR:
4734 case UNLT_EXPR:
4735 /* In C++ a ?: expression can be an lvalue, so put the
4736 operand which will be used if they are equal first
4737 so that we can convert this back to the
4738 corresponding COND_EXPR. */
4739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4741 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4742 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4743 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4744 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4745 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4746 comp_op1, comp_op0);
4747 return pedantic_non_lvalue_loc (loc,
4748 fold_convert_loc (loc, type, tem));
4750 break;
4751 case GE_EXPR:
4752 case GT_EXPR:
4753 case UNGE_EXPR:
4754 case UNGT_EXPR:
4755 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4757 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4758 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4759 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4760 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4761 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4762 comp_op1, comp_op0);
4763 return pedantic_non_lvalue_loc (loc,
4764 fold_convert_loc (loc, type, tem));
4766 break;
4767 case UNEQ_EXPR:
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, arg2));
4771 break;
4772 case LTGT_EXPR:
4773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4774 return pedantic_non_lvalue_loc (loc,
4775 fold_convert_loc (loc, type, arg1));
4776 break;
4777 default:
4778 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4779 break;
4783 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4784 we might still be able to simplify this. For example,
4785 if C1 is one less or one more than C2, this might have started
4786 out as a MIN or MAX and been transformed by this function.
4787 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4789 if (INTEGRAL_TYPE_P (type)
4790 && TREE_CODE (arg01) == INTEGER_CST
4791 && TREE_CODE (arg2) == INTEGER_CST)
4792 switch (comp_code)
4794 case EQ_EXPR:
4795 if (TREE_CODE (arg1) == INTEGER_CST)
4796 break;
4797 /* We can replace A with C1 in this case. */
4798 arg1 = fold_convert_loc (loc, type, arg01);
4799 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4801 case LT_EXPR:
4802 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4803 MIN_EXPR, to preserve the signedness of the comparison. */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4805 OEP_ONLY_CONST)
4806 && operand_equal_p (arg01,
4807 const_binop (PLUS_EXPR, arg2,
4808 build_int_cst (type, 1)),
4809 OEP_ONLY_CONST))
4811 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4813 arg2));
4814 return pedantic_non_lvalue_loc (loc,
4815 fold_convert_loc (loc, type, tem));
4817 break;
4819 case LE_EXPR:
4820 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4821 as above. */
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 build_int_cst (type, 1)),
4827 OEP_ONLY_CONST))
4829 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4831 arg2));
4832 return pedantic_non_lvalue_loc (loc,
4833 fold_convert_loc (loc, type, tem));
4835 break;
4837 case GT_EXPR:
4838 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4839 MAX_EXPR, to preserve the signedness of the comparison. */
4840 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4841 OEP_ONLY_CONST)
4842 && operand_equal_p (arg01,
4843 const_binop (MINUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4845 OEP_ONLY_CONST))
4847 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4849 arg2));
4850 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4852 break;
4854 case GE_EXPR:
4855 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4856 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4857 OEP_ONLY_CONST)
4858 && operand_equal_p (arg01,
4859 const_binop (PLUS_EXPR, arg2,
4860 build_int_cst (type, 1)),
4861 OEP_ONLY_CONST))
4863 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4864 fold_convert_loc (loc, TREE_TYPE (arg00),
4865 arg2));
4866 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4868 break;
4869 case NE_EXPR:
4870 break;
4871 default:
4872 gcc_unreachable ();
4875 return NULL_TREE;
4880 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4881 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4882 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4883 false) >= 2)
4884 #endif
4886 /* EXP is some logical combination of boolean tests. See if we can
4887 merge it into some range test. Return the new tree if so. */
4889 static tree
4890 fold_range_test (location_t loc, enum tree_code code, tree type,
4891 tree op0, tree op1)
4893 int or_op = (code == TRUTH_ORIF_EXPR
4894 || code == TRUTH_OR_EXPR);
4895 int in0_p, in1_p, in_p;
4896 tree low0, low1, low, high0, high1, high;
4897 bool strict_overflow_p = false;
4898 tree tem, lhs, rhs;
4899 const char * const warnmsg = G_("assuming signed overflow does not occur "
4900 "when simplifying range test");
4902 if (!INTEGRAL_TYPE_P (type))
4903 return 0;
4905 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4906 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4908 /* If this is an OR operation, invert both sides; we will invert
4909 again at the end. */
4910 if (or_op)
4911 in0_p = ! in0_p, in1_p = ! in1_p;
4913 /* If both expressions are the same, if we can merge the ranges, and we
4914 can build the range test, return it or it inverted. If one of the
4915 ranges is always true or always false, consider it to be the same
4916 expression as the other. */
4917 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4918 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4919 in1_p, low1, high1)
4920 && 0 != (tem = (build_range_check (loc, type,
4921 lhs != 0 ? lhs
4922 : rhs != 0 ? rhs : integer_zero_node,
4923 in_p, low, high))))
4925 if (strict_overflow_p)
4926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4927 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4930 /* On machines where the branch cost is expensive, if this is a
4931 short-circuited branch and the underlying object on both sides
4932 is the same, make a non-short-circuit operation. */
4933 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4934 && lhs != 0 && rhs != 0
4935 && (code == TRUTH_ANDIF_EXPR
4936 || code == TRUTH_ORIF_EXPR)
4937 && operand_equal_p (lhs, rhs, 0))
4939 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4940 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4941 which cases we can't do this. */
4942 if (simple_operand_p (lhs))
4943 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4944 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4945 type, op0, op1);
4947 else if (!lang_hooks.decls.global_bindings_p ()
4948 && !CONTAINS_PLACEHOLDER_P (lhs))
4950 tree common = save_expr (lhs);
4952 if (0 != (lhs = build_range_check (loc, type, common,
4953 or_op ? ! in0_p : in0_p,
4954 low0, high0))
4955 && (0 != (rhs = build_range_check (loc, type, common,
4956 or_op ? ! in1_p : in1_p,
4957 low1, high1))))
4959 if (strict_overflow_p)
4960 fold_overflow_warning (warnmsg,
4961 WARN_STRICT_OVERFLOW_COMPARISON);
4962 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4964 type, lhs, rhs);
4969 return 0;
4972 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4973 bit value. Arrange things so the extra bits will be set to zero if and
4974 only if C is signed-extended to its full width. If MASK is nonzero,
4975 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4977 static tree
4978 unextend (tree c, int p, int unsignedp, tree mask)
4980 tree type = TREE_TYPE (c);
4981 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4982 tree temp;
4984 if (p == modesize || unsignedp)
4985 return c;
4987 /* We work by getting just the sign bit into the low-order bit, then
4988 into the high-order bit, then sign-extend. We then XOR that value
4989 with C. */
4990 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
4992 /* We must use a signed type in order to get an arithmetic right shift.
4993 However, we must also avoid introducing accidental overflows, so that
4994 a subsequent call to integer_zerop will work. Hence we must
4995 do the type conversion here. At this point, the constant is either
4996 zero or one, and the conversion to a signed type can never overflow.
4997 We could get an overflow if this conversion is done anywhere else. */
4998 if (TYPE_UNSIGNED (type))
4999 temp = fold_convert (signed_type_for (type), temp);
5001 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5002 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5003 if (mask != 0)
5004 temp = const_binop (BIT_AND_EXPR, temp,
5005 fold_convert (TREE_TYPE (c), mask));
5006 /* If necessary, convert the type back to match the type of C. */
5007 if (TYPE_UNSIGNED (type))
5008 temp = fold_convert (type, temp);
5010 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5013 /* For an expression that has the form
5014 (A && B) || ~B
5016 (A || B) && ~B,
5017 we can drop one of the inner expressions and simplify to
5018 A || ~B
5020 A && ~B
5021 LOC is the location of the resulting expression. OP is the inner
5022 logical operation; the left-hand side in the examples above, while CMPOP
5023 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5024 removing a condition that guards another, as in
5025 (A != NULL && A->...) || A == NULL
5026 which we must not transform. If RHS_ONLY is true, only eliminate the
5027 right-most operand of the inner logical operation. */
5029 static tree
5030 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5031 bool rhs_only)
5033 tree type = TREE_TYPE (cmpop);
5034 enum tree_code code = TREE_CODE (cmpop);
5035 enum tree_code truthop_code = TREE_CODE (op);
5036 tree lhs = TREE_OPERAND (op, 0);
5037 tree rhs = TREE_OPERAND (op, 1);
5038 tree orig_lhs = lhs, orig_rhs = rhs;
5039 enum tree_code rhs_code = TREE_CODE (rhs);
5040 enum tree_code lhs_code = TREE_CODE (lhs);
5041 enum tree_code inv_code;
5043 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5044 return NULL_TREE;
5046 if (TREE_CODE_CLASS (code) != tcc_comparison)
5047 return NULL_TREE;
5049 if (rhs_code == truthop_code)
5051 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5052 if (newrhs != NULL_TREE)
5054 rhs = newrhs;
5055 rhs_code = TREE_CODE (rhs);
5058 if (lhs_code == truthop_code && !rhs_only)
5060 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5061 if (newlhs != NULL_TREE)
5063 lhs = newlhs;
5064 lhs_code = TREE_CODE (lhs);
5068 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5069 if (inv_code == rhs_code
5070 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5071 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5072 return lhs;
5073 if (!rhs_only && inv_code == lhs_code
5074 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5075 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5076 return rhs;
5077 if (rhs != orig_rhs || lhs != orig_lhs)
5078 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5079 lhs, rhs);
5080 return NULL_TREE;
5083 /* Find ways of folding logical expressions of LHS and RHS:
5084 Try to merge two comparisons to the same innermost item.
5085 Look for range tests like "ch >= '0' && ch <= '9'".
5086 Look for combinations of simple terms on machines with expensive branches
5087 and evaluate the RHS unconditionally.
5089 For example, if we have p->a == 2 && p->b == 4 and we can make an
5090 object large enough to span both A and B, we can do this with a comparison
5091 against the object ANDed with the a mask.
5093 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5094 operations to do this with one comparison.
5096 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5097 function and the one above.
5099 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5100 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5102 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5103 two operands.
5105 We return the simplified tree or 0 if no optimization is possible. */
5107 static tree
5108 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5109 tree lhs, tree rhs)
5111 /* If this is the "or" of two comparisons, we can do something if
5112 the comparisons are NE_EXPR. If this is the "and", we can do something
5113 if the comparisons are EQ_EXPR. I.e.,
5114 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5116 WANTED_CODE is this operation code. For single bit fields, we can
5117 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5118 comparison for one-bit fields. */
5120 enum tree_code wanted_code;
5121 enum tree_code lcode, rcode;
5122 tree ll_arg, lr_arg, rl_arg, rr_arg;
5123 tree ll_inner, lr_inner, rl_inner, rr_inner;
5124 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5125 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5126 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5127 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5128 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5129 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5130 machine_mode lnmode, rnmode;
5131 tree ll_mask, lr_mask, rl_mask, rr_mask;
5132 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5133 tree l_const, r_const;
5134 tree lntype, rntype, result;
5135 HOST_WIDE_INT first_bit, end_bit;
5136 int volatilep;
5138 /* Start by getting the comparison codes. Fail if anything is volatile.
5139 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5140 it were surrounded with a NE_EXPR. */
5142 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5143 return 0;
5145 lcode = TREE_CODE (lhs);
5146 rcode = TREE_CODE (rhs);
5148 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5150 lhs = build2 (NE_EXPR, truth_type, lhs,
5151 build_int_cst (TREE_TYPE (lhs), 0));
5152 lcode = NE_EXPR;
5155 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5157 rhs = build2 (NE_EXPR, truth_type, rhs,
5158 build_int_cst (TREE_TYPE (rhs), 0));
5159 rcode = NE_EXPR;
5162 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5163 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5164 return 0;
5166 ll_arg = TREE_OPERAND (lhs, 0);
5167 lr_arg = TREE_OPERAND (lhs, 1);
5168 rl_arg = TREE_OPERAND (rhs, 0);
5169 rr_arg = TREE_OPERAND (rhs, 1);
5171 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5172 if (simple_operand_p (ll_arg)
5173 && simple_operand_p (lr_arg))
5175 if (operand_equal_p (ll_arg, rl_arg, 0)
5176 && operand_equal_p (lr_arg, rr_arg, 0))
5178 result = combine_comparisons (loc, code, lcode, rcode,
5179 truth_type, ll_arg, lr_arg);
5180 if (result)
5181 return result;
5183 else if (operand_equal_p (ll_arg, rr_arg, 0)
5184 && operand_equal_p (lr_arg, rl_arg, 0))
5186 result = combine_comparisons (loc, code, lcode,
5187 swap_tree_comparison (rcode),
5188 truth_type, ll_arg, lr_arg);
5189 if (result)
5190 return result;
5194 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5195 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5197 /* If the RHS can be evaluated unconditionally and its operands are
5198 simple, it wins to evaluate the RHS unconditionally on machines
5199 with expensive branches. In this case, this isn't a comparison
5200 that can be merged. */
5202 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5203 false) >= 2
5204 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5205 && simple_operand_p (rl_arg)
5206 && simple_operand_p (rr_arg))
5208 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5209 if (code == TRUTH_OR_EXPR
5210 && lcode == NE_EXPR && integer_zerop (lr_arg)
5211 && rcode == NE_EXPR && integer_zerop (rr_arg)
5212 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5213 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5214 return build2_loc (loc, NE_EXPR, truth_type,
5215 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5216 ll_arg, rl_arg),
5217 build_int_cst (TREE_TYPE (ll_arg), 0));
5219 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5220 if (code == TRUTH_AND_EXPR
5221 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5222 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5223 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5224 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5225 return build2_loc (loc, EQ_EXPR, truth_type,
5226 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5227 ll_arg, rl_arg),
5228 build_int_cst (TREE_TYPE (ll_arg), 0));
5231 /* See if the comparisons can be merged. Then get all the parameters for
5232 each side. */
5234 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5235 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5236 return 0;
5238 volatilep = 0;
5239 ll_inner = decode_field_reference (loc, ll_arg,
5240 &ll_bitsize, &ll_bitpos, &ll_mode,
5241 &ll_unsignedp, &volatilep, &ll_mask,
5242 &ll_and_mask);
5243 lr_inner = decode_field_reference (loc, lr_arg,
5244 &lr_bitsize, &lr_bitpos, &lr_mode,
5245 &lr_unsignedp, &volatilep, &lr_mask,
5246 &lr_and_mask);
5247 rl_inner = decode_field_reference (loc, rl_arg,
5248 &rl_bitsize, &rl_bitpos, &rl_mode,
5249 &rl_unsignedp, &volatilep, &rl_mask,
5250 &rl_and_mask);
5251 rr_inner = decode_field_reference (loc, rr_arg,
5252 &rr_bitsize, &rr_bitpos, &rr_mode,
5253 &rr_unsignedp, &volatilep, &rr_mask,
5254 &rr_and_mask);
5256 /* It must be true that the inner operation on the lhs of each
5257 comparison must be the same if we are to be able to do anything.
5258 Then see if we have constants. If not, the same must be true for
5259 the rhs's. */
5260 if (volatilep || ll_inner == 0 || rl_inner == 0
5261 || ! operand_equal_p (ll_inner, rl_inner, 0))
5262 return 0;
5264 if (TREE_CODE (lr_arg) == INTEGER_CST
5265 && TREE_CODE (rr_arg) == INTEGER_CST)
5266 l_const = lr_arg, r_const = rr_arg;
5267 else if (lr_inner == 0 || rr_inner == 0
5268 || ! operand_equal_p (lr_inner, rr_inner, 0))
5269 return 0;
5270 else
5271 l_const = r_const = 0;
5273 /* If either comparison code is not correct for our logical operation,
5274 fail. However, we can convert a one-bit comparison against zero into
5275 the opposite comparison against that bit being set in the field. */
5277 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5278 if (lcode != wanted_code)
5280 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5282 /* Make the left operand unsigned, since we are only interested
5283 in the value of one bit. Otherwise we are doing the wrong
5284 thing below. */
5285 ll_unsignedp = 1;
5286 l_const = ll_mask;
5288 else
5289 return 0;
5292 /* This is analogous to the code for l_const above. */
5293 if (rcode != wanted_code)
5295 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5297 rl_unsignedp = 1;
5298 r_const = rl_mask;
5300 else
5301 return 0;
5304 /* See if we can find a mode that contains both fields being compared on
5305 the left. If we can't, fail. Otherwise, update all constants and masks
5306 to be relative to a field of that size. */
5307 first_bit = MIN (ll_bitpos, rl_bitpos);
5308 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5309 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5310 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5311 volatilep);
5312 if (lnmode == VOIDmode)
5313 return 0;
5315 lnbitsize = GET_MODE_BITSIZE (lnmode);
5316 lnbitpos = first_bit & ~ (lnbitsize - 1);
5317 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5318 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5320 if (BYTES_BIG_ENDIAN)
5322 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5323 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5326 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5327 size_int (xll_bitpos));
5328 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5329 size_int (xrl_bitpos));
5331 if (l_const)
5333 l_const = fold_convert_loc (loc, lntype, l_const);
5334 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5335 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5336 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5337 fold_build1_loc (loc, BIT_NOT_EXPR,
5338 lntype, ll_mask))))
5340 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5342 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5345 if (r_const)
5347 r_const = fold_convert_loc (loc, lntype, r_const);
5348 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5349 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5350 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5351 fold_build1_loc (loc, BIT_NOT_EXPR,
5352 lntype, rl_mask))))
5354 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5356 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5360 /* If the right sides are not constant, do the same for it. Also,
5361 disallow this optimization if a size or signedness mismatch occurs
5362 between the left and right sides. */
5363 if (l_const == 0)
5365 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5366 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5367 /* Make sure the two fields on the right
5368 correspond to the left without being swapped. */
5369 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5370 return 0;
5372 first_bit = MIN (lr_bitpos, rr_bitpos);
5373 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5374 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5375 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5376 volatilep);
5377 if (rnmode == VOIDmode)
5378 return 0;
5380 rnbitsize = GET_MODE_BITSIZE (rnmode);
5381 rnbitpos = first_bit & ~ (rnbitsize - 1);
5382 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5383 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5385 if (BYTES_BIG_ENDIAN)
5387 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5388 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5391 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5392 rntype, lr_mask),
5393 size_int (xlr_bitpos));
5394 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5395 rntype, rr_mask),
5396 size_int (xrr_bitpos));
5398 /* Make a mask that corresponds to both fields being compared.
5399 Do this for both items being compared. If the operands are the
5400 same size and the bits being compared are in the same position
5401 then we can do this by masking both and comparing the masked
5402 results. */
5403 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5404 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5405 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5407 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5408 ll_unsignedp || rl_unsignedp);
5409 if (! all_ones_mask_p (ll_mask, lnbitsize))
5410 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5412 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5413 lr_unsignedp || rr_unsignedp);
5414 if (! all_ones_mask_p (lr_mask, rnbitsize))
5415 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5417 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5420 /* There is still another way we can do something: If both pairs of
5421 fields being compared are adjacent, we may be able to make a wider
5422 field containing them both.
5424 Note that we still must mask the lhs/rhs expressions. Furthermore,
5425 the mask must be shifted to account for the shift done by
5426 make_bit_field_ref. */
5427 if ((ll_bitsize + ll_bitpos == rl_bitpos
5428 && lr_bitsize + lr_bitpos == rr_bitpos)
5429 || (ll_bitpos == rl_bitpos + rl_bitsize
5430 && lr_bitpos == rr_bitpos + rr_bitsize))
5432 tree type;
5434 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5435 ll_bitsize + rl_bitsize,
5436 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5437 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5438 lr_bitsize + rr_bitsize,
5439 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5441 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5442 size_int (MIN (xll_bitpos, xrl_bitpos)));
5443 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5444 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5446 /* Convert to the smaller type before masking out unwanted bits. */
5447 type = lntype;
5448 if (lntype != rntype)
5450 if (lnbitsize > rnbitsize)
5452 lhs = fold_convert_loc (loc, rntype, lhs);
5453 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5454 type = rntype;
5456 else if (lnbitsize < rnbitsize)
5458 rhs = fold_convert_loc (loc, lntype, rhs);
5459 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5460 type = lntype;
5464 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5465 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5467 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5468 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5470 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5473 return 0;
5476 /* Handle the case of comparisons with constants. If there is something in
5477 common between the masks, those bits of the constants must be the same.
5478 If not, the condition is always false. Test for this to avoid generating
5479 incorrect code below. */
5480 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5481 if (! integer_zerop (result)
5482 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5483 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5485 if (wanted_code == NE_EXPR)
5487 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5488 return constant_boolean_node (true, truth_type);
5490 else
5492 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5493 return constant_boolean_node (false, truth_type);
5497 /* Construct the expression we will return. First get the component
5498 reference we will make. Unless the mask is all ones the width of
5499 that field, perform the mask operation. Then compare with the
5500 merged constant. */
5501 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5502 ll_unsignedp || rl_unsignedp);
5504 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5505 if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5508 return build2_loc (loc, wanted_code, truth_type, result,
5509 const_binop (BIT_IOR_EXPR, l_const, r_const));
5512 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5513 constant. */
5515 static tree
5516 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5517 tree op0, tree op1)
5519 tree arg0 = op0;
5520 enum tree_code op_code;
5521 tree comp_const;
5522 tree minmax_const;
5523 int consts_equal, consts_lt;
5524 tree inner;
5526 STRIP_SIGN_NOPS (arg0);
5528 op_code = TREE_CODE (arg0);
5529 minmax_const = TREE_OPERAND (arg0, 1);
5530 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5531 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5532 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5533 inner = TREE_OPERAND (arg0, 0);
5535 /* If something does not permit us to optimize, return the original tree. */
5536 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5537 || TREE_CODE (comp_const) != INTEGER_CST
5538 || TREE_OVERFLOW (comp_const)
5539 || TREE_CODE (minmax_const) != INTEGER_CST
5540 || TREE_OVERFLOW (minmax_const))
5541 return NULL_TREE;
5543 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5544 and GT_EXPR, doing the rest with recursive calls using logical
5545 simplifications. */
5546 switch (code)
5548 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5550 tree tem
5551 = optimize_minmax_comparison (loc,
5552 invert_tree_comparison (code, false),
5553 type, op0, op1);
5554 if (tem)
5555 return invert_truthvalue_loc (loc, tem);
5556 return NULL_TREE;
5559 case GE_EXPR:
5560 return
5561 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5562 optimize_minmax_comparison
5563 (loc, EQ_EXPR, type, arg0, comp_const),
5564 optimize_minmax_comparison
5565 (loc, GT_EXPR, type, arg0, comp_const));
5567 case EQ_EXPR:
5568 if (op_code == MAX_EXPR && consts_equal)
5569 /* MAX (X, 0) == 0 -> X <= 0 */
5570 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5572 else if (op_code == MAX_EXPR && consts_lt)
5573 /* MAX (X, 0) == 5 -> X == 5 */
5574 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5576 else if (op_code == MAX_EXPR)
5577 /* MAX (X, 0) == -1 -> false */
5578 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5580 else if (consts_equal)
5581 /* MIN (X, 0) == 0 -> X >= 0 */
5582 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5584 else if (consts_lt)
5585 /* MIN (X, 0) == 5 -> false */
5586 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5588 else
5589 /* MIN (X, 0) == -1 -> X == -1 */
5590 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5592 case GT_EXPR:
5593 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5594 /* MAX (X, 0) > 0 -> X > 0
5595 MAX (X, 0) > 5 -> X > 5 */
5596 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) > -1 -> true */
5600 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5602 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5603 /* MIN (X, 0) > 0 -> false
5604 MIN (X, 0) > 5 -> false */
5605 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607 else
5608 /* MIN (X, 0) > -1 -> X > -1 */
5609 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5611 default:
5612 return NULL_TREE;
5616 /* T is an integer expression that is being multiplied, divided, or taken a
5617 modulus (CODE says which and what kind of divide or modulus) by a
5618 constant C. See if we can eliminate that operation by folding it with
5619 other operations already in T. WIDE_TYPE, if non-null, is a type that
5620 should be used for the computation if wider than our type.
5622 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5623 (X * 2) + (Y * 4). We must, however, be assured that either the original
5624 expression would not overflow or that overflow is undefined for the type
5625 in the language in question.
5627 If we return a non-null expression, it is an equivalent form of the
5628 original computation, but need not be in the original type.
5630 We set *STRICT_OVERFLOW_P to true if the return values depends on
5631 signed overflow being undefined. Otherwise we do not change
5632 *STRICT_OVERFLOW_P. */
5634 static tree
5635 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5636 bool *strict_overflow_p)
5638 /* To avoid exponential search depth, refuse to allow recursion past
5639 three levels. Beyond that (1) it's highly unlikely that we'll find
5640 something interesting and (2) we've probably processed it before
5641 when we built the inner expression. */
5643 static int depth;
5644 tree ret;
5646 if (depth > 3)
5647 return NULL;
5649 depth++;
5650 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5651 depth--;
5653 return ret;
5656 static tree
5657 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 tree type = TREE_TYPE (t);
5661 enum tree_code tcode = TREE_CODE (t);
5662 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5663 > GET_MODE_SIZE (TYPE_MODE (type)))
5664 ? wide_type : type);
5665 tree t1, t2;
5666 int same_p = tcode == code;
5667 tree op0 = NULL_TREE, op1 = NULL_TREE;
5668 bool sub_strict_overflow_p;
5670 /* Don't deal with constants of zero here; they confuse the code below. */
5671 if (integer_zerop (c))
5672 return NULL_TREE;
5674 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5675 op0 = TREE_OPERAND (t, 0);
5677 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5678 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5680 /* Note that we need not handle conditional operations here since fold
5681 already handles those cases. So just do arithmetic here. */
5682 switch (tcode)
5684 case INTEGER_CST:
5685 /* For a constant, we can always simplify if we are a multiply
5686 or (for divide and modulus) if it is a multiple of our constant. */
5687 if (code == MULT_EXPR
5688 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5689 return const_binop (code, fold_convert (ctype, t),
5690 fold_convert (ctype, c));
5691 break;
5693 CASE_CONVERT: case NON_LVALUE_EXPR:
5694 /* If op0 is an expression ... */
5695 if ((COMPARISON_CLASS_P (op0)
5696 || UNARY_CLASS_P (op0)
5697 || BINARY_CLASS_P (op0)
5698 || VL_EXP_CLASS_P (op0)
5699 || EXPRESSION_CLASS_P (op0))
5700 /* ... and has wrapping overflow, and its type is smaller
5701 than ctype, then we cannot pass through as widening. */
5702 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5703 && (TYPE_PRECISION (ctype)
5704 > TYPE_PRECISION (TREE_TYPE (op0))))
5705 /* ... or this is a truncation (t is narrower than op0),
5706 then we cannot pass through this narrowing. */
5707 || (TYPE_PRECISION (type)
5708 < TYPE_PRECISION (TREE_TYPE (op0)))
5709 /* ... or signedness changes for division or modulus,
5710 then we cannot pass through this conversion. */
5711 || (code != MULT_EXPR
5712 && (TYPE_UNSIGNED (ctype)
5713 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5714 /* ... or has undefined overflow while the converted to
5715 type has not, we cannot do the operation in the inner type
5716 as that would introduce undefined overflow. */
5717 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5718 && !TYPE_OVERFLOW_UNDEFINED (type))))
5719 break;
5721 /* Pass the constant down and see if we can make a simplification. If
5722 we can, replace this expression with the inner simplification for
5723 possible later conversion to our or some other type. */
5724 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5725 && TREE_CODE (t2) == INTEGER_CST
5726 && !TREE_OVERFLOW (t2)
5727 && (0 != (t1 = extract_muldiv (op0, t2, code,
5728 code == MULT_EXPR
5729 ? ctype : NULL_TREE,
5730 strict_overflow_p))))
5731 return t1;
5732 break;
5734 case ABS_EXPR:
5735 /* If widening the type changes it from signed to unsigned, then we
5736 must avoid building ABS_EXPR itself as unsigned. */
5737 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5739 tree cstype = (*signed_type_for) (ctype);
5740 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5741 != 0)
5743 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5744 return fold_convert (ctype, t1);
5746 break;
5748 /* If the constant is negative, we cannot simplify this. */
5749 if (tree_int_cst_sgn (c) == -1)
5750 break;
5751 /* FALLTHROUGH */
5752 case NEGATE_EXPR:
5753 /* For division and modulus, type can't be unsigned, as e.g.
5754 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5755 For signed types, even with wrapping overflow, this is fine. */
5756 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5757 break;
5758 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5759 != 0)
5760 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5761 break;
5763 case MIN_EXPR: case MAX_EXPR:
5764 /* If widening the type changes the signedness, then we can't perform
5765 this optimization as that changes the result. */
5766 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5767 break;
5769 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5770 sub_strict_overflow_p = false;
5771 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5772 &sub_strict_overflow_p)) != 0
5773 && (t2 = extract_muldiv (op1, c, code, wide_type,
5774 &sub_strict_overflow_p)) != 0)
5776 if (tree_int_cst_sgn (c) < 0)
5777 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5778 if (sub_strict_overflow_p)
5779 *strict_overflow_p = true;
5780 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5781 fold_convert (ctype, t2));
5783 break;
5785 case LSHIFT_EXPR: case RSHIFT_EXPR:
5786 /* If the second operand is constant, this is a multiplication
5787 or floor division, by a power of two, so we can treat it that
5788 way unless the multiplier or divisor overflows. Signed
5789 left-shift overflow is implementation-defined rather than
5790 undefined in C90, so do not convert signed left shift into
5791 multiplication. */
5792 if (TREE_CODE (op1) == INTEGER_CST
5793 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5794 /* const_binop may not detect overflow correctly,
5795 so check for it explicitly here. */
5796 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5797 && 0 != (t1 = fold_convert (ctype,
5798 const_binop (LSHIFT_EXPR,
5799 size_one_node,
5800 op1)))
5801 && !TREE_OVERFLOW (t1))
5802 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5803 ? MULT_EXPR : FLOOR_DIV_EXPR,
5804 ctype,
5805 fold_convert (ctype, op0),
5806 t1),
5807 c, code, wide_type, strict_overflow_p);
5808 break;
5810 case PLUS_EXPR: case MINUS_EXPR:
5811 /* See if we can eliminate the operation on both sides. If we can, we
5812 can return a new PLUS or MINUS. If we can't, the only remaining
5813 cases where we can do anything are if the second operand is a
5814 constant. */
5815 sub_strict_overflow_p = false;
5816 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5817 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5818 if (t1 != 0 && t2 != 0
5819 && (code == MULT_EXPR
5820 /* If not multiplication, we can only do this if both operands
5821 are divisible by c. */
5822 || (multiple_of_p (ctype, op0, c)
5823 && multiple_of_p (ctype, op1, c))))
5825 if (sub_strict_overflow_p)
5826 *strict_overflow_p = true;
5827 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5828 fold_convert (ctype, t2));
5831 /* If this was a subtraction, negate OP1 and set it to be an addition.
5832 This simplifies the logic below. */
5833 if (tcode == MINUS_EXPR)
5835 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5836 /* If OP1 was not easily negatable, the constant may be OP0. */
5837 if (TREE_CODE (op0) == INTEGER_CST)
5839 tree tem = op0;
5840 op0 = op1;
5841 op1 = tem;
5842 tem = t1;
5843 t1 = t2;
5844 t2 = tem;
5848 if (TREE_CODE (op1) != INTEGER_CST)
5849 break;
5851 /* If either OP1 or C are negative, this optimization is not safe for
5852 some of the division and remainder types while for others we need
5853 to change the code. */
5854 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5856 if (code == CEIL_DIV_EXPR)
5857 code = FLOOR_DIV_EXPR;
5858 else if (code == FLOOR_DIV_EXPR)
5859 code = CEIL_DIV_EXPR;
5860 else if (code != MULT_EXPR
5861 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5862 break;
5865 /* If it's a multiply or a division/modulus operation of a multiple
5866 of our constant, do the operation and verify it doesn't overflow. */
5867 if (code == MULT_EXPR
5868 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5870 op1 = const_binop (code, fold_convert (ctype, op1),
5871 fold_convert (ctype, c));
5872 /* We allow the constant to overflow with wrapping semantics. */
5873 if (op1 == 0
5874 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5875 break;
5877 else
5878 break;
5880 /* If we have an unsigned type, we cannot widen the operation since it
5881 will change the result if the original computation overflowed. */
5882 if (TYPE_UNSIGNED (ctype) && ctype != type)
5883 break;
5885 /* If we were able to eliminate our operation from the first side,
5886 apply our operation to the second side and reform the PLUS. */
5887 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5888 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5890 /* The last case is if we are a multiply. In that case, we can
5891 apply the distributive law to commute the multiply and addition
5892 if the multiplication of the constants doesn't overflow
5893 and overflow is defined. With undefined overflow
5894 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5895 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5896 return fold_build2 (tcode, ctype,
5897 fold_build2 (code, ctype,
5898 fold_convert (ctype, op0),
5899 fold_convert (ctype, c)),
5900 op1);
5902 break;
5904 case MULT_EXPR:
5905 /* We have a special case here if we are doing something like
5906 (C * 8) % 4 since we know that's zero. */
5907 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5908 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5909 /* If the multiplication can overflow we cannot optimize this. */
5910 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5911 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5912 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5914 *strict_overflow_p = true;
5915 return omit_one_operand (type, integer_zero_node, op0);
5918 /* ... fall through ... */
5920 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5921 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5922 /* If we can extract our operation from the LHS, do so and return a
5923 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5924 do something only if the second operand is a constant. */
5925 if (same_p
5926 && (t1 = extract_muldiv (op0, c, code, wide_type,
5927 strict_overflow_p)) != 0)
5928 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5929 fold_convert (ctype, op1));
5930 else if (tcode == MULT_EXPR && code == MULT_EXPR
5931 && (t1 = extract_muldiv (op1, c, code, wide_type,
5932 strict_overflow_p)) != 0)
5933 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5934 fold_convert (ctype, t1));
5935 else if (TREE_CODE (op1) != INTEGER_CST)
5936 return 0;
5938 /* If these are the same operation types, we can associate them
5939 assuming no overflow. */
5940 if (tcode == code)
5942 bool overflow_p = false;
5943 bool overflow_mul_p;
5944 signop sign = TYPE_SIGN (ctype);
5945 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5946 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5947 if (overflow_mul_p
5948 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5949 overflow_p = true;
5950 if (!overflow_p)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 wide_int_to_tree (ctype, mul));
5955 /* If these operations "cancel" each other, we have the main
5956 optimizations of this pass, which occur when either constant is a
5957 multiple of the other, in which case we replace this with either an
5958 operation or CODE or TCODE.
5960 If we have an unsigned type, we cannot do this since it will change
5961 the result if the original computation overflowed. */
5962 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5963 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5964 || (tcode == MULT_EXPR
5965 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5966 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5967 && code != MULT_EXPR)))
5969 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5971 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5972 *strict_overflow_p = true;
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5974 fold_convert (ctype,
5975 const_binop (TRUNC_DIV_EXPR,
5976 op1, c)));
5978 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5981 *strict_overflow_p = true;
5982 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype,
5984 const_binop (TRUNC_DIV_EXPR,
5985 c, op1)));
5988 break;
5990 default:
5991 break;
5994 return 0;
5997 /* Return a node which has the indicated constant VALUE (either 0 or
5998 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5999 and is of the indicated TYPE. */
6001 tree
6002 constant_boolean_node (bool value, tree type)
6004 if (type == integer_type_node)
6005 return value ? integer_one_node : integer_zero_node;
6006 else if (type == boolean_type_node)
6007 return value ? boolean_true_node : boolean_false_node;
6008 else if (TREE_CODE (type) == VECTOR_TYPE)
6009 return build_vector_from_val (type,
6010 build_int_cst (TREE_TYPE (type),
6011 value ? -1 : 0));
6012 else
6013 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6017 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6018 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6019 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6020 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6021 COND is the first argument to CODE; otherwise (as in the example
6022 given here), it is the second argument. TYPE is the type of the
6023 original expression. Return NULL_TREE if no simplification is
6024 possible. */
6026 static tree
6027 fold_binary_op_with_conditional_arg (location_t loc,
6028 enum tree_code code,
6029 tree type, tree op0, tree op1,
6030 tree cond, tree arg, int cond_first_p)
6032 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6033 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6034 tree test, true_value, false_value;
6035 tree lhs = NULL_TREE;
6036 tree rhs = NULL_TREE;
6037 enum tree_code cond_code = COND_EXPR;
6039 if (TREE_CODE (cond) == COND_EXPR
6040 || TREE_CODE (cond) == VEC_COND_EXPR)
6042 test = TREE_OPERAND (cond, 0);
6043 true_value = TREE_OPERAND (cond, 1);
6044 false_value = TREE_OPERAND (cond, 2);
6045 /* If this operand throws an expression, then it does not make
6046 sense to try to perform a logical or arithmetic operation
6047 involving it. */
6048 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6049 lhs = true_value;
6050 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6051 rhs = false_value;
6053 else
6055 tree testtype = TREE_TYPE (cond);
6056 test = cond;
6057 true_value = constant_boolean_node (true, testtype);
6058 false_value = constant_boolean_node (false, testtype);
6061 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6062 cond_code = VEC_COND_EXPR;
6064 /* This transformation is only worthwhile if we don't have to wrap ARG
6065 in a SAVE_EXPR and the operation can be simplified without recursing
6066 on at least one of the branches once its pushed inside the COND_EXPR. */
6067 if (!TREE_CONSTANT (arg)
6068 && (TREE_SIDE_EFFECTS (arg)
6069 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6070 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6071 return NULL_TREE;
6073 arg = fold_convert_loc (loc, arg_type, arg);
6074 if (lhs == 0)
6076 true_value = fold_convert_loc (loc, cond_type, true_value);
6077 if (cond_first_p)
6078 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6079 else
6080 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6082 if (rhs == 0)
6084 false_value = fold_convert_loc (loc, cond_type, false_value);
6085 if (cond_first_p)
6086 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6087 else
6088 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6091 /* Check that we have simplified at least one of the branches. */
6092 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6093 return NULL_TREE;
6095 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6111 bool
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6114 if (!real_zerop (addend))
6115 return false;
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6119 return false;
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6123 return true;
6125 /* In a vector or complex, we would need to check the sign of all zeros. */
6126 if (TREE_CODE (addend) != REAL_CST)
6127 return false;
6129 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6130 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6131 negate = !negate;
6133 /* The mode has signed zeros, and we have to honor their sign.
6134 In this situation, there is only one case we can return true for.
6135 X - 0 is the same as X unless rounding towards -infinity is
6136 supported. */
6137 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6140 /* Subroutine of fold() that checks comparisons of built-in math
6141 functions against real constants.
6143 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6144 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6145 is the type of the result and ARG0 and ARG1 are the operands of the
6146 comparison. ARG1 must be a TREE_REAL_CST.
6148 The function returns the constant folded tree if a simplification
6149 can be made, and NULL_TREE otherwise. */
6151 static tree
6152 fold_mathfn_compare (location_t loc,
6153 enum built_in_function fcode, enum tree_code code,
6154 tree type, tree arg0, tree arg1)
6156 REAL_VALUE_TYPE c;
6158 if (BUILTIN_SQRT_P (fcode))
6160 tree arg = CALL_EXPR_ARG (arg0, 0);
6161 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6163 c = TREE_REAL_CST (arg1);
6164 if (REAL_VALUE_NEGATIVE (c))
6166 /* sqrt(x) < y is always false, if y is negative. */
6167 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6168 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6170 /* sqrt(x) > y is always true, if y is negative and we
6171 don't care about NaNs, i.e. negative values of x. */
6172 if (code == NE_EXPR || !HONOR_NANS (mode))
6173 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6175 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6176 return fold_build2_loc (loc, GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg), dconst0));
6179 else if (code == GT_EXPR || code == GE_EXPR)
6181 REAL_VALUE_TYPE c2;
6183 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6184 real_convert (&c2, mode, &c2);
6186 if (REAL_VALUE_ISINF (c2))
6188 /* sqrt(x) > y is x == +Inf, when y is very large. */
6189 if (HONOR_INFINITIES (mode))
6190 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6191 build_real (TREE_TYPE (arg), c2));
6193 /* sqrt(x) > y is always false, when y is very large
6194 and we don't care about infinities. */
6195 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6198 /* sqrt(x) > c is the same as x > c*c. */
6199 return fold_build2_loc (loc, code, type, arg,
6200 build_real (TREE_TYPE (arg), c2));
6202 else if (code == LT_EXPR || code == LE_EXPR)
6204 REAL_VALUE_TYPE c2;
6206 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6207 real_convert (&c2, mode, &c2);
6209 if (REAL_VALUE_ISINF (c2))
6211 /* sqrt(x) < y is always true, when y is a very large
6212 value and we don't care about NaNs or Infinities. */
6213 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6214 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6216 /* sqrt(x) < y is x != +Inf when y is very large and we
6217 don't care about NaNs. */
6218 if (! HONOR_NANS (mode))
6219 return fold_build2_loc (loc, NE_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) < y is x >= 0 when y is very large and we
6223 don't care about Infinities. */
6224 if (! HONOR_INFINITIES (mode))
6225 return fold_build2_loc (loc, GE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), dconst0));
6228 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6229 arg = save_expr (arg);
6230 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6231 fold_build2_loc (loc, GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6233 dconst0)),
6234 fold_build2_loc (loc, NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6236 c2)));
6239 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, code, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6245 arg = save_expr (arg);
6246 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6247 fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg),
6249 dconst0)),
6250 fold_build2_loc (loc, code, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 c2)));
6256 return NULL_TREE;
6259 /* Subroutine of fold() that optimizes comparisons against Infinities,
6260 either +Inf or -Inf.
6262 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6263 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6264 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6266 The function returns the constant folded tree if a simplification
6267 can be made, and NULL_TREE otherwise. */
6269 static tree
6270 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6271 tree arg0, tree arg1)
6273 machine_mode mode;
6274 REAL_VALUE_TYPE max;
6275 tree temp;
6276 bool neg;
6278 mode = TYPE_MODE (TREE_TYPE (arg0));
6280 /* For negative infinity swap the sense of the comparison. */
6281 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6282 if (neg)
6283 code = swap_tree_comparison (code);
6285 switch (code)
6287 case GT_EXPR:
6288 /* x > +Inf is always false, if with ignore sNANs. */
6289 if (HONOR_SNANS (mode))
6290 return NULL_TREE;
6291 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6293 case LE_EXPR:
6294 /* x <= +Inf is always true, if we don't case about NaNs. */
6295 if (! HONOR_NANS (mode))
6296 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6298 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6299 arg0 = save_expr (arg0);
6300 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6302 case EQ_EXPR:
6303 case GE_EXPR:
6304 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6305 real_maxval (&max, neg, mode);
6306 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6307 arg0, build_real (TREE_TYPE (arg0), max));
6309 case LT_EXPR:
6310 /* x < +Inf is always equal to x <= DBL_MAX. */
6311 real_maxval (&max, neg, mode);
6312 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6313 arg0, build_real (TREE_TYPE (arg0), max));
6315 case NE_EXPR:
6316 /* x != +Inf is always equal to !(x > DBL_MAX). */
6317 real_maxval (&max, neg, mode);
6318 if (! HONOR_NANS (mode))
6319 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6320 arg0, build_real (TREE_TYPE (arg0), max));
6322 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6324 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6326 default:
6327 break;
6330 return NULL_TREE;
6333 /* Subroutine of fold() that optimizes comparisons of a division by
6334 a nonzero integer constant against an integer constant, i.e.
6335 X/C1 op C2.
6337 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6338 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6339 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6341 The function returns the constant folded tree if a simplification
6342 can be made, and NULL_TREE otherwise. */
6344 static tree
6345 fold_div_compare (location_t loc,
6346 enum tree_code code, tree type, tree arg0, tree arg1)
6348 tree prod, tmp, hi, lo;
6349 tree arg00 = TREE_OPERAND (arg0, 0);
6350 tree arg01 = TREE_OPERAND (arg0, 1);
6351 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6352 bool neg_overflow = false;
6353 bool overflow;
6355 /* We have to do this the hard way to detect unsigned overflow.
6356 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6357 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6358 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6359 neg_overflow = false;
6361 if (sign == UNSIGNED)
6363 tmp = int_const_binop (MINUS_EXPR, arg01,
6364 build_int_cst (TREE_TYPE (arg01), 1));
6365 lo = prod;
6367 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6368 val = wi::add (prod, tmp, sign, &overflow);
6369 hi = force_fit_type (TREE_TYPE (arg00), val,
6370 -1, overflow | TREE_OVERFLOW (prod));
6372 else if (tree_int_cst_sgn (arg01) >= 0)
6374 tmp = int_const_binop (MINUS_EXPR, arg01,
6375 build_int_cst (TREE_TYPE (arg01), 1));
6376 switch (tree_int_cst_sgn (arg1))
6378 case -1:
6379 neg_overflow = true;
6380 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6381 hi = prod;
6382 break;
6384 case 0:
6385 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6386 hi = tmp;
6387 break;
6389 case 1:
6390 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6391 lo = prod;
6392 break;
6394 default:
6395 gcc_unreachable ();
6398 else
6400 /* A negative divisor reverses the relational operators. */
6401 code = swap_tree_comparison (code);
6403 tmp = int_const_binop (PLUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6407 case -1:
6408 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6409 lo = prod;
6410 break;
6412 case 0:
6413 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 lo = tmp;
6415 break;
6417 case 1:
6418 neg_overflow = true;
6419 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6420 hi = prod;
6421 break;
6423 default:
6424 gcc_unreachable ();
6428 switch (code)
6430 case EQ_EXPR:
6431 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6432 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6433 if (TREE_OVERFLOW (hi))
6434 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6435 if (TREE_OVERFLOW (lo))
6436 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6437 return build_range_check (loc, type, arg00, 1, lo, hi);
6439 case NE_EXPR:
6440 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6441 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6442 if (TREE_OVERFLOW (hi))
6443 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6444 if (TREE_OVERFLOW (lo))
6445 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6446 return build_range_check (loc, type, arg00, 0, lo, hi);
6448 case LT_EXPR:
6449 if (TREE_OVERFLOW (lo))
6451 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6452 return omit_one_operand_loc (loc, type, tmp, arg00);
6454 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6456 case LE_EXPR:
6457 if (TREE_OVERFLOW (hi))
6459 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6460 return omit_one_operand_loc (loc, type, tmp, arg00);
6462 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6464 case GT_EXPR:
6465 if (TREE_OVERFLOW (hi))
6467 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6468 return omit_one_operand_loc (loc, type, tmp, arg00);
6470 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6472 case GE_EXPR:
6473 if (TREE_OVERFLOW (lo))
6475 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6476 return omit_one_operand_loc (loc, type, tmp, arg00);
6478 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6480 default:
6481 break;
6484 return NULL_TREE;
6488 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6489 equality/inequality test, then return a simplified form of the test
6490 using a sign testing. Otherwise return NULL. TYPE is the desired
6491 result type. */
6493 static tree
6494 fold_single_bit_test_into_sign_test (location_t loc,
6495 enum tree_code code, tree arg0, tree arg1,
6496 tree result_type)
6498 /* If this is testing a single bit, we can optimize the test. */
6499 if ((code == NE_EXPR || code == EQ_EXPR)
6500 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6501 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6503 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6504 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6505 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6507 if (arg00 != NULL_TREE
6508 /* This is only a win if casting to a signed type is cheap,
6509 i.e. when arg00's type is not a partial mode. */
6510 && TYPE_PRECISION (TREE_TYPE (arg00))
6511 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6513 tree stype = signed_type_for (TREE_TYPE (arg00));
6514 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6515 result_type,
6516 fold_convert_loc (loc, stype, arg00),
6517 build_int_cst (stype, 0));
6521 return NULL_TREE;
6524 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6525 equality/inequality test, then return a simplified form of
6526 the test using shifts and logical operations. Otherwise return
6527 NULL. TYPE is the desired result type. */
6529 tree
6530 fold_single_bit_test (location_t loc, enum tree_code code,
6531 tree arg0, tree arg1, tree result_type)
6533 /* If this is testing a single bit, we can optimize the test. */
6534 if ((code == NE_EXPR || code == EQ_EXPR)
6535 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6536 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6538 tree inner = TREE_OPERAND (arg0, 0);
6539 tree type = TREE_TYPE (arg0);
6540 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6541 machine_mode operand_mode = TYPE_MODE (type);
6542 int ops_unsigned;
6543 tree signed_type, unsigned_type, intermediate_type;
6544 tree tem, one;
6546 /* First, see if we can fold the single bit test into a sign-bit
6547 test. */
6548 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6549 result_type);
6550 if (tem)
6551 return tem;
6553 /* Otherwise we have (A & C) != 0 where C is a single bit,
6554 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6555 Similarly for (A & C) == 0. */
6557 /* If INNER is a right shift of a constant and it plus BITNUM does
6558 not overflow, adjust BITNUM and INNER. */
6559 if (TREE_CODE (inner) == RSHIFT_EXPR
6560 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6561 && bitnum < TYPE_PRECISION (type)
6562 && wi::ltu_p (TREE_OPERAND (inner, 1),
6563 TYPE_PRECISION (type) - bitnum))
6565 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6566 inner = TREE_OPERAND (inner, 0);
6569 /* If we are going to be able to omit the AND below, we must do our
6570 operations as unsigned. If we must use the AND, we have a choice.
6571 Normally unsigned is faster, but for some machines signed is. */
6572 #ifdef LOAD_EXTEND_OP
6573 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6574 && !flag_syntax_only) ? 0 : 1;
6575 #else
6576 ops_unsigned = 1;
6577 #endif
6579 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6580 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6581 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6582 inner = fold_convert_loc (loc, intermediate_type, inner);
6584 if (bitnum != 0)
6585 inner = build2 (RSHIFT_EXPR, intermediate_type,
6586 inner, size_int (bitnum));
6588 one = build_int_cst (intermediate_type, 1);
6590 if (code == EQ_EXPR)
6591 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6593 /* Put the AND last so it can combine with more things. */
6594 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6596 /* Make sure to return the proper type. */
6597 inner = fold_convert_loc (loc, result_type, inner);
6599 return inner;
6601 return NULL_TREE;
6604 /* Check whether we are allowed to reorder operands arg0 and arg1,
6605 such that the evaluation of arg1 occurs before arg0. */
6607 static bool
6608 reorder_operands_p (const_tree arg0, const_tree arg1)
6610 if (! flag_evaluation_order)
6611 return true;
6612 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6613 return true;
6614 return ! TREE_SIDE_EFFECTS (arg0)
6615 && ! TREE_SIDE_EFFECTS (arg1);
6618 /* Test whether it is preferable two swap two operands, ARG0 and
6619 ARG1, for example because ARG0 is an integer constant and ARG1
6620 isn't. If REORDER is true, only recommend swapping if we can
6621 evaluate the operands in reverse order. */
6623 bool
6624 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6626 if (CONSTANT_CLASS_P (arg1))
6627 return 0;
6628 if (CONSTANT_CLASS_P (arg0))
6629 return 1;
6631 STRIP_SIGN_NOPS (arg0);
6632 STRIP_SIGN_NOPS (arg1);
6634 if (TREE_CONSTANT (arg1))
6635 return 0;
6636 if (TREE_CONSTANT (arg0))
6637 return 1;
6639 if (reorder && flag_evaluation_order
6640 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6641 return 0;
6643 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6644 for commutative and comparison operators. Ensuring a canonical
6645 form allows the optimizers to find additional redundancies without
6646 having to explicitly check for both orderings. */
6647 if (TREE_CODE (arg0) == SSA_NAME
6648 && TREE_CODE (arg1) == SSA_NAME
6649 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6650 return 1;
6652 /* Put SSA_NAMEs last. */
6653 if (TREE_CODE (arg1) == SSA_NAME)
6654 return 0;
6655 if (TREE_CODE (arg0) == SSA_NAME)
6656 return 1;
6658 /* Put variables last. */
6659 if (DECL_P (arg1))
6660 return 0;
6661 if (DECL_P (arg0))
6662 return 1;
6664 return 0;
6667 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6668 ARG0 is extended to a wider type. */
6670 static tree
6671 fold_widened_comparison (location_t loc, enum tree_code code,
6672 tree type, tree arg0, tree arg1)
6674 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6675 tree arg1_unw;
6676 tree shorter_type, outer_type;
6677 tree min, max;
6678 bool above, below;
6680 if (arg0_unw == arg0)
6681 return NULL_TREE;
6682 shorter_type = TREE_TYPE (arg0_unw);
6684 #ifdef HAVE_canonicalize_funcptr_for_compare
6685 /* Disable this optimization if we're casting a function pointer
6686 type on targets that require function pointer canonicalization. */
6687 if (HAVE_canonicalize_funcptr_for_compare
6688 && TREE_CODE (shorter_type) == POINTER_TYPE
6689 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6690 return NULL_TREE;
6691 #endif
6693 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6694 return NULL_TREE;
6696 arg1_unw = get_unwidened (arg1, NULL_TREE);
6698 /* If possible, express the comparison in the shorter mode. */
6699 if ((code == EQ_EXPR || code == NE_EXPR
6700 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6701 && (TREE_TYPE (arg1_unw) == shorter_type
6702 || ((TYPE_PRECISION (shorter_type)
6703 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6704 && (TYPE_UNSIGNED (shorter_type)
6705 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6706 || (TREE_CODE (arg1_unw) == INTEGER_CST
6707 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6708 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6709 && int_fits_type_p (arg1_unw, shorter_type))))
6710 return fold_build2_loc (loc, code, type, arg0_unw,
6711 fold_convert_loc (loc, shorter_type, arg1_unw));
6713 if (TREE_CODE (arg1_unw) != INTEGER_CST
6714 || TREE_CODE (shorter_type) != INTEGER_TYPE
6715 || !int_fits_type_p (arg1_unw, shorter_type))
6716 return NULL_TREE;
6718 /* If we are comparing with the integer that does not fit into the range
6719 of the shorter type, the result is known. */
6720 outer_type = TREE_TYPE (arg1_unw);
6721 min = lower_bound_in_type (outer_type, shorter_type);
6722 max = upper_bound_in_type (outer_type, shorter_type);
6724 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6725 max, arg1_unw));
6726 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6727 arg1_unw, min));
6729 switch (code)
6731 case EQ_EXPR:
6732 if (above || below)
6733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6734 break;
6736 case NE_EXPR:
6737 if (above || below)
6738 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6739 break;
6741 case LT_EXPR:
6742 case LE_EXPR:
6743 if (above)
6744 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6745 else if (below)
6746 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6748 case GT_EXPR:
6749 case GE_EXPR:
6750 if (above)
6751 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6752 else if (below)
6753 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6755 default:
6756 break;
6759 return NULL_TREE;
6762 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6763 ARG0 just the signedness is changed. */
6765 static tree
6766 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6767 tree arg0, tree arg1)
6769 tree arg0_inner;
6770 tree inner_type, outer_type;
6772 if (!CONVERT_EXPR_P (arg0))
6773 return NULL_TREE;
6775 outer_type = TREE_TYPE (arg0);
6776 arg0_inner = TREE_OPERAND (arg0, 0);
6777 inner_type = TREE_TYPE (arg0_inner);
6779 #ifdef HAVE_canonicalize_funcptr_for_compare
6780 /* Disable this optimization if we're casting a function pointer
6781 type on targets that require function pointer canonicalization. */
6782 if (HAVE_canonicalize_funcptr_for_compare
6783 && TREE_CODE (inner_type) == POINTER_TYPE
6784 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6785 return NULL_TREE;
6786 #endif
6788 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6789 return NULL_TREE;
6791 if (TREE_CODE (arg1) != INTEGER_CST
6792 && !(CONVERT_EXPR_P (arg1)
6793 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6794 return NULL_TREE;
6796 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6797 && code != NE_EXPR
6798 && code != EQ_EXPR)
6799 return NULL_TREE;
6801 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6802 return NULL_TREE;
6804 if (TREE_CODE (arg1) == INTEGER_CST)
6805 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6806 TREE_OVERFLOW (arg1));
6807 else
6808 arg1 = fold_convert_loc (loc, inner_type, arg1);
6810 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6814 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6815 means A >= Y && A != MAX, but in this case we know that
6816 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6818 static tree
6819 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6821 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6823 if (TREE_CODE (bound) == LT_EXPR)
6824 a = TREE_OPERAND (bound, 0);
6825 else if (TREE_CODE (bound) == GT_EXPR)
6826 a = TREE_OPERAND (bound, 1);
6827 else
6828 return NULL_TREE;
6830 typea = TREE_TYPE (a);
6831 if (!INTEGRAL_TYPE_P (typea)
6832 && !POINTER_TYPE_P (typea))
6833 return NULL_TREE;
6835 if (TREE_CODE (ineq) == LT_EXPR)
6837 a1 = TREE_OPERAND (ineq, 1);
6838 y = TREE_OPERAND (ineq, 0);
6840 else if (TREE_CODE (ineq) == GT_EXPR)
6842 a1 = TREE_OPERAND (ineq, 0);
6843 y = TREE_OPERAND (ineq, 1);
6845 else
6846 return NULL_TREE;
6848 if (TREE_TYPE (a1) != typea)
6849 return NULL_TREE;
6851 if (POINTER_TYPE_P (typea))
6853 /* Convert the pointer types into integer before taking the difference. */
6854 tree ta = fold_convert_loc (loc, ssizetype, a);
6855 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6856 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6858 else
6859 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6861 if (!diff || !integer_onep (diff))
6862 return NULL_TREE;
6864 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6867 /* Fold a sum or difference of at least one multiplication.
6868 Returns the folded tree or NULL if no simplification could be made. */
6870 static tree
6871 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6872 tree arg0, tree arg1)
6874 tree arg00, arg01, arg10, arg11;
6875 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6877 /* (A * C) +- (B * C) -> (A+-B) * C.
6878 (A * C) +- A -> A * (C+-1).
6879 We are most concerned about the case where C is a constant,
6880 but other combinations show up during loop reduction. Since
6881 it is not difficult, try all four possibilities. */
6883 if (TREE_CODE (arg0) == MULT_EXPR)
6885 arg00 = TREE_OPERAND (arg0, 0);
6886 arg01 = TREE_OPERAND (arg0, 1);
6888 else if (TREE_CODE (arg0) == INTEGER_CST)
6890 arg00 = build_one_cst (type);
6891 arg01 = arg0;
6893 else
6895 /* We cannot generate constant 1 for fract. */
6896 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6897 return NULL_TREE;
6898 arg00 = arg0;
6899 arg01 = build_one_cst (type);
6901 if (TREE_CODE (arg1) == MULT_EXPR)
6903 arg10 = TREE_OPERAND (arg1, 0);
6904 arg11 = TREE_OPERAND (arg1, 1);
6906 else if (TREE_CODE (arg1) == INTEGER_CST)
6908 arg10 = build_one_cst (type);
6909 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6910 the purpose of this canonicalization. */
6911 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6912 && negate_expr_p (arg1)
6913 && code == PLUS_EXPR)
6915 arg11 = negate_expr (arg1);
6916 code = MINUS_EXPR;
6918 else
6919 arg11 = arg1;
6921 else
6923 /* We cannot generate constant 1 for fract. */
6924 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6925 return NULL_TREE;
6926 arg10 = arg1;
6927 arg11 = build_one_cst (type);
6929 same = NULL_TREE;
6931 if (operand_equal_p (arg01, arg11, 0))
6932 same = arg01, alt0 = arg00, alt1 = arg10;
6933 else if (operand_equal_p (arg00, arg10, 0))
6934 same = arg00, alt0 = arg01, alt1 = arg11;
6935 else if (operand_equal_p (arg00, arg11, 0))
6936 same = arg00, alt0 = arg01, alt1 = arg10;
6937 else if (operand_equal_p (arg01, arg10, 0))
6938 same = arg01, alt0 = arg00, alt1 = arg11;
6940 /* No identical multiplicands; see if we can find a common
6941 power-of-two factor in non-power-of-two multiplies. This
6942 can help in multi-dimensional array access. */
6943 else if (tree_fits_shwi_p (arg01)
6944 && tree_fits_shwi_p (arg11))
6946 HOST_WIDE_INT int01, int11, tmp;
6947 bool swap = false;
6948 tree maybe_same;
6949 int01 = tree_to_shwi (arg01);
6950 int11 = tree_to_shwi (arg11);
6952 /* Move min of absolute values to int11. */
6953 if (absu_hwi (int01) < absu_hwi (int11))
6955 tmp = int01, int01 = int11, int11 = tmp;
6956 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6957 maybe_same = arg01;
6958 swap = true;
6960 else
6961 maybe_same = arg11;
6963 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6964 /* The remainder should not be a constant, otherwise we
6965 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6966 increased the number of multiplications necessary. */
6967 && TREE_CODE (arg10) != INTEGER_CST)
6969 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6970 build_int_cst (TREE_TYPE (arg00),
6971 int01 / int11));
6972 alt1 = arg10;
6973 same = maybe_same;
6974 if (swap)
6975 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6979 if (same)
6980 return fold_build2_loc (loc, MULT_EXPR, type,
6981 fold_build2_loc (loc, code, type,
6982 fold_convert_loc (loc, type, alt0),
6983 fold_convert_loc (loc, type, alt1)),
6984 fold_convert_loc (loc, type, same));
6986 return NULL_TREE;
6989 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6990 specified by EXPR into the buffer PTR of length LEN bytes.
6991 Return the number of bytes placed in the buffer, or zero
6992 upon failure. */
6994 static int
6995 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6997 tree type = TREE_TYPE (expr);
6998 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6999 int byte, offset, word, words;
7000 unsigned char value;
7002 if ((off == -1 && total_bytes > len)
7003 || off >= total_bytes)
7004 return 0;
7005 if (off == -1)
7006 off = 0;
7007 words = total_bytes / UNITS_PER_WORD;
7009 for (byte = 0; byte < total_bytes; byte++)
7011 int bitpos = byte * BITS_PER_UNIT;
7012 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7013 number of bytes. */
7014 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7016 if (total_bytes > UNITS_PER_WORD)
7018 word = byte / UNITS_PER_WORD;
7019 if (WORDS_BIG_ENDIAN)
7020 word = (words - 1) - word;
7021 offset = word * UNITS_PER_WORD;
7022 if (BYTES_BIG_ENDIAN)
7023 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7024 else
7025 offset += byte % UNITS_PER_WORD;
7027 else
7028 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7029 if (offset >= off
7030 && offset - off < len)
7031 ptr[offset - off] = value;
7033 return MIN (len, total_bytes - off);
7037 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7038 specified by EXPR into the buffer PTR of length LEN bytes.
7039 Return the number of bytes placed in the buffer, or zero
7040 upon failure. */
7042 static int
7043 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7045 tree type = TREE_TYPE (expr);
7046 machine_mode mode = TYPE_MODE (type);
7047 int total_bytes = GET_MODE_SIZE (mode);
7048 FIXED_VALUE_TYPE value;
7049 tree i_value, i_type;
7051 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7052 return 0;
7054 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7056 if (NULL_TREE == i_type
7057 || TYPE_PRECISION (i_type) != total_bytes)
7058 return 0;
7060 value = TREE_FIXED_CST (expr);
7061 i_value = double_int_to_tree (i_type, value.data);
7063 return native_encode_int (i_value, ptr, len, off);
7067 /* Subroutine of native_encode_expr. Encode the REAL_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7070 upon failure. */
7072 static int
7073 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7075 tree type = TREE_TYPE (expr);
7076 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7077 int byte, offset, word, words, bitpos;
7078 unsigned char value;
7080 /* There are always 32 bits in each long, no matter the size of
7081 the hosts long. We handle floating point representations with
7082 up to 192 bits. */
7083 long tmp[6];
7085 if ((off == -1 && total_bytes > len)
7086 || off >= total_bytes)
7087 return 0;
7088 if (off == -1)
7089 off = 0;
7090 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7092 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7094 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7095 bitpos += BITS_PER_UNIT)
7097 byte = (bitpos / BITS_PER_UNIT) & 3;
7098 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7100 if (UNITS_PER_WORD < 4)
7102 word = byte / UNITS_PER_WORD;
7103 if (WORDS_BIG_ENDIAN)
7104 word = (words - 1) - word;
7105 offset = word * UNITS_PER_WORD;
7106 if (BYTES_BIG_ENDIAN)
7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7108 else
7109 offset += byte % UNITS_PER_WORD;
7111 else
7112 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7113 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7114 if (offset >= off
7115 && offset - off < len)
7116 ptr[offset - off] = value;
7118 return MIN (len, total_bytes - off);
7121 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7126 static int
7127 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7129 int rsize, isize;
7130 tree part;
7132 part = TREE_REALPART (expr);
7133 rsize = native_encode_expr (part, ptr, len, off);
7134 if (off == -1
7135 && rsize == 0)
7136 return 0;
7137 part = TREE_IMAGPART (expr);
7138 if (off != -1)
7139 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7140 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7141 if (off == -1
7142 && isize != rsize)
7143 return 0;
7144 return rsize + isize;
7148 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7153 static int
7154 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7156 unsigned i, count;
7157 int size, offset;
7158 tree itype, elem;
7160 offset = 0;
7161 count = VECTOR_CST_NELTS (expr);
7162 itype = TREE_TYPE (TREE_TYPE (expr));
7163 size = GET_MODE_SIZE (TYPE_MODE (itype));
7164 for (i = 0; i < count; i++)
7166 if (off >= size)
7168 off -= size;
7169 continue;
7171 elem = VECTOR_CST_ELT (expr, i);
7172 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7173 if ((off == -1 && res != size)
7174 || res == 0)
7175 return 0;
7176 offset += res;
7177 if (offset >= len)
7178 return offset;
7179 if (off != -1)
7180 off = 0;
7182 return offset;
7186 /* Subroutine of native_encode_expr. Encode the STRING_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7189 upon failure. */
7191 static int
7192 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7194 tree type = TREE_TYPE (expr);
7195 HOST_WIDE_INT total_bytes;
7197 if (TREE_CODE (type) != ARRAY_TYPE
7198 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7199 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7200 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7201 return 0;
7202 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7203 if ((off == -1 && total_bytes > len)
7204 || off >= total_bytes)
7205 return 0;
7206 if (off == -1)
7207 off = 0;
7208 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7210 int written = 0;
7211 if (off < TREE_STRING_LENGTH (expr))
7213 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7214 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7216 memset (ptr + written, 0,
7217 MIN (total_bytes - written, len - written));
7219 else
7220 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7221 return MIN (total_bytes - off, len);
7225 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7226 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7227 buffer PTR of length LEN bytes. If OFF is not -1 then start
7228 the encoding at byte offset OFF and encode at most LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero upon failure. */
7232 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7234 switch (TREE_CODE (expr))
7236 case INTEGER_CST:
7237 return native_encode_int (expr, ptr, len, off);
7239 case REAL_CST:
7240 return native_encode_real (expr, ptr, len, off);
7242 case FIXED_CST:
7243 return native_encode_fixed (expr, ptr, len, off);
7245 case COMPLEX_CST:
7246 return native_encode_complex (expr, ptr, len, off);
7248 case VECTOR_CST:
7249 return native_encode_vector (expr, ptr, len, off);
7251 case STRING_CST:
7252 return native_encode_string (expr, ptr, len, off);
7254 default:
7255 return 0;
7260 /* Subroutine of native_interpret_expr. Interpret the contents of
7261 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7262 If the buffer cannot be interpreted, return NULL_TREE. */
7264 static tree
7265 native_interpret_int (tree type, const unsigned char *ptr, int len)
7267 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7269 if (total_bytes > len
7270 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7271 return NULL_TREE;
7273 wide_int result = wi::from_buffer (ptr, total_bytes);
7275 return wide_int_to_tree (type, result);
7279 /* Subroutine of native_interpret_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7281 If the buffer cannot be interpreted, return NULL_TREE. */
7283 static tree
7284 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7286 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7287 double_int result;
7288 FIXED_VALUE_TYPE fixed_value;
7290 if (total_bytes > len
7291 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7292 return NULL_TREE;
7294 result = double_int::from_buffer (ptr, total_bytes);
7295 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7297 return build_fixed (type, fixed_value);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7305 static tree
7306 native_interpret_real (tree type, const unsigned char *ptr, int len)
7308 machine_mode mode = TYPE_MODE (type);
7309 int total_bytes = GET_MODE_SIZE (mode);
7310 int byte, offset, word, words, bitpos;
7311 unsigned char value;
7312 /* There are always 32 bits in each long, no matter the size of
7313 the hosts long. We handle floating point representations with
7314 up to 192 bits. */
7315 REAL_VALUE_TYPE r;
7316 long tmp[6];
7318 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7319 if (total_bytes > len || total_bytes > 24)
7320 return NULL_TREE;
7321 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7323 memset (tmp, 0, sizeof (tmp));
7324 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7325 bitpos += BITS_PER_UNIT)
7327 byte = (bitpos / BITS_PER_UNIT) & 3;
7328 if (UNITS_PER_WORD < 4)
7330 word = byte / UNITS_PER_WORD;
7331 if (WORDS_BIG_ENDIAN)
7332 word = (words - 1) - word;
7333 offset = word * UNITS_PER_WORD;
7334 if (BYTES_BIG_ENDIAN)
7335 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7336 else
7337 offset += byte % UNITS_PER_WORD;
7339 else
7340 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7341 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7343 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7346 real_from_target (&r, tmp, mode);
7347 return build_real (type, r);
7351 /* Subroutine of native_interpret_expr. Interpret the contents of
7352 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7353 If the buffer cannot be interpreted, return NULL_TREE. */
7355 static tree
7356 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7358 tree etype, rpart, ipart;
7359 int size;
7361 etype = TREE_TYPE (type);
7362 size = GET_MODE_SIZE (TYPE_MODE (etype));
7363 if (size * 2 > len)
7364 return NULL_TREE;
7365 rpart = native_interpret_expr (etype, ptr, size);
7366 if (!rpart)
7367 return NULL_TREE;
7368 ipart = native_interpret_expr (etype, ptr+size, size);
7369 if (!ipart)
7370 return NULL_TREE;
7371 return build_complex (type, rpart, ipart);
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7379 static tree
7380 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7382 tree etype, elem;
7383 int i, size, count;
7384 tree *elements;
7386 etype = TREE_TYPE (type);
7387 size = GET_MODE_SIZE (TYPE_MODE (etype));
7388 count = TYPE_VECTOR_SUBPARTS (type);
7389 if (size * count > len)
7390 return NULL_TREE;
7392 elements = XALLOCAVEC (tree, count);
7393 for (i = count - 1; i >= 0; i--)
7395 elem = native_interpret_expr (etype, ptr+(i*size), size);
7396 if (!elem)
7397 return NULL_TREE;
7398 elements[i] = elem;
7400 return build_vector (type, elements);
7404 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a constant of type TYPE. For
7406 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7407 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7408 return NULL_TREE. */
7410 tree
7411 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7413 switch (TREE_CODE (type))
7415 case INTEGER_TYPE:
7416 case ENUMERAL_TYPE:
7417 case BOOLEAN_TYPE:
7418 case POINTER_TYPE:
7419 case REFERENCE_TYPE:
7420 return native_interpret_int (type, ptr, len);
7422 case REAL_TYPE:
7423 return native_interpret_real (type, ptr, len);
7425 case FIXED_POINT_TYPE:
7426 return native_interpret_fixed (type, ptr, len);
7428 case COMPLEX_TYPE:
7429 return native_interpret_complex (type, ptr, len);
7431 case VECTOR_TYPE:
7432 return native_interpret_vector (type, ptr, len);
7434 default:
7435 return NULL_TREE;
7439 /* Returns true if we can interpret the contents of a native encoding
7440 as TYPE. */
7442 static bool
7443 can_native_interpret_type_p (tree type)
7445 switch (TREE_CODE (type))
7447 case INTEGER_TYPE:
7448 case ENUMERAL_TYPE:
7449 case BOOLEAN_TYPE:
7450 case POINTER_TYPE:
7451 case REFERENCE_TYPE:
7452 case FIXED_POINT_TYPE:
7453 case REAL_TYPE:
7454 case COMPLEX_TYPE:
7455 case VECTOR_TYPE:
7456 return true;
7457 default:
7458 return false;
7462 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7463 TYPE at compile-time. If we're unable to perform the conversion
7464 return NULL_TREE. */
7466 static tree
7467 fold_view_convert_expr (tree type, tree expr)
7469 /* We support up to 512-bit values (for V8DFmode). */
7470 unsigned char buffer[64];
7471 int len;
7473 /* Check that the host and target are sane. */
7474 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7475 return NULL_TREE;
7477 len = native_encode_expr (expr, buffer, sizeof (buffer));
7478 if (len == 0)
7479 return NULL_TREE;
7481 return native_interpret_expr (type, buffer, len);
7484 /* Build an expression for the address of T. Folds away INDIRECT_REF
7485 to avoid confusing the gimplify process. */
7487 tree
7488 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7490 /* The size of the object is not relevant when talking about its address. */
7491 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7492 t = TREE_OPERAND (t, 0);
7494 if (TREE_CODE (t) == INDIRECT_REF)
7496 t = TREE_OPERAND (t, 0);
7498 if (TREE_TYPE (t) != ptrtype)
7499 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7501 else if (TREE_CODE (t) == MEM_REF
7502 && integer_zerop (TREE_OPERAND (t, 1)))
7503 return TREE_OPERAND (t, 0);
7504 else if (TREE_CODE (t) == MEM_REF
7505 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7506 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7507 TREE_OPERAND (t, 0),
7508 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7509 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7511 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7513 if (TREE_TYPE (t) != ptrtype)
7514 t = fold_convert_loc (loc, ptrtype, t);
7516 else
7517 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7519 return t;
7522 /* Build an expression for the address of T. */
7524 tree
7525 build_fold_addr_expr_loc (location_t loc, tree t)
7527 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7529 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7532 static bool vec_cst_ctor_to_array (tree, tree *);
7534 /* Fold a unary expression of code CODE and type TYPE with operand
7535 OP0. Return the folded expression if folding is successful.
7536 Otherwise, return NULL_TREE. */
7538 tree
7539 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7541 tree tem;
7542 tree arg0;
7543 enum tree_code_class kind = TREE_CODE_CLASS (code);
7545 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7546 && TREE_CODE_LENGTH (code) == 1);
7548 tem = generic_simplify (loc, code, type, op0);
7549 if (tem)
7550 return tem;
7552 arg0 = op0;
7553 if (arg0)
7555 if (CONVERT_EXPR_CODE_P (code)
7556 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7558 /* Don't use STRIP_NOPS, because signedness of argument type
7559 matters. */
7560 STRIP_SIGN_NOPS (arg0);
7562 else
7564 /* Strip any conversions that don't change the mode. This
7565 is safe for every expression, except for a comparison
7566 expression because its signedness is derived from its
7567 operands.
7569 Note that this is done as an internal manipulation within
7570 the constant folder, in order to find the simplest
7571 representation of the arguments so that their form can be
7572 studied. In any cases, the appropriate type conversions
7573 should be put back in the tree that will get out of the
7574 constant folder. */
7575 STRIP_NOPS (arg0);
7579 if (TREE_CODE_CLASS (code) == tcc_unary)
7581 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7582 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7583 fold_build1_loc (loc, code, type,
7584 fold_convert_loc (loc, TREE_TYPE (op0),
7585 TREE_OPERAND (arg0, 1))));
7586 else if (TREE_CODE (arg0) == COND_EXPR)
7588 tree arg01 = TREE_OPERAND (arg0, 1);
7589 tree arg02 = TREE_OPERAND (arg0, 2);
7590 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7591 arg01 = fold_build1_loc (loc, code, type,
7592 fold_convert_loc (loc,
7593 TREE_TYPE (op0), arg01));
7594 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7595 arg02 = fold_build1_loc (loc, code, type,
7596 fold_convert_loc (loc,
7597 TREE_TYPE (op0), arg02));
7598 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7599 arg01, arg02);
7601 /* If this was a conversion, and all we did was to move into
7602 inside the COND_EXPR, bring it back out. But leave it if
7603 it is a conversion from integer to integer and the
7604 result precision is no wider than a word since such a
7605 conversion is cheap and may be optimized away by combine,
7606 while it couldn't if it were outside the COND_EXPR. Then return
7607 so we don't get into an infinite recursion loop taking the
7608 conversion out and then back in. */
7610 if ((CONVERT_EXPR_CODE_P (code)
7611 || code == NON_LVALUE_EXPR)
7612 && TREE_CODE (tem) == COND_EXPR
7613 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7614 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7615 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7616 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7617 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7618 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7619 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7620 && (INTEGRAL_TYPE_P
7621 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7622 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7623 || flag_syntax_only))
7624 tem = build1_loc (loc, code, type,
7625 build3 (COND_EXPR,
7626 TREE_TYPE (TREE_OPERAND
7627 (TREE_OPERAND (tem, 1), 0)),
7628 TREE_OPERAND (tem, 0),
7629 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7630 TREE_OPERAND (TREE_OPERAND (tem, 2),
7631 0)));
7632 return tem;
7636 switch (code)
7638 case NON_LVALUE_EXPR:
7639 if (!maybe_lvalue_p (op0))
7640 return fold_convert_loc (loc, type, op0);
7641 return NULL_TREE;
7643 CASE_CONVERT:
7644 case FLOAT_EXPR:
7645 case FIX_TRUNC_EXPR:
7646 if (COMPARISON_CLASS_P (op0))
7648 /* If we have (type) (a CMP b) and type is an integral type, return
7649 new expression involving the new type. Canonicalize
7650 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7651 non-integral type.
7652 Do not fold the result as that would not simplify further, also
7653 folding again results in recursions. */
7654 if (TREE_CODE (type) == BOOLEAN_TYPE)
7655 return build2_loc (loc, TREE_CODE (op0), type,
7656 TREE_OPERAND (op0, 0),
7657 TREE_OPERAND (op0, 1));
7658 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7659 && TREE_CODE (type) != VECTOR_TYPE)
7660 return build3_loc (loc, COND_EXPR, type, op0,
7661 constant_boolean_node (true, type),
7662 constant_boolean_node (false, type));
7665 /* Handle (T *)&A.B.C for A being of type T and B and C
7666 living at offset zero. This occurs frequently in
7667 C++ upcasting and then accessing the base. */
7668 if (TREE_CODE (op0) == ADDR_EXPR
7669 && POINTER_TYPE_P (type)
7670 && handled_component_p (TREE_OPERAND (op0, 0)))
7672 HOST_WIDE_INT bitsize, bitpos;
7673 tree offset;
7674 machine_mode mode;
7675 int unsignedp, volatilep;
7676 tree base = TREE_OPERAND (op0, 0);
7677 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7678 &mode, &unsignedp, &volatilep, false);
7679 /* If the reference was to a (constant) zero offset, we can use
7680 the address of the base if it has the same base type
7681 as the result type and the pointer type is unqualified. */
7682 if (! offset && bitpos == 0
7683 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7684 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7685 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7686 return fold_convert_loc (loc, type,
7687 build_fold_addr_expr_loc (loc, base));
7690 if (TREE_CODE (op0) == MODIFY_EXPR
7691 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7692 /* Detect assigning a bitfield. */
7693 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7694 && DECL_BIT_FIELD
7695 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7697 /* Don't leave an assignment inside a conversion
7698 unless assigning a bitfield. */
7699 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7700 /* First do the assignment, then return converted constant. */
7701 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7702 TREE_NO_WARNING (tem) = 1;
7703 TREE_USED (tem) = 1;
7704 return tem;
7707 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7708 constants (if x has signed type, the sign bit cannot be set
7709 in c). This folds extension into the BIT_AND_EXPR.
7710 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7711 very likely don't have maximal range for their precision and this
7712 transformation effectively doesn't preserve non-maximal ranges. */
7713 if (TREE_CODE (type) == INTEGER_TYPE
7714 && TREE_CODE (op0) == BIT_AND_EXPR
7715 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7717 tree and_expr = op0;
7718 tree and0 = TREE_OPERAND (and_expr, 0);
7719 tree and1 = TREE_OPERAND (and_expr, 1);
7720 int change = 0;
7722 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7723 || (TYPE_PRECISION (type)
7724 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7725 change = 1;
7726 else if (TYPE_PRECISION (TREE_TYPE (and1))
7727 <= HOST_BITS_PER_WIDE_INT
7728 && tree_fits_uhwi_p (and1))
7730 unsigned HOST_WIDE_INT cst;
7732 cst = tree_to_uhwi (and1);
7733 cst &= HOST_WIDE_INT_M1U
7734 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7735 change = (cst == 0);
7736 #ifdef LOAD_EXTEND_OP
7737 if (change
7738 && !flag_syntax_only
7739 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7740 == ZERO_EXTEND))
7742 tree uns = unsigned_type_for (TREE_TYPE (and0));
7743 and0 = fold_convert_loc (loc, uns, and0);
7744 and1 = fold_convert_loc (loc, uns, and1);
7746 #endif
7748 if (change)
7750 tem = force_fit_type (type, wi::to_widest (and1), 0,
7751 TREE_OVERFLOW (and1));
7752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7753 fold_convert_loc (loc, type, and0), tem);
7757 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7758 when one of the new casts will fold away. Conservatively we assume
7759 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7760 if (POINTER_TYPE_P (type)
7761 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7762 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7763 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7764 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7765 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7767 tree arg00 = TREE_OPERAND (arg0, 0);
7768 tree arg01 = TREE_OPERAND (arg0, 1);
7770 return fold_build_pointer_plus_loc
7771 (loc, fold_convert_loc (loc, type, arg00), arg01);
7774 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7775 of the same precision, and X is an integer type not narrower than
7776 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7777 if (INTEGRAL_TYPE_P (type)
7778 && TREE_CODE (op0) == BIT_NOT_EXPR
7779 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7780 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7781 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7783 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7784 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7785 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7786 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7787 fold_convert_loc (loc, type, tem));
7790 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7791 type of X and Y (integer types only). */
7792 if (INTEGRAL_TYPE_P (type)
7793 && TREE_CODE (op0) == MULT_EXPR
7794 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7795 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7797 /* Be careful not to introduce new overflows. */
7798 tree mult_type;
7799 if (TYPE_OVERFLOW_WRAPS (type))
7800 mult_type = type;
7801 else
7802 mult_type = unsigned_type_for (type);
7804 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7806 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7807 fold_convert_loc (loc, mult_type,
7808 TREE_OPERAND (op0, 0)),
7809 fold_convert_loc (loc, mult_type,
7810 TREE_OPERAND (op0, 1)));
7811 return fold_convert_loc (loc, type, tem);
7815 tem = fold_convert_const (code, type, arg0);
7816 return tem ? tem : NULL_TREE;
7818 case ADDR_SPACE_CONVERT_EXPR:
7819 if (integer_zerop (arg0))
7820 return fold_convert_const (code, type, arg0);
7821 return NULL_TREE;
7823 case FIXED_CONVERT_EXPR:
7824 tem = fold_convert_const (code, type, arg0);
7825 return tem ? tem : NULL_TREE;
7827 case VIEW_CONVERT_EXPR:
7828 if (TREE_CODE (op0) == MEM_REF)
7829 return fold_build2_loc (loc, MEM_REF, type,
7830 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7832 return fold_view_convert_expr (type, op0);
7834 case NEGATE_EXPR:
7835 tem = fold_negate_expr (loc, arg0);
7836 if (tem)
7837 return fold_convert_loc (loc, type, tem);
7838 return NULL_TREE;
7840 case ABS_EXPR:
7841 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7842 return fold_abs_const (arg0, type);
7843 /* Convert fabs((double)float) into (double)fabsf(float). */
7844 else if (TREE_CODE (arg0) == NOP_EXPR
7845 && TREE_CODE (type) == REAL_TYPE)
7847 tree targ0 = strip_float_extensions (arg0);
7848 if (targ0 != arg0)
7849 return fold_convert_loc (loc, type,
7850 fold_build1_loc (loc, ABS_EXPR,
7851 TREE_TYPE (targ0),
7852 targ0));
7854 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7855 else if (TREE_CODE (arg0) == ABS_EXPR)
7856 return arg0;
7858 /* Strip sign ops from argument. */
7859 if (TREE_CODE (type) == REAL_TYPE)
7861 tem = fold_strip_sign_ops (arg0);
7862 if (tem)
7863 return fold_build1_loc (loc, ABS_EXPR, type,
7864 fold_convert_loc (loc, type, tem));
7866 return NULL_TREE;
7868 case CONJ_EXPR:
7869 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7870 return fold_convert_loc (loc, type, arg0);
7871 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7873 tree itype = TREE_TYPE (type);
7874 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7875 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7876 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7877 negate_expr (ipart));
7879 if (TREE_CODE (arg0) == COMPLEX_CST)
7881 tree itype = TREE_TYPE (type);
7882 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7883 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7884 return build_complex (type, rpart, negate_expr (ipart));
7886 if (TREE_CODE (arg0) == CONJ_EXPR)
7887 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7888 return NULL_TREE;
7890 case BIT_NOT_EXPR:
7891 if (TREE_CODE (arg0) == INTEGER_CST)
7892 return fold_not_const (arg0, type);
7893 /* Convert ~ (-A) to A - 1. */
7894 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7895 return fold_build2_loc (loc, MINUS_EXPR, type,
7896 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7897 build_int_cst (type, 1));
7898 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7899 else if (INTEGRAL_TYPE_P (type)
7900 && ((TREE_CODE (arg0) == MINUS_EXPR
7901 && integer_onep (TREE_OPERAND (arg0, 1)))
7902 || (TREE_CODE (arg0) == PLUS_EXPR
7903 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7904 return fold_build1_loc (loc, NEGATE_EXPR, type,
7905 fold_convert_loc (loc, type,
7906 TREE_OPERAND (arg0, 0)));
7907 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7908 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7909 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7910 fold_convert_loc (loc, type,
7911 TREE_OPERAND (arg0, 0)))))
7912 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7913 fold_convert_loc (loc, type,
7914 TREE_OPERAND (arg0, 1)));
7915 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7916 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type,
7918 TREE_OPERAND (arg0, 1)))))
7919 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7920 fold_convert_loc (loc, type,
7921 TREE_OPERAND (arg0, 0)), tem);
7922 /* Perform BIT_NOT_EXPR on each element individually. */
7923 else if (TREE_CODE (arg0) == VECTOR_CST)
7925 tree *elements;
7926 tree elem;
7927 unsigned count = VECTOR_CST_NELTS (arg0), i;
7929 elements = XALLOCAVEC (tree, count);
7930 for (i = 0; i < count; i++)
7932 elem = VECTOR_CST_ELT (arg0, i);
7933 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7934 if (elem == NULL_TREE)
7935 break;
7936 elements[i] = elem;
7938 if (i == count)
7939 return build_vector (type, elements);
7941 else if (COMPARISON_CLASS_P (arg0)
7942 && (VECTOR_TYPE_P (type)
7943 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
7945 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
7946 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
7947 HONOR_NANS (TYPE_MODE (op_type)));
7948 if (subcode != ERROR_MARK)
7949 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
7950 TREE_OPERAND (arg0, 1));
7954 return NULL_TREE;
7956 case TRUTH_NOT_EXPR:
7957 /* Note that the operand of this must be an int
7958 and its values must be 0 or 1.
7959 ("true" is a fixed value perhaps depending on the language,
7960 but we don't handle values other than 1 correctly yet.) */
7961 tem = fold_truth_not_expr (loc, arg0);
7962 if (!tem)
7963 return NULL_TREE;
7964 return fold_convert_loc (loc, type, tem);
7966 case REALPART_EXPR:
7967 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7968 return fold_convert_loc (loc, type, arg0);
7969 if (TREE_CODE (arg0) == COMPLEX_CST)
7970 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7971 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7973 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7974 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7975 fold_build1_loc (loc, REALPART_EXPR, itype,
7976 TREE_OPERAND (arg0, 0)),
7977 fold_build1_loc (loc, REALPART_EXPR, itype,
7978 TREE_OPERAND (arg0, 1)));
7979 return fold_convert_loc (loc, type, tem);
7981 if (TREE_CODE (arg0) == CONJ_EXPR)
7983 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7984 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7985 TREE_OPERAND (arg0, 0));
7986 return fold_convert_loc (loc, type, tem);
7988 if (TREE_CODE (arg0) == CALL_EXPR)
7990 tree fn = get_callee_fndecl (arg0);
7991 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7992 switch (DECL_FUNCTION_CODE (fn))
7994 CASE_FLT_FN (BUILT_IN_CEXPI):
7995 fn = mathfn_built_in (type, BUILT_IN_COS);
7996 if (fn)
7997 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7998 break;
8000 default:
8001 break;
8004 return NULL_TREE;
8006 case IMAGPART_EXPR:
8007 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8008 return build_zero_cst (type);
8009 if (TREE_CODE (arg0) == COMPLEX_CST)
8010 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8011 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8013 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8014 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8015 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8016 TREE_OPERAND (arg0, 0)),
8017 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8018 TREE_OPERAND (arg0, 1)));
8019 return fold_convert_loc (loc, type, tem);
8021 if (TREE_CODE (arg0) == CONJ_EXPR)
8023 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8024 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8025 return fold_convert_loc (loc, type, negate_expr (tem));
8027 if (TREE_CODE (arg0) == CALL_EXPR)
8029 tree fn = get_callee_fndecl (arg0);
8030 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8031 switch (DECL_FUNCTION_CODE (fn))
8033 CASE_FLT_FN (BUILT_IN_CEXPI):
8034 fn = mathfn_built_in (type, BUILT_IN_SIN);
8035 if (fn)
8036 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8037 break;
8039 default:
8040 break;
8043 return NULL_TREE;
8045 case INDIRECT_REF:
8046 /* Fold *&X to X if X is an lvalue. */
8047 if (TREE_CODE (op0) == ADDR_EXPR)
8049 tree op00 = TREE_OPERAND (op0, 0);
8050 if ((TREE_CODE (op00) == VAR_DECL
8051 || TREE_CODE (op00) == PARM_DECL
8052 || TREE_CODE (op00) == RESULT_DECL)
8053 && !TREE_READONLY (op00))
8054 return op00;
8056 return NULL_TREE;
8058 case VEC_UNPACK_LO_EXPR:
8059 case VEC_UNPACK_HI_EXPR:
8060 case VEC_UNPACK_FLOAT_LO_EXPR:
8061 case VEC_UNPACK_FLOAT_HI_EXPR:
8063 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8064 tree *elts;
8065 enum tree_code subcode;
8067 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8068 if (TREE_CODE (arg0) != VECTOR_CST)
8069 return NULL_TREE;
8071 elts = XALLOCAVEC (tree, nelts * 2);
8072 if (!vec_cst_ctor_to_array (arg0, elts))
8073 return NULL_TREE;
8075 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8076 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8077 elts += nelts;
8079 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8080 subcode = NOP_EXPR;
8081 else
8082 subcode = FLOAT_EXPR;
8084 for (i = 0; i < nelts; i++)
8086 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8087 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8088 return NULL_TREE;
8091 return build_vector (type, elts);
8094 case REDUC_MIN_EXPR:
8095 case REDUC_MAX_EXPR:
8096 case REDUC_PLUS_EXPR:
8098 unsigned int nelts, i;
8099 tree *elts;
8100 enum tree_code subcode;
8102 if (TREE_CODE (op0) != VECTOR_CST)
8103 return NULL_TREE;
8104 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8106 elts = XALLOCAVEC (tree, nelts);
8107 if (!vec_cst_ctor_to_array (op0, elts))
8108 return NULL_TREE;
8110 switch (code)
8112 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8113 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8114 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8115 default: gcc_unreachable ();
8118 for (i = 1; i < nelts; i++)
8120 elts[0] = const_binop (subcode, elts[0], elts[i]);
8121 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8122 return NULL_TREE;
8125 return elts[0];
8128 default:
8129 return NULL_TREE;
8130 } /* switch (code) */
8134 /* If the operation was a conversion do _not_ mark a resulting constant
8135 with TREE_OVERFLOW if the original constant was not. These conversions
8136 have implementation defined behavior and retaining the TREE_OVERFLOW
8137 flag here would confuse later passes such as VRP. */
8138 tree
8139 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8140 tree type, tree op0)
8142 tree res = fold_unary_loc (loc, code, type, op0);
8143 if (res
8144 && TREE_CODE (res) == INTEGER_CST
8145 && TREE_CODE (op0) == INTEGER_CST
8146 && CONVERT_EXPR_CODE_P (code))
8147 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8149 return res;
8152 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8153 operands OP0 and OP1. LOC is the location of the resulting expression.
8154 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8155 Return the folded expression if folding is successful. Otherwise,
8156 return NULL_TREE. */
8157 static tree
8158 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8159 tree arg0, tree arg1, tree op0, tree op1)
8161 tree tem;
8163 /* We only do these simplifications if we are optimizing. */
8164 if (!optimize)
8165 return NULL_TREE;
8167 /* Check for things like (A || B) && (A || C). We can convert this
8168 to A || (B && C). Note that either operator can be any of the four
8169 truth and/or operations and the transformation will still be
8170 valid. Also note that we only care about order for the
8171 ANDIF and ORIF operators. If B contains side effects, this
8172 might change the truth-value of A. */
8173 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8174 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8175 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8176 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8177 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8178 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8180 tree a00 = TREE_OPERAND (arg0, 0);
8181 tree a01 = TREE_OPERAND (arg0, 1);
8182 tree a10 = TREE_OPERAND (arg1, 0);
8183 tree a11 = TREE_OPERAND (arg1, 1);
8184 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8185 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8186 && (code == TRUTH_AND_EXPR
8187 || code == TRUTH_OR_EXPR));
8189 if (operand_equal_p (a00, a10, 0))
8190 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8191 fold_build2_loc (loc, code, type, a01, a11));
8192 else if (commutative && operand_equal_p (a00, a11, 0))
8193 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8194 fold_build2_loc (loc, code, type, a01, a10));
8195 else if (commutative && operand_equal_p (a01, a10, 0))
8196 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8197 fold_build2_loc (loc, code, type, a00, a11));
8199 /* This case if tricky because we must either have commutative
8200 operators or else A10 must not have side-effects. */
8202 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8203 && operand_equal_p (a01, a11, 0))
8204 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8205 fold_build2_loc (loc, code, type, a00, a10),
8206 a01);
8209 /* See if we can build a range comparison. */
8210 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8211 return tem;
8213 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8214 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8216 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8217 if (tem)
8218 return fold_build2_loc (loc, code, type, tem, arg1);
8221 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8222 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8224 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8225 if (tem)
8226 return fold_build2_loc (loc, code, type, arg0, tem);
8229 /* Check for the possibility of merging component references. If our
8230 lhs is another similar operation, try to merge its rhs with our
8231 rhs. Then try to merge our lhs and rhs. */
8232 if (TREE_CODE (arg0) == code
8233 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8234 TREE_OPERAND (arg0, 1), arg1)))
8235 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8237 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8238 return tem;
8240 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8241 && (code == TRUTH_AND_EXPR
8242 || code == TRUTH_ANDIF_EXPR
8243 || code == TRUTH_OR_EXPR
8244 || code == TRUTH_ORIF_EXPR))
8246 enum tree_code ncode, icode;
8248 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8249 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8250 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8252 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8253 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8254 We don't want to pack more than two leafs to a non-IF AND/OR
8255 expression.
8256 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8257 equal to IF-CODE, then we don't want to add right-hand operand.
8258 If the inner right-hand side of left-hand operand has
8259 side-effects, or isn't simple, then we can't add to it,
8260 as otherwise we might destroy if-sequence. */
8261 if (TREE_CODE (arg0) == icode
8262 && simple_operand_p_2 (arg1)
8263 /* Needed for sequence points to handle trappings, and
8264 side-effects. */
8265 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8267 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8268 arg1);
8269 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8270 tem);
8272 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8273 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8274 else if (TREE_CODE (arg1) == icode
8275 && simple_operand_p_2 (arg0)
8276 /* Needed for sequence points to handle trappings, and
8277 side-effects. */
8278 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8280 tem = fold_build2_loc (loc, ncode, type,
8281 arg0, TREE_OPERAND (arg1, 0));
8282 return fold_build2_loc (loc, icode, type, tem,
8283 TREE_OPERAND (arg1, 1));
8285 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8286 into (A OR B).
8287 For sequence point consistancy, we need to check for trapping,
8288 and side-effects. */
8289 else if (code == icode && simple_operand_p_2 (arg0)
8290 && simple_operand_p_2 (arg1))
8291 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8294 return NULL_TREE;
8297 /* Fold a binary expression of code CODE and type TYPE with operands
8298 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8299 Return the folded expression if folding is successful. Otherwise,
8300 return NULL_TREE. */
8302 static tree
8303 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8305 enum tree_code compl_code;
8307 if (code == MIN_EXPR)
8308 compl_code = MAX_EXPR;
8309 else if (code == MAX_EXPR)
8310 compl_code = MIN_EXPR;
8311 else
8312 gcc_unreachable ();
8314 /* MIN (MAX (a, b), b) == b. */
8315 if (TREE_CODE (op0) == compl_code
8316 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8317 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8319 /* MIN (MAX (b, a), b) == b. */
8320 if (TREE_CODE (op0) == compl_code
8321 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8322 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8323 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8325 /* MIN (a, MAX (a, b)) == a. */
8326 if (TREE_CODE (op1) == compl_code
8327 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8328 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8329 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8331 /* MIN (a, MAX (b, a)) == a. */
8332 if (TREE_CODE (op1) == compl_code
8333 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8334 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8335 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8337 return NULL_TREE;
8340 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8341 by changing CODE to reduce the magnitude of constants involved in
8342 ARG0 of the comparison.
8343 Returns a canonicalized comparison tree if a simplification was
8344 possible, otherwise returns NULL_TREE.
8345 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8346 valid if signed overflow is undefined. */
8348 static tree
8349 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8350 tree arg0, tree arg1,
8351 bool *strict_overflow_p)
8353 enum tree_code code0 = TREE_CODE (arg0);
8354 tree t, cst0 = NULL_TREE;
8355 int sgn0;
8356 bool swap = false;
8358 /* Match A +- CST code arg1 and CST code arg1. We can change the
8359 first form only if overflow is undefined. */
8360 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8361 /* In principle pointers also have undefined overflow behavior,
8362 but that causes problems elsewhere. */
8363 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8364 && (code0 == MINUS_EXPR
8365 || code0 == PLUS_EXPR)
8366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8367 || code0 == INTEGER_CST))
8368 return NULL_TREE;
8370 /* Identify the constant in arg0 and its sign. */
8371 if (code0 == INTEGER_CST)
8372 cst0 = arg0;
8373 else
8374 cst0 = TREE_OPERAND (arg0, 1);
8375 sgn0 = tree_int_cst_sgn (cst0);
8377 /* Overflowed constants and zero will cause problems. */
8378 if (integer_zerop (cst0)
8379 || TREE_OVERFLOW (cst0))
8380 return NULL_TREE;
8382 /* See if we can reduce the magnitude of the constant in
8383 arg0 by changing the comparison code. */
8384 if (code0 == INTEGER_CST)
8386 /* CST <= arg1 -> CST-1 < arg1. */
8387 if (code == LE_EXPR && sgn0 == 1)
8388 code = LT_EXPR;
8389 /* -CST < arg1 -> -CST-1 <= arg1. */
8390 else if (code == LT_EXPR && sgn0 == -1)
8391 code = LE_EXPR;
8392 /* CST > arg1 -> CST-1 >= arg1. */
8393 else if (code == GT_EXPR && sgn0 == 1)
8394 code = GE_EXPR;
8395 /* -CST >= arg1 -> -CST-1 > arg1. */
8396 else if (code == GE_EXPR && sgn0 == -1)
8397 code = GT_EXPR;
8398 else
8399 return NULL_TREE;
8400 /* arg1 code' CST' might be more canonical. */
8401 swap = true;
8403 else
8405 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8406 if (code == LT_EXPR
8407 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8408 code = LE_EXPR;
8409 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8410 else if (code == GT_EXPR
8411 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8412 code = GE_EXPR;
8413 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8414 else if (code == LE_EXPR
8415 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8416 code = LT_EXPR;
8417 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8418 else if (code == GE_EXPR
8419 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8420 code = GT_EXPR;
8421 else
8422 return NULL_TREE;
8423 *strict_overflow_p = true;
8426 /* Now build the constant reduced in magnitude. But not if that
8427 would produce one outside of its types range. */
8428 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8429 && ((sgn0 == 1
8430 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8431 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8432 || (sgn0 == -1
8433 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8434 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8435 /* We cannot swap the comparison here as that would cause us to
8436 endlessly recurse. */
8437 return NULL_TREE;
8439 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8440 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8441 if (code0 != INTEGER_CST)
8442 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8443 t = fold_convert (TREE_TYPE (arg1), t);
8445 /* If swapping might yield to a more canonical form, do so. */
8446 if (swap)
8447 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8448 else
8449 return fold_build2_loc (loc, code, type, t, arg1);
8452 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8453 overflow further. Try to decrease the magnitude of constants involved
8454 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8455 and put sole constants at the second argument position.
8456 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8458 static tree
8459 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8460 tree arg0, tree arg1)
8462 tree t;
8463 bool strict_overflow_p;
8464 const char * const warnmsg = G_("assuming signed overflow does not occur "
8465 "when reducing constant in comparison");
8467 /* Try canonicalization by simplifying arg0. */
8468 strict_overflow_p = false;
8469 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8470 &strict_overflow_p);
8471 if (t)
8473 if (strict_overflow_p)
8474 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8475 return t;
8478 /* Try canonicalization by simplifying arg1 using the swapped
8479 comparison. */
8480 code = swap_tree_comparison (code);
8481 strict_overflow_p = false;
8482 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8483 &strict_overflow_p);
8484 if (t && strict_overflow_p)
8485 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8486 return t;
8489 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8490 space. This is used to avoid issuing overflow warnings for
8491 expressions like &p->x which can not wrap. */
8493 static bool
8494 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8496 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8497 return true;
8499 if (bitpos < 0)
8500 return true;
8502 wide_int wi_offset;
8503 int precision = TYPE_PRECISION (TREE_TYPE (base));
8504 if (offset == NULL_TREE)
8505 wi_offset = wi::zero (precision);
8506 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8507 return true;
8508 else
8509 wi_offset = offset;
8511 bool overflow;
8512 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8513 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8514 if (overflow)
8515 return true;
8517 if (!wi::fits_uhwi_p (total))
8518 return true;
8520 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8521 if (size <= 0)
8522 return true;
8524 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8525 array. */
8526 if (TREE_CODE (base) == ADDR_EXPR)
8528 HOST_WIDE_INT base_size;
8530 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8531 if (base_size > 0 && size < base_size)
8532 size = base_size;
8535 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8538 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8539 kind INTEGER_CST. This makes sure to properly sign-extend the
8540 constant. */
8542 static HOST_WIDE_INT
8543 size_low_cst (const_tree t)
8545 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8546 int prec = TYPE_PRECISION (TREE_TYPE (t));
8547 if (prec < HOST_BITS_PER_WIDE_INT)
8548 return sext_hwi (w, prec);
8549 return w;
8552 /* Subroutine of fold_binary. This routine performs all of the
8553 transformations that are common to the equality/inequality
8554 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8555 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8556 fold_binary should call fold_binary. Fold a comparison with
8557 tree code CODE and type TYPE with operands OP0 and OP1. Return
8558 the folded comparison or NULL_TREE. */
8560 static tree
8561 fold_comparison (location_t loc, enum tree_code code, tree type,
8562 tree op0, tree op1)
8564 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8565 tree arg0, arg1, tem;
8567 arg0 = op0;
8568 arg1 = op1;
8570 STRIP_SIGN_NOPS (arg0);
8571 STRIP_SIGN_NOPS (arg1);
8573 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8574 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8575 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8577 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8578 && TREE_CODE (arg1) == INTEGER_CST
8579 && !TREE_OVERFLOW (arg1))
8581 const enum tree_code
8582 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8583 tree const1 = TREE_OPERAND (arg0, 1);
8584 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8585 tree variable = TREE_OPERAND (arg0, 0);
8586 tree new_const = int_const_binop (reverse_op, const2, const1);
8588 /* If the constant operation overflowed this can be
8589 simplified as a comparison against INT_MAX/INT_MIN. */
8590 if (TREE_OVERFLOW (new_const)
8591 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8593 int const1_sgn = tree_int_cst_sgn (const1);
8594 enum tree_code code2 = code;
8596 /* Get the sign of the constant on the lhs if the
8597 operation were VARIABLE + CONST1. */
8598 if (TREE_CODE (arg0) == MINUS_EXPR)
8599 const1_sgn = -const1_sgn;
8601 /* The sign of the constant determines if we overflowed
8602 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8603 Canonicalize to the INT_MIN overflow by swapping the comparison
8604 if necessary. */
8605 if (const1_sgn == -1)
8606 code2 = swap_tree_comparison (code);
8608 /* We now can look at the canonicalized case
8609 VARIABLE + 1 CODE2 INT_MIN
8610 and decide on the result. */
8611 switch (code2)
8613 case EQ_EXPR:
8614 case LT_EXPR:
8615 case LE_EXPR:
8616 return
8617 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8619 case NE_EXPR:
8620 case GE_EXPR:
8621 case GT_EXPR:
8622 return
8623 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8625 default:
8626 gcc_unreachable ();
8629 else
8631 if (!equality_code)
8632 fold_overflow_warning ("assuming signed overflow does not occur "
8633 "when changing X +- C1 cmp C2 to "
8634 "X cmp C2 -+ C1",
8635 WARN_STRICT_OVERFLOW_COMPARISON);
8636 return fold_build2_loc (loc, code, type, variable, new_const);
8640 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8641 if (TREE_CODE (arg0) == MINUS_EXPR
8642 && equality_code
8643 && integer_zerop (arg1))
8645 /* ??? The transformation is valid for the other operators if overflow
8646 is undefined for the type, but performing it here badly interacts
8647 with the transformation in fold_cond_expr_with_comparison which
8648 attempts to synthetize ABS_EXPR. */
8649 if (!equality_code)
8650 fold_overflow_warning ("assuming signed overflow does not occur "
8651 "when changing X - Y cmp 0 to X cmp Y",
8652 WARN_STRICT_OVERFLOW_COMPARISON);
8653 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8654 TREE_OPERAND (arg0, 1));
8657 /* For comparisons of pointers we can decompose it to a compile time
8658 comparison of the base objects and the offsets into the object.
8659 This requires at least one operand being an ADDR_EXPR or a
8660 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8661 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8662 && (TREE_CODE (arg0) == ADDR_EXPR
8663 || TREE_CODE (arg1) == ADDR_EXPR
8664 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8665 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8667 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8668 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8669 machine_mode mode;
8670 int volatilep, unsignedp;
8671 bool indirect_base0 = false, indirect_base1 = false;
8673 /* Get base and offset for the access. Strip ADDR_EXPR for
8674 get_inner_reference, but put it back by stripping INDIRECT_REF
8675 off the base object if possible. indirect_baseN will be true
8676 if baseN is not an address but refers to the object itself. */
8677 base0 = arg0;
8678 if (TREE_CODE (arg0) == ADDR_EXPR)
8680 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8681 &bitsize, &bitpos0, &offset0, &mode,
8682 &unsignedp, &volatilep, false);
8683 if (TREE_CODE (base0) == INDIRECT_REF)
8684 base0 = TREE_OPERAND (base0, 0);
8685 else
8686 indirect_base0 = true;
8688 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8690 base0 = TREE_OPERAND (arg0, 0);
8691 STRIP_SIGN_NOPS (base0);
8692 if (TREE_CODE (base0) == ADDR_EXPR)
8694 base0 = TREE_OPERAND (base0, 0);
8695 indirect_base0 = true;
8697 offset0 = TREE_OPERAND (arg0, 1);
8698 if (tree_fits_shwi_p (offset0))
8700 HOST_WIDE_INT off = size_low_cst (offset0);
8701 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8702 * BITS_PER_UNIT)
8703 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8705 bitpos0 = off * BITS_PER_UNIT;
8706 offset0 = NULL_TREE;
8711 base1 = arg1;
8712 if (TREE_CODE (arg1) == ADDR_EXPR)
8714 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8715 &bitsize, &bitpos1, &offset1, &mode,
8716 &unsignedp, &volatilep, false);
8717 if (TREE_CODE (base1) == INDIRECT_REF)
8718 base1 = TREE_OPERAND (base1, 0);
8719 else
8720 indirect_base1 = true;
8722 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8724 base1 = TREE_OPERAND (arg1, 0);
8725 STRIP_SIGN_NOPS (base1);
8726 if (TREE_CODE (base1) == ADDR_EXPR)
8728 base1 = TREE_OPERAND (base1, 0);
8729 indirect_base1 = true;
8731 offset1 = TREE_OPERAND (arg1, 1);
8732 if (tree_fits_shwi_p (offset1))
8734 HOST_WIDE_INT off = size_low_cst (offset1);
8735 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8736 * BITS_PER_UNIT)
8737 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8739 bitpos1 = off * BITS_PER_UNIT;
8740 offset1 = NULL_TREE;
8745 /* A local variable can never be pointed to by
8746 the default SSA name of an incoming parameter. */
8747 if ((TREE_CODE (arg0) == ADDR_EXPR
8748 && indirect_base0
8749 && TREE_CODE (base0) == VAR_DECL
8750 && auto_var_in_fn_p (base0, current_function_decl)
8751 && !indirect_base1
8752 && TREE_CODE (base1) == SSA_NAME
8753 && SSA_NAME_IS_DEFAULT_DEF (base1)
8754 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8755 || (TREE_CODE (arg1) == ADDR_EXPR
8756 && indirect_base1
8757 && TREE_CODE (base1) == VAR_DECL
8758 && auto_var_in_fn_p (base1, current_function_decl)
8759 && !indirect_base0
8760 && TREE_CODE (base0) == SSA_NAME
8761 && SSA_NAME_IS_DEFAULT_DEF (base0)
8762 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8764 if (code == NE_EXPR)
8765 return constant_boolean_node (1, type);
8766 else if (code == EQ_EXPR)
8767 return constant_boolean_node (0, type);
8769 /* If we have equivalent bases we might be able to simplify. */
8770 else if (indirect_base0 == indirect_base1
8771 && operand_equal_p (base0, base1, 0))
8773 /* We can fold this expression to a constant if the non-constant
8774 offset parts are equal. */
8775 if ((offset0 == offset1
8776 || (offset0 && offset1
8777 && operand_equal_p (offset0, offset1, 0)))
8778 && (code == EQ_EXPR
8779 || code == NE_EXPR
8780 || (indirect_base0 && DECL_P (base0))
8781 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8784 if (!equality_code
8785 && bitpos0 != bitpos1
8786 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8787 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8788 fold_overflow_warning (("assuming pointer wraparound does not "
8789 "occur when comparing P +- C1 with "
8790 "P +- C2"),
8791 WARN_STRICT_OVERFLOW_CONDITIONAL);
8793 switch (code)
8795 case EQ_EXPR:
8796 return constant_boolean_node (bitpos0 == bitpos1, type);
8797 case NE_EXPR:
8798 return constant_boolean_node (bitpos0 != bitpos1, type);
8799 case LT_EXPR:
8800 return constant_boolean_node (bitpos0 < bitpos1, type);
8801 case LE_EXPR:
8802 return constant_boolean_node (bitpos0 <= bitpos1, type);
8803 case GE_EXPR:
8804 return constant_boolean_node (bitpos0 >= bitpos1, type);
8805 case GT_EXPR:
8806 return constant_boolean_node (bitpos0 > bitpos1, type);
8807 default:;
8810 /* We can simplify the comparison to a comparison of the variable
8811 offset parts if the constant offset parts are equal.
8812 Be careful to use signed sizetype here because otherwise we
8813 mess with array offsets in the wrong way. This is possible
8814 because pointer arithmetic is restricted to retain within an
8815 object and overflow on pointer differences is undefined as of
8816 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8817 else if (bitpos0 == bitpos1
8818 && (equality_code
8819 || (indirect_base0 && DECL_P (base0))
8820 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8822 /* By converting to signed sizetype we cover middle-end pointer
8823 arithmetic which operates on unsigned pointer types of size
8824 type size and ARRAY_REF offsets which are properly sign or
8825 zero extended from their type in case it is narrower than
8826 sizetype. */
8827 if (offset0 == NULL_TREE)
8828 offset0 = build_int_cst (ssizetype, 0);
8829 else
8830 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8831 if (offset1 == NULL_TREE)
8832 offset1 = build_int_cst (ssizetype, 0);
8833 else
8834 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8836 if (!equality_code
8837 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8838 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8839 fold_overflow_warning (("assuming pointer wraparound does not "
8840 "occur when comparing P +- C1 with "
8841 "P +- C2"),
8842 WARN_STRICT_OVERFLOW_COMPARISON);
8844 return fold_build2_loc (loc, code, type, offset0, offset1);
8847 /* For non-equal bases we can simplify if they are addresses
8848 of local binding decls or constants. */
8849 else if (indirect_base0 && indirect_base1
8850 /* We know that !operand_equal_p (base0, base1, 0)
8851 because the if condition was false. But make
8852 sure two decls are not the same. */
8853 && base0 != base1
8854 && TREE_CODE (arg0) == ADDR_EXPR
8855 && TREE_CODE (arg1) == ADDR_EXPR
8856 && (((TREE_CODE (base0) == VAR_DECL
8857 || TREE_CODE (base0) == PARM_DECL)
8858 && (targetm.binds_local_p (base0)
8859 || CONSTANT_CLASS_P (base1)))
8860 || CONSTANT_CLASS_P (base0))
8861 && (((TREE_CODE (base1) == VAR_DECL
8862 || TREE_CODE (base1) == PARM_DECL)
8863 && (targetm.binds_local_p (base1)
8864 || CONSTANT_CLASS_P (base0)))
8865 || CONSTANT_CLASS_P (base1)))
8867 if (code == EQ_EXPR)
8868 return omit_two_operands_loc (loc, type, boolean_false_node,
8869 arg0, arg1);
8870 else if (code == NE_EXPR)
8871 return omit_two_operands_loc (loc, type, boolean_true_node,
8872 arg0, arg1);
8874 /* For equal offsets we can simplify to a comparison of the
8875 base addresses. */
8876 else if (bitpos0 == bitpos1
8877 && (indirect_base0
8878 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8879 && (indirect_base1
8880 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8881 && ((offset0 == offset1)
8882 || (offset0 && offset1
8883 && operand_equal_p (offset0, offset1, 0))))
8885 if (indirect_base0)
8886 base0 = build_fold_addr_expr_loc (loc, base0);
8887 if (indirect_base1)
8888 base1 = build_fold_addr_expr_loc (loc, base1);
8889 return fold_build2_loc (loc, code, type, base0, base1);
8893 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8894 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8895 the resulting offset is smaller in absolute value than the
8896 original one and has the same sign. */
8897 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8898 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8899 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8900 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8901 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8902 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8903 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8905 tree const1 = TREE_OPERAND (arg0, 1);
8906 tree const2 = TREE_OPERAND (arg1, 1);
8907 tree variable1 = TREE_OPERAND (arg0, 0);
8908 tree variable2 = TREE_OPERAND (arg1, 0);
8909 tree cst;
8910 const char * const warnmsg = G_("assuming signed overflow does not "
8911 "occur when combining constants around "
8912 "a comparison");
8914 /* Put the constant on the side where it doesn't overflow and is
8915 of lower absolute value and of same sign than before. */
8916 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8917 ? MINUS_EXPR : PLUS_EXPR,
8918 const2, const1);
8919 if (!TREE_OVERFLOW (cst)
8920 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8921 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8923 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8924 return fold_build2_loc (loc, code, type,
8925 variable1,
8926 fold_build2_loc (loc, TREE_CODE (arg1),
8927 TREE_TYPE (arg1),
8928 variable2, cst));
8931 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8932 ? MINUS_EXPR : PLUS_EXPR,
8933 const1, const2);
8934 if (!TREE_OVERFLOW (cst)
8935 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8936 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8938 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8939 return fold_build2_loc (loc, code, type,
8940 fold_build2_loc (loc, TREE_CODE (arg0),
8941 TREE_TYPE (arg0),
8942 variable1, cst),
8943 variable2);
8947 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8948 signed arithmetic case. That form is created by the compiler
8949 often enough for folding it to be of value. One example is in
8950 computing loop trip counts after Operator Strength Reduction. */
8951 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8952 && TREE_CODE (arg0) == MULT_EXPR
8953 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8954 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8955 && integer_zerop (arg1))
8957 tree const1 = TREE_OPERAND (arg0, 1);
8958 tree const2 = arg1; /* zero */
8959 tree variable1 = TREE_OPERAND (arg0, 0);
8960 enum tree_code cmp_code = code;
8962 /* Handle unfolded multiplication by zero. */
8963 if (integer_zerop (const1))
8964 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8966 fold_overflow_warning (("assuming signed overflow does not occur when "
8967 "eliminating multiplication in comparison "
8968 "with zero"),
8969 WARN_STRICT_OVERFLOW_COMPARISON);
8971 /* If const1 is negative we swap the sense of the comparison. */
8972 if (tree_int_cst_sgn (const1) < 0)
8973 cmp_code = swap_tree_comparison (cmp_code);
8975 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8978 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8979 if (tem)
8980 return tem;
8982 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8984 tree targ0 = strip_float_extensions (arg0);
8985 tree targ1 = strip_float_extensions (arg1);
8986 tree newtype = TREE_TYPE (targ0);
8988 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8989 newtype = TREE_TYPE (targ1);
8991 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8992 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8993 return fold_build2_loc (loc, code, type,
8994 fold_convert_loc (loc, newtype, targ0),
8995 fold_convert_loc (loc, newtype, targ1));
8997 /* (-a) CMP (-b) -> b CMP a */
8998 if (TREE_CODE (arg0) == NEGATE_EXPR
8999 && TREE_CODE (arg1) == NEGATE_EXPR)
9000 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9001 TREE_OPERAND (arg0, 0));
9003 if (TREE_CODE (arg1) == REAL_CST)
9005 REAL_VALUE_TYPE cst;
9006 cst = TREE_REAL_CST (arg1);
9008 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9009 if (TREE_CODE (arg0) == NEGATE_EXPR)
9010 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9011 TREE_OPERAND (arg0, 0),
9012 build_real (TREE_TYPE (arg1),
9013 real_value_negate (&cst)));
9015 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9016 /* a CMP (-0) -> a CMP 0 */
9017 if (REAL_VALUE_MINUS_ZERO (cst))
9018 return fold_build2_loc (loc, code, type, arg0,
9019 build_real (TREE_TYPE (arg1), dconst0));
9021 /* x != NaN is always true, other ops are always false. */
9022 if (REAL_VALUE_ISNAN (cst)
9023 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9025 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9026 return omit_one_operand_loc (loc, type, tem, arg0);
9029 /* Fold comparisons against infinity. */
9030 if (REAL_VALUE_ISINF (cst)
9031 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9033 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9034 if (tem != NULL_TREE)
9035 return tem;
9039 /* If this is a comparison of a real constant with a PLUS_EXPR
9040 or a MINUS_EXPR of a real constant, we can convert it into a
9041 comparison with a revised real constant as long as no overflow
9042 occurs when unsafe_math_optimizations are enabled. */
9043 if (flag_unsafe_math_optimizations
9044 && TREE_CODE (arg1) == REAL_CST
9045 && (TREE_CODE (arg0) == PLUS_EXPR
9046 || TREE_CODE (arg0) == MINUS_EXPR)
9047 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9048 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9049 ? MINUS_EXPR : PLUS_EXPR,
9050 arg1, TREE_OPERAND (arg0, 1)))
9051 && !TREE_OVERFLOW (tem))
9052 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9054 /* Likewise, we can simplify a comparison of a real constant with
9055 a MINUS_EXPR whose first operand is also a real constant, i.e.
9056 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9057 floating-point types only if -fassociative-math is set. */
9058 if (flag_associative_math
9059 && TREE_CODE (arg1) == REAL_CST
9060 && TREE_CODE (arg0) == MINUS_EXPR
9061 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9062 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9063 arg1))
9064 && !TREE_OVERFLOW (tem))
9065 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9066 TREE_OPERAND (arg0, 1), tem);
9068 /* Fold comparisons against built-in math functions. */
9069 if (TREE_CODE (arg1) == REAL_CST
9070 && flag_unsafe_math_optimizations
9071 && ! flag_errno_math)
9073 enum built_in_function fcode = builtin_mathfn_code (arg0);
9075 if (fcode != END_BUILTINS)
9077 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9078 if (tem != NULL_TREE)
9079 return tem;
9084 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9085 && CONVERT_EXPR_P (arg0))
9087 /* If we are widening one operand of an integer comparison,
9088 see if the other operand is similarly being widened. Perhaps we
9089 can do the comparison in the narrower type. */
9090 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9091 if (tem)
9092 return tem;
9094 /* Or if we are changing signedness. */
9095 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9096 if (tem)
9097 return tem;
9100 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9101 constant, we can simplify it. */
9102 if (TREE_CODE (arg1) == INTEGER_CST
9103 && (TREE_CODE (arg0) == MIN_EXPR
9104 || TREE_CODE (arg0) == MAX_EXPR)
9105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9107 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9108 if (tem)
9109 return tem;
9112 /* Simplify comparison of something with itself. (For IEEE
9113 floating-point, we can only do some of these simplifications.) */
9114 if (operand_equal_p (arg0, arg1, 0))
9116 switch (code)
9118 case EQ_EXPR:
9119 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9120 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9121 return constant_boolean_node (1, type);
9122 break;
9124 case GE_EXPR:
9125 case LE_EXPR:
9126 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9127 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9128 return constant_boolean_node (1, type);
9129 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9131 case NE_EXPR:
9132 /* For NE, we can only do this simplification if integer
9133 or we don't honor IEEE floating point NaNs. */
9134 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9135 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9136 break;
9137 /* ... fall through ... */
9138 case GT_EXPR:
9139 case LT_EXPR:
9140 return constant_boolean_node (0, type);
9141 default:
9142 gcc_unreachable ();
9146 /* If we are comparing an expression that just has comparisons
9147 of two integer values, arithmetic expressions of those comparisons,
9148 and constants, we can simplify it. There are only three cases
9149 to check: the two values can either be equal, the first can be
9150 greater, or the second can be greater. Fold the expression for
9151 those three values. Since each value must be 0 or 1, we have
9152 eight possibilities, each of which corresponds to the constant 0
9153 or 1 or one of the six possible comparisons.
9155 This handles common cases like (a > b) == 0 but also handles
9156 expressions like ((x > y) - (y > x)) > 0, which supposedly
9157 occur in macroized code. */
9159 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9161 tree cval1 = 0, cval2 = 0;
9162 int save_p = 0;
9164 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9165 /* Don't handle degenerate cases here; they should already
9166 have been handled anyway. */
9167 && cval1 != 0 && cval2 != 0
9168 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9169 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9170 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9171 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9172 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9173 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9174 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9176 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9177 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9179 /* We can't just pass T to eval_subst in case cval1 or cval2
9180 was the same as ARG1. */
9182 tree high_result
9183 = fold_build2_loc (loc, code, type,
9184 eval_subst (loc, arg0, cval1, maxval,
9185 cval2, minval),
9186 arg1);
9187 tree equal_result
9188 = fold_build2_loc (loc, code, type,
9189 eval_subst (loc, arg0, cval1, maxval,
9190 cval2, maxval),
9191 arg1);
9192 tree low_result
9193 = fold_build2_loc (loc, code, type,
9194 eval_subst (loc, arg0, cval1, minval,
9195 cval2, maxval),
9196 arg1);
9198 /* All three of these results should be 0 or 1. Confirm they are.
9199 Then use those values to select the proper code to use. */
9201 if (TREE_CODE (high_result) == INTEGER_CST
9202 && TREE_CODE (equal_result) == INTEGER_CST
9203 && TREE_CODE (low_result) == INTEGER_CST)
9205 /* Make a 3-bit mask with the high-order bit being the
9206 value for `>', the next for '=', and the low for '<'. */
9207 switch ((integer_onep (high_result) * 4)
9208 + (integer_onep (equal_result) * 2)
9209 + integer_onep (low_result))
9211 case 0:
9212 /* Always false. */
9213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9214 case 1:
9215 code = LT_EXPR;
9216 break;
9217 case 2:
9218 code = EQ_EXPR;
9219 break;
9220 case 3:
9221 code = LE_EXPR;
9222 break;
9223 case 4:
9224 code = GT_EXPR;
9225 break;
9226 case 5:
9227 code = NE_EXPR;
9228 break;
9229 case 6:
9230 code = GE_EXPR;
9231 break;
9232 case 7:
9233 /* Always true. */
9234 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9237 if (save_p)
9239 tem = save_expr (build2 (code, type, cval1, cval2));
9240 SET_EXPR_LOCATION (tem, loc);
9241 return tem;
9243 return fold_build2_loc (loc, code, type, cval1, cval2);
9248 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9249 into a single range test. */
9250 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9251 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9252 && TREE_CODE (arg1) == INTEGER_CST
9253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9254 && !integer_zerop (TREE_OPERAND (arg0, 1))
9255 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9256 && !TREE_OVERFLOW (arg1))
9258 tem = fold_div_compare (loc, code, type, arg0, arg1);
9259 if (tem != NULL_TREE)
9260 return tem;
9263 /* Fold ~X op ~Y as Y op X. */
9264 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9265 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9267 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9268 return fold_build2_loc (loc, code, type,
9269 fold_convert_loc (loc, cmp_type,
9270 TREE_OPERAND (arg1, 0)),
9271 TREE_OPERAND (arg0, 0));
9274 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9275 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9276 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9278 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9279 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9280 TREE_OPERAND (arg0, 0),
9281 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9282 fold_convert_loc (loc, cmp_type, arg1)));
9285 return NULL_TREE;
9289 /* Subroutine of fold_binary. Optimize complex multiplications of the
9290 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9291 argument EXPR represents the expression "z" of type TYPE. */
9293 static tree
9294 fold_mult_zconjz (location_t loc, tree type, tree expr)
9296 tree itype = TREE_TYPE (type);
9297 tree rpart, ipart, tem;
9299 if (TREE_CODE (expr) == COMPLEX_EXPR)
9301 rpart = TREE_OPERAND (expr, 0);
9302 ipart = TREE_OPERAND (expr, 1);
9304 else if (TREE_CODE (expr) == COMPLEX_CST)
9306 rpart = TREE_REALPART (expr);
9307 ipart = TREE_IMAGPART (expr);
9309 else
9311 expr = save_expr (expr);
9312 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9313 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9316 rpart = save_expr (rpart);
9317 ipart = save_expr (ipart);
9318 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9319 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9320 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9321 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9322 build_zero_cst (itype));
9326 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9327 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9328 guarantees that P and N have the same least significant log2(M) bits.
9329 N is not otherwise constrained. In particular, N is not normalized to
9330 0 <= N < M as is common. In general, the precise value of P is unknown.
9331 M is chosen as large as possible such that constant N can be determined.
9333 Returns M and sets *RESIDUE to N.
9335 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9336 account. This is not always possible due to PR 35705.
9339 static unsigned HOST_WIDE_INT
9340 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9341 bool allow_func_align)
9343 enum tree_code code;
9345 *residue = 0;
9347 code = TREE_CODE (expr);
9348 if (code == ADDR_EXPR)
9350 unsigned int bitalign;
9351 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9352 *residue /= BITS_PER_UNIT;
9353 return bitalign / BITS_PER_UNIT;
9355 else if (code == POINTER_PLUS_EXPR)
9357 tree op0, op1;
9358 unsigned HOST_WIDE_INT modulus;
9359 enum tree_code inner_code;
9361 op0 = TREE_OPERAND (expr, 0);
9362 STRIP_NOPS (op0);
9363 modulus = get_pointer_modulus_and_residue (op0, residue,
9364 allow_func_align);
9366 op1 = TREE_OPERAND (expr, 1);
9367 STRIP_NOPS (op1);
9368 inner_code = TREE_CODE (op1);
9369 if (inner_code == INTEGER_CST)
9371 *residue += TREE_INT_CST_LOW (op1);
9372 return modulus;
9374 else if (inner_code == MULT_EXPR)
9376 op1 = TREE_OPERAND (op1, 1);
9377 if (TREE_CODE (op1) == INTEGER_CST)
9379 unsigned HOST_WIDE_INT align;
9381 /* Compute the greatest power-of-2 divisor of op1. */
9382 align = TREE_INT_CST_LOW (op1);
9383 align &= -align;
9385 /* If align is non-zero and less than *modulus, replace
9386 *modulus with align., If align is 0, then either op1 is 0
9387 or the greatest power-of-2 divisor of op1 doesn't fit in an
9388 unsigned HOST_WIDE_INT. In either case, no additional
9389 constraint is imposed. */
9390 if (align)
9391 modulus = MIN (modulus, align);
9393 return modulus;
9398 /* If we get here, we were unable to determine anything useful about the
9399 expression. */
9400 return 1;
9403 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9404 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9406 static bool
9407 vec_cst_ctor_to_array (tree arg, tree *elts)
9409 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9411 if (TREE_CODE (arg) == VECTOR_CST)
9413 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9414 elts[i] = VECTOR_CST_ELT (arg, i);
9416 else if (TREE_CODE (arg) == CONSTRUCTOR)
9418 constructor_elt *elt;
9420 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9421 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9422 return false;
9423 else
9424 elts[i] = elt->value;
9426 else
9427 return false;
9428 for (; i < nelts; i++)
9429 elts[i]
9430 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9431 return true;
9434 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9435 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9436 NULL_TREE otherwise. */
9438 static tree
9439 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9441 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9442 tree *elts;
9443 bool need_ctor = false;
9445 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9446 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9447 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9448 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9449 return NULL_TREE;
9451 elts = XALLOCAVEC (tree, nelts * 3);
9452 if (!vec_cst_ctor_to_array (arg0, elts)
9453 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9454 return NULL_TREE;
9456 for (i = 0; i < nelts; i++)
9458 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9459 need_ctor = true;
9460 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9463 if (need_ctor)
9465 vec<constructor_elt, va_gc> *v;
9466 vec_alloc (v, nelts);
9467 for (i = 0; i < nelts; i++)
9468 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9469 return build_constructor (type, v);
9471 else
9472 return build_vector (type, &elts[2 * nelts]);
9475 /* Try to fold a pointer difference of type TYPE two address expressions of
9476 array references AREF0 and AREF1 using location LOC. Return a
9477 simplified expression for the difference or NULL_TREE. */
9479 static tree
9480 fold_addr_of_array_ref_difference (location_t loc, tree type,
9481 tree aref0, tree aref1)
9483 tree base0 = TREE_OPERAND (aref0, 0);
9484 tree base1 = TREE_OPERAND (aref1, 0);
9485 tree base_offset = build_int_cst (type, 0);
9487 /* If the bases are array references as well, recurse. If the bases
9488 are pointer indirections compute the difference of the pointers.
9489 If the bases are equal, we are set. */
9490 if ((TREE_CODE (base0) == ARRAY_REF
9491 && TREE_CODE (base1) == ARRAY_REF
9492 && (base_offset
9493 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9494 || (INDIRECT_REF_P (base0)
9495 && INDIRECT_REF_P (base1)
9496 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9497 TREE_OPERAND (base0, 0),
9498 TREE_OPERAND (base1, 0))))
9499 || operand_equal_p (base0, base1, 0))
9501 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9502 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9503 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9504 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9505 return fold_build2_loc (loc, PLUS_EXPR, type,
9506 base_offset,
9507 fold_build2_loc (loc, MULT_EXPR, type,
9508 diff, esz));
9510 return NULL_TREE;
9513 /* If the real or vector real constant CST of type TYPE has an exact
9514 inverse, return it, else return NULL. */
9516 static tree
9517 exact_inverse (tree type, tree cst)
9519 REAL_VALUE_TYPE r;
9520 tree unit_type, *elts;
9521 machine_mode mode;
9522 unsigned vec_nelts, i;
9524 switch (TREE_CODE (cst))
9526 case REAL_CST:
9527 r = TREE_REAL_CST (cst);
9529 if (exact_real_inverse (TYPE_MODE (type), &r))
9530 return build_real (type, r);
9532 return NULL_TREE;
9534 case VECTOR_CST:
9535 vec_nelts = VECTOR_CST_NELTS (cst);
9536 elts = XALLOCAVEC (tree, vec_nelts);
9537 unit_type = TREE_TYPE (type);
9538 mode = TYPE_MODE (unit_type);
9540 for (i = 0; i < vec_nelts; i++)
9542 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9543 if (!exact_real_inverse (mode, &r))
9544 return NULL_TREE;
9545 elts[i] = build_real (unit_type, r);
9548 return build_vector (type, elts);
9550 default:
9551 return NULL_TREE;
9555 /* Mask out the tz least significant bits of X of type TYPE where
9556 tz is the number of trailing zeroes in Y. */
9557 static wide_int
9558 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9560 int tz = wi::ctz (y);
9561 if (tz > 0)
9562 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9563 return x;
9566 /* Return true when T is an address and is known to be nonzero.
9567 For floating point we further ensure that T is not denormal.
9568 Similar logic is present in nonzero_address in rtlanal.h.
9570 If the return value is based on the assumption that signed overflow
9571 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9572 change *STRICT_OVERFLOW_P. */
9574 static bool
9575 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9577 tree type = TREE_TYPE (t);
9578 enum tree_code code;
9580 /* Doing something useful for floating point would need more work. */
9581 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9582 return false;
9584 code = TREE_CODE (t);
9585 switch (TREE_CODE_CLASS (code))
9587 case tcc_unary:
9588 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9589 strict_overflow_p);
9590 case tcc_binary:
9591 case tcc_comparison:
9592 return tree_binary_nonzero_warnv_p (code, type,
9593 TREE_OPERAND (t, 0),
9594 TREE_OPERAND (t, 1),
9595 strict_overflow_p);
9596 case tcc_constant:
9597 case tcc_declaration:
9598 case tcc_reference:
9599 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9601 default:
9602 break;
9605 switch (code)
9607 case TRUTH_NOT_EXPR:
9608 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9609 strict_overflow_p);
9611 case TRUTH_AND_EXPR:
9612 case TRUTH_OR_EXPR:
9613 case TRUTH_XOR_EXPR:
9614 return tree_binary_nonzero_warnv_p (code, type,
9615 TREE_OPERAND (t, 0),
9616 TREE_OPERAND (t, 1),
9617 strict_overflow_p);
9619 case COND_EXPR:
9620 case CONSTRUCTOR:
9621 case OBJ_TYPE_REF:
9622 case ASSERT_EXPR:
9623 case ADDR_EXPR:
9624 case WITH_SIZE_EXPR:
9625 case SSA_NAME:
9626 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9628 case COMPOUND_EXPR:
9629 case MODIFY_EXPR:
9630 case BIND_EXPR:
9631 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9632 strict_overflow_p);
9634 case SAVE_EXPR:
9635 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9636 strict_overflow_p);
9638 case CALL_EXPR:
9640 tree fndecl = get_callee_fndecl (t);
9641 if (!fndecl) return false;
9642 if (flag_delete_null_pointer_checks && !flag_check_new
9643 && DECL_IS_OPERATOR_NEW (fndecl)
9644 && !TREE_NOTHROW (fndecl))
9645 return true;
9646 if (flag_delete_null_pointer_checks
9647 && lookup_attribute ("returns_nonnull",
9648 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9649 return true;
9650 return alloca_call_p (t);
9653 default:
9654 break;
9656 return false;
9659 /* Return true when T is an address and is known to be nonzero.
9660 Handle warnings about undefined signed overflow. */
9662 static bool
9663 tree_expr_nonzero_p (tree t)
9665 bool ret, strict_overflow_p;
9667 strict_overflow_p = false;
9668 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9669 if (strict_overflow_p)
9670 fold_overflow_warning (("assuming signed overflow does not occur when "
9671 "determining that expression is always "
9672 "non-zero"),
9673 WARN_STRICT_OVERFLOW_MISC);
9674 return ret;
9677 /* Fold a binary expression of code CODE and type TYPE with operands
9678 OP0 and OP1. LOC is the location of the resulting expression.
9679 Return the folded expression if folding is successful. Otherwise,
9680 return NULL_TREE. */
9682 tree
9683 fold_binary_loc (location_t loc,
9684 enum tree_code code, tree type, tree op0, tree op1)
9686 enum tree_code_class kind = TREE_CODE_CLASS (code);
9687 tree arg0, arg1, tem;
9688 tree t1 = NULL_TREE;
9689 bool strict_overflow_p;
9690 unsigned int prec;
9692 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9693 && TREE_CODE_LENGTH (code) == 2
9694 && op0 != NULL_TREE
9695 && op1 != NULL_TREE);
9697 arg0 = op0;
9698 arg1 = op1;
9700 /* Strip any conversions that don't change the mode. This is
9701 safe for every expression, except for a comparison expression
9702 because its signedness is derived from its operands. So, in
9703 the latter case, only strip conversions that don't change the
9704 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9705 preserved.
9707 Note that this is done as an internal manipulation within the
9708 constant folder, in order to find the simplest representation
9709 of the arguments so that their form can be studied. In any
9710 cases, the appropriate type conversions should be put back in
9711 the tree that will get out of the constant folder. */
9713 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9715 STRIP_SIGN_NOPS (arg0);
9716 STRIP_SIGN_NOPS (arg1);
9718 else
9720 STRIP_NOPS (arg0);
9721 STRIP_NOPS (arg1);
9724 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9725 constant but we can't do arithmetic on them. */
9726 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9727 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9728 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9729 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9730 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9731 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9732 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9734 if (kind == tcc_binary)
9736 /* Make sure type and arg0 have the same saturating flag. */
9737 gcc_assert (TYPE_SATURATING (type)
9738 == TYPE_SATURATING (TREE_TYPE (arg0)));
9739 tem = const_binop (code, arg0, arg1);
9741 else if (kind == tcc_comparison)
9742 tem = fold_relational_const (code, type, arg0, arg1);
9743 else
9744 tem = NULL_TREE;
9746 if (tem != NULL_TREE)
9748 if (TREE_TYPE (tem) != type)
9749 tem = fold_convert_loc (loc, type, tem);
9750 return tem;
9754 /* If this is a commutative operation, and ARG0 is a constant, move it
9755 to ARG1 to reduce the number of tests below. */
9756 if (commutative_tree_code (code)
9757 && tree_swap_operands_p (arg0, arg1, true))
9758 return fold_build2_loc (loc, code, type, op1, op0);
9760 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9761 to ARG1 to reduce the number of tests below. */
9762 if (kind == tcc_comparison
9763 && tree_swap_operands_p (arg0, arg1, true))
9764 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9766 tem = generic_simplify (loc, code, type, op0, op1);
9767 if (tem)
9768 return tem;
9770 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9772 First check for cases where an arithmetic operation is applied to a
9773 compound, conditional, or comparison operation. Push the arithmetic
9774 operation inside the compound or conditional to see if any folding
9775 can then be done. Convert comparison to conditional for this purpose.
9776 The also optimizes non-constant cases that used to be done in
9777 expand_expr.
9779 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9780 one of the operands is a comparison and the other is a comparison, a
9781 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9782 code below would make the expression more complex. Change it to a
9783 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9784 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9786 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9787 || code == EQ_EXPR || code == NE_EXPR)
9788 && TREE_CODE (type) != VECTOR_TYPE
9789 && ((truth_value_p (TREE_CODE (arg0))
9790 && (truth_value_p (TREE_CODE (arg1))
9791 || (TREE_CODE (arg1) == BIT_AND_EXPR
9792 && integer_onep (TREE_OPERAND (arg1, 1)))))
9793 || (truth_value_p (TREE_CODE (arg1))
9794 && (truth_value_p (TREE_CODE (arg0))
9795 || (TREE_CODE (arg0) == BIT_AND_EXPR
9796 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9798 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9799 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9800 : TRUTH_XOR_EXPR,
9801 boolean_type_node,
9802 fold_convert_loc (loc, boolean_type_node, arg0),
9803 fold_convert_loc (loc, boolean_type_node, arg1));
9805 if (code == EQ_EXPR)
9806 tem = invert_truthvalue_loc (loc, tem);
9808 return fold_convert_loc (loc, type, tem);
9811 if (TREE_CODE_CLASS (code) == tcc_binary
9812 || TREE_CODE_CLASS (code) == tcc_comparison)
9814 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9816 tem = fold_build2_loc (loc, code, type,
9817 fold_convert_loc (loc, TREE_TYPE (op0),
9818 TREE_OPERAND (arg0, 1)), op1);
9819 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9820 tem);
9822 if (TREE_CODE (arg1) == COMPOUND_EXPR
9823 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9825 tem = fold_build2_loc (loc, code, type, op0,
9826 fold_convert_loc (loc, TREE_TYPE (op1),
9827 TREE_OPERAND (arg1, 1)));
9828 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9829 tem);
9832 if (TREE_CODE (arg0) == COND_EXPR
9833 || TREE_CODE (arg0) == VEC_COND_EXPR
9834 || COMPARISON_CLASS_P (arg0))
9836 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9837 arg0, arg1,
9838 /*cond_first_p=*/1);
9839 if (tem != NULL_TREE)
9840 return tem;
9843 if (TREE_CODE (arg1) == COND_EXPR
9844 || TREE_CODE (arg1) == VEC_COND_EXPR
9845 || COMPARISON_CLASS_P (arg1))
9847 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9848 arg1, arg0,
9849 /*cond_first_p=*/0);
9850 if (tem != NULL_TREE)
9851 return tem;
9855 switch (code)
9857 case MEM_REF:
9858 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9859 if (TREE_CODE (arg0) == ADDR_EXPR
9860 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9862 tree iref = TREE_OPERAND (arg0, 0);
9863 return fold_build2 (MEM_REF, type,
9864 TREE_OPERAND (iref, 0),
9865 int_const_binop (PLUS_EXPR, arg1,
9866 TREE_OPERAND (iref, 1)));
9869 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9870 if (TREE_CODE (arg0) == ADDR_EXPR
9871 && handled_component_p (TREE_OPERAND (arg0, 0)))
9873 tree base;
9874 HOST_WIDE_INT coffset;
9875 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9876 &coffset);
9877 if (!base)
9878 return NULL_TREE;
9879 return fold_build2 (MEM_REF, type,
9880 build_fold_addr_expr (base),
9881 int_const_binop (PLUS_EXPR, arg1,
9882 size_int (coffset)));
9885 return NULL_TREE;
9887 case POINTER_PLUS_EXPR:
9888 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9889 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9890 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9891 return fold_convert_loc (loc, type,
9892 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9893 fold_convert_loc (loc, sizetype,
9894 arg1),
9895 fold_convert_loc (loc, sizetype,
9896 arg0)));
9898 /* PTR_CST +p CST -> CST1 */
9899 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9900 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9901 fold_convert_loc (loc, type, arg1));
9903 return NULL_TREE;
9905 case PLUS_EXPR:
9906 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9908 /* X + (X / CST) * -CST is X % CST. */
9909 if (TREE_CODE (arg1) == MULT_EXPR
9910 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9911 && operand_equal_p (arg0,
9912 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9914 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9915 tree cst1 = TREE_OPERAND (arg1, 1);
9916 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9917 cst1, cst0);
9918 if (sum && integer_zerop (sum))
9919 return fold_convert_loc (loc, type,
9920 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9921 TREE_TYPE (arg0), arg0,
9922 cst0));
9926 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9927 one. Make sure the type is not saturating and has the signedness of
9928 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9929 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9930 if ((TREE_CODE (arg0) == MULT_EXPR
9931 || TREE_CODE (arg1) == MULT_EXPR)
9932 && !TYPE_SATURATING (type)
9933 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9934 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9935 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9937 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9938 if (tem)
9939 return tem;
9942 if (! FLOAT_TYPE_P (type))
9944 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9945 with a constant, and the two constants have no bits in common,
9946 we should treat this as a BIT_IOR_EXPR since this may produce more
9947 simplifications. */
9948 if (TREE_CODE (arg0) == BIT_AND_EXPR
9949 && TREE_CODE (arg1) == BIT_AND_EXPR
9950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9951 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9952 && wi::bit_and (TREE_OPERAND (arg0, 1),
9953 TREE_OPERAND (arg1, 1)) == 0)
9955 code = BIT_IOR_EXPR;
9956 goto bit_ior;
9959 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9960 (plus (plus (mult) (mult)) (foo)) so that we can
9961 take advantage of the factoring cases below. */
9962 if (TYPE_OVERFLOW_WRAPS (type)
9963 && (((TREE_CODE (arg0) == PLUS_EXPR
9964 || TREE_CODE (arg0) == MINUS_EXPR)
9965 && TREE_CODE (arg1) == MULT_EXPR)
9966 || ((TREE_CODE (arg1) == PLUS_EXPR
9967 || TREE_CODE (arg1) == MINUS_EXPR)
9968 && TREE_CODE (arg0) == MULT_EXPR)))
9970 tree parg0, parg1, parg, marg;
9971 enum tree_code pcode;
9973 if (TREE_CODE (arg1) == MULT_EXPR)
9974 parg = arg0, marg = arg1;
9975 else
9976 parg = arg1, marg = arg0;
9977 pcode = TREE_CODE (parg);
9978 parg0 = TREE_OPERAND (parg, 0);
9979 parg1 = TREE_OPERAND (parg, 1);
9980 STRIP_NOPS (parg0);
9981 STRIP_NOPS (parg1);
9983 if (TREE_CODE (parg0) == MULT_EXPR
9984 && TREE_CODE (parg1) != MULT_EXPR)
9985 return fold_build2_loc (loc, pcode, type,
9986 fold_build2_loc (loc, PLUS_EXPR, type,
9987 fold_convert_loc (loc, type,
9988 parg0),
9989 fold_convert_loc (loc, type,
9990 marg)),
9991 fold_convert_loc (loc, type, parg1));
9992 if (TREE_CODE (parg0) != MULT_EXPR
9993 && TREE_CODE (parg1) == MULT_EXPR)
9994 return
9995 fold_build2_loc (loc, PLUS_EXPR, type,
9996 fold_convert_loc (loc, type, parg0),
9997 fold_build2_loc (loc, pcode, type,
9998 fold_convert_loc (loc, type, marg),
9999 fold_convert_loc (loc, type,
10000 parg1)));
10003 else
10005 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10006 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10007 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10009 /* Likewise if the operands are reversed. */
10010 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10011 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10013 /* Convert X + -C into X - C. */
10014 if (TREE_CODE (arg1) == REAL_CST
10015 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10017 tem = fold_negate_const (arg1, type);
10018 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10019 return fold_build2_loc (loc, MINUS_EXPR, type,
10020 fold_convert_loc (loc, type, arg0),
10021 fold_convert_loc (loc, type, tem));
10024 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10025 to __complex__ ( x, y ). This is not the same for SNaNs or
10026 if signed zeros are involved. */
10027 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10028 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10029 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10031 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10032 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10033 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10034 bool arg0rz = false, arg0iz = false;
10035 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10036 || (arg0i && (arg0iz = real_zerop (arg0i))))
10038 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10039 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10040 if (arg0rz && arg1i && real_zerop (arg1i))
10042 tree rp = arg1r ? arg1r
10043 : build1 (REALPART_EXPR, rtype, arg1);
10044 tree ip = arg0i ? arg0i
10045 : build1 (IMAGPART_EXPR, rtype, arg0);
10046 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10048 else if (arg0iz && arg1r && real_zerop (arg1r))
10050 tree rp = arg0r ? arg0r
10051 : build1 (REALPART_EXPR, rtype, arg0);
10052 tree ip = arg1i ? arg1i
10053 : build1 (IMAGPART_EXPR, rtype, arg1);
10054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10059 if (flag_unsafe_math_optimizations
10060 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10061 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10062 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10063 return tem;
10065 /* Convert x+x into x*2.0. */
10066 if (operand_equal_p (arg0, arg1, 0)
10067 && SCALAR_FLOAT_TYPE_P (type))
10068 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10069 build_real (type, dconst2));
10071 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10072 We associate floats only if the user has specified
10073 -fassociative-math. */
10074 if (flag_associative_math
10075 && TREE_CODE (arg1) == PLUS_EXPR
10076 && TREE_CODE (arg0) != MULT_EXPR)
10078 tree tree10 = TREE_OPERAND (arg1, 0);
10079 tree tree11 = TREE_OPERAND (arg1, 1);
10080 if (TREE_CODE (tree11) == MULT_EXPR
10081 && TREE_CODE (tree10) == MULT_EXPR)
10083 tree tree0;
10084 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10085 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10088 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10089 We associate floats only if the user has specified
10090 -fassociative-math. */
10091 if (flag_associative_math
10092 && TREE_CODE (arg0) == PLUS_EXPR
10093 && TREE_CODE (arg1) != MULT_EXPR)
10095 tree tree00 = TREE_OPERAND (arg0, 0);
10096 tree tree01 = TREE_OPERAND (arg0, 1);
10097 if (TREE_CODE (tree01) == MULT_EXPR
10098 && TREE_CODE (tree00) == MULT_EXPR)
10100 tree tree0;
10101 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10102 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10107 bit_rotate:
10108 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10109 is a rotate of A by C1 bits. */
10110 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10111 is a rotate of A by B bits. */
10113 enum tree_code code0, code1;
10114 tree rtype;
10115 code0 = TREE_CODE (arg0);
10116 code1 = TREE_CODE (arg1);
10117 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10118 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10119 && operand_equal_p (TREE_OPERAND (arg0, 0),
10120 TREE_OPERAND (arg1, 0), 0)
10121 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10122 TYPE_UNSIGNED (rtype))
10123 /* Only create rotates in complete modes. Other cases are not
10124 expanded properly. */
10125 && (element_precision (rtype)
10126 == element_precision (TYPE_MODE (rtype))))
10128 tree tree01, tree11;
10129 enum tree_code code01, code11;
10131 tree01 = TREE_OPERAND (arg0, 1);
10132 tree11 = TREE_OPERAND (arg1, 1);
10133 STRIP_NOPS (tree01);
10134 STRIP_NOPS (tree11);
10135 code01 = TREE_CODE (tree01);
10136 code11 = TREE_CODE (tree11);
10137 if (code01 == INTEGER_CST
10138 && code11 == INTEGER_CST
10139 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10140 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10142 tem = build2_loc (loc, LROTATE_EXPR,
10143 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10144 TREE_OPERAND (arg0, 0),
10145 code0 == LSHIFT_EXPR ? tree01 : tree11);
10146 return fold_convert_loc (loc, type, tem);
10148 else if (code11 == MINUS_EXPR)
10150 tree tree110, tree111;
10151 tree110 = TREE_OPERAND (tree11, 0);
10152 tree111 = TREE_OPERAND (tree11, 1);
10153 STRIP_NOPS (tree110);
10154 STRIP_NOPS (tree111);
10155 if (TREE_CODE (tree110) == INTEGER_CST
10156 && 0 == compare_tree_int (tree110,
10157 element_precision
10158 (TREE_TYPE (TREE_OPERAND
10159 (arg0, 0))))
10160 && operand_equal_p (tree01, tree111, 0))
10161 return
10162 fold_convert_loc (loc, type,
10163 build2 ((code0 == LSHIFT_EXPR
10164 ? LROTATE_EXPR
10165 : RROTATE_EXPR),
10166 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10167 TREE_OPERAND (arg0, 0), tree01));
10169 else if (code01 == MINUS_EXPR)
10171 tree tree010, tree011;
10172 tree010 = TREE_OPERAND (tree01, 0);
10173 tree011 = TREE_OPERAND (tree01, 1);
10174 STRIP_NOPS (tree010);
10175 STRIP_NOPS (tree011);
10176 if (TREE_CODE (tree010) == INTEGER_CST
10177 && 0 == compare_tree_int (tree010,
10178 element_precision
10179 (TREE_TYPE (TREE_OPERAND
10180 (arg0, 0))))
10181 && operand_equal_p (tree11, tree011, 0))
10182 return fold_convert_loc
10183 (loc, type,
10184 build2 ((code0 != LSHIFT_EXPR
10185 ? LROTATE_EXPR
10186 : RROTATE_EXPR),
10187 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10188 TREE_OPERAND (arg0, 0), tree11));
10193 associate:
10194 /* In most languages, can't associate operations on floats through
10195 parentheses. Rather than remember where the parentheses were, we
10196 don't associate floats at all, unless the user has specified
10197 -fassociative-math.
10198 And, we need to make sure type is not saturating. */
10200 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10201 && !TYPE_SATURATING (type))
10203 tree var0, con0, lit0, minus_lit0;
10204 tree var1, con1, lit1, minus_lit1;
10205 tree atype = type;
10206 bool ok = true;
10208 /* Split both trees into variables, constants, and literals. Then
10209 associate each group together, the constants with literals,
10210 then the result with variables. This increases the chances of
10211 literals being recombined later and of generating relocatable
10212 expressions for the sum of a constant and literal. */
10213 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10214 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10215 code == MINUS_EXPR);
10217 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10218 if (code == MINUS_EXPR)
10219 code = PLUS_EXPR;
10221 /* With undefined overflow prefer doing association in a type
10222 which wraps on overflow, if that is one of the operand types. */
10223 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10224 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10226 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10227 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10228 atype = TREE_TYPE (arg0);
10229 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10230 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10231 atype = TREE_TYPE (arg1);
10232 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10235 /* With undefined overflow we can only associate constants with one
10236 variable, and constants whose association doesn't overflow. */
10237 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10238 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10240 if (var0 && var1)
10242 tree tmp0 = var0;
10243 tree tmp1 = var1;
10245 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10246 tmp0 = TREE_OPERAND (tmp0, 0);
10247 if (CONVERT_EXPR_P (tmp0)
10248 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10249 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10250 <= TYPE_PRECISION (atype)))
10251 tmp0 = TREE_OPERAND (tmp0, 0);
10252 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10253 tmp1 = TREE_OPERAND (tmp1, 0);
10254 if (CONVERT_EXPR_P (tmp1)
10255 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10256 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10257 <= TYPE_PRECISION (atype)))
10258 tmp1 = TREE_OPERAND (tmp1, 0);
10259 /* The only case we can still associate with two variables
10260 is if they are the same, modulo negation and bit-pattern
10261 preserving conversions. */
10262 if (!operand_equal_p (tmp0, tmp1, 0))
10263 ok = false;
10267 /* Only do something if we found more than two objects. Otherwise,
10268 nothing has changed and we risk infinite recursion. */
10269 if (ok
10270 && (2 < ((var0 != 0) + (var1 != 0)
10271 + (con0 != 0) + (con1 != 0)
10272 + (lit0 != 0) + (lit1 != 0)
10273 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10275 bool any_overflows = false;
10276 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10277 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10278 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10279 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10280 var0 = associate_trees (loc, var0, var1, code, atype);
10281 con0 = associate_trees (loc, con0, con1, code, atype);
10282 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10283 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10284 code, atype);
10286 /* Preserve the MINUS_EXPR if the negative part of the literal is
10287 greater than the positive part. Otherwise, the multiplicative
10288 folding code (i.e extract_muldiv) may be fooled in case
10289 unsigned constants are subtracted, like in the following
10290 example: ((X*2 + 4) - 8U)/2. */
10291 if (minus_lit0 && lit0)
10293 if (TREE_CODE (lit0) == INTEGER_CST
10294 && TREE_CODE (minus_lit0) == INTEGER_CST
10295 && tree_int_cst_lt (lit0, minus_lit0))
10297 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10298 MINUS_EXPR, atype);
10299 lit0 = 0;
10301 else
10303 lit0 = associate_trees (loc, lit0, minus_lit0,
10304 MINUS_EXPR, atype);
10305 minus_lit0 = 0;
10309 /* Don't introduce overflows through reassociation. */
10310 if (!any_overflows
10311 && ((lit0 && TREE_OVERFLOW (lit0))
10312 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10313 return NULL_TREE;
10315 if (minus_lit0)
10317 if (con0 == 0)
10318 return
10319 fold_convert_loc (loc, type,
10320 associate_trees (loc, var0, minus_lit0,
10321 MINUS_EXPR, atype));
10322 else
10324 con0 = associate_trees (loc, con0, minus_lit0,
10325 MINUS_EXPR, atype);
10326 return
10327 fold_convert_loc (loc, type,
10328 associate_trees (loc, var0, con0,
10329 PLUS_EXPR, atype));
10333 con0 = associate_trees (loc, con0, lit0, code, atype);
10334 return
10335 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10336 code, atype));
10340 return NULL_TREE;
10342 case MINUS_EXPR:
10343 /* Pointer simplifications for subtraction, simple reassociations. */
10344 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10346 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10347 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10348 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10350 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10351 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10352 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10353 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10354 return fold_build2_loc (loc, PLUS_EXPR, type,
10355 fold_build2_loc (loc, MINUS_EXPR, type,
10356 arg00, arg10),
10357 fold_build2_loc (loc, MINUS_EXPR, type,
10358 arg01, arg11));
10360 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10361 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10363 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10364 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10365 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10366 fold_convert_loc (loc, type, arg1));
10367 if (tmp)
10368 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10370 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10371 simplifies. */
10372 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10374 tree arg10 = fold_convert_loc (loc, type,
10375 TREE_OPERAND (arg1, 0));
10376 tree arg11 = fold_convert_loc (loc, type,
10377 TREE_OPERAND (arg1, 1));
10378 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10379 fold_convert_loc (loc, type, arg0),
10380 arg10);
10381 if (tmp)
10382 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10385 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10386 if (TREE_CODE (arg0) == NEGATE_EXPR
10387 && negate_expr_p (arg1)
10388 && reorder_operands_p (arg0, arg1))
10389 return fold_build2_loc (loc, MINUS_EXPR, type,
10390 fold_convert_loc (loc, type,
10391 negate_expr (arg1)),
10392 fold_convert_loc (loc, type,
10393 TREE_OPERAND (arg0, 0)));
10394 /* Convert -A - 1 to ~A. */
10395 if (TREE_CODE (arg0) == NEGATE_EXPR
10396 && integer_each_onep (arg1)
10397 && !TYPE_OVERFLOW_TRAPS (type))
10398 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10399 fold_convert_loc (loc, type,
10400 TREE_OPERAND (arg0, 0)));
10402 /* Convert -1 - A to ~A. */
10403 if (TREE_CODE (type) != COMPLEX_TYPE
10404 && integer_all_onesp (arg0))
10405 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10408 /* X - (X / Y) * Y is X % Y. */
10409 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10410 && TREE_CODE (arg1) == MULT_EXPR
10411 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10412 && operand_equal_p (arg0,
10413 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10414 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10415 TREE_OPERAND (arg1, 1), 0))
10416 return
10417 fold_convert_loc (loc, type,
10418 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10419 arg0, TREE_OPERAND (arg1, 1)));
10421 if (! FLOAT_TYPE_P (type))
10423 if (integer_zerop (arg0))
10424 return negate_expr (fold_convert_loc (loc, type, arg1));
10426 /* Fold A - (A & B) into ~B & A. */
10427 if (!TREE_SIDE_EFFECTS (arg0)
10428 && TREE_CODE (arg1) == BIT_AND_EXPR)
10430 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10432 tree arg10 = fold_convert_loc (loc, type,
10433 TREE_OPERAND (arg1, 0));
10434 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10435 fold_build1_loc (loc, BIT_NOT_EXPR,
10436 type, arg10),
10437 fold_convert_loc (loc, type, arg0));
10439 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10441 tree arg11 = fold_convert_loc (loc,
10442 type, TREE_OPERAND (arg1, 1));
10443 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10444 fold_build1_loc (loc, BIT_NOT_EXPR,
10445 type, arg11),
10446 fold_convert_loc (loc, type, arg0));
10450 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10451 any power of 2 minus 1. */
10452 if (TREE_CODE (arg0) == BIT_AND_EXPR
10453 && TREE_CODE (arg1) == BIT_AND_EXPR
10454 && operand_equal_p (TREE_OPERAND (arg0, 0),
10455 TREE_OPERAND (arg1, 0), 0))
10457 tree mask0 = TREE_OPERAND (arg0, 1);
10458 tree mask1 = TREE_OPERAND (arg1, 1);
10459 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10461 if (operand_equal_p (tem, mask1, 0))
10463 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10464 TREE_OPERAND (arg0, 0), mask1);
10465 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10470 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10471 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10472 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10474 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10475 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10476 (-ARG1 + ARG0) reduces to -ARG1. */
10477 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10478 return negate_expr (fold_convert_loc (loc, type, arg1));
10480 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10481 __complex__ ( x, -y ). This is not the same for SNaNs or if
10482 signed zeros are involved. */
10483 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10484 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10485 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10487 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10488 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10489 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10490 bool arg0rz = false, arg0iz = false;
10491 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10492 || (arg0i && (arg0iz = real_zerop (arg0i))))
10494 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10495 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10496 if (arg0rz && arg1i && real_zerop (arg1i))
10498 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10499 arg1r ? arg1r
10500 : build1 (REALPART_EXPR, rtype, arg1));
10501 tree ip = arg0i ? arg0i
10502 : build1 (IMAGPART_EXPR, rtype, arg0);
10503 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10505 else if (arg0iz && arg1r && real_zerop (arg1r))
10507 tree rp = arg0r ? arg0r
10508 : build1 (REALPART_EXPR, rtype, arg0);
10509 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10510 arg1i ? arg1i
10511 : build1 (IMAGPART_EXPR, rtype, arg1));
10512 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10517 /* A - B -> A + (-B) if B is easily negatable. */
10518 if (negate_expr_p (arg1)
10519 && !TYPE_OVERFLOW_SANITIZED (type)
10520 && ((FLOAT_TYPE_P (type)
10521 /* Avoid this transformation if B is a positive REAL_CST. */
10522 && (TREE_CODE (arg1) != REAL_CST
10523 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10524 || INTEGRAL_TYPE_P (type)))
10525 return fold_build2_loc (loc, PLUS_EXPR, type,
10526 fold_convert_loc (loc, type, arg0),
10527 fold_convert_loc (loc, type,
10528 negate_expr (arg1)));
10530 /* Try folding difference of addresses. */
10532 HOST_WIDE_INT diff;
10534 if ((TREE_CODE (arg0) == ADDR_EXPR
10535 || TREE_CODE (arg1) == ADDR_EXPR)
10536 && ptr_difference_const (arg0, arg1, &diff))
10537 return build_int_cst_type (type, diff);
10540 /* Fold &a[i] - &a[j] to i-j. */
10541 if (TREE_CODE (arg0) == ADDR_EXPR
10542 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10543 && TREE_CODE (arg1) == ADDR_EXPR
10544 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10546 tree tem = fold_addr_of_array_ref_difference (loc, type,
10547 TREE_OPERAND (arg0, 0),
10548 TREE_OPERAND (arg1, 0));
10549 if (tem)
10550 return tem;
10553 if (FLOAT_TYPE_P (type)
10554 && flag_unsafe_math_optimizations
10555 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10556 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10557 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10558 return tem;
10560 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10561 one. Make sure the type is not saturating and has the signedness of
10562 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10563 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10564 if ((TREE_CODE (arg0) == MULT_EXPR
10565 || TREE_CODE (arg1) == MULT_EXPR)
10566 && !TYPE_SATURATING (type)
10567 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10568 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10569 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10571 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10572 if (tem)
10573 return tem;
10576 goto associate;
10578 case MULT_EXPR:
10579 /* (-A) * (-B) -> A * B */
10580 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10581 return fold_build2_loc (loc, MULT_EXPR, type,
10582 fold_convert_loc (loc, type,
10583 TREE_OPERAND (arg0, 0)),
10584 fold_convert_loc (loc, type,
10585 negate_expr (arg1)));
10586 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10587 return fold_build2_loc (loc, MULT_EXPR, type,
10588 fold_convert_loc (loc, type,
10589 negate_expr (arg0)),
10590 fold_convert_loc (loc, type,
10591 TREE_OPERAND (arg1, 0)));
10593 if (! FLOAT_TYPE_P (type))
10595 /* Transform x * -1 into -x. Make sure to do the negation
10596 on the original operand with conversions not stripped
10597 because we can only strip non-sign-changing conversions. */
10598 if (integer_minus_onep (arg1))
10599 return fold_convert_loc (loc, type, negate_expr (op0));
10600 /* Transform x * -C into -x * C if x is easily negatable. */
10601 if (TREE_CODE (arg1) == INTEGER_CST
10602 && tree_int_cst_sgn (arg1) == -1
10603 && negate_expr_p (arg0)
10604 && (tem = negate_expr (arg1)) != arg1
10605 && !TREE_OVERFLOW (tem))
10606 return fold_build2_loc (loc, MULT_EXPR, type,
10607 fold_convert_loc (loc, type,
10608 negate_expr (arg0)),
10609 tem);
10611 /* (a * (1 << b)) is (a << b) */
10612 if (TREE_CODE (arg1) == LSHIFT_EXPR
10613 && integer_onep (TREE_OPERAND (arg1, 0)))
10614 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10615 TREE_OPERAND (arg1, 1));
10616 if (TREE_CODE (arg0) == LSHIFT_EXPR
10617 && integer_onep (TREE_OPERAND (arg0, 0)))
10618 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10619 TREE_OPERAND (arg0, 1));
10621 /* (A + A) * C -> A * 2 * C */
10622 if (TREE_CODE (arg0) == PLUS_EXPR
10623 && TREE_CODE (arg1) == INTEGER_CST
10624 && operand_equal_p (TREE_OPERAND (arg0, 0),
10625 TREE_OPERAND (arg0, 1), 0))
10626 return fold_build2_loc (loc, MULT_EXPR, type,
10627 omit_one_operand_loc (loc, type,
10628 TREE_OPERAND (arg0, 0),
10629 TREE_OPERAND (arg0, 1)),
10630 fold_build2_loc (loc, MULT_EXPR, type,
10631 build_int_cst (type, 2) , arg1));
10633 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10634 sign-changing only. */
10635 if (TREE_CODE (arg1) == INTEGER_CST
10636 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10637 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10638 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10640 strict_overflow_p = false;
10641 if (TREE_CODE (arg1) == INTEGER_CST
10642 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10643 &strict_overflow_p)))
10645 if (strict_overflow_p)
10646 fold_overflow_warning (("assuming signed overflow does not "
10647 "occur when simplifying "
10648 "multiplication"),
10649 WARN_STRICT_OVERFLOW_MISC);
10650 return fold_convert_loc (loc, type, tem);
10653 /* Optimize z * conj(z) for integer complex numbers. */
10654 if (TREE_CODE (arg0) == CONJ_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10656 return fold_mult_zconjz (loc, type, arg1);
10657 if (TREE_CODE (arg1) == CONJ_EXPR
10658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10659 return fold_mult_zconjz (loc, type, arg0);
10661 else
10663 /* Maybe fold x * 0 to 0. The expressions aren't the same
10664 when x is NaN, since x * 0 is also NaN. Nor are they the
10665 same in modes with signed zeros, since multiplying a
10666 negative value by 0 gives -0, not +0. */
10667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10668 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10669 && real_zerop (arg1))
10670 return omit_one_operand_loc (loc, type, arg1, arg0);
10671 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10672 Likewise for complex arithmetic with signed zeros. */
10673 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10674 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10675 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10676 && real_onep (arg1))
10677 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10679 /* Transform x * -1.0 into -x. */
10680 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10681 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10682 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10683 && real_minus_onep (arg1))
10684 return fold_convert_loc (loc, type, negate_expr (arg0));
10686 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10687 the result for floating point types due to rounding so it is applied
10688 only if -fassociative-math was specify. */
10689 if (flag_associative_math
10690 && TREE_CODE (arg0) == RDIV_EXPR
10691 && TREE_CODE (arg1) == REAL_CST
10692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10694 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10695 arg1);
10696 if (tem)
10697 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10698 TREE_OPERAND (arg0, 1));
10701 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10702 if (operand_equal_p (arg0, arg1, 0))
10704 tree tem = fold_strip_sign_ops (arg0);
10705 if (tem != NULL_TREE)
10707 tem = fold_convert_loc (loc, type, tem);
10708 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10712 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10713 This is not the same for NaNs or if signed zeros are
10714 involved. */
10715 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10716 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10717 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10718 && TREE_CODE (arg1) == COMPLEX_CST
10719 && real_zerop (TREE_REALPART (arg1)))
10721 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10722 if (real_onep (TREE_IMAGPART (arg1)))
10723 return
10724 fold_build2_loc (loc, COMPLEX_EXPR, type,
10725 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10726 rtype, arg0)),
10727 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10728 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10729 return
10730 fold_build2_loc (loc, COMPLEX_EXPR, type,
10731 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10732 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10733 rtype, arg0)));
10736 /* Optimize z * conj(z) for floating point complex numbers.
10737 Guarded by flag_unsafe_math_optimizations as non-finite
10738 imaginary components don't produce scalar results. */
10739 if (flag_unsafe_math_optimizations
10740 && TREE_CODE (arg0) == CONJ_EXPR
10741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10742 return fold_mult_zconjz (loc, type, arg1);
10743 if (flag_unsafe_math_optimizations
10744 && TREE_CODE (arg1) == CONJ_EXPR
10745 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10746 return fold_mult_zconjz (loc, type, arg0);
10748 if (flag_unsafe_math_optimizations)
10750 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10751 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10753 /* Optimizations of root(...)*root(...). */
10754 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10756 tree rootfn, arg;
10757 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10758 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10760 /* Optimize sqrt(x)*sqrt(x) as x. */
10761 if (BUILTIN_SQRT_P (fcode0)
10762 && operand_equal_p (arg00, arg10, 0)
10763 && ! HONOR_SNANS (TYPE_MODE (type)))
10764 return arg00;
10766 /* Optimize root(x)*root(y) as root(x*y). */
10767 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10768 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10769 return build_call_expr_loc (loc, rootfn, 1, arg);
10772 /* Optimize expN(x)*expN(y) as expN(x+y). */
10773 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10775 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10776 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10777 CALL_EXPR_ARG (arg0, 0),
10778 CALL_EXPR_ARG (arg1, 0));
10779 return build_call_expr_loc (loc, expfn, 1, arg);
10782 /* Optimizations of pow(...)*pow(...). */
10783 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10784 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10785 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10787 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10788 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10789 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10790 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10792 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10793 if (operand_equal_p (arg01, arg11, 0))
10795 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10796 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10797 arg00, arg10);
10798 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10801 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10802 if (operand_equal_p (arg00, arg10, 0))
10804 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10805 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10806 arg01, arg11);
10807 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10811 /* Optimize tan(x)*cos(x) as sin(x). */
10812 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10813 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10814 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10815 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10816 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10817 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10818 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10819 CALL_EXPR_ARG (arg1, 0), 0))
10821 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10823 if (sinfn != NULL_TREE)
10824 return build_call_expr_loc (loc, sinfn, 1,
10825 CALL_EXPR_ARG (arg0, 0));
10828 /* Optimize x*pow(x,c) as pow(x,c+1). */
10829 if (fcode1 == BUILT_IN_POW
10830 || fcode1 == BUILT_IN_POWF
10831 || fcode1 == BUILT_IN_POWL)
10833 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10834 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10835 if (TREE_CODE (arg11) == REAL_CST
10836 && !TREE_OVERFLOW (arg11)
10837 && operand_equal_p (arg0, arg10, 0))
10839 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10840 REAL_VALUE_TYPE c;
10841 tree arg;
10843 c = TREE_REAL_CST (arg11);
10844 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10845 arg = build_real (type, c);
10846 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10850 /* Optimize pow(x,c)*x as pow(x,c+1). */
10851 if (fcode0 == BUILT_IN_POW
10852 || fcode0 == BUILT_IN_POWF
10853 || fcode0 == BUILT_IN_POWL)
10855 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10856 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10857 if (TREE_CODE (arg01) == REAL_CST
10858 && !TREE_OVERFLOW (arg01)
10859 && operand_equal_p (arg1, arg00, 0))
10861 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10862 REAL_VALUE_TYPE c;
10863 tree arg;
10865 c = TREE_REAL_CST (arg01);
10866 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10867 arg = build_real (type, c);
10868 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10872 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10873 if (!in_gimple_form
10874 && optimize
10875 && operand_equal_p (arg0, arg1, 0))
10877 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10879 if (powfn)
10881 tree arg = build_real (type, dconst2);
10882 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10887 goto associate;
10889 case BIT_IOR_EXPR:
10890 bit_ior:
10891 /* ~X | X is -1. */
10892 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10893 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10895 t1 = build_zero_cst (type);
10896 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10897 return omit_one_operand_loc (loc, type, t1, arg1);
10900 /* X | ~X is -1. */
10901 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10902 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10904 t1 = build_zero_cst (type);
10905 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10906 return omit_one_operand_loc (loc, type, t1, arg0);
10909 /* Canonicalize (X & C1) | C2. */
10910 if (TREE_CODE (arg0) == BIT_AND_EXPR
10911 && TREE_CODE (arg1) == INTEGER_CST
10912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10914 int width = TYPE_PRECISION (type), w;
10915 wide_int c1 = TREE_OPERAND (arg0, 1);
10916 wide_int c2 = arg1;
10918 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10919 if ((c1 & c2) == c1)
10920 return omit_one_operand_loc (loc, type, arg1,
10921 TREE_OPERAND (arg0, 0));
10923 wide_int msk = wi::mask (width, false,
10924 TYPE_PRECISION (TREE_TYPE (arg1)));
10926 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10927 if (msk.and_not (c1 | c2) == 0)
10928 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10929 TREE_OPERAND (arg0, 0), arg1);
10931 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10932 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10933 mode which allows further optimizations. */
10934 c1 &= msk;
10935 c2 &= msk;
10936 wide_int c3 = c1.and_not (c2);
10937 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10939 wide_int mask = wi::mask (w, false,
10940 TYPE_PRECISION (type));
10941 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10943 c3 = mask;
10944 break;
10948 if (c3 != c1)
10949 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10950 fold_build2_loc (loc, BIT_AND_EXPR, type,
10951 TREE_OPERAND (arg0, 0),
10952 wide_int_to_tree (type,
10953 c3)),
10954 arg1);
10957 /* (X & ~Y) | (~X & Y) is X ^ Y */
10958 if (TREE_CODE (arg0) == BIT_AND_EXPR
10959 && TREE_CODE (arg1) == BIT_AND_EXPR)
10961 tree a0, a1, l0, l1, n0, n1;
10963 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10964 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10966 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10967 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10969 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10970 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10972 if ((operand_equal_p (n0, a0, 0)
10973 && operand_equal_p (n1, a1, 0))
10974 || (operand_equal_p (n0, a1, 0)
10975 && operand_equal_p (n1, a0, 0)))
10976 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10979 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10980 if (t1 != NULL_TREE)
10981 return t1;
10983 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10985 This results in more efficient code for machines without a NAND
10986 instruction. Combine will canonicalize to the first form
10987 which will allow use of NAND instructions provided by the
10988 backend if they exist. */
10989 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10990 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10992 return
10993 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10994 build2 (BIT_AND_EXPR, type,
10995 fold_convert_loc (loc, type,
10996 TREE_OPERAND (arg0, 0)),
10997 fold_convert_loc (loc, type,
10998 TREE_OPERAND (arg1, 0))));
11001 /* See if this can be simplified into a rotate first. If that
11002 is unsuccessful continue in the association code. */
11003 goto bit_rotate;
11005 case BIT_XOR_EXPR:
11006 /* ~X ^ X is -1. */
11007 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11010 t1 = build_zero_cst (type);
11011 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11012 return omit_one_operand_loc (loc, type, t1, arg1);
11015 /* X ^ ~X is -1. */
11016 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11019 t1 = build_zero_cst (type);
11020 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11021 return omit_one_operand_loc (loc, type, t1, arg0);
11024 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11025 with a constant, and the two constants have no bits in common,
11026 we should treat this as a BIT_IOR_EXPR since this may produce more
11027 simplifications. */
11028 if (TREE_CODE (arg0) == BIT_AND_EXPR
11029 && TREE_CODE (arg1) == BIT_AND_EXPR
11030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11031 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11032 && wi::bit_and (TREE_OPERAND (arg0, 1),
11033 TREE_OPERAND (arg1, 1)) == 0)
11035 code = BIT_IOR_EXPR;
11036 goto bit_ior;
11039 /* (X | Y) ^ X -> Y & ~ X*/
11040 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11041 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11043 tree t2 = TREE_OPERAND (arg0, 1);
11044 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11045 arg1);
11046 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11047 fold_convert_loc (loc, type, t2),
11048 fold_convert_loc (loc, type, t1));
11049 return t1;
11052 /* (Y | X) ^ X -> Y & ~ X*/
11053 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11054 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11056 tree t2 = TREE_OPERAND (arg0, 0);
11057 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11058 arg1);
11059 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11060 fold_convert_loc (loc, type, t2),
11061 fold_convert_loc (loc, type, t1));
11062 return t1;
11065 /* X ^ (X | Y) -> Y & ~ X*/
11066 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11067 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11069 tree t2 = TREE_OPERAND (arg1, 1);
11070 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11071 arg0);
11072 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11073 fold_convert_loc (loc, type, t2),
11074 fold_convert_loc (loc, type, t1));
11075 return t1;
11078 /* X ^ (Y | X) -> Y & ~ X*/
11079 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11082 tree t2 = TREE_OPERAND (arg1, 0);
11083 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11084 arg0);
11085 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11086 fold_convert_loc (loc, type, t2),
11087 fold_convert_loc (loc, type, t1));
11088 return t1;
11091 /* Convert ~X ^ ~Y to X ^ Y. */
11092 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11093 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11094 return fold_build2_loc (loc, code, type,
11095 fold_convert_loc (loc, type,
11096 TREE_OPERAND (arg0, 0)),
11097 fold_convert_loc (loc, type,
11098 TREE_OPERAND (arg1, 0)));
11100 /* Convert ~X ^ C to X ^ ~C. */
11101 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11102 && TREE_CODE (arg1) == INTEGER_CST)
11103 return fold_build2_loc (loc, code, type,
11104 fold_convert_loc (loc, type,
11105 TREE_OPERAND (arg0, 0)),
11106 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11108 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11109 if (TREE_CODE (arg0) == BIT_AND_EXPR
11110 && INTEGRAL_TYPE_P (type)
11111 && integer_onep (TREE_OPERAND (arg0, 1))
11112 && integer_onep (arg1))
11113 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11114 build_zero_cst (TREE_TYPE (arg0)));
11116 /* Fold (X & Y) ^ Y as ~X & Y. */
11117 if (TREE_CODE (arg0) == BIT_AND_EXPR
11118 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11120 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11121 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11122 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11123 fold_convert_loc (loc, type, arg1));
11125 /* Fold (X & Y) ^ X as ~Y & X. */
11126 if (TREE_CODE (arg0) == BIT_AND_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11128 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11130 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11131 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11132 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11133 fold_convert_loc (loc, type, arg1));
11135 /* Fold X ^ (X & Y) as X & ~Y. */
11136 if (TREE_CODE (arg1) == BIT_AND_EXPR
11137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11139 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11140 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11141 fold_convert_loc (loc, type, arg0),
11142 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11144 /* Fold X ^ (Y & X) as ~Y & X. */
11145 if (TREE_CODE (arg1) == BIT_AND_EXPR
11146 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11147 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11149 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11150 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11151 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11152 fold_convert_loc (loc, type, arg0));
11155 /* See if this can be simplified into a rotate first. If that
11156 is unsuccessful continue in the association code. */
11157 goto bit_rotate;
11159 case BIT_AND_EXPR:
11160 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11161 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11162 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11163 || (TREE_CODE (arg0) == EQ_EXPR
11164 && integer_zerop (TREE_OPERAND (arg0, 1))))
11165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11166 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11168 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11169 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11170 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11171 || (TREE_CODE (arg1) == EQ_EXPR
11172 && integer_zerop (TREE_OPERAND (arg1, 1))))
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11174 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11176 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11177 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11178 && INTEGRAL_TYPE_P (type)
11179 && integer_onep (TREE_OPERAND (arg0, 1))
11180 && integer_onep (arg1))
11182 tree tem2;
11183 tem = TREE_OPERAND (arg0, 0);
11184 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11185 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11186 tem, tem2);
11187 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11188 build_zero_cst (TREE_TYPE (tem)));
11190 /* Fold ~X & 1 as (X & 1) == 0. */
11191 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11192 && INTEGRAL_TYPE_P (type)
11193 && integer_onep (arg1))
11195 tree tem2;
11196 tem = TREE_OPERAND (arg0, 0);
11197 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11198 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11199 tem, tem2);
11200 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11201 build_zero_cst (TREE_TYPE (tem)));
11203 /* Fold !X & 1 as X == 0. */
11204 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11205 && integer_onep (arg1))
11207 tem = TREE_OPERAND (arg0, 0);
11208 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11209 build_zero_cst (TREE_TYPE (tem)));
11212 /* Fold (X ^ Y) & Y as ~X & Y. */
11213 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11214 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11216 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11217 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11219 fold_convert_loc (loc, type, arg1));
11221 /* Fold (X ^ Y) & X as ~Y & X. */
11222 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11224 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11226 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11227 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11228 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11229 fold_convert_loc (loc, type, arg1));
11231 /* Fold X & (X ^ Y) as X & ~Y. */
11232 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11233 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11235 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11237 fold_convert_loc (loc, type, arg0),
11238 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11240 /* Fold X & (Y ^ X) as ~Y & X. */
11241 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11242 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11243 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11245 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11246 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11247 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11248 fold_convert_loc (loc, type, arg0));
11251 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11252 multiple of 1 << CST. */
11253 if (TREE_CODE (arg1) == INTEGER_CST)
11255 wide_int cst1 = arg1;
11256 wide_int ncst1 = -cst1;
11257 if ((cst1 & ncst1) == ncst1
11258 && multiple_of_p (type, arg0,
11259 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11260 return fold_convert_loc (loc, type, arg0);
11263 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11264 bits from CST2. */
11265 if (TREE_CODE (arg1) == INTEGER_CST
11266 && TREE_CODE (arg0) == MULT_EXPR
11267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11269 wide_int warg1 = arg1;
11270 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11272 if (masked == 0)
11273 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11274 arg0, arg1);
11275 else if (masked != warg1)
11277 /* Avoid the transform if arg1 is a mask of some
11278 mode which allows further optimizations. */
11279 int pop = wi::popcount (warg1);
11280 if (!(pop >= BITS_PER_UNIT
11281 && exact_log2 (pop) != -1
11282 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11283 return fold_build2_loc (loc, code, type, op0,
11284 wide_int_to_tree (type, masked));
11288 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11289 ((A & N) + B) & M -> (A + B) & M
11290 Similarly if (N & M) == 0,
11291 ((A | N) + B) & M -> (A + B) & M
11292 and for - instead of + (or unary - instead of +)
11293 and/or ^ instead of |.
11294 If B is constant and (B & M) == 0, fold into A & M. */
11295 if (TREE_CODE (arg1) == INTEGER_CST)
11297 wide_int cst1 = arg1;
11298 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11299 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11300 && (TREE_CODE (arg0) == PLUS_EXPR
11301 || TREE_CODE (arg0) == MINUS_EXPR
11302 || TREE_CODE (arg0) == NEGATE_EXPR)
11303 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11304 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11306 tree pmop[2];
11307 int which = 0;
11308 wide_int cst0;
11310 /* Now we know that arg0 is (C + D) or (C - D) or
11311 -C and arg1 (M) is == (1LL << cst) - 1.
11312 Store C into PMOP[0] and D into PMOP[1]. */
11313 pmop[0] = TREE_OPERAND (arg0, 0);
11314 pmop[1] = NULL;
11315 if (TREE_CODE (arg0) != NEGATE_EXPR)
11317 pmop[1] = TREE_OPERAND (arg0, 1);
11318 which = 1;
11321 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11322 which = -1;
11324 for (; which >= 0; which--)
11325 switch (TREE_CODE (pmop[which]))
11327 case BIT_AND_EXPR:
11328 case BIT_IOR_EXPR:
11329 case BIT_XOR_EXPR:
11330 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11331 != INTEGER_CST)
11332 break;
11333 cst0 = TREE_OPERAND (pmop[which], 1);
11334 cst0 &= cst1;
11335 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11337 if (cst0 != cst1)
11338 break;
11340 else if (cst0 != 0)
11341 break;
11342 /* If C or D is of the form (A & N) where
11343 (N & M) == M, or of the form (A | N) or
11344 (A ^ N) where (N & M) == 0, replace it with A. */
11345 pmop[which] = TREE_OPERAND (pmop[which], 0);
11346 break;
11347 case INTEGER_CST:
11348 /* If C or D is a N where (N & M) == 0, it can be
11349 omitted (assumed 0). */
11350 if ((TREE_CODE (arg0) == PLUS_EXPR
11351 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11352 && (cst1 & pmop[which]) == 0)
11353 pmop[which] = NULL;
11354 break;
11355 default:
11356 break;
11359 /* Only build anything new if we optimized one or both arguments
11360 above. */
11361 if (pmop[0] != TREE_OPERAND (arg0, 0)
11362 || (TREE_CODE (arg0) != NEGATE_EXPR
11363 && pmop[1] != TREE_OPERAND (arg0, 1)))
11365 tree utype = TREE_TYPE (arg0);
11366 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11368 /* Perform the operations in a type that has defined
11369 overflow behavior. */
11370 utype = unsigned_type_for (TREE_TYPE (arg0));
11371 if (pmop[0] != NULL)
11372 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11373 if (pmop[1] != NULL)
11374 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11377 if (TREE_CODE (arg0) == NEGATE_EXPR)
11378 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11379 else if (TREE_CODE (arg0) == PLUS_EXPR)
11381 if (pmop[0] != NULL && pmop[1] != NULL)
11382 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11383 pmop[0], pmop[1]);
11384 else if (pmop[0] != NULL)
11385 tem = pmop[0];
11386 else if (pmop[1] != NULL)
11387 tem = pmop[1];
11388 else
11389 return build_int_cst (type, 0);
11391 else if (pmop[0] == NULL)
11392 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11393 else
11394 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11395 pmop[0], pmop[1]);
11396 /* TEM is now the new binary +, - or unary - replacement. */
11397 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11398 fold_convert_loc (loc, utype, arg1));
11399 return fold_convert_loc (loc, type, tem);
11404 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11405 if (t1 != NULL_TREE)
11406 return t1;
11407 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11408 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11409 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11411 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11413 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11414 if (mask == -1)
11415 return
11416 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11419 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11421 This results in more efficient code for machines without a NOR
11422 instruction. Combine will canonicalize to the first form
11423 which will allow use of NOR instructions provided by the
11424 backend if they exist. */
11425 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11426 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11428 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11429 build2 (BIT_IOR_EXPR, type,
11430 fold_convert_loc (loc, type,
11431 TREE_OPERAND (arg0, 0)),
11432 fold_convert_loc (loc, type,
11433 TREE_OPERAND (arg1, 0))));
11436 /* If arg0 is derived from the address of an object or function, we may
11437 be able to fold this expression using the object or function's
11438 alignment. */
11439 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11441 unsigned HOST_WIDE_INT modulus, residue;
11442 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11444 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11445 integer_onep (arg1));
11447 /* This works because modulus is a power of 2. If this weren't the
11448 case, we'd have to replace it by its greatest power-of-2
11449 divisor: modulus & -modulus. */
11450 if (low < modulus)
11451 return build_int_cst (type, residue & low);
11454 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11455 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11456 if the new mask might be further optimized. */
11457 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11458 || TREE_CODE (arg0) == RSHIFT_EXPR)
11459 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11460 && TREE_CODE (arg1) == INTEGER_CST
11461 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11462 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11463 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11464 < TYPE_PRECISION (TREE_TYPE (arg0))))
11466 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11467 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11468 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11469 tree shift_type = TREE_TYPE (arg0);
11471 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11472 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11473 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11474 && TYPE_PRECISION (TREE_TYPE (arg0))
11475 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11477 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11478 tree arg00 = TREE_OPERAND (arg0, 0);
11479 /* See if more bits can be proven as zero because of
11480 zero extension. */
11481 if (TREE_CODE (arg00) == NOP_EXPR
11482 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11484 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11485 if (TYPE_PRECISION (inner_type)
11486 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11487 && TYPE_PRECISION (inner_type) < prec)
11489 prec = TYPE_PRECISION (inner_type);
11490 /* See if we can shorten the right shift. */
11491 if (shiftc < prec)
11492 shift_type = inner_type;
11493 /* Otherwise X >> C1 is all zeros, so we'll optimize
11494 it into (X, 0) later on by making sure zerobits
11495 is all ones. */
11498 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11499 if (shiftc < prec)
11501 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11502 zerobits <<= prec - shiftc;
11504 /* For arithmetic shift if sign bit could be set, zerobits
11505 can contain actually sign bits, so no transformation is
11506 possible, unless MASK masks them all away. In that
11507 case the shift needs to be converted into logical shift. */
11508 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11509 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11511 if ((mask & zerobits) == 0)
11512 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11513 else
11514 zerobits = 0;
11518 /* ((X << 16) & 0xff00) is (X, 0). */
11519 if ((mask & zerobits) == mask)
11520 return omit_one_operand_loc (loc, type,
11521 build_int_cst (type, 0), arg0);
11523 newmask = mask | zerobits;
11524 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11526 /* Only do the transformation if NEWMASK is some integer
11527 mode's mask. */
11528 for (prec = BITS_PER_UNIT;
11529 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11530 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11531 break;
11532 if (prec < HOST_BITS_PER_WIDE_INT
11533 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11535 tree newmaskt;
11537 if (shift_type != TREE_TYPE (arg0))
11539 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11540 fold_convert_loc (loc, shift_type,
11541 TREE_OPERAND (arg0, 0)),
11542 TREE_OPERAND (arg0, 1));
11543 tem = fold_convert_loc (loc, type, tem);
11545 else
11546 tem = op0;
11547 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11548 if (!tree_int_cst_equal (newmaskt, arg1))
11549 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11554 goto associate;
11556 case RDIV_EXPR:
11557 /* Don't touch a floating-point divide by zero unless the mode
11558 of the constant can represent infinity. */
11559 if (TREE_CODE (arg1) == REAL_CST
11560 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11561 && real_zerop (arg1))
11562 return NULL_TREE;
11564 /* Optimize A / A to 1.0 if we don't care about
11565 NaNs or Infinities. Skip the transformation
11566 for non-real operands. */
11567 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11568 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11569 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11570 && operand_equal_p (arg0, arg1, 0))
11572 tree r = build_real (TREE_TYPE (arg0), dconst1);
11574 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11577 /* The complex version of the above A / A optimization. */
11578 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11579 && operand_equal_p (arg0, arg1, 0))
11581 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11582 if (! HONOR_NANS (TYPE_MODE (elem_type))
11583 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11585 tree r = build_real (elem_type, dconst1);
11586 /* omit_two_operands will call fold_convert for us. */
11587 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11591 /* (-A) / (-B) -> A / B */
11592 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11593 return fold_build2_loc (loc, RDIV_EXPR, type,
11594 TREE_OPERAND (arg0, 0),
11595 negate_expr (arg1));
11596 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11597 return fold_build2_loc (loc, RDIV_EXPR, type,
11598 negate_expr (arg0),
11599 TREE_OPERAND (arg1, 0));
11601 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11602 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11603 && real_onep (arg1))
11604 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11606 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11607 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11608 && real_minus_onep (arg1))
11609 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11610 negate_expr (arg0)));
11612 /* If ARG1 is a constant, we can convert this to a multiply by the
11613 reciprocal. This does not have the same rounding properties,
11614 so only do this if -freciprocal-math. We can actually
11615 always safely do it if ARG1 is a power of two, but it's hard to
11616 tell if it is or not in a portable manner. */
11617 if (optimize
11618 && (TREE_CODE (arg1) == REAL_CST
11619 || (TREE_CODE (arg1) == COMPLEX_CST
11620 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11621 || (TREE_CODE (arg1) == VECTOR_CST
11622 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11624 if (flag_reciprocal_math
11625 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11626 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11627 /* Find the reciprocal if optimizing and the result is exact.
11628 TODO: Complex reciprocal not implemented. */
11629 if (TREE_CODE (arg1) != COMPLEX_CST)
11631 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11633 if (inverse)
11634 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11637 /* Convert A/B/C to A/(B*C). */
11638 if (flag_reciprocal_math
11639 && TREE_CODE (arg0) == RDIV_EXPR)
11640 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11641 fold_build2_loc (loc, MULT_EXPR, type,
11642 TREE_OPERAND (arg0, 1), arg1));
11644 /* Convert A/(B/C) to (A/B)*C. */
11645 if (flag_reciprocal_math
11646 && TREE_CODE (arg1) == RDIV_EXPR)
11647 return fold_build2_loc (loc, MULT_EXPR, type,
11648 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11649 TREE_OPERAND (arg1, 0)),
11650 TREE_OPERAND (arg1, 1));
11652 /* Convert C1/(X*C2) into (C1/C2)/X. */
11653 if (flag_reciprocal_math
11654 && TREE_CODE (arg1) == MULT_EXPR
11655 && TREE_CODE (arg0) == REAL_CST
11656 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11658 tree tem = const_binop (RDIV_EXPR, arg0,
11659 TREE_OPERAND (arg1, 1));
11660 if (tem)
11661 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11662 TREE_OPERAND (arg1, 0));
11665 if (flag_unsafe_math_optimizations)
11667 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11668 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11670 /* Optimize sin(x)/cos(x) as tan(x). */
11671 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11672 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11673 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11674 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11675 CALL_EXPR_ARG (arg1, 0), 0))
11677 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11679 if (tanfn != NULL_TREE)
11680 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11683 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11684 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11685 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11686 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11687 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11688 CALL_EXPR_ARG (arg1, 0), 0))
11690 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11692 if (tanfn != NULL_TREE)
11694 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11695 CALL_EXPR_ARG (arg0, 0));
11696 return fold_build2_loc (loc, RDIV_EXPR, type,
11697 build_real (type, dconst1), tmp);
11701 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11702 NaNs or Infinities. */
11703 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11704 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11705 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11707 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11708 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11710 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11711 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11712 && operand_equal_p (arg00, arg01, 0))
11714 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11716 if (cosfn != NULL_TREE)
11717 return build_call_expr_loc (loc, cosfn, 1, arg00);
11721 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11722 NaNs or Infinities. */
11723 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11724 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11725 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11727 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11728 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11730 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11731 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11732 && operand_equal_p (arg00, arg01, 0))
11734 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11736 if (cosfn != NULL_TREE)
11738 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11739 return fold_build2_loc (loc, RDIV_EXPR, type,
11740 build_real (type, dconst1),
11741 tmp);
11746 /* Optimize pow(x,c)/x as pow(x,c-1). */
11747 if (fcode0 == BUILT_IN_POW
11748 || fcode0 == BUILT_IN_POWF
11749 || fcode0 == BUILT_IN_POWL)
11751 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11752 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11753 if (TREE_CODE (arg01) == REAL_CST
11754 && !TREE_OVERFLOW (arg01)
11755 && operand_equal_p (arg1, arg00, 0))
11757 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11758 REAL_VALUE_TYPE c;
11759 tree arg;
11761 c = TREE_REAL_CST (arg01);
11762 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11763 arg = build_real (type, c);
11764 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11768 /* Optimize a/root(b/c) into a*root(c/b). */
11769 if (BUILTIN_ROOT_P (fcode1))
11771 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11773 if (TREE_CODE (rootarg) == RDIV_EXPR)
11775 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11776 tree b = TREE_OPERAND (rootarg, 0);
11777 tree c = TREE_OPERAND (rootarg, 1);
11779 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11781 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11782 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11786 /* Optimize x/expN(y) into x*expN(-y). */
11787 if (BUILTIN_EXPONENT_P (fcode1))
11789 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11790 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11791 arg1 = build_call_expr_loc (loc,
11792 expfn, 1,
11793 fold_convert_loc (loc, type, arg));
11794 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11797 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11798 if (fcode1 == BUILT_IN_POW
11799 || fcode1 == BUILT_IN_POWF
11800 || fcode1 == BUILT_IN_POWL)
11802 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11803 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11804 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11805 tree neg11 = fold_convert_loc (loc, type,
11806 negate_expr (arg11));
11807 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11808 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11811 return NULL_TREE;
11813 case TRUNC_DIV_EXPR:
11814 /* Optimize (X & (-A)) / A where A is a power of 2,
11815 to X >> log2(A) */
11816 if (TREE_CODE (arg0) == BIT_AND_EXPR
11817 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11818 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11820 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11821 arg1, TREE_OPERAND (arg0, 1));
11822 if (sum && integer_zerop (sum)) {
11823 tree pow2 = build_int_cst (integer_type_node,
11824 wi::exact_log2 (arg1));
11825 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11826 TREE_OPERAND (arg0, 0), pow2);
11830 /* Fall through */
11832 case FLOOR_DIV_EXPR:
11833 /* Simplify A / (B << N) where A and B are positive and B is
11834 a power of 2, to A >> (N + log2(B)). */
11835 strict_overflow_p = false;
11836 if (TREE_CODE (arg1) == LSHIFT_EXPR
11837 && (TYPE_UNSIGNED (type)
11838 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11840 tree sval = TREE_OPERAND (arg1, 0);
11841 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11843 tree sh_cnt = TREE_OPERAND (arg1, 1);
11844 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11845 wi::exact_log2 (sval));
11847 if (strict_overflow_p)
11848 fold_overflow_warning (("assuming signed overflow does not "
11849 "occur when simplifying A / (B << N)"),
11850 WARN_STRICT_OVERFLOW_MISC);
11852 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11853 sh_cnt, pow2);
11854 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11855 fold_convert_loc (loc, type, arg0), sh_cnt);
11859 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11860 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11861 if (INTEGRAL_TYPE_P (type)
11862 && TYPE_UNSIGNED (type)
11863 && code == FLOOR_DIV_EXPR)
11864 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11866 /* Fall through */
11868 case ROUND_DIV_EXPR:
11869 case CEIL_DIV_EXPR:
11870 case EXACT_DIV_EXPR:
11871 if (integer_zerop (arg1))
11872 return NULL_TREE;
11873 /* X / -1 is -X. */
11874 if (!TYPE_UNSIGNED (type)
11875 && TREE_CODE (arg1) == INTEGER_CST
11876 && wi::eq_p (arg1, -1))
11877 return fold_convert_loc (loc, type, negate_expr (arg0));
11879 /* Convert -A / -B to A / B when the type is signed and overflow is
11880 undefined. */
11881 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11882 && TREE_CODE (arg0) == NEGATE_EXPR
11883 && negate_expr_p (arg1))
11885 if (INTEGRAL_TYPE_P (type))
11886 fold_overflow_warning (("assuming signed overflow does not occur "
11887 "when distributing negation across "
11888 "division"),
11889 WARN_STRICT_OVERFLOW_MISC);
11890 return fold_build2_loc (loc, code, type,
11891 fold_convert_loc (loc, type,
11892 TREE_OPERAND (arg0, 0)),
11893 fold_convert_loc (loc, type,
11894 negate_expr (arg1)));
11896 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11897 && TREE_CODE (arg1) == NEGATE_EXPR
11898 && negate_expr_p (arg0))
11900 if (INTEGRAL_TYPE_P (type))
11901 fold_overflow_warning (("assuming signed overflow does not occur "
11902 "when distributing negation across "
11903 "division"),
11904 WARN_STRICT_OVERFLOW_MISC);
11905 return fold_build2_loc (loc, code, type,
11906 fold_convert_loc (loc, type,
11907 negate_expr (arg0)),
11908 fold_convert_loc (loc, type,
11909 TREE_OPERAND (arg1, 0)));
11912 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11913 operation, EXACT_DIV_EXPR.
11915 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11916 At one time others generated faster code, it's not clear if they do
11917 after the last round to changes to the DIV code in expmed.c. */
11918 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11919 && multiple_of_p (type, arg0, arg1))
11920 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11922 strict_overflow_p = false;
11923 if (TREE_CODE (arg1) == INTEGER_CST
11924 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11925 &strict_overflow_p)))
11927 if (strict_overflow_p)
11928 fold_overflow_warning (("assuming signed overflow does not occur "
11929 "when simplifying division"),
11930 WARN_STRICT_OVERFLOW_MISC);
11931 return fold_convert_loc (loc, type, tem);
11934 return NULL_TREE;
11936 case CEIL_MOD_EXPR:
11937 case FLOOR_MOD_EXPR:
11938 case ROUND_MOD_EXPR:
11939 case TRUNC_MOD_EXPR:
11940 /* X % -1 is zero. */
11941 if (!TYPE_UNSIGNED (type)
11942 && TREE_CODE (arg1) == INTEGER_CST
11943 && wi::eq_p (arg1, -1))
11944 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11946 /* X % -C is the same as X % C. */
11947 if (code == TRUNC_MOD_EXPR
11948 && TYPE_SIGN (type) == SIGNED
11949 && TREE_CODE (arg1) == INTEGER_CST
11950 && !TREE_OVERFLOW (arg1)
11951 && wi::neg_p (arg1)
11952 && !TYPE_OVERFLOW_TRAPS (type)
11953 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11954 && !sign_bit_p (arg1, arg1))
11955 return fold_build2_loc (loc, code, type,
11956 fold_convert_loc (loc, type, arg0),
11957 fold_convert_loc (loc, type,
11958 negate_expr (arg1)));
11960 /* X % -Y is the same as X % Y. */
11961 if (code == TRUNC_MOD_EXPR
11962 && !TYPE_UNSIGNED (type)
11963 && TREE_CODE (arg1) == NEGATE_EXPR
11964 && !TYPE_OVERFLOW_TRAPS (type))
11965 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11966 fold_convert_loc (loc, type,
11967 TREE_OPERAND (arg1, 0)));
11969 strict_overflow_p = false;
11970 if (TREE_CODE (arg1) == INTEGER_CST
11971 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11972 &strict_overflow_p)))
11974 if (strict_overflow_p)
11975 fold_overflow_warning (("assuming signed overflow does not occur "
11976 "when simplifying modulus"),
11977 WARN_STRICT_OVERFLOW_MISC);
11978 return fold_convert_loc (loc, type, tem);
11981 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11982 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11983 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11984 && (TYPE_UNSIGNED (type)
11985 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11987 tree c = arg1;
11988 /* Also optimize A % (C << N) where C is a power of 2,
11989 to A & ((C << N) - 1). */
11990 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11991 c = TREE_OPERAND (arg1, 0);
11993 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11995 tree mask
11996 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11997 build_int_cst (TREE_TYPE (arg1), 1));
11998 if (strict_overflow_p)
11999 fold_overflow_warning (("assuming signed overflow does not "
12000 "occur when simplifying "
12001 "X % (power of two)"),
12002 WARN_STRICT_OVERFLOW_MISC);
12003 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12004 fold_convert_loc (loc, type, arg0),
12005 fold_convert_loc (loc, type, mask));
12009 return NULL_TREE;
12011 case LROTATE_EXPR:
12012 case RROTATE_EXPR:
12013 if (integer_all_onesp (arg0))
12014 return omit_one_operand_loc (loc, type, arg0, arg1);
12015 goto shift;
12017 case RSHIFT_EXPR:
12018 /* Optimize -1 >> x for arithmetic right shifts. */
12019 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12020 && tree_expr_nonnegative_p (arg1))
12021 return omit_one_operand_loc (loc, type, arg0, arg1);
12022 /* ... fall through ... */
12024 case LSHIFT_EXPR:
12025 shift:
12026 if (integer_zerop (arg1))
12027 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12028 if (integer_zerop (arg0))
12029 return omit_one_operand_loc (loc, type, arg0, arg1);
12031 /* Prefer vector1 << scalar to vector1 << vector2
12032 if vector2 is uniform. */
12033 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12034 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12035 return fold_build2_loc (loc, code, type, op0, tem);
12037 /* Since negative shift count is not well-defined,
12038 don't try to compute it in the compiler. */
12039 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12040 return NULL_TREE;
12042 prec = element_precision (type);
12044 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12045 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12046 && tree_to_uhwi (arg1) < prec
12047 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12048 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12050 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12051 + tree_to_uhwi (arg1));
12053 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12054 being well defined. */
12055 if (low >= prec)
12057 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12058 low = low % prec;
12059 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12060 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12061 TREE_OPERAND (arg0, 0));
12062 else
12063 low = prec - 1;
12066 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12067 build_int_cst (TREE_TYPE (arg1), low));
12070 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12071 into x & ((unsigned)-1 >> c) for unsigned types. */
12072 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12073 || (TYPE_UNSIGNED (type)
12074 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12075 && tree_fits_uhwi_p (arg1)
12076 && tree_to_uhwi (arg1) < prec
12077 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12078 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12080 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12081 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12082 tree lshift;
12083 tree arg00;
12085 if (low0 == low1)
12087 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12089 lshift = build_minus_one_cst (type);
12090 lshift = const_binop (code, lshift, arg1);
12092 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12096 /* Rewrite an LROTATE_EXPR by a constant into an
12097 RROTATE_EXPR by a new constant. */
12098 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12100 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12101 tem = const_binop (MINUS_EXPR, tem, arg1);
12102 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12105 /* If we have a rotate of a bit operation with the rotate count and
12106 the second operand of the bit operation both constant,
12107 permute the two operations. */
12108 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12109 && (TREE_CODE (arg0) == BIT_AND_EXPR
12110 || TREE_CODE (arg0) == BIT_IOR_EXPR
12111 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12112 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12113 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12114 fold_build2_loc (loc, code, type,
12115 TREE_OPERAND (arg0, 0), arg1),
12116 fold_build2_loc (loc, code, type,
12117 TREE_OPERAND (arg0, 1), arg1));
12119 /* Two consecutive rotates adding up to the some integer
12120 multiple of the precision of the type can be ignored. */
12121 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12122 && TREE_CODE (arg0) == RROTATE_EXPR
12123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12124 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12125 prec) == 0)
12126 return TREE_OPERAND (arg0, 0);
12128 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12129 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12130 if the latter can be further optimized. */
12131 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12132 && TREE_CODE (arg0) == BIT_AND_EXPR
12133 && TREE_CODE (arg1) == INTEGER_CST
12134 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12136 tree mask = fold_build2_loc (loc, code, type,
12137 fold_convert_loc (loc, type,
12138 TREE_OPERAND (arg0, 1)),
12139 arg1);
12140 tree shift = fold_build2_loc (loc, code, type,
12141 fold_convert_loc (loc, type,
12142 TREE_OPERAND (arg0, 0)),
12143 arg1);
12144 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12145 if (tem)
12146 return tem;
12149 return NULL_TREE;
12151 case MIN_EXPR:
12152 if (operand_equal_p (arg0, arg1, 0))
12153 return omit_one_operand_loc (loc, type, arg0, arg1);
12154 if (INTEGRAL_TYPE_P (type)
12155 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12156 return omit_one_operand_loc (loc, type, arg1, arg0);
12157 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12158 if (tem)
12159 return tem;
12160 goto associate;
12162 case MAX_EXPR:
12163 if (operand_equal_p (arg0, arg1, 0))
12164 return omit_one_operand_loc (loc, type, arg0, arg1);
12165 if (INTEGRAL_TYPE_P (type)
12166 && TYPE_MAX_VALUE (type)
12167 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12168 return omit_one_operand_loc (loc, type, arg1, arg0);
12169 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12170 if (tem)
12171 return tem;
12172 goto associate;
12174 case TRUTH_ANDIF_EXPR:
12175 /* Note that the operands of this must be ints
12176 and their values must be 0 or 1.
12177 ("true" is a fixed value perhaps depending on the language.) */
12178 /* If first arg is constant zero, return it. */
12179 if (integer_zerop (arg0))
12180 return fold_convert_loc (loc, type, arg0);
12181 case TRUTH_AND_EXPR:
12182 /* If either arg is constant true, drop it. */
12183 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12184 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12185 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12186 /* Preserve sequence points. */
12187 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12188 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12189 /* If second arg is constant zero, result is zero, but first arg
12190 must be evaluated. */
12191 if (integer_zerop (arg1))
12192 return omit_one_operand_loc (loc, type, arg1, arg0);
12193 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12194 case will be handled here. */
12195 if (integer_zerop (arg0))
12196 return omit_one_operand_loc (loc, type, arg0, arg1);
12198 /* !X && X is always false. */
12199 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12201 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12202 /* X && !X is always false. */
12203 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12205 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12207 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12208 means A >= Y && A != MAX, but in this case we know that
12209 A < X <= MAX. */
12211 if (!TREE_SIDE_EFFECTS (arg0)
12212 && !TREE_SIDE_EFFECTS (arg1))
12214 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12215 if (tem && !operand_equal_p (tem, arg0, 0))
12216 return fold_build2_loc (loc, code, type, tem, arg1);
12218 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12219 if (tem && !operand_equal_p (tem, arg1, 0))
12220 return fold_build2_loc (loc, code, type, arg0, tem);
12223 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12224 != NULL_TREE)
12225 return tem;
12227 return NULL_TREE;
12229 case TRUTH_ORIF_EXPR:
12230 /* Note that the operands of this must be ints
12231 and their values must be 0 or true.
12232 ("true" is a fixed value perhaps depending on the language.) */
12233 /* If first arg is constant true, return it. */
12234 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12235 return fold_convert_loc (loc, type, arg0);
12236 case TRUTH_OR_EXPR:
12237 /* If either arg is constant zero, drop it. */
12238 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12239 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12240 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12241 /* Preserve sequence points. */
12242 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12243 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12244 /* If second arg is constant true, result is true, but we must
12245 evaluate first arg. */
12246 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12247 return omit_one_operand_loc (loc, type, arg1, arg0);
12248 /* Likewise for first arg, but note this only occurs here for
12249 TRUTH_OR_EXPR. */
12250 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12251 return omit_one_operand_loc (loc, type, arg0, arg1);
12253 /* !X || X is always true. */
12254 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12256 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12257 /* X || !X is always true. */
12258 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12260 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12262 /* (X && !Y) || (!X && Y) is X ^ Y */
12263 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12264 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12266 tree a0, a1, l0, l1, n0, n1;
12268 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12269 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12271 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12272 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12274 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12275 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12277 if ((operand_equal_p (n0, a0, 0)
12278 && operand_equal_p (n1, a1, 0))
12279 || (operand_equal_p (n0, a1, 0)
12280 && operand_equal_p (n1, a0, 0)))
12281 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12284 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12285 != NULL_TREE)
12286 return tem;
12288 return NULL_TREE;
12290 case TRUTH_XOR_EXPR:
12291 /* If the second arg is constant zero, drop it. */
12292 if (integer_zerop (arg1))
12293 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12294 /* If the second arg is constant true, this is a logical inversion. */
12295 if (integer_onep (arg1))
12297 tem = invert_truthvalue_loc (loc, arg0);
12298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12300 /* Identical arguments cancel to zero. */
12301 if (operand_equal_p (arg0, arg1, 0))
12302 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12304 /* !X ^ X is always true. */
12305 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12307 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12309 /* X ^ !X is always true. */
12310 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12312 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12314 return NULL_TREE;
12316 case EQ_EXPR:
12317 case NE_EXPR:
12318 STRIP_NOPS (arg0);
12319 STRIP_NOPS (arg1);
12321 tem = fold_comparison (loc, code, type, op0, op1);
12322 if (tem != NULL_TREE)
12323 return tem;
12325 /* bool_var != 0 becomes bool_var. */
12326 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12327 && code == NE_EXPR)
12328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12330 /* bool_var == 1 becomes bool_var. */
12331 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12332 && code == EQ_EXPR)
12333 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12335 /* bool_var != 1 becomes !bool_var. */
12336 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12337 && code == NE_EXPR)
12338 return fold_convert_loc (loc, type,
12339 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12340 TREE_TYPE (arg0), arg0));
12342 /* bool_var == 0 becomes !bool_var. */
12343 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12344 && code == EQ_EXPR)
12345 return fold_convert_loc (loc, type,
12346 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12347 TREE_TYPE (arg0), arg0));
12349 /* !exp != 0 becomes !exp */
12350 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12351 && code == NE_EXPR)
12352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12354 /* If this is an equality comparison of the address of two non-weak,
12355 unaliased symbols neither of which are extern (since we do not
12356 have access to attributes for externs), then we know the result. */
12357 if (TREE_CODE (arg0) == ADDR_EXPR
12358 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12359 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12360 && ! lookup_attribute ("alias",
12361 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12362 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12363 && TREE_CODE (arg1) == ADDR_EXPR
12364 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12365 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12366 && ! lookup_attribute ("alias",
12367 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12368 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12370 /* We know that we're looking at the address of two
12371 non-weak, unaliased, static _DECL nodes.
12373 It is both wasteful and incorrect to call operand_equal_p
12374 to compare the two ADDR_EXPR nodes. It is wasteful in that
12375 all we need to do is test pointer equality for the arguments
12376 to the two ADDR_EXPR nodes. It is incorrect to use
12377 operand_equal_p as that function is NOT equivalent to a
12378 C equality test. It can in fact return false for two
12379 objects which would test as equal using the C equality
12380 operator. */
12381 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12382 return constant_boolean_node (equal
12383 ? code == EQ_EXPR : code != EQ_EXPR,
12384 type);
12387 /* Similarly for a NEGATE_EXPR. */
12388 if (TREE_CODE (arg0) == NEGATE_EXPR
12389 && TREE_CODE (arg1) == INTEGER_CST
12390 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12391 arg1)))
12392 && TREE_CODE (tem) == INTEGER_CST
12393 && !TREE_OVERFLOW (tem))
12394 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12396 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12397 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12398 && TREE_CODE (arg1) == INTEGER_CST
12399 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12400 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12401 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12402 fold_convert_loc (loc,
12403 TREE_TYPE (arg0),
12404 arg1),
12405 TREE_OPERAND (arg0, 1)));
12407 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12408 if ((TREE_CODE (arg0) == PLUS_EXPR
12409 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12410 || TREE_CODE (arg0) == MINUS_EXPR)
12411 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12412 0)),
12413 arg1, 0)
12414 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12415 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12417 tree val = TREE_OPERAND (arg0, 1);
12418 return omit_two_operands_loc (loc, type,
12419 fold_build2_loc (loc, code, type,
12420 val,
12421 build_int_cst (TREE_TYPE (val),
12422 0)),
12423 TREE_OPERAND (arg0, 0), arg1);
12426 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12427 if (TREE_CODE (arg0) == MINUS_EXPR
12428 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12429 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12430 1)),
12431 arg1, 0)
12432 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12434 return omit_two_operands_loc (loc, type,
12435 code == NE_EXPR
12436 ? boolean_true_node : boolean_false_node,
12437 TREE_OPERAND (arg0, 1), arg1);
12440 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12441 if (TREE_CODE (arg0) == ABS_EXPR
12442 && (integer_zerop (arg1) || real_zerop (arg1)))
12443 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12445 /* If this is an EQ or NE comparison with zero and ARG0 is
12446 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12447 two operations, but the latter can be done in one less insn
12448 on machines that have only two-operand insns or on which a
12449 constant cannot be the first operand. */
12450 if (TREE_CODE (arg0) == BIT_AND_EXPR
12451 && integer_zerop (arg1))
12453 tree arg00 = TREE_OPERAND (arg0, 0);
12454 tree arg01 = TREE_OPERAND (arg0, 1);
12455 if (TREE_CODE (arg00) == LSHIFT_EXPR
12456 && integer_onep (TREE_OPERAND (arg00, 0)))
12458 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12459 arg01, TREE_OPERAND (arg00, 1));
12460 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12461 build_int_cst (TREE_TYPE (arg0), 1));
12462 return fold_build2_loc (loc, code, type,
12463 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12464 arg1);
12466 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12467 && integer_onep (TREE_OPERAND (arg01, 0)))
12469 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12470 arg00, TREE_OPERAND (arg01, 1));
12471 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12472 build_int_cst (TREE_TYPE (arg0), 1));
12473 return fold_build2_loc (loc, code, type,
12474 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12475 arg1);
12479 /* If this is an NE or EQ comparison of zero against the result of a
12480 signed MOD operation whose second operand is a power of 2, make
12481 the MOD operation unsigned since it is simpler and equivalent. */
12482 if (integer_zerop (arg1)
12483 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12484 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12485 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12486 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12487 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12488 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12490 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12491 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12492 fold_convert_loc (loc, newtype,
12493 TREE_OPERAND (arg0, 0)),
12494 fold_convert_loc (loc, newtype,
12495 TREE_OPERAND (arg0, 1)));
12497 return fold_build2_loc (loc, code, type, newmod,
12498 fold_convert_loc (loc, newtype, arg1));
12501 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12502 C1 is a valid shift constant, and C2 is a power of two, i.e.
12503 a single bit. */
12504 if (TREE_CODE (arg0) == BIT_AND_EXPR
12505 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12506 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12507 == INTEGER_CST
12508 && integer_pow2p (TREE_OPERAND (arg0, 1))
12509 && integer_zerop (arg1))
12511 tree itype = TREE_TYPE (arg0);
12512 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12513 prec = TYPE_PRECISION (itype);
12515 /* Check for a valid shift count. */
12516 if (wi::ltu_p (arg001, prec))
12518 tree arg01 = TREE_OPERAND (arg0, 1);
12519 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12520 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12521 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12522 can be rewritten as (X & (C2 << C1)) != 0. */
12523 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12525 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12526 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12527 return fold_build2_loc (loc, code, type, tem,
12528 fold_convert_loc (loc, itype, arg1));
12530 /* Otherwise, for signed (arithmetic) shifts,
12531 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12532 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12533 else if (!TYPE_UNSIGNED (itype))
12534 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12535 arg000, build_int_cst (itype, 0));
12536 /* Otherwise, of unsigned (logical) shifts,
12537 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12538 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12539 else
12540 return omit_one_operand_loc (loc, type,
12541 code == EQ_EXPR ? integer_one_node
12542 : integer_zero_node,
12543 arg000);
12547 /* If we have (A & C) == C where C is a power of 2, convert this into
12548 (A & C) != 0. Similarly for NE_EXPR. */
12549 if (TREE_CODE (arg0) == BIT_AND_EXPR
12550 && integer_pow2p (TREE_OPERAND (arg0, 1))
12551 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12552 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12553 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12554 integer_zero_node));
12556 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12557 bit, then fold the expression into A < 0 or A >= 0. */
12558 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12559 if (tem)
12560 return tem;
12562 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12563 Similarly for NE_EXPR. */
12564 if (TREE_CODE (arg0) == BIT_AND_EXPR
12565 && TREE_CODE (arg1) == INTEGER_CST
12566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12568 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12569 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12570 TREE_OPERAND (arg0, 1));
12571 tree dandnotc
12572 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12573 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12574 notc);
12575 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12576 if (integer_nonzerop (dandnotc))
12577 return omit_one_operand_loc (loc, type, rslt, arg0);
12580 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12581 Similarly for NE_EXPR. */
12582 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12583 && TREE_CODE (arg1) == INTEGER_CST
12584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12586 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12587 tree candnotd
12588 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12589 TREE_OPERAND (arg0, 1),
12590 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12591 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12592 if (integer_nonzerop (candnotd))
12593 return omit_one_operand_loc (loc, type, rslt, arg0);
12596 /* If this is a comparison of a field, we may be able to simplify it. */
12597 if ((TREE_CODE (arg0) == COMPONENT_REF
12598 || TREE_CODE (arg0) == BIT_FIELD_REF)
12599 /* Handle the constant case even without -O
12600 to make sure the warnings are given. */
12601 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12603 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12604 if (t1)
12605 return t1;
12608 /* Optimize comparisons of strlen vs zero to a compare of the
12609 first character of the string vs zero. To wit,
12610 strlen(ptr) == 0 => *ptr == 0
12611 strlen(ptr) != 0 => *ptr != 0
12612 Other cases should reduce to one of these two (or a constant)
12613 due to the return value of strlen being unsigned. */
12614 if (TREE_CODE (arg0) == CALL_EXPR
12615 && integer_zerop (arg1))
12617 tree fndecl = get_callee_fndecl (arg0);
12619 if (fndecl
12620 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12621 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12622 && call_expr_nargs (arg0) == 1
12623 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12625 tree iref = build_fold_indirect_ref_loc (loc,
12626 CALL_EXPR_ARG (arg0, 0));
12627 return fold_build2_loc (loc, code, type, iref,
12628 build_int_cst (TREE_TYPE (iref), 0));
12632 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12633 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12634 if (TREE_CODE (arg0) == RSHIFT_EXPR
12635 && integer_zerop (arg1)
12636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12638 tree arg00 = TREE_OPERAND (arg0, 0);
12639 tree arg01 = TREE_OPERAND (arg0, 1);
12640 tree itype = TREE_TYPE (arg00);
12641 if (wi::eq_p (arg01, element_precision (itype) - 1))
12643 if (TYPE_UNSIGNED (itype))
12645 itype = signed_type_for (itype);
12646 arg00 = fold_convert_loc (loc, itype, arg00);
12648 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12649 type, arg00, build_zero_cst (itype));
12653 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12654 if (integer_zerop (arg1)
12655 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12656 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12657 TREE_OPERAND (arg0, 1));
12659 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12660 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12661 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12662 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12663 build_zero_cst (TREE_TYPE (arg0)));
12664 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12665 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12666 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12667 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12668 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12669 build_zero_cst (TREE_TYPE (arg0)));
12671 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12672 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12673 && TREE_CODE (arg1) == INTEGER_CST
12674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12675 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12676 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12677 TREE_OPERAND (arg0, 1), arg1));
12679 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12680 (X & C) == 0 when C is a single bit. */
12681 if (TREE_CODE (arg0) == BIT_AND_EXPR
12682 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12683 && integer_zerop (arg1)
12684 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12686 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12687 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12688 TREE_OPERAND (arg0, 1));
12689 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12690 type, tem,
12691 fold_convert_loc (loc, TREE_TYPE (arg0),
12692 arg1));
12695 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12696 constant C is a power of two, i.e. a single bit. */
12697 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12698 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12699 && integer_zerop (arg1)
12700 && integer_pow2p (TREE_OPERAND (arg0, 1))
12701 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12702 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12704 tree arg00 = TREE_OPERAND (arg0, 0);
12705 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12706 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12709 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12710 when is C is a power of two, i.e. a single bit. */
12711 if (TREE_CODE (arg0) == BIT_AND_EXPR
12712 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12713 && integer_zerop (arg1)
12714 && integer_pow2p (TREE_OPERAND (arg0, 1))
12715 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12716 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12718 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12719 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12720 arg000, TREE_OPERAND (arg0, 1));
12721 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12722 tem, build_int_cst (TREE_TYPE (tem), 0));
12725 if (integer_zerop (arg1)
12726 && tree_expr_nonzero_p (arg0))
12728 tree res = constant_boolean_node (code==NE_EXPR, type);
12729 return omit_one_operand_loc (loc, type, res, arg0);
12732 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12733 if (TREE_CODE (arg0) == NEGATE_EXPR
12734 && TREE_CODE (arg1) == NEGATE_EXPR)
12735 return fold_build2_loc (loc, code, type,
12736 TREE_OPERAND (arg0, 0),
12737 fold_convert_loc (loc, TREE_TYPE (arg0),
12738 TREE_OPERAND (arg1, 0)));
12740 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12741 if (TREE_CODE (arg0) == BIT_AND_EXPR
12742 && TREE_CODE (arg1) == BIT_AND_EXPR)
12744 tree arg00 = TREE_OPERAND (arg0, 0);
12745 tree arg01 = TREE_OPERAND (arg0, 1);
12746 tree arg10 = TREE_OPERAND (arg1, 0);
12747 tree arg11 = TREE_OPERAND (arg1, 1);
12748 tree itype = TREE_TYPE (arg0);
12750 if (operand_equal_p (arg01, arg11, 0))
12751 return fold_build2_loc (loc, code, type,
12752 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12753 fold_build2_loc (loc,
12754 BIT_XOR_EXPR, itype,
12755 arg00, arg10),
12756 arg01),
12757 build_zero_cst (itype));
12759 if (operand_equal_p (arg01, arg10, 0))
12760 return fold_build2_loc (loc, code, type,
12761 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12762 fold_build2_loc (loc,
12763 BIT_XOR_EXPR, itype,
12764 arg00, arg11),
12765 arg01),
12766 build_zero_cst (itype));
12768 if (operand_equal_p (arg00, arg11, 0))
12769 return fold_build2_loc (loc, code, type,
12770 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12771 fold_build2_loc (loc,
12772 BIT_XOR_EXPR, itype,
12773 arg01, arg10),
12774 arg00),
12775 build_zero_cst (itype));
12777 if (operand_equal_p (arg00, arg10, 0))
12778 return fold_build2_loc (loc, code, type,
12779 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12780 fold_build2_loc (loc,
12781 BIT_XOR_EXPR, itype,
12782 arg01, arg11),
12783 arg00),
12784 build_zero_cst (itype));
12787 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12788 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12790 tree arg00 = TREE_OPERAND (arg0, 0);
12791 tree arg01 = TREE_OPERAND (arg0, 1);
12792 tree arg10 = TREE_OPERAND (arg1, 0);
12793 tree arg11 = TREE_OPERAND (arg1, 1);
12794 tree itype = TREE_TYPE (arg0);
12796 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12797 operand_equal_p guarantees no side-effects so we don't need
12798 to use omit_one_operand on Z. */
12799 if (operand_equal_p (arg01, arg11, 0))
12800 return fold_build2_loc (loc, code, type, arg00,
12801 fold_convert_loc (loc, TREE_TYPE (arg00),
12802 arg10));
12803 if (operand_equal_p (arg01, arg10, 0))
12804 return fold_build2_loc (loc, code, type, arg00,
12805 fold_convert_loc (loc, TREE_TYPE (arg00),
12806 arg11));
12807 if (operand_equal_p (arg00, arg11, 0))
12808 return fold_build2_loc (loc, code, type, arg01,
12809 fold_convert_loc (loc, TREE_TYPE (arg01),
12810 arg10));
12811 if (operand_equal_p (arg00, arg10, 0))
12812 return fold_build2_loc (loc, code, type, arg01,
12813 fold_convert_loc (loc, TREE_TYPE (arg01),
12814 arg11));
12816 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12817 if (TREE_CODE (arg01) == INTEGER_CST
12818 && TREE_CODE (arg11) == INTEGER_CST)
12820 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12821 fold_convert_loc (loc, itype, arg11));
12822 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12823 return fold_build2_loc (loc, code, type, tem,
12824 fold_convert_loc (loc, itype, arg10));
12828 /* Attempt to simplify equality/inequality comparisons of complex
12829 values. Only lower the comparison if the result is known or
12830 can be simplified to a single scalar comparison. */
12831 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12832 || TREE_CODE (arg0) == COMPLEX_CST)
12833 && (TREE_CODE (arg1) == COMPLEX_EXPR
12834 || TREE_CODE (arg1) == COMPLEX_CST))
12836 tree real0, imag0, real1, imag1;
12837 tree rcond, icond;
12839 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12841 real0 = TREE_OPERAND (arg0, 0);
12842 imag0 = TREE_OPERAND (arg0, 1);
12844 else
12846 real0 = TREE_REALPART (arg0);
12847 imag0 = TREE_IMAGPART (arg0);
12850 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12852 real1 = TREE_OPERAND (arg1, 0);
12853 imag1 = TREE_OPERAND (arg1, 1);
12855 else
12857 real1 = TREE_REALPART (arg1);
12858 imag1 = TREE_IMAGPART (arg1);
12861 rcond = fold_binary_loc (loc, code, type, real0, real1);
12862 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12864 if (integer_zerop (rcond))
12866 if (code == EQ_EXPR)
12867 return omit_two_operands_loc (loc, type, boolean_false_node,
12868 imag0, imag1);
12869 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12871 else
12873 if (code == NE_EXPR)
12874 return omit_two_operands_loc (loc, type, boolean_true_node,
12875 imag0, imag1);
12876 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12880 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12881 if (icond && TREE_CODE (icond) == INTEGER_CST)
12883 if (integer_zerop (icond))
12885 if (code == EQ_EXPR)
12886 return omit_two_operands_loc (loc, type, boolean_false_node,
12887 real0, real1);
12888 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12890 else
12892 if (code == NE_EXPR)
12893 return omit_two_operands_loc (loc, type, boolean_true_node,
12894 real0, real1);
12895 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12900 return NULL_TREE;
12902 case LT_EXPR:
12903 case GT_EXPR:
12904 case LE_EXPR:
12905 case GE_EXPR:
12906 tem = fold_comparison (loc, code, type, op0, op1);
12907 if (tem != NULL_TREE)
12908 return tem;
12910 /* Transform comparisons of the form X +- C CMP X. */
12911 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12912 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12913 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12914 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12915 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12916 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12918 tree arg01 = TREE_OPERAND (arg0, 1);
12919 enum tree_code code0 = TREE_CODE (arg0);
12920 int is_positive;
12922 if (TREE_CODE (arg01) == REAL_CST)
12923 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12924 else
12925 is_positive = tree_int_cst_sgn (arg01);
12927 /* (X - c) > X becomes false. */
12928 if (code == GT_EXPR
12929 && ((code0 == MINUS_EXPR && is_positive >= 0)
12930 || (code0 == PLUS_EXPR && is_positive <= 0)))
12932 if (TREE_CODE (arg01) == INTEGER_CST
12933 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12934 fold_overflow_warning (("assuming signed overflow does not "
12935 "occur when assuming that (X - c) > X "
12936 "is always false"),
12937 WARN_STRICT_OVERFLOW_ALL);
12938 return constant_boolean_node (0, type);
12941 /* Likewise (X + c) < X becomes false. */
12942 if (code == LT_EXPR
12943 && ((code0 == PLUS_EXPR && is_positive >= 0)
12944 || (code0 == MINUS_EXPR && is_positive <= 0)))
12946 if (TREE_CODE (arg01) == INTEGER_CST
12947 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12948 fold_overflow_warning (("assuming signed overflow does not "
12949 "occur when assuming that "
12950 "(X + c) < X is always false"),
12951 WARN_STRICT_OVERFLOW_ALL);
12952 return constant_boolean_node (0, type);
12955 /* Convert (X - c) <= X to true. */
12956 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12957 && code == LE_EXPR
12958 && ((code0 == MINUS_EXPR && is_positive >= 0)
12959 || (code0 == PLUS_EXPR && is_positive <= 0)))
12961 if (TREE_CODE (arg01) == INTEGER_CST
12962 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12963 fold_overflow_warning (("assuming signed overflow does not "
12964 "occur when assuming that "
12965 "(X - c) <= X is always true"),
12966 WARN_STRICT_OVERFLOW_ALL);
12967 return constant_boolean_node (1, type);
12970 /* Convert (X + c) >= X to true. */
12971 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12972 && code == GE_EXPR
12973 && ((code0 == PLUS_EXPR && is_positive >= 0)
12974 || (code0 == MINUS_EXPR && is_positive <= 0)))
12976 if (TREE_CODE (arg01) == INTEGER_CST
12977 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12978 fold_overflow_warning (("assuming signed overflow does not "
12979 "occur when assuming that "
12980 "(X + c) >= X is always true"),
12981 WARN_STRICT_OVERFLOW_ALL);
12982 return constant_boolean_node (1, type);
12985 if (TREE_CODE (arg01) == INTEGER_CST)
12987 /* Convert X + c > X and X - c < X to true for integers. */
12988 if (code == GT_EXPR
12989 && ((code0 == PLUS_EXPR && is_positive > 0)
12990 || (code0 == MINUS_EXPR && is_positive < 0)))
12992 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12993 fold_overflow_warning (("assuming signed overflow does "
12994 "not occur when assuming that "
12995 "(X + c) > X is always true"),
12996 WARN_STRICT_OVERFLOW_ALL);
12997 return constant_boolean_node (1, type);
13000 if (code == LT_EXPR
13001 && ((code0 == MINUS_EXPR && is_positive > 0)
13002 || (code0 == PLUS_EXPR && is_positive < 0)))
13004 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13005 fold_overflow_warning (("assuming signed overflow does "
13006 "not occur when assuming that "
13007 "(X - c) < X is always true"),
13008 WARN_STRICT_OVERFLOW_ALL);
13009 return constant_boolean_node (1, type);
13012 /* Convert X + c <= X and X - c >= X to false for integers. */
13013 if (code == LE_EXPR
13014 && ((code0 == PLUS_EXPR && is_positive > 0)
13015 || (code0 == MINUS_EXPR && is_positive < 0)))
13017 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13018 fold_overflow_warning (("assuming signed overflow does "
13019 "not occur when assuming that "
13020 "(X + c) <= X is always false"),
13021 WARN_STRICT_OVERFLOW_ALL);
13022 return constant_boolean_node (0, type);
13025 if (code == GE_EXPR
13026 && ((code0 == MINUS_EXPR && is_positive > 0)
13027 || (code0 == PLUS_EXPR && is_positive < 0)))
13029 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13030 fold_overflow_warning (("assuming signed overflow does "
13031 "not occur when assuming that "
13032 "(X - c) >= X is always false"),
13033 WARN_STRICT_OVERFLOW_ALL);
13034 return constant_boolean_node (0, type);
13039 /* Comparisons with the highest or lowest possible integer of
13040 the specified precision will have known values. */
13042 tree arg1_type = TREE_TYPE (arg1);
13043 unsigned int prec = TYPE_PRECISION (arg1_type);
13045 if (TREE_CODE (arg1) == INTEGER_CST
13046 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13048 wide_int max = wi::max_value (arg1_type);
13049 wide_int signed_max = wi::max_value (prec, SIGNED);
13050 wide_int min = wi::min_value (arg1_type);
13052 if (wi::eq_p (arg1, max))
13053 switch (code)
13055 case GT_EXPR:
13056 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13058 case GE_EXPR:
13059 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13061 case LE_EXPR:
13062 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13064 case LT_EXPR:
13065 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13067 /* The GE_EXPR and LT_EXPR cases above are not normally
13068 reached because of previous transformations. */
13070 default:
13071 break;
13073 else if (wi::eq_p (arg1, max - 1))
13074 switch (code)
13076 case GT_EXPR:
13077 arg1 = const_binop (PLUS_EXPR, arg1,
13078 build_int_cst (TREE_TYPE (arg1), 1));
13079 return fold_build2_loc (loc, EQ_EXPR, type,
13080 fold_convert_loc (loc,
13081 TREE_TYPE (arg1), arg0),
13082 arg1);
13083 case LE_EXPR:
13084 arg1 = const_binop (PLUS_EXPR, arg1,
13085 build_int_cst (TREE_TYPE (arg1), 1));
13086 return fold_build2_loc (loc, NE_EXPR, type,
13087 fold_convert_loc (loc, TREE_TYPE (arg1),
13088 arg0),
13089 arg1);
13090 default:
13091 break;
13093 else if (wi::eq_p (arg1, min))
13094 switch (code)
13096 case LT_EXPR:
13097 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13099 case LE_EXPR:
13100 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13102 case GE_EXPR:
13103 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13105 case GT_EXPR:
13106 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13108 default:
13109 break;
13111 else if (wi::eq_p (arg1, min + 1))
13112 switch (code)
13114 case GE_EXPR:
13115 arg1 = const_binop (MINUS_EXPR, arg1,
13116 build_int_cst (TREE_TYPE (arg1), 1));
13117 return fold_build2_loc (loc, NE_EXPR, type,
13118 fold_convert_loc (loc,
13119 TREE_TYPE (arg1), arg0),
13120 arg1);
13121 case LT_EXPR:
13122 arg1 = const_binop (MINUS_EXPR, arg1,
13123 build_int_cst (TREE_TYPE (arg1), 1));
13124 return fold_build2_loc (loc, EQ_EXPR, type,
13125 fold_convert_loc (loc, TREE_TYPE (arg1),
13126 arg0),
13127 arg1);
13128 default:
13129 break;
13132 else if (wi::eq_p (arg1, signed_max)
13133 && TYPE_UNSIGNED (arg1_type)
13134 /* We will flip the signedness of the comparison operator
13135 associated with the mode of arg1, so the sign bit is
13136 specified by this mode. Check that arg1 is the signed
13137 max associated with this sign bit. */
13138 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13139 /* signed_type does not work on pointer types. */
13140 && INTEGRAL_TYPE_P (arg1_type))
13142 /* The following case also applies to X < signed_max+1
13143 and X >= signed_max+1 because previous transformations. */
13144 if (code == LE_EXPR || code == GT_EXPR)
13146 tree st = signed_type_for (arg1_type);
13147 return fold_build2_loc (loc,
13148 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13149 type, fold_convert_loc (loc, st, arg0),
13150 build_int_cst (st, 0));
13156 /* If we are comparing an ABS_EXPR with a constant, we can
13157 convert all the cases into explicit comparisons, but they may
13158 well not be faster than doing the ABS and one comparison.
13159 But ABS (X) <= C is a range comparison, which becomes a subtraction
13160 and a comparison, and is probably faster. */
13161 if (code == LE_EXPR
13162 && TREE_CODE (arg1) == INTEGER_CST
13163 && TREE_CODE (arg0) == ABS_EXPR
13164 && ! TREE_SIDE_EFFECTS (arg0)
13165 && (0 != (tem = negate_expr (arg1)))
13166 && TREE_CODE (tem) == INTEGER_CST
13167 && !TREE_OVERFLOW (tem))
13168 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13169 build2 (GE_EXPR, type,
13170 TREE_OPERAND (arg0, 0), tem),
13171 build2 (LE_EXPR, type,
13172 TREE_OPERAND (arg0, 0), arg1));
13174 /* Convert ABS_EXPR<x> >= 0 to true. */
13175 strict_overflow_p = false;
13176 if (code == GE_EXPR
13177 && (integer_zerop (arg1)
13178 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13179 && real_zerop (arg1)))
13180 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13182 if (strict_overflow_p)
13183 fold_overflow_warning (("assuming signed overflow does not occur "
13184 "when simplifying comparison of "
13185 "absolute value and zero"),
13186 WARN_STRICT_OVERFLOW_CONDITIONAL);
13187 return omit_one_operand_loc (loc, type,
13188 constant_boolean_node (true, type),
13189 arg0);
13192 /* Convert ABS_EXPR<x> < 0 to false. */
13193 strict_overflow_p = false;
13194 if (code == LT_EXPR
13195 && (integer_zerop (arg1) || real_zerop (arg1))
13196 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13198 if (strict_overflow_p)
13199 fold_overflow_warning (("assuming signed overflow does not occur "
13200 "when simplifying comparison of "
13201 "absolute value and zero"),
13202 WARN_STRICT_OVERFLOW_CONDITIONAL);
13203 return omit_one_operand_loc (loc, type,
13204 constant_boolean_node (false, type),
13205 arg0);
13208 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13209 and similarly for >= into !=. */
13210 if ((code == LT_EXPR || code == GE_EXPR)
13211 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13212 && TREE_CODE (arg1) == LSHIFT_EXPR
13213 && integer_onep (TREE_OPERAND (arg1, 0)))
13214 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13215 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13216 TREE_OPERAND (arg1, 1)),
13217 build_zero_cst (TREE_TYPE (arg0)));
13219 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13220 otherwise Y might be >= # of bits in X's type and thus e.g.
13221 (unsigned char) (1 << Y) for Y 15 might be 0.
13222 If the cast is widening, then 1 << Y should have unsigned type,
13223 otherwise if Y is number of bits in the signed shift type minus 1,
13224 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13225 31 might be 0xffffffff80000000. */
13226 if ((code == LT_EXPR || code == GE_EXPR)
13227 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13228 && CONVERT_EXPR_P (arg1)
13229 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13230 && (TYPE_PRECISION (TREE_TYPE (arg1))
13231 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13232 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13233 || (TYPE_PRECISION (TREE_TYPE (arg1))
13234 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13235 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13237 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13238 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13239 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13240 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13241 build_zero_cst (TREE_TYPE (arg0)));
13244 return NULL_TREE;
13246 case UNORDERED_EXPR:
13247 case ORDERED_EXPR:
13248 case UNLT_EXPR:
13249 case UNLE_EXPR:
13250 case UNGT_EXPR:
13251 case UNGE_EXPR:
13252 case UNEQ_EXPR:
13253 case LTGT_EXPR:
13254 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13256 t1 = fold_relational_const (code, type, arg0, arg1);
13257 if (t1 != NULL_TREE)
13258 return t1;
13261 /* If the first operand is NaN, the result is constant. */
13262 if (TREE_CODE (arg0) == REAL_CST
13263 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13264 && (code != LTGT_EXPR || ! flag_trapping_math))
13266 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13267 ? integer_zero_node
13268 : integer_one_node;
13269 return omit_one_operand_loc (loc, type, t1, arg1);
13272 /* If the second operand is NaN, the result is constant. */
13273 if (TREE_CODE (arg1) == REAL_CST
13274 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13275 && (code != LTGT_EXPR || ! flag_trapping_math))
13277 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13278 ? integer_zero_node
13279 : integer_one_node;
13280 return omit_one_operand_loc (loc, type, t1, arg0);
13283 /* Simplify unordered comparison of something with itself. */
13284 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13285 && operand_equal_p (arg0, arg1, 0))
13286 return constant_boolean_node (1, type);
13288 if (code == LTGT_EXPR
13289 && !flag_trapping_math
13290 && operand_equal_p (arg0, arg1, 0))
13291 return constant_boolean_node (0, type);
13293 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13295 tree targ0 = strip_float_extensions (arg0);
13296 tree targ1 = strip_float_extensions (arg1);
13297 tree newtype = TREE_TYPE (targ0);
13299 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13300 newtype = TREE_TYPE (targ1);
13302 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13303 return fold_build2_loc (loc, code, type,
13304 fold_convert_loc (loc, newtype, targ0),
13305 fold_convert_loc (loc, newtype, targ1));
13308 return NULL_TREE;
13310 case COMPOUND_EXPR:
13311 /* When pedantic, a compound expression can be neither an lvalue
13312 nor an integer constant expression. */
13313 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13314 return NULL_TREE;
13315 /* Don't let (0, 0) be null pointer constant. */
13316 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13317 : fold_convert_loc (loc, type, arg1);
13318 return pedantic_non_lvalue_loc (loc, tem);
13320 case COMPLEX_EXPR:
13321 if ((TREE_CODE (arg0) == REAL_CST
13322 && TREE_CODE (arg1) == REAL_CST)
13323 || (TREE_CODE (arg0) == INTEGER_CST
13324 && TREE_CODE (arg1) == INTEGER_CST))
13325 return build_complex (type, arg0, arg1);
13326 return NULL_TREE;
13328 case ASSERT_EXPR:
13329 /* An ASSERT_EXPR should never be passed to fold_binary. */
13330 gcc_unreachable ();
13332 case VEC_PACK_TRUNC_EXPR:
13333 case VEC_PACK_FIX_TRUNC_EXPR:
13335 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13336 tree *elts;
13338 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13339 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13340 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13341 return NULL_TREE;
13343 elts = XALLOCAVEC (tree, nelts);
13344 if (!vec_cst_ctor_to_array (arg0, elts)
13345 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13346 return NULL_TREE;
13348 for (i = 0; i < nelts; i++)
13350 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13351 ? NOP_EXPR : FIX_TRUNC_EXPR,
13352 TREE_TYPE (type), elts[i]);
13353 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13354 return NULL_TREE;
13357 return build_vector (type, elts);
13360 case VEC_WIDEN_MULT_LO_EXPR:
13361 case VEC_WIDEN_MULT_HI_EXPR:
13362 case VEC_WIDEN_MULT_EVEN_EXPR:
13363 case VEC_WIDEN_MULT_ODD_EXPR:
13365 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13366 unsigned int out, ofs, scale;
13367 tree *elts;
13369 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13370 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13371 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13372 return NULL_TREE;
13374 elts = XALLOCAVEC (tree, nelts * 4);
13375 if (!vec_cst_ctor_to_array (arg0, elts)
13376 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13377 return NULL_TREE;
13379 if (code == VEC_WIDEN_MULT_LO_EXPR)
13380 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13381 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13382 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13383 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13384 scale = 1, ofs = 0;
13385 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13386 scale = 1, ofs = 1;
13388 for (out = 0; out < nelts; out++)
13390 unsigned int in1 = (out << scale) + ofs;
13391 unsigned int in2 = in1 + nelts * 2;
13392 tree t1, t2;
13394 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13395 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13397 if (t1 == NULL_TREE || t2 == NULL_TREE)
13398 return NULL_TREE;
13399 elts[out] = const_binop (MULT_EXPR, t1, t2);
13400 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13401 return NULL_TREE;
13404 return build_vector (type, elts);
13407 default:
13408 return NULL_TREE;
13409 } /* switch (code) */
13412 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13413 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13414 of GOTO_EXPR. */
13416 static tree
13417 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13419 switch (TREE_CODE (*tp))
13421 case LABEL_EXPR:
13422 return *tp;
13424 case GOTO_EXPR:
13425 *walk_subtrees = 0;
13427 /* ... fall through ... */
13429 default:
13430 return NULL_TREE;
13434 /* Return whether the sub-tree ST contains a label which is accessible from
13435 outside the sub-tree. */
13437 static bool
13438 contains_label_p (tree st)
13440 return
13441 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13444 /* Fold a ternary expression of code CODE and type TYPE with operands
13445 OP0, OP1, and OP2. Return the folded expression if folding is
13446 successful. Otherwise, return NULL_TREE. */
13448 tree
13449 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13450 tree op0, tree op1, tree op2)
13452 tree tem;
13453 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13454 enum tree_code_class kind = TREE_CODE_CLASS (code);
13456 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13457 && TREE_CODE_LENGTH (code) == 3);
13459 /* If this is a commutative operation, and OP0 is a constant, move it
13460 to OP1 to reduce the number of tests below. */
13461 if (commutative_ternary_tree_code (code)
13462 && tree_swap_operands_p (op0, op1, true))
13463 return fold_build3_loc (loc, code, type, op1, op0, op2);
13465 tem = generic_simplify (loc, code, type, op0, op1, op2);
13466 if (tem)
13467 return tem;
13469 /* Strip any conversions that don't change the mode. This is safe
13470 for every expression, except for a comparison expression because
13471 its signedness is derived from its operands. So, in the latter
13472 case, only strip conversions that don't change the signedness.
13474 Note that this is done as an internal manipulation within the
13475 constant folder, in order to find the simplest representation of
13476 the arguments so that their form can be studied. In any cases,
13477 the appropriate type conversions should be put back in the tree
13478 that will get out of the constant folder. */
13479 if (op0)
13481 arg0 = op0;
13482 STRIP_NOPS (arg0);
13485 if (op1)
13487 arg1 = op1;
13488 STRIP_NOPS (arg1);
13491 if (op2)
13493 arg2 = op2;
13494 STRIP_NOPS (arg2);
13497 switch (code)
13499 case COMPONENT_REF:
13500 if (TREE_CODE (arg0) == CONSTRUCTOR
13501 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13503 unsigned HOST_WIDE_INT idx;
13504 tree field, value;
13505 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13506 if (field == arg1)
13507 return value;
13509 return NULL_TREE;
13511 case COND_EXPR:
13512 case VEC_COND_EXPR:
13513 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13514 so all simple results must be passed through pedantic_non_lvalue. */
13515 if (TREE_CODE (arg0) == INTEGER_CST)
13517 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13518 tem = integer_zerop (arg0) ? op2 : op1;
13519 /* Only optimize constant conditions when the selected branch
13520 has the same type as the COND_EXPR. This avoids optimizing
13521 away "c ? x : throw", where the throw has a void type.
13522 Avoid throwing away that operand which contains label. */
13523 if ((!TREE_SIDE_EFFECTS (unused_op)
13524 || !contains_label_p (unused_op))
13525 && (! VOID_TYPE_P (TREE_TYPE (tem))
13526 || VOID_TYPE_P (type)))
13527 return pedantic_non_lvalue_loc (loc, tem);
13528 return NULL_TREE;
13530 else if (TREE_CODE (arg0) == VECTOR_CST)
13532 if ((TREE_CODE (arg1) == VECTOR_CST
13533 || TREE_CODE (arg1) == CONSTRUCTOR)
13534 && (TREE_CODE (arg2) == VECTOR_CST
13535 || TREE_CODE (arg2) == CONSTRUCTOR))
13537 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13538 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13539 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13540 for (i = 0; i < nelts; i++)
13542 tree val = VECTOR_CST_ELT (arg0, i);
13543 if (integer_all_onesp (val))
13544 sel[i] = i;
13545 else if (integer_zerop (val))
13546 sel[i] = nelts + i;
13547 else /* Currently unreachable. */
13548 return NULL_TREE;
13550 tree t = fold_vec_perm (type, arg1, arg2, sel);
13551 if (t != NULL_TREE)
13552 return t;
13556 /* If we have A op B ? A : C, we may be able to convert this to a
13557 simpler expression, depending on the operation and the values
13558 of B and C. Signed zeros prevent all of these transformations,
13559 for reasons given above each one.
13561 Also try swapping the arguments and inverting the conditional. */
13562 if (COMPARISON_CLASS_P (arg0)
13563 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13564 arg1, TREE_OPERAND (arg0, 1))
13565 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13567 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13568 if (tem)
13569 return tem;
13572 if (COMPARISON_CLASS_P (arg0)
13573 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13574 op2,
13575 TREE_OPERAND (arg0, 1))
13576 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13578 location_t loc0 = expr_location_or (arg0, loc);
13579 tem = fold_invert_truthvalue (loc0, arg0);
13580 if (tem && COMPARISON_CLASS_P (tem))
13582 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13583 if (tem)
13584 return tem;
13588 /* If the second operand is simpler than the third, swap them
13589 since that produces better jump optimization results. */
13590 if (truth_value_p (TREE_CODE (arg0))
13591 && tree_swap_operands_p (op1, op2, false))
13593 location_t loc0 = expr_location_or (arg0, loc);
13594 /* See if this can be inverted. If it can't, possibly because
13595 it was a floating-point inequality comparison, don't do
13596 anything. */
13597 tem = fold_invert_truthvalue (loc0, arg0);
13598 if (tem)
13599 return fold_build3_loc (loc, code, type, tem, op2, op1);
13602 /* Convert A ? 1 : 0 to simply A. */
13603 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13604 : (integer_onep (op1)
13605 && !VECTOR_TYPE_P (type)))
13606 && integer_zerop (op2)
13607 /* If we try to convert OP0 to our type, the
13608 call to fold will try to move the conversion inside
13609 a COND, which will recurse. In that case, the COND_EXPR
13610 is probably the best choice, so leave it alone. */
13611 && type == TREE_TYPE (arg0))
13612 return pedantic_non_lvalue_loc (loc, arg0);
13614 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13615 over COND_EXPR in cases such as floating point comparisons. */
13616 if (integer_zerop (op1)
13617 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13618 : (integer_onep (op2)
13619 && !VECTOR_TYPE_P (type)))
13620 && truth_value_p (TREE_CODE (arg0)))
13621 return pedantic_non_lvalue_loc (loc,
13622 fold_convert_loc (loc, type,
13623 invert_truthvalue_loc (loc,
13624 arg0)));
13626 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13627 if (TREE_CODE (arg0) == LT_EXPR
13628 && integer_zerop (TREE_OPERAND (arg0, 1))
13629 && integer_zerop (op2)
13630 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13632 /* sign_bit_p looks through both zero and sign extensions,
13633 but for this optimization only sign extensions are
13634 usable. */
13635 tree tem2 = TREE_OPERAND (arg0, 0);
13636 while (tem != tem2)
13638 if (TREE_CODE (tem2) != NOP_EXPR
13639 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13641 tem = NULL_TREE;
13642 break;
13644 tem2 = TREE_OPERAND (tem2, 0);
13646 /* sign_bit_p only checks ARG1 bits within A's precision.
13647 If <sign bit of A> has wider type than A, bits outside
13648 of A's precision in <sign bit of A> need to be checked.
13649 If they are all 0, this optimization needs to be done
13650 in unsigned A's type, if they are all 1 in signed A's type,
13651 otherwise this can't be done. */
13652 if (tem
13653 && TYPE_PRECISION (TREE_TYPE (tem))
13654 < TYPE_PRECISION (TREE_TYPE (arg1))
13655 && TYPE_PRECISION (TREE_TYPE (tem))
13656 < TYPE_PRECISION (type))
13658 int inner_width, outer_width;
13659 tree tem_type;
13661 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13662 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13663 if (outer_width > TYPE_PRECISION (type))
13664 outer_width = TYPE_PRECISION (type);
13666 wide_int mask = wi::shifted_mask
13667 (inner_width, outer_width - inner_width, false,
13668 TYPE_PRECISION (TREE_TYPE (arg1)));
13670 wide_int common = mask & arg1;
13671 if (common == mask)
13673 tem_type = signed_type_for (TREE_TYPE (tem));
13674 tem = fold_convert_loc (loc, tem_type, tem);
13676 else if (common == 0)
13678 tem_type = unsigned_type_for (TREE_TYPE (tem));
13679 tem = fold_convert_loc (loc, tem_type, tem);
13681 else
13682 tem = NULL;
13685 if (tem)
13686 return
13687 fold_convert_loc (loc, type,
13688 fold_build2_loc (loc, BIT_AND_EXPR,
13689 TREE_TYPE (tem), tem,
13690 fold_convert_loc (loc,
13691 TREE_TYPE (tem),
13692 arg1)));
13695 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13696 already handled above. */
13697 if (TREE_CODE (arg0) == BIT_AND_EXPR
13698 && integer_onep (TREE_OPERAND (arg0, 1))
13699 && integer_zerop (op2)
13700 && integer_pow2p (arg1))
13702 tree tem = TREE_OPERAND (arg0, 0);
13703 STRIP_NOPS (tem);
13704 if (TREE_CODE (tem) == RSHIFT_EXPR
13705 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13706 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13707 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13708 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13709 TREE_OPERAND (tem, 0), arg1);
13712 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13713 is probably obsolete because the first operand should be a
13714 truth value (that's why we have the two cases above), but let's
13715 leave it in until we can confirm this for all front-ends. */
13716 if (integer_zerop (op2)
13717 && TREE_CODE (arg0) == NE_EXPR
13718 && integer_zerop (TREE_OPERAND (arg0, 1))
13719 && integer_pow2p (arg1)
13720 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13721 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13722 arg1, OEP_ONLY_CONST))
13723 return pedantic_non_lvalue_loc (loc,
13724 fold_convert_loc (loc, type,
13725 TREE_OPERAND (arg0, 0)));
13727 /* Disable the transformations below for vectors, since
13728 fold_binary_op_with_conditional_arg may undo them immediately,
13729 yielding an infinite loop. */
13730 if (code == VEC_COND_EXPR)
13731 return NULL_TREE;
13733 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13734 if (integer_zerop (op2)
13735 && truth_value_p (TREE_CODE (arg0))
13736 && truth_value_p (TREE_CODE (arg1))
13737 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13738 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13739 : TRUTH_ANDIF_EXPR,
13740 type, fold_convert_loc (loc, type, arg0), arg1);
13742 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13743 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13744 && truth_value_p (TREE_CODE (arg0))
13745 && truth_value_p (TREE_CODE (arg1))
13746 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13748 location_t loc0 = expr_location_or (arg0, loc);
13749 /* Only perform transformation if ARG0 is easily inverted. */
13750 tem = fold_invert_truthvalue (loc0, arg0);
13751 if (tem)
13752 return fold_build2_loc (loc, code == VEC_COND_EXPR
13753 ? BIT_IOR_EXPR
13754 : TRUTH_ORIF_EXPR,
13755 type, fold_convert_loc (loc, type, tem),
13756 arg1);
13759 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13760 if (integer_zerop (arg1)
13761 && truth_value_p (TREE_CODE (arg0))
13762 && truth_value_p (TREE_CODE (op2))
13763 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13765 location_t loc0 = expr_location_or (arg0, loc);
13766 /* Only perform transformation if ARG0 is easily inverted. */
13767 tem = fold_invert_truthvalue (loc0, arg0);
13768 if (tem)
13769 return fold_build2_loc (loc, code == VEC_COND_EXPR
13770 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13771 type, fold_convert_loc (loc, type, tem),
13772 op2);
13775 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13776 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13777 && truth_value_p (TREE_CODE (arg0))
13778 && truth_value_p (TREE_CODE (op2))
13779 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13780 return fold_build2_loc (loc, code == VEC_COND_EXPR
13781 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13782 type, fold_convert_loc (loc, type, arg0), op2);
13784 return NULL_TREE;
13786 case CALL_EXPR:
13787 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13788 of fold_ternary on them. */
13789 gcc_unreachable ();
13791 case BIT_FIELD_REF:
13792 if ((TREE_CODE (arg0) == VECTOR_CST
13793 || (TREE_CODE (arg0) == CONSTRUCTOR
13794 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13795 && (type == TREE_TYPE (TREE_TYPE (arg0))
13796 || (TREE_CODE (type) == VECTOR_TYPE
13797 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13799 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13800 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13801 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13802 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13804 if (n != 0
13805 && (idx % width) == 0
13806 && (n % width) == 0
13807 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13809 idx = idx / width;
13810 n = n / width;
13812 if (TREE_CODE (arg0) == VECTOR_CST)
13814 if (n == 1)
13815 return VECTOR_CST_ELT (arg0, idx);
13817 tree *vals = XALLOCAVEC (tree, n);
13818 for (unsigned i = 0; i < n; ++i)
13819 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13820 return build_vector (type, vals);
13823 /* Constructor elements can be subvectors. */
13824 unsigned HOST_WIDE_INT k = 1;
13825 if (CONSTRUCTOR_NELTS (arg0) != 0)
13827 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13828 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13829 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13832 /* We keep an exact subset of the constructor elements. */
13833 if ((idx % k) == 0 && (n % k) == 0)
13835 if (CONSTRUCTOR_NELTS (arg0) == 0)
13836 return build_constructor (type, NULL);
13837 idx /= k;
13838 n /= k;
13839 if (n == 1)
13841 if (idx < CONSTRUCTOR_NELTS (arg0))
13842 return CONSTRUCTOR_ELT (arg0, idx)->value;
13843 return build_zero_cst (type);
13846 vec<constructor_elt, va_gc> *vals;
13847 vec_alloc (vals, n);
13848 for (unsigned i = 0;
13849 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13850 ++i)
13851 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13852 CONSTRUCTOR_ELT
13853 (arg0, idx + i)->value);
13854 return build_constructor (type, vals);
13856 /* The bitfield references a single constructor element. */
13857 else if (idx + n <= (idx / k + 1) * k)
13859 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13860 return build_zero_cst (type);
13861 else if (n == k)
13862 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13863 else
13864 return fold_build3_loc (loc, code, type,
13865 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13866 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13871 /* A bit-field-ref that referenced the full argument can be stripped. */
13872 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13873 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13874 && integer_zerop (op2))
13875 return fold_convert_loc (loc, type, arg0);
13877 /* On constants we can use native encode/interpret to constant
13878 fold (nearly) all BIT_FIELD_REFs. */
13879 if (CONSTANT_CLASS_P (arg0)
13880 && can_native_interpret_type_p (type)
13881 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13882 /* This limitation should not be necessary, we just need to
13883 round this up to mode size. */
13884 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13885 /* Need bit-shifting of the buffer to relax the following. */
13886 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13888 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13889 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13890 unsigned HOST_WIDE_INT clen;
13891 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13892 /* ??? We cannot tell native_encode_expr to start at
13893 some random byte only. So limit us to a reasonable amount
13894 of work. */
13895 if (clen <= 4096)
13897 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13898 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13899 if (len > 0
13900 && len * BITS_PER_UNIT >= bitpos + bitsize)
13902 tree v = native_interpret_expr (type,
13903 b + bitpos / BITS_PER_UNIT,
13904 bitsize / BITS_PER_UNIT);
13905 if (v)
13906 return v;
13911 return NULL_TREE;
13913 case FMA_EXPR:
13914 /* For integers we can decompose the FMA if possible. */
13915 if (TREE_CODE (arg0) == INTEGER_CST
13916 && TREE_CODE (arg1) == INTEGER_CST)
13917 return fold_build2_loc (loc, PLUS_EXPR, type,
13918 const_binop (MULT_EXPR, arg0, arg1), arg2);
13919 if (integer_zerop (arg2))
13920 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13922 return fold_fma (loc, type, arg0, arg1, arg2);
13924 case VEC_PERM_EXPR:
13925 if (TREE_CODE (arg2) == VECTOR_CST)
13927 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13928 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13929 unsigned char *sel2 = sel + nelts;
13930 bool need_mask_canon = false;
13931 bool need_mask_canon2 = false;
13932 bool all_in_vec0 = true;
13933 bool all_in_vec1 = true;
13934 bool maybe_identity = true;
13935 bool single_arg = (op0 == op1);
13936 bool changed = false;
13938 mask2 = 2 * nelts - 1;
13939 mask = single_arg ? (nelts - 1) : mask2;
13940 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13941 for (i = 0; i < nelts; i++)
13943 tree val = VECTOR_CST_ELT (arg2, i);
13944 if (TREE_CODE (val) != INTEGER_CST)
13945 return NULL_TREE;
13947 /* Make sure that the perm value is in an acceptable
13948 range. */
13949 wide_int t = val;
13950 need_mask_canon |= wi::gtu_p (t, mask);
13951 need_mask_canon2 |= wi::gtu_p (t, mask2);
13952 sel[i] = t.to_uhwi () & mask;
13953 sel2[i] = t.to_uhwi () & mask2;
13955 if (sel[i] < nelts)
13956 all_in_vec1 = false;
13957 else
13958 all_in_vec0 = false;
13960 if ((sel[i] & (nelts-1)) != i)
13961 maybe_identity = false;
13964 if (maybe_identity)
13966 if (all_in_vec0)
13967 return op0;
13968 if (all_in_vec1)
13969 return op1;
13972 if (all_in_vec0)
13973 op1 = op0;
13974 else if (all_in_vec1)
13976 op0 = op1;
13977 for (i = 0; i < nelts; i++)
13978 sel[i] -= nelts;
13979 need_mask_canon = true;
13982 if ((TREE_CODE (op0) == VECTOR_CST
13983 || TREE_CODE (op0) == CONSTRUCTOR)
13984 && (TREE_CODE (op1) == VECTOR_CST
13985 || TREE_CODE (op1) == CONSTRUCTOR))
13987 tree t = fold_vec_perm (type, op0, op1, sel);
13988 if (t != NULL_TREE)
13989 return t;
13992 if (op0 == op1 && !single_arg)
13993 changed = true;
13995 /* Some targets are deficient and fail to expand a single
13996 argument permutation while still allowing an equivalent
13997 2-argument version. */
13998 if (need_mask_canon && arg2 == op2
13999 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14000 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14002 need_mask_canon = need_mask_canon2;
14003 sel = sel2;
14006 if (need_mask_canon && arg2 == op2)
14008 tree *tsel = XALLOCAVEC (tree, nelts);
14009 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14010 for (i = 0; i < nelts; i++)
14011 tsel[i] = build_int_cst (eltype, sel[i]);
14012 op2 = build_vector (TREE_TYPE (arg2), tsel);
14013 changed = true;
14016 if (changed)
14017 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14019 return NULL_TREE;
14021 default:
14022 return NULL_TREE;
14023 } /* switch (code) */
14026 /* Perform constant folding and related simplification of EXPR.
14027 The related simplifications include x*1 => x, x*0 => 0, etc.,
14028 and application of the associative law.
14029 NOP_EXPR conversions may be removed freely (as long as we
14030 are careful not to change the type of the overall expression).
14031 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14032 but we can constant-fold them if they have constant operands. */
14034 #ifdef ENABLE_FOLD_CHECKING
14035 # define fold(x) fold_1 (x)
14036 static tree fold_1 (tree);
14037 static
14038 #endif
14039 tree
14040 fold (tree expr)
14042 const tree t = expr;
14043 enum tree_code code = TREE_CODE (t);
14044 enum tree_code_class kind = TREE_CODE_CLASS (code);
14045 tree tem;
14046 location_t loc = EXPR_LOCATION (expr);
14048 /* Return right away if a constant. */
14049 if (kind == tcc_constant)
14050 return t;
14052 /* CALL_EXPR-like objects with variable numbers of operands are
14053 treated specially. */
14054 if (kind == tcc_vl_exp)
14056 if (code == CALL_EXPR)
14058 tem = fold_call_expr (loc, expr, false);
14059 return tem ? tem : expr;
14061 return expr;
14064 if (IS_EXPR_CODE_CLASS (kind))
14066 tree type = TREE_TYPE (t);
14067 tree op0, op1, op2;
14069 switch (TREE_CODE_LENGTH (code))
14071 case 1:
14072 op0 = TREE_OPERAND (t, 0);
14073 tem = fold_unary_loc (loc, code, type, op0);
14074 return tem ? tem : expr;
14075 case 2:
14076 op0 = TREE_OPERAND (t, 0);
14077 op1 = TREE_OPERAND (t, 1);
14078 tem = fold_binary_loc (loc, code, type, op0, op1);
14079 return tem ? tem : expr;
14080 case 3:
14081 op0 = TREE_OPERAND (t, 0);
14082 op1 = TREE_OPERAND (t, 1);
14083 op2 = TREE_OPERAND (t, 2);
14084 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14085 return tem ? tem : expr;
14086 default:
14087 break;
14091 switch (code)
14093 case ARRAY_REF:
14095 tree op0 = TREE_OPERAND (t, 0);
14096 tree op1 = TREE_OPERAND (t, 1);
14098 if (TREE_CODE (op1) == INTEGER_CST
14099 && TREE_CODE (op0) == CONSTRUCTOR
14100 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14102 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14103 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14104 unsigned HOST_WIDE_INT begin = 0;
14106 /* Find a matching index by means of a binary search. */
14107 while (begin != end)
14109 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14110 tree index = (*elts)[middle].index;
14112 if (TREE_CODE (index) == INTEGER_CST
14113 && tree_int_cst_lt (index, op1))
14114 begin = middle + 1;
14115 else if (TREE_CODE (index) == INTEGER_CST
14116 && tree_int_cst_lt (op1, index))
14117 end = middle;
14118 else if (TREE_CODE (index) == RANGE_EXPR
14119 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14120 begin = middle + 1;
14121 else if (TREE_CODE (index) == RANGE_EXPR
14122 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14123 end = middle;
14124 else
14125 return (*elts)[middle].value;
14129 return t;
14132 /* Return a VECTOR_CST if possible. */
14133 case CONSTRUCTOR:
14135 tree type = TREE_TYPE (t);
14136 if (TREE_CODE (type) != VECTOR_TYPE)
14137 return t;
14139 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14140 unsigned HOST_WIDE_INT idx, pos = 0;
14141 tree value;
14143 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14145 if (!CONSTANT_CLASS_P (value))
14146 return t;
14147 if (TREE_CODE (value) == VECTOR_CST)
14149 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14150 vec[pos++] = VECTOR_CST_ELT (value, i);
14152 else
14153 vec[pos++] = value;
14155 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14156 vec[pos] = build_zero_cst (TREE_TYPE (type));
14158 return build_vector (type, vec);
14161 case CONST_DECL:
14162 return fold (DECL_INITIAL (t));
14164 default:
14165 return t;
14166 } /* switch (code) */
14169 #ifdef ENABLE_FOLD_CHECKING
14170 #undef fold
14172 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14173 hash_table<pointer_hash<const tree_node> > *);
14174 static void fold_check_failed (const_tree, const_tree);
14175 void print_fold_checksum (const_tree);
14177 /* When --enable-checking=fold, compute a digest of expr before
14178 and after actual fold call to see if fold did not accidentally
14179 change original expr. */
14181 tree
14182 fold (tree expr)
14184 tree ret;
14185 struct md5_ctx ctx;
14186 unsigned char checksum_before[16], checksum_after[16];
14187 hash_table<pointer_hash<const tree_node> > ht (32);
14189 md5_init_ctx (&ctx);
14190 fold_checksum_tree (expr, &ctx, &ht);
14191 md5_finish_ctx (&ctx, checksum_before);
14192 ht.empty ();
14194 ret = fold_1 (expr);
14196 md5_init_ctx (&ctx);
14197 fold_checksum_tree (expr, &ctx, &ht);
14198 md5_finish_ctx (&ctx, checksum_after);
14200 if (memcmp (checksum_before, checksum_after, 16))
14201 fold_check_failed (expr, ret);
14203 return ret;
14206 void
14207 print_fold_checksum (const_tree expr)
14209 struct md5_ctx ctx;
14210 unsigned char checksum[16], cnt;
14211 hash_table<pointer_hash<const tree_node> > ht (32);
14213 md5_init_ctx (&ctx);
14214 fold_checksum_tree (expr, &ctx, &ht);
14215 md5_finish_ctx (&ctx, checksum);
14216 for (cnt = 0; cnt < 16; ++cnt)
14217 fprintf (stderr, "%02x", checksum[cnt]);
14218 putc ('\n', stderr);
14221 static void
14222 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14224 internal_error ("fold check: original tree changed by fold");
14227 static void
14228 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14229 hash_table<pointer_hash <const tree_node> > *ht)
14231 const tree_node **slot;
14232 enum tree_code code;
14233 union tree_node buf;
14234 int i, len;
14236 recursive_label:
14237 if (expr == NULL)
14238 return;
14239 slot = ht->find_slot (expr, INSERT);
14240 if (*slot != NULL)
14241 return;
14242 *slot = expr;
14243 code = TREE_CODE (expr);
14244 if (TREE_CODE_CLASS (code) == tcc_declaration
14245 && DECL_ASSEMBLER_NAME_SET_P (expr))
14247 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14248 memcpy ((char *) &buf, expr, tree_size (expr));
14249 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14250 expr = (tree) &buf;
14252 else if (TREE_CODE_CLASS (code) == tcc_type
14253 && (TYPE_POINTER_TO (expr)
14254 || TYPE_REFERENCE_TO (expr)
14255 || TYPE_CACHED_VALUES_P (expr)
14256 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14257 || TYPE_NEXT_VARIANT (expr)))
14259 /* Allow these fields to be modified. */
14260 tree tmp;
14261 memcpy ((char *) &buf, expr, tree_size (expr));
14262 expr = tmp = (tree) &buf;
14263 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14264 TYPE_POINTER_TO (tmp) = NULL;
14265 TYPE_REFERENCE_TO (tmp) = NULL;
14266 TYPE_NEXT_VARIANT (tmp) = NULL;
14267 if (TYPE_CACHED_VALUES_P (tmp))
14269 TYPE_CACHED_VALUES_P (tmp) = 0;
14270 TYPE_CACHED_VALUES (tmp) = NULL;
14273 md5_process_bytes (expr, tree_size (expr), ctx);
14274 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14275 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14276 if (TREE_CODE_CLASS (code) != tcc_type
14277 && TREE_CODE_CLASS (code) != tcc_declaration
14278 && code != TREE_LIST
14279 && code != SSA_NAME
14280 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14281 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14282 switch (TREE_CODE_CLASS (code))
14284 case tcc_constant:
14285 switch (code)
14287 case STRING_CST:
14288 md5_process_bytes (TREE_STRING_POINTER (expr),
14289 TREE_STRING_LENGTH (expr), ctx);
14290 break;
14291 case COMPLEX_CST:
14292 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14293 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14294 break;
14295 case VECTOR_CST:
14296 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14297 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14298 break;
14299 default:
14300 break;
14302 break;
14303 case tcc_exceptional:
14304 switch (code)
14306 case TREE_LIST:
14307 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14308 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14309 expr = TREE_CHAIN (expr);
14310 goto recursive_label;
14311 break;
14312 case TREE_VEC:
14313 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14314 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14315 break;
14316 default:
14317 break;
14319 break;
14320 case tcc_expression:
14321 case tcc_reference:
14322 case tcc_comparison:
14323 case tcc_unary:
14324 case tcc_binary:
14325 case tcc_statement:
14326 case tcc_vl_exp:
14327 len = TREE_OPERAND_LENGTH (expr);
14328 for (i = 0; i < len; ++i)
14329 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14330 break;
14331 case tcc_declaration:
14332 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14333 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14334 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14336 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14337 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14338 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14339 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14340 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14343 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14345 if (TREE_CODE (expr) == FUNCTION_DECL)
14347 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14348 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14350 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14352 break;
14353 case tcc_type:
14354 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14355 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14356 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14357 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14358 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14359 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14360 if (INTEGRAL_TYPE_P (expr)
14361 || SCALAR_FLOAT_TYPE_P (expr))
14363 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14364 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14366 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14367 if (TREE_CODE (expr) == RECORD_TYPE
14368 || TREE_CODE (expr) == UNION_TYPE
14369 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14370 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14371 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14372 break;
14373 default:
14374 break;
14378 /* Helper function for outputting the checksum of a tree T. When
14379 debugging with gdb, you can "define mynext" to be "next" followed
14380 by "call debug_fold_checksum (op0)", then just trace down till the
14381 outputs differ. */
14383 DEBUG_FUNCTION void
14384 debug_fold_checksum (const_tree t)
14386 int i;
14387 unsigned char checksum[16];
14388 struct md5_ctx ctx;
14389 hash_table<pointer_hash<const tree_node> > ht (32);
14391 md5_init_ctx (&ctx);
14392 fold_checksum_tree (t, &ctx, &ht);
14393 md5_finish_ctx (&ctx, checksum);
14394 ht.empty ();
14396 for (i = 0; i < 16; i++)
14397 fprintf (stderr, "%d ", checksum[i]);
14399 fprintf (stderr, "\n");
14402 #endif
14404 /* Fold a unary tree expression with code CODE of type TYPE with an
14405 operand OP0. LOC is the location of the resulting expression.
14406 Return a folded expression if successful. Otherwise, return a tree
14407 expression with code CODE of type TYPE with an operand OP0. */
14409 tree
14410 fold_build1_stat_loc (location_t loc,
14411 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14413 tree tem;
14414 #ifdef ENABLE_FOLD_CHECKING
14415 unsigned char checksum_before[16], checksum_after[16];
14416 struct md5_ctx ctx;
14417 hash_table<pointer_hash<const tree_node> > ht (32);
14419 md5_init_ctx (&ctx);
14420 fold_checksum_tree (op0, &ctx, &ht);
14421 md5_finish_ctx (&ctx, checksum_before);
14422 ht.empty ();
14423 #endif
14425 tem = fold_unary_loc (loc, code, type, op0);
14426 if (!tem)
14427 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14429 #ifdef ENABLE_FOLD_CHECKING
14430 md5_init_ctx (&ctx);
14431 fold_checksum_tree (op0, &ctx, &ht);
14432 md5_finish_ctx (&ctx, checksum_after);
14434 if (memcmp (checksum_before, checksum_after, 16))
14435 fold_check_failed (op0, tem);
14436 #endif
14437 return tem;
14440 /* Fold a binary tree expression with code CODE of type TYPE with
14441 operands OP0 and OP1. LOC is the location of the resulting
14442 expression. Return a folded expression if successful. Otherwise,
14443 return a tree expression with code CODE of type TYPE with operands
14444 OP0 and OP1. */
14446 tree
14447 fold_build2_stat_loc (location_t loc,
14448 enum tree_code code, tree type, tree op0, tree op1
14449 MEM_STAT_DECL)
14451 tree tem;
14452 #ifdef ENABLE_FOLD_CHECKING
14453 unsigned char checksum_before_op0[16],
14454 checksum_before_op1[16],
14455 checksum_after_op0[16],
14456 checksum_after_op1[16];
14457 struct md5_ctx ctx;
14458 hash_table<pointer_hash<const tree_node> > ht (32);
14460 md5_init_ctx (&ctx);
14461 fold_checksum_tree (op0, &ctx, &ht);
14462 md5_finish_ctx (&ctx, checksum_before_op0);
14463 ht.empty ();
14465 md5_init_ctx (&ctx);
14466 fold_checksum_tree (op1, &ctx, &ht);
14467 md5_finish_ctx (&ctx, checksum_before_op1);
14468 ht.empty ();
14469 #endif
14471 tem = fold_binary_loc (loc, code, type, op0, op1);
14472 if (!tem)
14473 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14475 #ifdef ENABLE_FOLD_CHECKING
14476 md5_init_ctx (&ctx);
14477 fold_checksum_tree (op0, &ctx, &ht);
14478 md5_finish_ctx (&ctx, checksum_after_op0);
14479 ht.empty ();
14481 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14482 fold_check_failed (op0, tem);
14484 md5_init_ctx (&ctx);
14485 fold_checksum_tree (op1, &ctx, &ht);
14486 md5_finish_ctx (&ctx, checksum_after_op1);
14488 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14489 fold_check_failed (op1, tem);
14490 #endif
14491 return tem;
14494 /* Fold a ternary tree expression with code CODE of type TYPE with
14495 operands OP0, OP1, and OP2. Return a folded expression if
14496 successful. Otherwise, return a tree expression with code CODE of
14497 type TYPE with operands OP0, OP1, and OP2. */
14499 tree
14500 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14501 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14503 tree tem;
14504 #ifdef ENABLE_FOLD_CHECKING
14505 unsigned char checksum_before_op0[16],
14506 checksum_before_op1[16],
14507 checksum_before_op2[16],
14508 checksum_after_op0[16],
14509 checksum_after_op1[16],
14510 checksum_after_op2[16];
14511 struct md5_ctx ctx;
14512 hash_table<pointer_hash<const tree_node> > ht (32);
14514 md5_init_ctx (&ctx);
14515 fold_checksum_tree (op0, &ctx, &ht);
14516 md5_finish_ctx (&ctx, checksum_before_op0);
14517 ht.empty ();
14519 md5_init_ctx (&ctx);
14520 fold_checksum_tree (op1, &ctx, &ht);
14521 md5_finish_ctx (&ctx, checksum_before_op1);
14522 ht.empty ();
14524 md5_init_ctx (&ctx);
14525 fold_checksum_tree (op2, &ctx, &ht);
14526 md5_finish_ctx (&ctx, checksum_before_op2);
14527 ht.empty ();
14528 #endif
14530 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14531 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14532 if (!tem)
14533 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14535 #ifdef ENABLE_FOLD_CHECKING
14536 md5_init_ctx (&ctx);
14537 fold_checksum_tree (op0, &ctx, &ht);
14538 md5_finish_ctx (&ctx, checksum_after_op0);
14539 ht.empty ();
14541 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14542 fold_check_failed (op0, tem);
14544 md5_init_ctx (&ctx);
14545 fold_checksum_tree (op1, &ctx, &ht);
14546 md5_finish_ctx (&ctx, checksum_after_op1);
14547 ht.empty ();
14549 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14550 fold_check_failed (op1, tem);
14552 md5_init_ctx (&ctx);
14553 fold_checksum_tree (op2, &ctx, &ht);
14554 md5_finish_ctx (&ctx, checksum_after_op2);
14556 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14557 fold_check_failed (op2, tem);
14558 #endif
14559 return tem;
14562 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14563 arguments in ARGARRAY, and a null static chain.
14564 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14565 of type TYPE from the given operands as constructed by build_call_array. */
14567 tree
14568 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14569 int nargs, tree *argarray)
14571 tree tem;
14572 #ifdef ENABLE_FOLD_CHECKING
14573 unsigned char checksum_before_fn[16],
14574 checksum_before_arglist[16],
14575 checksum_after_fn[16],
14576 checksum_after_arglist[16];
14577 struct md5_ctx ctx;
14578 hash_table<pointer_hash<const tree_node> > ht (32);
14579 int i;
14581 md5_init_ctx (&ctx);
14582 fold_checksum_tree (fn, &ctx, &ht);
14583 md5_finish_ctx (&ctx, checksum_before_fn);
14584 ht.empty ();
14586 md5_init_ctx (&ctx);
14587 for (i = 0; i < nargs; i++)
14588 fold_checksum_tree (argarray[i], &ctx, &ht);
14589 md5_finish_ctx (&ctx, checksum_before_arglist);
14590 ht.empty ();
14591 #endif
14593 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14595 #ifdef ENABLE_FOLD_CHECKING
14596 md5_init_ctx (&ctx);
14597 fold_checksum_tree (fn, &ctx, &ht);
14598 md5_finish_ctx (&ctx, checksum_after_fn);
14599 ht.empty ();
14601 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14602 fold_check_failed (fn, tem);
14604 md5_init_ctx (&ctx);
14605 for (i = 0; i < nargs; i++)
14606 fold_checksum_tree (argarray[i], &ctx, &ht);
14607 md5_finish_ctx (&ctx, checksum_after_arglist);
14609 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14610 fold_check_failed (NULL_TREE, tem);
14611 #endif
14612 return tem;
14615 /* Perform constant folding and related simplification of initializer
14616 expression EXPR. These behave identically to "fold_buildN" but ignore
14617 potential run-time traps and exceptions that fold must preserve. */
14619 #define START_FOLD_INIT \
14620 int saved_signaling_nans = flag_signaling_nans;\
14621 int saved_trapping_math = flag_trapping_math;\
14622 int saved_rounding_math = flag_rounding_math;\
14623 int saved_trapv = flag_trapv;\
14624 int saved_folding_initializer = folding_initializer;\
14625 flag_signaling_nans = 0;\
14626 flag_trapping_math = 0;\
14627 flag_rounding_math = 0;\
14628 flag_trapv = 0;\
14629 folding_initializer = 1;
14631 #define END_FOLD_INIT \
14632 flag_signaling_nans = saved_signaling_nans;\
14633 flag_trapping_math = saved_trapping_math;\
14634 flag_rounding_math = saved_rounding_math;\
14635 flag_trapv = saved_trapv;\
14636 folding_initializer = saved_folding_initializer;
14638 tree
14639 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14640 tree type, tree op)
14642 tree result;
14643 START_FOLD_INIT;
14645 result = fold_build1_loc (loc, code, type, op);
14647 END_FOLD_INIT;
14648 return result;
14651 tree
14652 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14653 tree type, tree op0, tree op1)
14655 tree result;
14656 START_FOLD_INIT;
14658 result = fold_build2_loc (loc, code, type, op0, op1);
14660 END_FOLD_INIT;
14661 return result;
14664 tree
14665 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14666 int nargs, tree *argarray)
14668 tree result;
14669 START_FOLD_INIT;
14671 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14673 END_FOLD_INIT;
14674 return result;
14677 #undef START_FOLD_INIT
14678 #undef END_FOLD_INIT
14680 /* Determine if first argument is a multiple of second argument. Return 0 if
14681 it is not, or we cannot easily determined it to be.
14683 An example of the sort of thing we care about (at this point; this routine
14684 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14685 fold cases do now) is discovering that
14687 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14689 is a multiple of
14691 SAVE_EXPR (J * 8)
14693 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14695 This code also handles discovering that
14697 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14699 is a multiple of 8 so we don't have to worry about dealing with a
14700 possible remainder.
14702 Note that we *look* inside a SAVE_EXPR only to determine how it was
14703 calculated; it is not safe for fold to do much of anything else with the
14704 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14705 at run time. For example, the latter example above *cannot* be implemented
14706 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14707 evaluation time of the original SAVE_EXPR is not necessarily the same at
14708 the time the new expression is evaluated. The only optimization of this
14709 sort that would be valid is changing
14711 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14713 divided by 8 to
14715 SAVE_EXPR (I) * SAVE_EXPR (J)
14717 (where the same SAVE_EXPR (J) is used in the original and the
14718 transformed version). */
14721 multiple_of_p (tree type, const_tree top, const_tree bottom)
14723 if (operand_equal_p (top, bottom, 0))
14724 return 1;
14726 if (TREE_CODE (type) != INTEGER_TYPE)
14727 return 0;
14729 switch (TREE_CODE (top))
14731 case BIT_AND_EXPR:
14732 /* Bitwise and provides a power of two multiple. If the mask is
14733 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14734 if (!integer_pow2p (bottom))
14735 return 0;
14736 /* FALLTHRU */
14738 case MULT_EXPR:
14739 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14740 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14742 case PLUS_EXPR:
14743 case MINUS_EXPR:
14744 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14745 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14747 case LSHIFT_EXPR:
14748 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14750 tree op1, t1;
14752 op1 = TREE_OPERAND (top, 1);
14753 /* const_binop may not detect overflow correctly,
14754 so check for it explicitly here. */
14755 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14756 && 0 != (t1 = fold_convert (type,
14757 const_binop (LSHIFT_EXPR,
14758 size_one_node,
14759 op1)))
14760 && !TREE_OVERFLOW (t1))
14761 return multiple_of_p (type, t1, bottom);
14763 return 0;
14765 case NOP_EXPR:
14766 /* Can't handle conversions from non-integral or wider integral type. */
14767 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14768 || (TYPE_PRECISION (type)
14769 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14770 return 0;
14772 /* .. fall through ... */
14774 case SAVE_EXPR:
14775 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14777 case COND_EXPR:
14778 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14779 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14781 case INTEGER_CST:
14782 if (TREE_CODE (bottom) != INTEGER_CST
14783 || integer_zerop (bottom)
14784 || (TYPE_UNSIGNED (type)
14785 && (tree_int_cst_sgn (top) < 0
14786 || tree_int_cst_sgn (bottom) < 0)))
14787 return 0;
14788 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14789 SIGNED);
14791 default:
14792 return 0;
14796 /* Return true if CODE or TYPE is known to be non-negative. */
14798 static bool
14799 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14801 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14802 && truth_value_p (code))
14803 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14804 have a signed:1 type (where the value is -1 and 0). */
14805 return true;
14806 return false;
14809 /* Return true if (CODE OP0) is known to be non-negative. If the return
14810 value is based on the assumption that signed overflow is undefined,
14811 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14812 *STRICT_OVERFLOW_P. */
14814 bool
14815 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14816 bool *strict_overflow_p)
14818 if (TYPE_UNSIGNED (type))
14819 return true;
14821 switch (code)
14823 case ABS_EXPR:
14824 /* We can't return 1 if flag_wrapv is set because
14825 ABS_EXPR<INT_MIN> = INT_MIN. */
14826 if (!INTEGRAL_TYPE_P (type))
14827 return true;
14828 if (TYPE_OVERFLOW_UNDEFINED (type))
14830 *strict_overflow_p = true;
14831 return true;
14833 break;
14835 case NON_LVALUE_EXPR:
14836 case FLOAT_EXPR:
14837 case FIX_TRUNC_EXPR:
14838 return tree_expr_nonnegative_warnv_p (op0,
14839 strict_overflow_p);
14841 case NOP_EXPR:
14843 tree inner_type = TREE_TYPE (op0);
14844 tree outer_type = type;
14846 if (TREE_CODE (outer_type) == REAL_TYPE)
14848 if (TREE_CODE (inner_type) == REAL_TYPE)
14849 return tree_expr_nonnegative_warnv_p (op0,
14850 strict_overflow_p);
14851 if (INTEGRAL_TYPE_P (inner_type))
14853 if (TYPE_UNSIGNED (inner_type))
14854 return true;
14855 return tree_expr_nonnegative_warnv_p (op0,
14856 strict_overflow_p);
14859 else if (INTEGRAL_TYPE_P (outer_type))
14861 if (TREE_CODE (inner_type) == REAL_TYPE)
14862 return tree_expr_nonnegative_warnv_p (op0,
14863 strict_overflow_p);
14864 if (INTEGRAL_TYPE_P (inner_type))
14865 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14866 && TYPE_UNSIGNED (inner_type);
14869 break;
14871 default:
14872 return tree_simple_nonnegative_warnv_p (code, type);
14875 /* We don't know sign of `t', so be conservative and return false. */
14876 return false;
14879 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14880 value is based on the assumption that signed overflow is undefined,
14881 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14882 *STRICT_OVERFLOW_P. */
14884 bool
14885 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14886 tree op1, bool *strict_overflow_p)
14888 if (TYPE_UNSIGNED (type))
14889 return true;
14891 switch (code)
14893 case POINTER_PLUS_EXPR:
14894 case PLUS_EXPR:
14895 if (FLOAT_TYPE_P (type))
14896 return (tree_expr_nonnegative_warnv_p (op0,
14897 strict_overflow_p)
14898 && tree_expr_nonnegative_warnv_p (op1,
14899 strict_overflow_p));
14901 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14902 both unsigned and at least 2 bits shorter than the result. */
14903 if (TREE_CODE (type) == INTEGER_TYPE
14904 && TREE_CODE (op0) == NOP_EXPR
14905 && TREE_CODE (op1) == NOP_EXPR)
14907 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14908 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14909 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14910 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14912 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14913 TYPE_PRECISION (inner2)) + 1;
14914 return prec < TYPE_PRECISION (type);
14917 break;
14919 case MULT_EXPR:
14920 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14922 /* x * x is always non-negative for floating point x
14923 or without overflow. */
14924 if (operand_equal_p (op0, op1, 0)
14925 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14926 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14928 if (TYPE_OVERFLOW_UNDEFINED (type))
14929 *strict_overflow_p = true;
14930 return true;
14934 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14935 both unsigned and their total bits is shorter than the result. */
14936 if (TREE_CODE (type) == INTEGER_TYPE
14937 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14938 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14940 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14941 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14942 : TREE_TYPE (op0);
14943 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14944 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14945 : TREE_TYPE (op1);
14947 bool unsigned0 = TYPE_UNSIGNED (inner0);
14948 bool unsigned1 = TYPE_UNSIGNED (inner1);
14950 if (TREE_CODE (op0) == INTEGER_CST)
14951 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14953 if (TREE_CODE (op1) == INTEGER_CST)
14954 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14956 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14957 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14959 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14960 ? tree_int_cst_min_precision (op0, UNSIGNED)
14961 : TYPE_PRECISION (inner0);
14963 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14964 ? tree_int_cst_min_precision (op1, UNSIGNED)
14965 : TYPE_PRECISION (inner1);
14967 return precision0 + precision1 < TYPE_PRECISION (type);
14970 return false;
14972 case BIT_AND_EXPR:
14973 case MAX_EXPR:
14974 return (tree_expr_nonnegative_warnv_p (op0,
14975 strict_overflow_p)
14976 || tree_expr_nonnegative_warnv_p (op1,
14977 strict_overflow_p));
14979 case BIT_IOR_EXPR:
14980 case BIT_XOR_EXPR:
14981 case MIN_EXPR:
14982 case RDIV_EXPR:
14983 case TRUNC_DIV_EXPR:
14984 case CEIL_DIV_EXPR:
14985 case FLOOR_DIV_EXPR:
14986 case ROUND_DIV_EXPR:
14987 return (tree_expr_nonnegative_warnv_p (op0,
14988 strict_overflow_p)
14989 && tree_expr_nonnegative_warnv_p (op1,
14990 strict_overflow_p));
14992 case TRUNC_MOD_EXPR:
14993 case CEIL_MOD_EXPR:
14994 case FLOOR_MOD_EXPR:
14995 case ROUND_MOD_EXPR:
14996 return tree_expr_nonnegative_warnv_p (op0,
14997 strict_overflow_p);
14998 default:
14999 return tree_simple_nonnegative_warnv_p (code, type);
15002 /* We don't know sign of `t', so be conservative and return false. */
15003 return false;
15006 /* Return true if T is known to be non-negative. If the return
15007 value is based on the assumption that signed overflow is undefined,
15008 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15009 *STRICT_OVERFLOW_P. */
15011 bool
15012 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15014 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15015 return true;
15017 switch (TREE_CODE (t))
15019 case INTEGER_CST:
15020 return tree_int_cst_sgn (t) >= 0;
15022 case REAL_CST:
15023 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15025 case FIXED_CST:
15026 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15028 case COND_EXPR:
15029 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15030 strict_overflow_p)
15031 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15032 strict_overflow_p));
15033 default:
15034 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15035 TREE_TYPE (t));
15037 /* We don't know sign of `t', so be conservative and return false. */
15038 return false;
15041 /* Return true if T is known to be non-negative. If the return
15042 value is based on the assumption that signed overflow is undefined,
15043 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15044 *STRICT_OVERFLOW_P. */
15046 bool
15047 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15048 tree arg0, tree arg1, bool *strict_overflow_p)
15050 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15051 switch (DECL_FUNCTION_CODE (fndecl))
15053 CASE_FLT_FN (BUILT_IN_ACOS):
15054 CASE_FLT_FN (BUILT_IN_ACOSH):
15055 CASE_FLT_FN (BUILT_IN_CABS):
15056 CASE_FLT_FN (BUILT_IN_COSH):
15057 CASE_FLT_FN (BUILT_IN_ERFC):
15058 CASE_FLT_FN (BUILT_IN_EXP):
15059 CASE_FLT_FN (BUILT_IN_EXP10):
15060 CASE_FLT_FN (BUILT_IN_EXP2):
15061 CASE_FLT_FN (BUILT_IN_FABS):
15062 CASE_FLT_FN (BUILT_IN_FDIM):
15063 CASE_FLT_FN (BUILT_IN_HYPOT):
15064 CASE_FLT_FN (BUILT_IN_POW10):
15065 CASE_INT_FN (BUILT_IN_FFS):
15066 CASE_INT_FN (BUILT_IN_PARITY):
15067 CASE_INT_FN (BUILT_IN_POPCOUNT):
15068 CASE_INT_FN (BUILT_IN_CLZ):
15069 CASE_INT_FN (BUILT_IN_CLRSB):
15070 case BUILT_IN_BSWAP32:
15071 case BUILT_IN_BSWAP64:
15072 /* Always true. */
15073 return true;
15075 CASE_FLT_FN (BUILT_IN_SQRT):
15076 /* sqrt(-0.0) is -0.0. */
15077 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15078 return true;
15079 return tree_expr_nonnegative_warnv_p (arg0,
15080 strict_overflow_p);
15082 CASE_FLT_FN (BUILT_IN_ASINH):
15083 CASE_FLT_FN (BUILT_IN_ATAN):
15084 CASE_FLT_FN (BUILT_IN_ATANH):
15085 CASE_FLT_FN (BUILT_IN_CBRT):
15086 CASE_FLT_FN (BUILT_IN_CEIL):
15087 CASE_FLT_FN (BUILT_IN_ERF):
15088 CASE_FLT_FN (BUILT_IN_EXPM1):
15089 CASE_FLT_FN (BUILT_IN_FLOOR):
15090 CASE_FLT_FN (BUILT_IN_FMOD):
15091 CASE_FLT_FN (BUILT_IN_FREXP):
15092 CASE_FLT_FN (BUILT_IN_ICEIL):
15093 CASE_FLT_FN (BUILT_IN_IFLOOR):
15094 CASE_FLT_FN (BUILT_IN_IRINT):
15095 CASE_FLT_FN (BUILT_IN_IROUND):
15096 CASE_FLT_FN (BUILT_IN_LCEIL):
15097 CASE_FLT_FN (BUILT_IN_LDEXP):
15098 CASE_FLT_FN (BUILT_IN_LFLOOR):
15099 CASE_FLT_FN (BUILT_IN_LLCEIL):
15100 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15101 CASE_FLT_FN (BUILT_IN_LLRINT):
15102 CASE_FLT_FN (BUILT_IN_LLROUND):
15103 CASE_FLT_FN (BUILT_IN_LRINT):
15104 CASE_FLT_FN (BUILT_IN_LROUND):
15105 CASE_FLT_FN (BUILT_IN_MODF):
15106 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15107 CASE_FLT_FN (BUILT_IN_RINT):
15108 CASE_FLT_FN (BUILT_IN_ROUND):
15109 CASE_FLT_FN (BUILT_IN_SCALB):
15110 CASE_FLT_FN (BUILT_IN_SCALBLN):
15111 CASE_FLT_FN (BUILT_IN_SCALBN):
15112 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15113 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15114 CASE_FLT_FN (BUILT_IN_SINH):
15115 CASE_FLT_FN (BUILT_IN_TANH):
15116 CASE_FLT_FN (BUILT_IN_TRUNC):
15117 /* True if the 1st argument is nonnegative. */
15118 return tree_expr_nonnegative_warnv_p (arg0,
15119 strict_overflow_p);
15121 CASE_FLT_FN (BUILT_IN_FMAX):
15122 /* True if the 1st OR 2nd arguments are nonnegative. */
15123 return (tree_expr_nonnegative_warnv_p (arg0,
15124 strict_overflow_p)
15125 || (tree_expr_nonnegative_warnv_p (arg1,
15126 strict_overflow_p)));
15128 CASE_FLT_FN (BUILT_IN_FMIN):
15129 /* True if the 1st AND 2nd arguments are nonnegative. */
15130 return (tree_expr_nonnegative_warnv_p (arg0,
15131 strict_overflow_p)
15132 && (tree_expr_nonnegative_warnv_p (arg1,
15133 strict_overflow_p)));
15135 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15136 /* True if the 2nd argument is nonnegative. */
15137 return tree_expr_nonnegative_warnv_p (arg1,
15138 strict_overflow_p);
15140 CASE_FLT_FN (BUILT_IN_POWI):
15141 /* True if the 1st argument is nonnegative or the second
15142 argument is an even integer. */
15143 if (TREE_CODE (arg1) == INTEGER_CST
15144 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15145 return true;
15146 return tree_expr_nonnegative_warnv_p (arg0,
15147 strict_overflow_p);
15149 CASE_FLT_FN (BUILT_IN_POW):
15150 /* True if the 1st argument is nonnegative or the second
15151 argument is an even integer valued real. */
15152 if (TREE_CODE (arg1) == REAL_CST)
15154 REAL_VALUE_TYPE c;
15155 HOST_WIDE_INT n;
15157 c = TREE_REAL_CST (arg1);
15158 n = real_to_integer (&c);
15159 if ((n & 1) == 0)
15161 REAL_VALUE_TYPE cint;
15162 real_from_integer (&cint, VOIDmode, n, SIGNED);
15163 if (real_identical (&c, &cint))
15164 return true;
15167 return tree_expr_nonnegative_warnv_p (arg0,
15168 strict_overflow_p);
15170 default:
15171 break;
15173 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15174 type);
15177 /* Return true if T is known to be non-negative. If the return
15178 value is based on the assumption that signed overflow is undefined,
15179 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15180 *STRICT_OVERFLOW_P. */
15182 static bool
15183 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15185 enum tree_code code = TREE_CODE (t);
15186 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15187 return true;
15189 switch (code)
15191 case TARGET_EXPR:
15193 tree temp = TARGET_EXPR_SLOT (t);
15194 t = TARGET_EXPR_INITIAL (t);
15196 /* If the initializer is non-void, then it's a normal expression
15197 that will be assigned to the slot. */
15198 if (!VOID_TYPE_P (t))
15199 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15201 /* Otherwise, the initializer sets the slot in some way. One common
15202 way is an assignment statement at the end of the initializer. */
15203 while (1)
15205 if (TREE_CODE (t) == BIND_EXPR)
15206 t = expr_last (BIND_EXPR_BODY (t));
15207 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15208 || TREE_CODE (t) == TRY_CATCH_EXPR)
15209 t = expr_last (TREE_OPERAND (t, 0));
15210 else if (TREE_CODE (t) == STATEMENT_LIST)
15211 t = expr_last (t);
15212 else
15213 break;
15215 if (TREE_CODE (t) == MODIFY_EXPR
15216 && TREE_OPERAND (t, 0) == temp)
15217 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15218 strict_overflow_p);
15220 return false;
15223 case CALL_EXPR:
15225 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15226 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15228 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15229 get_callee_fndecl (t),
15230 arg0,
15231 arg1,
15232 strict_overflow_p);
15234 case COMPOUND_EXPR:
15235 case MODIFY_EXPR:
15236 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15237 strict_overflow_p);
15238 case BIND_EXPR:
15239 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15240 strict_overflow_p);
15241 case SAVE_EXPR:
15242 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15243 strict_overflow_p);
15245 default:
15246 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15247 TREE_TYPE (t));
15250 /* We don't know sign of `t', so be conservative and return false. */
15251 return false;
15254 /* Return true if T is known to be non-negative. If the return
15255 value is based on the assumption that signed overflow is undefined,
15256 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15257 *STRICT_OVERFLOW_P. */
15259 bool
15260 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15262 enum tree_code code;
15263 if (t == error_mark_node)
15264 return false;
15266 code = TREE_CODE (t);
15267 switch (TREE_CODE_CLASS (code))
15269 case tcc_binary:
15270 case tcc_comparison:
15271 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15272 TREE_TYPE (t),
15273 TREE_OPERAND (t, 0),
15274 TREE_OPERAND (t, 1),
15275 strict_overflow_p);
15277 case tcc_unary:
15278 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15279 TREE_TYPE (t),
15280 TREE_OPERAND (t, 0),
15281 strict_overflow_p);
15283 case tcc_constant:
15284 case tcc_declaration:
15285 case tcc_reference:
15286 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15288 default:
15289 break;
15292 switch (code)
15294 case TRUTH_AND_EXPR:
15295 case TRUTH_OR_EXPR:
15296 case TRUTH_XOR_EXPR:
15297 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15298 TREE_TYPE (t),
15299 TREE_OPERAND (t, 0),
15300 TREE_OPERAND (t, 1),
15301 strict_overflow_p);
15302 case TRUTH_NOT_EXPR:
15303 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15304 TREE_TYPE (t),
15305 TREE_OPERAND (t, 0),
15306 strict_overflow_p);
15308 case COND_EXPR:
15309 case CONSTRUCTOR:
15310 case OBJ_TYPE_REF:
15311 case ASSERT_EXPR:
15312 case ADDR_EXPR:
15313 case WITH_SIZE_EXPR:
15314 case SSA_NAME:
15315 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15317 default:
15318 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15322 /* Return true if `t' is known to be non-negative. Handle warnings
15323 about undefined signed overflow. */
15325 bool
15326 tree_expr_nonnegative_p (tree t)
15328 bool ret, strict_overflow_p;
15330 strict_overflow_p = false;
15331 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15332 if (strict_overflow_p)
15333 fold_overflow_warning (("assuming signed overflow does not occur when "
15334 "determining that expression is always "
15335 "non-negative"),
15336 WARN_STRICT_OVERFLOW_MISC);
15337 return ret;
15341 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15342 For floating point we further ensure that T is not denormal.
15343 Similar logic is present in nonzero_address in rtlanal.h.
15345 If the return value is based on the assumption that signed overflow
15346 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15347 change *STRICT_OVERFLOW_P. */
15349 bool
15350 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15351 bool *strict_overflow_p)
15353 switch (code)
15355 case ABS_EXPR:
15356 return tree_expr_nonzero_warnv_p (op0,
15357 strict_overflow_p);
15359 case NOP_EXPR:
15361 tree inner_type = TREE_TYPE (op0);
15362 tree outer_type = type;
15364 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15365 && tree_expr_nonzero_warnv_p (op0,
15366 strict_overflow_p));
15368 break;
15370 case NON_LVALUE_EXPR:
15371 return tree_expr_nonzero_warnv_p (op0,
15372 strict_overflow_p);
15374 default:
15375 break;
15378 return false;
15381 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15382 For floating point we further ensure that T is not denormal.
15383 Similar logic is present in nonzero_address in rtlanal.h.
15385 If the return value is based on the assumption that signed overflow
15386 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15387 change *STRICT_OVERFLOW_P. */
15389 bool
15390 tree_binary_nonzero_warnv_p (enum tree_code code,
15391 tree type,
15392 tree op0,
15393 tree op1, bool *strict_overflow_p)
15395 bool sub_strict_overflow_p;
15396 switch (code)
15398 case POINTER_PLUS_EXPR:
15399 case PLUS_EXPR:
15400 if (TYPE_OVERFLOW_UNDEFINED (type))
15402 /* With the presence of negative values it is hard
15403 to say something. */
15404 sub_strict_overflow_p = false;
15405 if (!tree_expr_nonnegative_warnv_p (op0,
15406 &sub_strict_overflow_p)
15407 || !tree_expr_nonnegative_warnv_p (op1,
15408 &sub_strict_overflow_p))
15409 return false;
15410 /* One of operands must be positive and the other non-negative. */
15411 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15412 overflows, on a twos-complement machine the sum of two
15413 nonnegative numbers can never be zero. */
15414 return (tree_expr_nonzero_warnv_p (op0,
15415 strict_overflow_p)
15416 || tree_expr_nonzero_warnv_p (op1,
15417 strict_overflow_p));
15419 break;
15421 case MULT_EXPR:
15422 if (TYPE_OVERFLOW_UNDEFINED (type))
15424 if (tree_expr_nonzero_warnv_p (op0,
15425 strict_overflow_p)
15426 && tree_expr_nonzero_warnv_p (op1,
15427 strict_overflow_p))
15429 *strict_overflow_p = true;
15430 return true;
15433 break;
15435 case MIN_EXPR:
15436 sub_strict_overflow_p = false;
15437 if (tree_expr_nonzero_warnv_p (op0,
15438 &sub_strict_overflow_p)
15439 && tree_expr_nonzero_warnv_p (op1,
15440 &sub_strict_overflow_p))
15442 if (sub_strict_overflow_p)
15443 *strict_overflow_p = true;
15445 break;
15447 case MAX_EXPR:
15448 sub_strict_overflow_p = false;
15449 if (tree_expr_nonzero_warnv_p (op0,
15450 &sub_strict_overflow_p))
15452 if (sub_strict_overflow_p)
15453 *strict_overflow_p = true;
15455 /* When both operands are nonzero, then MAX must be too. */
15456 if (tree_expr_nonzero_warnv_p (op1,
15457 strict_overflow_p))
15458 return true;
15460 /* MAX where operand 0 is positive is positive. */
15461 return tree_expr_nonnegative_warnv_p (op0,
15462 strict_overflow_p);
15464 /* MAX where operand 1 is positive is positive. */
15465 else if (tree_expr_nonzero_warnv_p (op1,
15466 &sub_strict_overflow_p)
15467 && tree_expr_nonnegative_warnv_p (op1,
15468 &sub_strict_overflow_p))
15470 if (sub_strict_overflow_p)
15471 *strict_overflow_p = true;
15472 return true;
15474 break;
15476 case BIT_IOR_EXPR:
15477 return (tree_expr_nonzero_warnv_p (op1,
15478 strict_overflow_p)
15479 || tree_expr_nonzero_warnv_p (op0,
15480 strict_overflow_p));
15482 default:
15483 break;
15486 return false;
15489 /* Return true when T is an address and is known to be nonzero.
15490 For floating point we further ensure that T is not denormal.
15491 Similar logic is present in nonzero_address in rtlanal.h.
15493 If the return value is based on the assumption that signed overflow
15494 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15495 change *STRICT_OVERFLOW_P. */
15497 bool
15498 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15500 bool sub_strict_overflow_p;
15501 switch (TREE_CODE (t))
15503 case INTEGER_CST:
15504 return !integer_zerop (t);
15506 case ADDR_EXPR:
15508 tree base = TREE_OPERAND (t, 0);
15510 if (!DECL_P (base))
15511 base = get_base_address (base);
15513 if (!base)
15514 return false;
15516 /* For objects in symbol table check if we know they are non-zero.
15517 Don't do anything for variables and functions before symtab is built;
15518 it is quite possible that they will be declared weak later. */
15519 if (DECL_P (base) && decl_in_symtab_p (base))
15521 struct symtab_node *symbol;
15523 symbol = symtab_node::get_create (base);
15524 if (symbol)
15525 return symbol->nonzero_address ();
15526 else
15527 return false;
15530 /* Function local objects are never NULL. */
15531 if (DECL_P (base)
15532 && (DECL_CONTEXT (base)
15533 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15534 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15535 return true;
15537 /* Constants are never weak. */
15538 if (CONSTANT_CLASS_P (base))
15539 return true;
15541 return false;
15544 case COND_EXPR:
15545 sub_strict_overflow_p = false;
15546 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15547 &sub_strict_overflow_p)
15548 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15549 &sub_strict_overflow_p))
15551 if (sub_strict_overflow_p)
15552 *strict_overflow_p = true;
15553 return true;
15555 break;
15557 default:
15558 break;
15560 return false;
15563 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15564 attempt to fold the expression to a constant without modifying TYPE,
15565 OP0 or OP1.
15567 If the expression could be simplified to a constant, then return
15568 the constant. If the expression would not be simplified to a
15569 constant, then return NULL_TREE. */
15571 tree
15572 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15574 tree tem = fold_binary (code, type, op0, op1);
15575 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15578 /* Given the components of a unary expression CODE, TYPE and OP0,
15579 attempt to fold the expression to a constant without modifying
15580 TYPE or OP0.
15582 If the expression could be simplified to a constant, then return
15583 the constant. If the expression would not be simplified to a
15584 constant, then return NULL_TREE. */
15586 tree
15587 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15589 tree tem = fold_unary (code, type, op0);
15590 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15593 /* If EXP represents referencing an element in a constant string
15594 (either via pointer arithmetic or array indexing), return the
15595 tree representing the value accessed, otherwise return NULL. */
15597 tree
15598 fold_read_from_constant_string (tree exp)
15600 if ((TREE_CODE (exp) == INDIRECT_REF
15601 || TREE_CODE (exp) == ARRAY_REF)
15602 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15604 tree exp1 = TREE_OPERAND (exp, 0);
15605 tree index;
15606 tree string;
15607 location_t loc = EXPR_LOCATION (exp);
15609 if (TREE_CODE (exp) == INDIRECT_REF)
15610 string = string_constant (exp1, &index);
15611 else
15613 tree low_bound = array_ref_low_bound (exp);
15614 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15616 /* Optimize the special-case of a zero lower bound.
15618 We convert the low_bound to sizetype to avoid some problems
15619 with constant folding. (E.g. suppose the lower bound is 1,
15620 and its mode is QI. Without the conversion,l (ARRAY
15621 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15622 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15623 if (! integer_zerop (low_bound))
15624 index = size_diffop_loc (loc, index,
15625 fold_convert_loc (loc, sizetype, low_bound));
15627 string = exp1;
15630 if (string
15631 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15632 && TREE_CODE (string) == STRING_CST
15633 && TREE_CODE (index) == INTEGER_CST
15634 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15635 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15636 == MODE_INT)
15637 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15638 return build_int_cst_type (TREE_TYPE (exp),
15639 (TREE_STRING_POINTER (string)
15640 [TREE_INT_CST_LOW (index)]));
15642 return NULL;
15645 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15646 an integer constant, real, or fixed-point constant.
15648 TYPE is the type of the result. */
15650 static tree
15651 fold_negate_const (tree arg0, tree type)
15653 tree t = NULL_TREE;
15655 switch (TREE_CODE (arg0))
15657 case INTEGER_CST:
15659 bool overflow;
15660 wide_int val = wi::neg (arg0, &overflow);
15661 t = force_fit_type (type, val, 1,
15662 (overflow | TREE_OVERFLOW (arg0))
15663 && !TYPE_UNSIGNED (type));
15664 break;
15667 case REAL_CST:
15668 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15669 break;
15671 case FIXED_CST:
15673 FIXED_VALUE_TYPE f;
15674 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15675 &(TREE_FIXED_CST (arg0)), NULL,
15676 TYPE_SATURATING (type));
15677 t = build_fixed (type, f);
15678 /* Propagate overflow flags. */
15679 if (overflow_p | TREE_OVERFLOW (arg0))
15680 TREE_OVERFLOW (t) = 1;
15681 break;
15684 default:
15685 gcc_unreachable ();
15688 return t;
15691 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15692 an integer constant or real constant.
15694 TYPE is the type of the result. */
15696 tree
15697 fold_abs_const (tree arg0, tree type)
15699 tree t = NULL_TREE;
15701 switch (TREE_CODE (arg0))
15703 case INTEGER_CST:
15705 /* If the value is unsigned or non-negative, then the absolute value
15706 is the same as the ordinary value. */
15707 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15708 t = arg0;
15710 /* If the value is negative, then the absolute value is
15711 its negation. */
15712 else
15714 bool overflow;
15715 wide_int val = wi::neg (arg0, &overflow);
15716 t = force_fit_type (type, val, -1,
15717 overflow | TREE_OVERFLOW (arg0));
15720 break;
15722 case REAL_CST:
15723 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15724 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15725 else
15726 t = arg0;
15727 break;
15729 default:
15730 gcc_unreachable ();
15733 return t;
15736 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15737 constant. TYPE is the type of the result. */
15739 static tree
15740 fold_not_const (const_tree arg0, tree type)
15742 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15744 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15747 /* Given CODE, a relational operator, the target type, TYPE and two
15748 constant operands OP0 and OP1, return the result of the
15749 relational operation. If the result is not a compile time
15750 constant, then return NULL_TREE. */
15752 static tree
15753 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15755 int result, invert;
15757 /* From here on, the only cases we handle are when the result is
15758 known to be a constant. */
15760 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15762 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15763 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15765 /* Handle the cases where either operand is a NaN. */
15766 if (real_isnan (c0) || real_isnan (c1))
15768 switch (code)
15770 case EQ_EXPR:
15771 case ORDERED_EXPR:
15772 result = 0;
15773 break;
15775 case NE_EXPR:
15776 case UNORDERED_EXPR:
15777 case UNLT_EXPR:
15778 case UNLE_EXPR:
15779 case UNGT_EXPR:
15780 case UNGE_EXPR:
15781 case UNEQ_EXPR:
15782 result = 1;
15783 break;
15785 case LT_EXPR:
15786 case LE_EXPR:
15787 case GT_EXPR:
15788 case GE_EXPR:
15789 case LTGT_EXPR:
15790 if (flag_trapping_math)
15791 return NULL_TREE;
15792 result = 0;
15793 break;
15795 default:
15796 gcc_unreachable ();
15799 return constant_boolean_node (result, type);
15802 return constant_boolean_node (real_compare (code, c0, c1), type);
15805 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15807 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15808 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15809 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15812 /* Handle equality/inequality of complex constants. */
15813 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15815 tree rcond = fold_relational_const (code, type,
15816 TREE_REALPART (op0),
15817 TREE_REALPART (op1));
15818 tree icond = fold_relational_const (code, type,
15819 TREE_IMAGPART (op0),
15820 TREE_IMAGPART (op1));
15821 if (code == EQ_EXPR)
15822 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15823 else if (code == NE_EXPR)
15824 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15825 else
15826 return NULL_TREE;
15829 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15831 unsigned count = VECTOR_CST_NELTS (op0);
15832 tree *elts = XALLOCAVEC (tree, count);
15833 gcc_assert (VECTOR_CST_NELTS (op1) == count
15834 && TYPE_VECTOR_SUBPARTS (type) == count);
15836 for (unsigned i = 0; i < count; i++)
15838 tree elem_type = TREE_TYPE (type);
15839 tree elem0 = VECTOR_CST_ELT (op0, i);
15840 tree elem1 = VECTOR_CST_ELT (op1, i);
15842 tree tem = fold_relational_const (code, elem_type,
15843 elem0, elem1);
15845 if (tem == NULL_TREE)
15846 return NULL_TREE;
15848 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15851 return build_vector (type, elts);
15854 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15856 To compute GT, swap the arguments and do LT.
15857 To compute GE, do LT and invert the result.
15858 To compute LE, swap the arguments, do LT and invert the result.
15859 To compute NE, do EQ and invert the result.
15861 Therefore, the code below must handle only EQ and LT. */
15863 if (code == LE_EXPR || code == GT_EXPR)
15865 tree tem = op0;
15866 op0 = op1;
15867 op1 = tem;
15868 code = swap_tree_comparison (code);
15871 /* Note that it is safe to invert for real values here because we
15872 have already handled the one case that it matters. */
15874 invert = 0;
15875 if (code == NE_EXPR || code == GE_EXPR)
15877 invert = 1;
15878 code = invert_tree_comparison (code, false);
15881 /* Compute a result for LT or EQ if args permit;
15882 Otherwise return T. */
15883 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15885 if (code == EQ_EXPR)
15886 result = tree_int_cst_equal (op0, op1);
15887 else
15888 result = tree_int_cst_lt (op0, op1);
15890 else
15891 return NULL_TREE;
15893 if (invert)
15894 result ^= 1;
15895 return constant_boolean_node (result, type);
15898 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15899 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15900 itself. */
15902 tree
15903 fold_build_cleanup_point_expr (tree type, tree expr)
15905 /* If the expression does not have side effects then we don't have to wrap
15906 it with a cleanup point expression. */
15907 if (!TREE_SIDE_EFFECTS (expr))
15908 return expr;
15910 /* If the expression is a return, check to see if the expression inside the
15911 return has no side effects or the right hand side of the modify expression
15912 inside the return. If either don't have side effects set we don't need to
15913 wrap the expression in a cleanup point expression. Note we don't check the
15914 left hand side of the modify because it should always be a return decl. */
15915 if (TREE_CODE (expr) == RETURN_EXPR)
15917 tree op = TREE_OPERAND (expr, 0);
15918 if (!op || !TREE_SIDE_EFFECTS (op))
15919 return expr;
15920 op = TREE_OPERAND (op, 1);
15921 if (!TREE_SIDE_EFFECTS (op))
15922 return expr;
15925 return build1 (CLEANUP_POINT_EXPR, type, expr);
15928 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15929 of an indirection through OP0, or NULL_TREE if no simplification is
15930 possible. */
15932 tree
15933 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15935 tree sub = op0;
15936 tree subtype;
15938 STRIP_NOPS (sub);
15939 subtype = TREE_TYPE (sub);
15940 if (!POINTER_TYPE_P (subtype))
15941 return NULL_TREE;
15943 if (TREE_CODE (sub) == ADDR_EXPR)
15945 tree op = TREE_OPERAND (sub, 0);
15946 tree optype = TREE_TYPE (op);
15947 /* *&CONST_DECL -> to the value of the const decl. */
15948 if (TREE_CODE (op) == CONST_DECL)
15949 return DECL_INITIAL (op);
15950 /* *&p => p; make sure to handle *&"str"[cst] here. */
15951 if (type == optype)
15953 tree fop = fold_read_from_constant_string (op);
15954 if (fop)
15955 return fop;
15956 else
15957 return op;
15959 /* *(foo *)&fooarray => fooarray[0] */
15960 else if (TREE_CODE (optype) == ARRAY_TYPE
15961 && type == TREE_TYPE (optype)
15962 && (!in_gimple_form
15963 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15965 tree type_domain = TYPE_DOMAIN (optype);
15966 tree min_val = size_zero_node;
15967 if (type_domain && TYPE_MIN_VALUE (type_domain))
15968 min_val = TYPE_MIN_VALUE (type_domain);
15969 if (in_gimple_form
15970 && TREE_CODE (min_val) != INTEGER_CST)
15971 return NULL_TREE;
15972 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15973 NULL_TREE, NULL_TREE);
15975 /* *(foo *)&complexfoo => __real__ complexfoo */
15976 else if (TREE_CODE (optype) == COMPLEX_TYPE
15977 && type == TREE_TYPE (optype))
15978 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15979 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15980 else if (TREE_CODE (optype) == VECTOR_TYPE
15981 && type == TREE_TYPE (optype))
15983 tree part_width = TYPE_SIZE (type);
15984 tree index = bitsize_int (0);
15985 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15989 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15990 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15992 tree op00 = TREE_OPERAND (sub, 0);
15993 tree op01 = TREE_OPERAND (sub, 1);
15995 STRIP_NOPS (op00);
15996 if (TREE_CODE (op00) == ADDR_EXPR)
15998 tree op00type;
15999 op00 = TREE_OPERAND (op00, 0);
16000 op00type = TREE_TYPE (op00);
16002 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16003 if (TREE_CODE (op00type) == VECTOR_TYPE
16004 && type == TREE_TYPE (op00type))
16006 HOST_WIDE_INT offset = tree_to_shwi (op01);
16007 tree part_width = TYPE_SIZE (type);
16008 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16009 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16010 tree index = bitsize_int (indexi);
16012 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16013 return fold_build3_loc (loc,
16014 BIT_FIELD_REF, type, op00,
16015 part_width, index);
16018 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16019 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16020 && type == TREE_TYPE (op00type))
16022 tree size = TYPE_SIZE_UNIT (type);
16023 if (tree_int_cst_equal (size, op01))
16024 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16026 /* ((foo *)&fooarray)[1] => fooarray[1] */
16027 else if (TREE_CODE (op00type) == ARRAY_TYPE
16028 && type == TREE_TYPE (op00type))
16030 tree type_domain = TYPE_DOMAIN (op00type);
16031 tree min_val = size_zero_node;
16032 if (type_domain && TYPE_MIN_VALUE (type_domain))
16033 min_val = TYPE_MIN_VALUE (type_domain);
16034 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16035 TYPE_SIZE_UNIT (type));
16036 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16037 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16038 NULL_TREE, NULL_TREE);
16043 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16044 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16045 && type == TREE_TYPE (TREE_TYPE (subtype))
16046 && (!in_gimple_form
16047 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16049 tree type_domain;
16050 tree min_val = size_zero_node;
16051 sub = build_fold_indirect_ref_loc (loc, sub);
16052 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16053 if (type_domain && TYPE_MIN_VALUE (type_domain))
16054 min_val = TYPE_MIN_VALUE (type_domain);
16055 if (in_gimple_form
16056 && TREE_CODE (min_val) != INTEGER_CST)
16057 return NULL_TREE;
16058 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16059 NULL_TREE);
16062 return NULL_TREE;
16065 /* Builds an expression for an indirection through T, simplifying some
16066 cases. */
16068 tree
16069 build_fold_indirect_ref_loc (location_t loc, tree t)
16071 tree type = TREE_TYPE (TREE_TYPE (t));
16072 tree sub = fold_indirect_ref_1 (loc, type, t);
16074 if (sub)
16075 return sub;
16077 return build1_loc (loc, INDIRECT_REF, type, t);
16080 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16082 tree
16083 fold_indirect_ref_loc (location_t loc, tree t)
16085 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16087 if (sub)
16088 return sub;
16089 else
16090 return t;
16093 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16094 whose result is ignored. The type of the returned tree need not be
16095 the same as the original expression. */
16097 tree
16098 fold_ignored_result (tree t)
16100 if (!TREE_SIDE_EFFECTS (t))
16101 return integer_zero_node;
16103 for (;;)
16104 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16106 case tcc_unary:
16107 t = TREE_OPERAND (t, 0);
16108 break;
16110 case tcc_binary:
16111 case tcc_comparison:
16112 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16113 t = TREE_OPERAND (t, 0);
16114 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16115 t = TREE_OPERAND (t, 1);
16116 else
16117 return t;
16118 break;
16120 case tcc_expression:
16121 switch (TREE_CODE (t))
16123 case COMPOUND_EXPR:
16124 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16125 return t;
16126 t = TREE_OPERAND (t, 0);
16127 break;
16129 case COND_EXPR:
16130 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16131 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16132 return t;
16133 t = TREE_OPERAND (t, 0);
16134 break;
16136 default:
16137 return t;
16139 break;
16141 default:
16142 return t;
16146 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16148 tree
16149 round_up_loc (location_t loc, tree value, unsigned int divisor)
16151 tree div = NULL_TREE;
16153 if (divisor == 1)
16154 return value;
16156 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16157 have to do anything. Only do this when we are not given a const,
16158 because in that case, this check is more expensive than just
16159 doing it. */
16160 if (TREE_CODE (value) != INTEGER_CST)
16162 div = build_int_cst (TREE_TYPE (value), divisor);
16164 if (multiple_of_p (TREE_TYPE (value), value, div))
16165 return value;
16168 /* If divisor is a power of two, simplify this to bit manipulation. */
16169 if (divisor == (divisor & -divisor))
16171 if (TREE_CODE (value) == INTEGER_CST)
16173 wide_int val = value;
16174 bool overflow_p;
16176 if ((val & (divisor - 1)) == 0)
16177 return value;
16179 overflow_p = TREE_OVERFLOW (value);
16180 val &= ~(divisor - 1);
16181 val += divisor;
16182 if (val == 0)
16183 overflow_p = true;
16185 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16187 else
16189 tree t;
16191 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16192 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16193 t = build_int_cst (TREE_TYPE (value), -divisor);
16194 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16197 else
16199 if (!div)
16200 div = build_int_cst (TREE_TYPE (value), divisor);
16201 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16202 value = size_binop_loc (loc, MULT_EXPR, value, div);
16205 return value;
16208 /* Likewise, but round down. */
16210 tree
16211 round_down_loc (location_t loc, tree value, int divisor)
16213 tree div = NULL_TREE;
16215 gcc_assert (divisor > 0);
16216 if (divisor == 1)
16217 return value;
16219 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16220 have to do anything. Only do this when we are not given a const,
16221 because in that case, this check is more expensive than just
16222 doing it. */
16223 if (TREE_CODE (value) != INTEGER_CST)
16225 div = build_int_cst (TREE_TYPE (value), divisor);
16227 if (multiple_of_p (TREE_TYPE (value), value, div))
16228 return value;
16231 /* If divisor is a power of two, simplify this to bit manipulation. */
16232 if (divisor == (divisor & -divisor))
16234 tree t;
16236 t = build_int_cst (TREE_TYPE (value), -divisor);
16237 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16239 else
16241 if (!div)
16242 div = build_int_cst (TREE_TYPE (value), divisor);
16243 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16244 value = size_binop_loc (loc, MULT_EXPR, value, div);
16247 return value;
16250 /* Returns the pointer to the base of the object addressed by EXP and
16251 extracts the information about the offset of the access, storing it
16252 to PBITPOS and POFFSET. */
16254 static tree
16255 split_address_to_core_and_offset (tree exp,
16256 HOST_WIDE_INT *pbitpos, tree *poffset)
16258 tree core;
16259 machine_mode mode;
16260 int unsignedp, volatilep;
16261 HOST_WIDE_INT bitsize;
16262 location_t loc = EXPR_LOCATION (exp);
16264 if (TREE_CODE (exp) == ADDR_EXPR)
16266 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16267 poffset, &mode, &unsignedp, &volatilep,
16268 false);
16269 core = build_fold_addr_expr_loc (loc, core);
16271 else
16273 core = exp;
16274 *pbitpos = 0;
16275 *poffset = NULL_TREE;
16278 return core;
16281 /* Returns true if addresses of E1 and E2 differ by a constant, false
16282 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16284 bool
16285 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16287 tree core1, core2;
16288 HOST_WIDE_INT bitpos1, bitpos2;
16289 tree toffset1, toffset2, tdiff, type;
16291 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16292 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16294 if (bitpos1 % BITS_PER_UNIT != 0
16295 || bitpos2 % BITS_PER_UNIT != 0
16296 || !operand_equal_p (core1, core2, 0))
16297 return false;
16299 if (toffset1 && toffset2)
16301 type = TREE_TYPE (toffset1);
16302 if (type != TREE_TYPE (toffset2))
16303 toffset2 = fold_convert (type, toffset2);
16305 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16306 if (!cst_and_fits_in_hwi (tdiff))
16307 return false;
16309 *diff = int_cst_value (tdiff);
16311 else if (toffset1 || toffset2)
16313 /* If only one of the offsets is non-constant, the difference cannot
16314 be a constant. */
16315 return false;
16317 else
16318 *diff = 0;
16320 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16321 return true;
16324 /* Simplify the floating point expression EXP when the sign of the
16325 result is not significant. Return NULL_TREE if no simplification
16326 is possible. */
16328 tree
16329 fold_strip_sign_ops (tree exp)
16331 tree arg0, arg1;
16332 location_t loc = EXPR_LOCATION (exp);
16334 switch (TREE_CODE (exp))
16336 case ABS_EXPR:
16337 case NEGATE_EXPR:
16338 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16339 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16341 case MULT_EXPR:
16342 case RDIV_EXPR:
16343 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16344 return NULL_TREE;
16345 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16346 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16347 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16348 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16349 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16350 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16351 break;
16353 case COMPOUND_EXPR:
16354 arg0 = TREE_OPERAND (exp, 0);
16355 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16356 if (arg1)
16357 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16358 break;
16360 case COND_EXPR:
16361 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16362 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16363 if (arg0 || arg1)
16364 return fold_build3_loc (loc,
16365 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16366 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16367 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16368 break;
16370 case CALL_EXPR:
16372 const enum built_in_function fcode = builtin_mathfn_code (exp);
16373 switch (fcode)
16375 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16376 /* Strip copysign function call, return the 1st argument. */
16377 arg0 = CALL_EXPR_ARG (exp, 0);
16378 arg1 = CALL_EXPR_ARG (exp, 1);
16379 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16381 default:
16382 /* Strip sign ops from the argument of "odd" math functions. */
16383 if (negate_mathfn_p (fcode))
16385 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16386 if (arg0)
16387 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16389 break;
16392 break;
16394 default:
16395 break;
16397 return NULL_TREE;