Remove VEC_RSHIFT_EXPR tree code, now unused
[official-gcc.git] / gcc / fold-const.c
blobee9ed7b34fa8f5fa43faa20d0eb8c4eba8de345f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
164 static location_t
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
186 return x;
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 widest_int quo;
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 tree type;
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
511 break;
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
527 break;
529 default:
530 break;
532 return false;
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
540 static tree
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
544 tree tem;
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
584 break;
586 case VECTOR_CST:
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
598 return build_vector (type, elts);
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1244 break;
1246 default:
1247 return NULL_TREE;
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1327 else
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1358 else
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1380 break;
1382 default:
1383 return NULL_TREE;
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 for (i = 0; i < count; i++)
1423 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 elts[i] = const_binop (code, elem1, arg2);
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE. */
1429 if (elts[i] == NULL_TREE)
1430 return NULL_TREE;
1433 return build_vector (type, elts);
1435 return NULL_TREE;
1438 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1439 indicates which particular sizetype to create. */
1441 tree
1442 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1444 return build_int_cst (sizetype_tab[(int) kind], number);
1447 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1448 is a tree code. The type of the result is taken from the operands.
1449 Both must be equivalent integer types, ala int_binop_types_match_p.
1450 If the operands are constant, so is the result. */
1452 tree
1453 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1455 tree type = TREE_TYPE (arg0);
1457 if (arg0 == error_mark_node || arg1 == error_mark_node)
1458 return error_mark_node;
1460 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1461 TREE_TYPE (arg1)));
1463 /* Handle the special case of two integer constants faster. */
1464 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1466 /* And some specific cases even faster than that. */
1467 if (code == PLUS_EXPR)
1469 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1470 return arg1;
1471 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1472 return arg0;
1474 else if (code == MINUS_EXPR)
1476 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1477 return arg0;
1479 else if (code == MULT_EXPR)
1481 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1482 return arg1;
1485 /* Handle general case of two integer constants. For sizetype
1486 constant calculations we always want to know about overflow,
1487 even in the unsigned case. */
1488 return int_const_binop_1 (code, arg0, arg1, -1);
1491 return fold_build2_loc (loc, code, type, arg0, arg1);
1494 /* Given two values, either both of sizetype or both of bitsizetype,
1495 compute the difference between the two values. Return the value
1496 in signed type corresponding to the type of the operands. */
1498 tree
1499 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1501 tree type = TREE_TYPE (arg0);
1502 tree ctype;
1504 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1505 TREE_TYPE (arg1)));
1507 /* If the type is already signed, just do the simple thing. */
1508 if (!TYPE_UNSIGNED (type))
1509 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1511 if (type == sizetype)
1512 ctype = ssizetype;
1513 else if (type == bitsizetype)
1514 ctype = sbitsizetype;
1515 else
1516 ctype = signed_type_for (type);
1518 /* If either operand is not a constant, do the conversions to the signed
1519 type and subtract. The hardware will do the right thing with any
1520 overflow in the subtraction. */
1521 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1522 return size_binop_loc (loc, MINUS_EXPR,
1523 fold_convert_loc (loc, ctype, arg0),
1524 fold_convert_loc (loc, ctype, arg1));
1526 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1527 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1528 overflow) and negate (which can't either). Special-case a result
1529 of zero while we're here. */
1530 if (tree_int_cst_equal (arg0, arg1))
1531 return build_int_cst (ctype, 0);
1532 else if (tree_int_cst_lt (arg1, arg0))
1533 return fold_convert_loc (loc, ctype,
1534 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1535 else
1536 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1537 fold_convert_loc (loc, ctype,
1538 size_binop_loc (loc,
1539 MINUS_EXPR,
1540 arg1, arg0)));
1543 /* A subroutine of fold_convert_const handling conversions of an
1544 INTEGER_CST to another integer type. */
1546 static tree
1547 fold_convert_const_int_from_int (tree type, const_tree arg1)
1549 /* Given an integer constant, make new constant with new type,
1550 appropriately sign-extended or truncated. Use widest_int
1551 so that any extension is done according ARG1's type. */
1552 return force_fit_type (type, wi::to_widest (arg1),
1553 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1554 TREE_OVERFLOW (arg1));
1557 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1558 to an integer type. */
1560 static tree
1561 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1563 bool overflow = false;
1564 tree t;
1566 /* The following code implements the floating point to integer
1567 conversion rules required by the Java Language Specification,
1568 that IEEE NaNs are mapped to zero and values that overflow
1569 the target precision saturate, i.e. values greater than
1570 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1571 are mapped to INT_MIN. These semantics are allowed by the
1572 C and C++ standards that simply state that the behavior of
1573 FP-to-integer conversion is unspecified upon overflow. */
1575 wide_int val;
1576 REAL_VALUE_TYPE r;
1577 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1579 switch (code)
1581 case FIX_TRUNC_EXPR:
1582 real_trunc (&r, VOIDmode, &x);
1583 break;
1585 default:
1586 gcc_unreachable ();
1589 /* If R is NaN, return zero and show we have an overflow. */
1590 if (REAL_VALUE_ISNAN (r))
1592 overflow = true;
1593 val = wi::zero (TYPE_PRECISION (type));
1596 /* See if R is less than the lower bound or greater than the
1597 upper bound. */
1599 if (! overflow)
1601 tree lt = TYPE_MIN_VALUE (type);
1602 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1603 if (REAL_VALUES_LESS (r, l))
1605 overflow = true;
1606 val = lt;
1610 if (! overflow)
1612 tree ut = TYPE_MAX_VALUE (type);
1613 if (ut)
1615 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1616 if (REAL_VALUES_LESS (u, r))
1618 overflow = true;
1619 val = ut;
1624 if (! overflow)
1625 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1627 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1628 return t;
1631 /* A subroutine of fold_convert_const handling conversions of a
1632 FIXED_CST to an integer type. */
1634 static tree
1635 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1637 tree t;
1638 double_int temp, temp_trunc;
1639 unsigned int mode;
1641 /* Right shift FIXED_CST to temp by fbit. */
1642 temp = TREE_FIXED_CST (arg1).data;
1643 mode = TREE_FIXED_CST (arg1).mode;
1644 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1646 temp = temp.rshift (GET_MODE_FBIT (mode),
1647 HOST_BITS_PER_DOUBLE_INT,
1648 SIGNED_FIXED_POINT_MODE_P (mode));
1650 /* Left shift temp to temp_trunc by fbit. */
1651 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1652 HOST_BITS_PER_DOUBLE_INT,
1653 SIGNED_FIXED_POINT_MODE_P (mode));
1655 else
1657 temp = double_int_zero;
1658 temp_trunc = double_int_zero;
1661 /* If FIXED_CST is negative, we need to round the value toward 0.
1662 By checking if the fractional bits are not zero to add 1 to temp. */
1663 if (SIGNED_FIXED_POINT_MODE_P (mode)
1664 && temp_trunc.is_negative ()
1665 && TREE_FIXED_CST (arg1).data != temp_trunc)
1666 temp += double_int_one;
1668 /* Given a fixed-point constant, make new constant with new type,
1669 appropriately sign-extended or truncated. */
1670 t = force_fit_type (type, temp, -1,
1671 (temp.is_negative ()
1672 && (TYPE_UNSIGNED (type)
1673 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1674 | TREE_OVERFLOW (arg1));
1676 return t;
1679 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1680 to another floating point type. */
1682 static tree
1683 fold_convert_const_real_from_real (tree type, const_tree arg1)
1685 REAL_VALUE_TYPE value;
1686 tree t;
1688 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1689 t = build_real (type, value);
1691 /* If converting an infinity or NAN to a representation that doesn't
1692 have one, set the overflow bit so that we can produce some kind of
1693 error message at the appropriate point if necessary. It's not the
1694 most user-friendly message, but it's better than nothing. */
1695 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1696 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1697 TREE_OVERFLOW (t) = 1;
1698 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1699 && !MODE_HAS_NANS (TYPE_MODE (type)))
1700 TREE_OVERFLOW (t) = 1;
1701 /* Regular overflow, conversion produced an infinity in a mode that
1702 can't represent them. */
1703 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1704 && REAL_VALUE_ISINF (value)
1705 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1706 TREE_OVERFLOW (t) = 1;
1707 else
1708 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1709 return t;
1712 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1713 to a floating point type. */
1715 static tree
1716 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1718 REAL_VALUE_TYPE value;
1719 tree t;
1721 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1722 t = build_real (type, value);
1724 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1725 return t;
1728 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1729 to another fixed-point type. */
1731 static tree
1732 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1734 FIXED_VALUE_TYPE value;
1735 tree t;
1736 bool overflow_p;
1738 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1739 TYPE_SATURATING (type));
1740 t = build_fixed (type, value);
1742 /* Propagate overflow flags. */
1743 if (overflow_p | TREE_OVERFLOW (arg1))
1744 TREE_OVERFLOW (t) = 1;
1745 return t;
1748 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1749 to a fixed-point type. */
1751 static tree
1752 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1754 FIXED_VALUE_TYPE value;
1755 tree t;
1756 bool overflow_p;
1757 double_int di;
1759 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1761 di.low = TREE_INT_CST_ELT (arg1, 0);
1762 if (TREE_INT_CST_NUNITS (arg1) == 1)
1763 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1764 else
1765 di.high = TREE_INT_CST_ELT (arg1, 1);
1767 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1768 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1769 TYPE_SATURATING (type));
1770 t = build_fixed (type, value);
1772 /* Propagate overflow flags. */
1773 if (overflow_p | TREE_OVERFLOW (arg1))
1774 TREE_OVERFLOW (t) = 1;
1775 return t;
1778 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1779 to a fixed-point type. */
1781 static tree
1782 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1784 FIXED_VALUE_TYPE value;
1785 tree t;
1786 bool overflow_p;
1788 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1789 &TREE_REAL_CST (arg1),
1790 TYPE_SATURATING (type));
1791 t = build_fixed (type, value);
1793 /* Propagate overflow flags. */
1794 if (overflow_p | TREE_OVERFLOW (arg1))
1795 TREE_OVERFLOW (t) = 1;
1796 return t;
1799 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1800 type TYPE. If no simplification can be done return NULL_TREE. */
1802 static tree
1803 fold_convert_const (enum tree_code code, tree type, tree arg1)
1805 if (TREE_TYPE (arg1) == type)
1806 return arg1;
1808 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1809 || TREE_CODE (type) == OFFSET_TYPE)
1811 if (TREE_CODE (arg1) == INTEGER_CST)
1812 return fold_convert_const_int_from_int (type, arg1);
1813 else if (TREE_CODE (arg1) == REAL_CST)
1814 return fold_convert_const_int_from_real (code, type, arg1);
1815 else if (TREE_CODE (arg1) == FIXED_CST)
1816 return fold_convert_const_int_from_fixed (type, arg1);
1818 else if (TREE_CODE (type) == REAL_TYPE)
1820 if (TREE_CODE (arg1) == INTEGER_CST)
1821 return build_real_from_int_cst (type, arg1);
1822 else if (TREE_CODE (arg1) == REAL_CST)
1823 return fold_convert_const_real_from_real (type, arg1);
1824 else if (TREE_CODE (arg1) == FIXED_CST)
1825 return fold_convert_const_real_from_fixed (type, arg1);
1827 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1829 if (TREE_CODE (arg1) == FIXED_CST)
1830 return fold_convert_const_fixed_from_fixed (type, arg1);
1831 else if (TREE_CODE (arg1) == INTEGER_CST)
1832 return fold_convert_const_fixed_from_int (type, arg1);
1833 else if (TREE_CODE (arg1) == REAL_CST)
1834 return fold_convert_const_fixed_from_real (type, arg1);
1836 return NULL_TREE;
1839 /* Construct a vector of zero elements of vector type TYPE. */
1841 static tree
1842 build_zero_vector (tree type)
1844 tree t;
1846 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1847 return build_vector_from_val (type, t);
1850 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1852 bool
1853 fold_convertible_p (const_tree type, const_tree arg)
1855 tree orig = TREE_TYPE (arg);
1857 if (type == orig)
1858 return true;
1860 if (TREE_CODE (arg) == ERROR_MARK
1861 || TREE_CODE (type) == ERROR_MARK
1862 || TREE_CODE (orig) == ERROR_MARK)
1863 return false;
1865 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1866 return true;
1868 switch (TREE_CODE (type))
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case POINTER_TYPE: case REFERENCE_TYPE:
1872 case OFFSET_TYPE:
1873 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1874 || TREE_CODE (orig) == OFFSET_TYPE)
1875 return true;
1876 return (TREE_CODE (orig) == VECTOR_TYPE
1877 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1879 case REAL_TYPE:
1880 case FIXED_POINT_TYPE:
1881 case COMPLEX_TYPE:
1882 case VECTOR_TYPE:
1883 case VOID_TYPE:
1884 return TREE_CODE (type) == TREE_CODE (orig);
1886 default:
1887 return false;
1891 /* Convert expression ARG to type TYPE. Used by the middle-end for
1892 simple conversions in preference to calling the front-end's convert. */
1894 tree
1895 fold_convert_loc (location_t loc, tree type, tree arg)
1897 tree orig = TREE_TYPE (arg);
1898 tree tem;
1900 if (type == orig)
1901 return arg;
1903 if (TREE_CODE (arg) == ERROR_MARK
1904 || TREE_CODE (type) == ERROR_MARK
1905 || TREE_CODE (orig) == ERROR_MARK)
1906 return error_mark_node;
1908 switch (TREE_CODE (type))
1910 case POINTER_TYPE:
1911 case REFERENCE_TYPE:
1912 /* Handle conversions between pointers to different address spaces. */
1913 if (POINTER_TYPE_P (orig)
1914 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1915 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1916 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1917 /* fall through */
1919 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1920 case OFFSET_TYPE:
1921 if (TREE_CODE (arg) == INTEGER_CST)
1923 tem = fold_convert_const (NOP_EXPR, type, arg);
1924 if (tem != NULL_TREE)
1925 return tem;
1927 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1928 || TREE_CODE (orig) == OFFSET_TYPE)
1929 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1930 if (TREE_CODE (orig) == COMPLEX_TYPE)
1931 return fold_convert_loc (loc, type,
1932 fold_build1_loc (loc, REALPART_EXPR,
1933 TREE_TYPE (orig), arg));
1934 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1935 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1936 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1938 case REAL_TYPE:
1939 if (TREE_CODE (arg) == INTEGER_CST)
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1945 else if (TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 return tem;
1951 else if (TREE_CODE (arg) == FIXED_CST)
1953 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1955 return tem;
1958 switch (TREE_CODE (orig))
1960 case INTEGER_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1965 case REAL_TYPE:
1966 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1968 case FIXED_POINT_TYPE:
1969 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1971 case COMPLEX_TYPE:
1972 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1973 return fold_convert_loc (loc, type, tem);
1975 default:
1976 gcc_unreachable ();
1979 case FIXED_POINT_TYPE:
1980 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1981 || TREE_CODE (arg) == REAL_CST)
1983 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1984 if (tem != NULL_TREE)
1985 goto fold_convert_exit;
1988 switch (TREE_CODE (orig))
1990 case FIXED_POINT_TYPE:
1991 case INTEGER_TYPE:
1992 case ENUMERAL_TYPE:
1993 case BOOLEAN_TYPE:
1994 case REAL_TYPE:
1995 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1997 case COMPLEX_TYPE:
1998 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1999 return fold_convert_loc (loc, type, tem);
2001 default:
2002 gcc_unreachable ();
2005 case COMPLEX_TYPE:
2006 switch (TREE_CODE (orig))
2008 case INTEGER_TYPE:
2009 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2010 case POINTER_TYPE: case REFERENCE_TYPE:
2011 case REAL_TYPE:
2012 case FIXED_POINT_TYPE:
2013 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2014 fold_convert_loc (loc, TREE_TYPE (type), arg),
2015 fold_convert_loc (loc, TREE_TYPE (type),
2016 integer_zero_node));
2017 case COMPLEX_TYPE:
2019 tree rpart, ipart;
2021 if (TREE_CODE (arg) == COMPLEX_EXPR)
2023 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2024 TREE_OPERAND (arg, 0));
2025 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2026 TREE_OPERAND (arg, 1));
2027 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2030 arg = save_expr (arg);
2031 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2032 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2033 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2034 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2035 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2038 default:
2039 gcc_unreachable ();
2042 case VECTOR_TYPE:
2043 if (integer_zerop (arg))
2044 return build_zero_vector (type);
2045 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2046 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2047 || TREE_CODE (orig) == VECTOR_TYPE);
2048 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2050 case VOID_TYPE:
2051 tem = fold_ignored_result (arg);
2052 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2054 default:
2055 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2056 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2057 gcc_unreachable ();
2059 fold_convert_exit:
2060 protected_set_expr_location_unshare (tem, loc);
2061 return tem;
2064 /* Return false if expr can be assumed not to be an lvalue, true
2065 otherwise. */
2067 static bool
2068 maybe_lvalue_p (const_tree x)
2070 /* We only need to wrap lvalue tree codes. */
2071 switch (TREE_CODE (x))
2073 case VAR_DECL:
2074 case PARM_DECL:
2075 case RESULT_DECL:
2076 case LABEL_DECL:
2077 case FUNCTION_DECL:
2078 case SSA_NAME:
2080 case COMPONENT_REF:
2081 case MEM_REF:
2082 case INDIRECT_REF:
2083 case ARRAY_REF:
2084 case ARRAY_RANGE_REF:
2085 case BIT_FIELD_REF:
2086 case OBJ_TYPE_REF:
2088 case REALPART_EXPR:
2089 case IMAGPART_EXPR:
2090 case PREINCREMENT_EXPR:
2091 case PREDECREMENT_EXPR:
2092 case SAVE_EXPR:
2093 case TRY_CATCH_EXPR:
2094 case WITH_CLEANUP_EXPR:
2095 case COMPOUND_EXPR:
2096 case MODIFY_EXPR:
2097 case TARGET_EXPR:
2098 case COND_EXPR:
2099 case BIND_EXPR:
2100 break;
2102 default:
2103 /* Assume the worst for front-end tree codes. */
2104 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2105 break;
2106 return false;
2109 return true;
2112 /* Return an expr equal to X but certainly not valid as an lvalue. */
2114 tree
2115 non_lvalue_loc (location_t loc, tree x)
2117 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2118 us. */
2119 if (in_gimple_form)
2120 return x;
2122 if (! maybe_lvalue_p (x))
2123 return x;
2124 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2127 /* When pedantic, return an expr equal to X but certainly not valid as a
2128 pedantic lvalue. Otherwise, return X. */
2130 static tree
2131 pedantic_non_lvalue_loc (location_t loc, tree x)
2133 return protected_set_expr_location_unshare (x, loc);
2136 /* Given a tree comparison code, return the code that is the logical inverse.
2137 It is generally not safe to do this for floating-point comparisons, except
2138 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2139 ERROR_MARK in this case. */
2141 enum tree_code
2142 invert_tree_comparison (enum tree_code code, bool honor_nans)
2144 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2145 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2146 return ERROR_MARK;
2148 switch (code)
2150 case EQ_EXPR:
2151 return NE_EXPR;
2152 case NE_EXPR:
2153 return EQ_EXPR;
2154 case GT_EXPR:
2155 return honor_nans ? UNLE_EXPR : LE_EXPR;
2156 case GE_EXPR:
2157 return honor_nans ? UNLT_EXPR : LT_EXPR;
2158 case LT_EXPR:
2159 return honor_nans ? UNGE_EXPR : GE_EXPR;
2160 case LE_EXPR:
2161 return honor_nans ? UNGT_EXPR : GT_EXPR;
2162 case LTGT_EXPR:
2163 return UNEQ_EXPR;
2164 case UNEQ_EXPR:
2165 return LTGT_EXPR;
2166 case UNGT_EXPR:
2167 return LE_EXPR;
2168 case UNGE_EXPR:
2169 return LT_EXPR;
2170 case UNLT_EXPR:
2171 return GE_EXPR;
2172 case UNLE_EXPR:
2173 return GT_EXPR;
2174 case ORDERED_EXPR:
2175 return UNORDERED_EXPR;
2176 case UNORDERED_EXPR:
2177 return ORDERED_EXPR;
2178 default:
2179 gcc_unreachable ();
2183 /* Similar, but return the comparison that results if the operands are
2184 swapped. This is safe for floating-point. */
2186 enum tree_code
2187 swap_tree_comparison (enum tree_code code)
2189 switch (code)
2191 case EQ_EXPR:
2192 case NE_EXPR:
2193 case ORDERED_EXPR:
2194 case UNORDERED_EXPR:
2195 case LTGT_EXPR:
2196 case UNEQ_EXPR:
2197 return code;
2198 case GT_EXPR:
2199 return LT_EXPR;
2200 case GE_EXPR:
2201 return LE_EXPR;
2202 case LT_EXPR:
2203 return GT_EXPR;
2204 case LE_EXPR:
2205 return GE_EXPR;
2206 case UNGT_EXPR:
2207 return UNLT_EXPR;
2208 case UNGE_EXPR:
2209 return UNLE_EXPR;
2210 case UNLT_EXPR:
2211 return UNGT_EXPR;
2212 case UNLE_EXPR:
2213 return UNGE_EXPR;
2214 default:
2215 gcc_unreachable ();
2220 /* Convert a comparison tree code from an enum tree_code representation
2221 into a compcode bit-based encoding. This function is the inverse of
2222 compcode_to_comparison. */
2224 static enum comparison_code
2225 comparison_to_compcode (enum tree_code code)
2227 switch (code)
2229 case LT_EXPR:
2230 return COMPCODE_LT;
2231 case EQ_EXPR:
2232 return COMPCODE_EQ;
2233 case LE_EXPR:
2234 return COMPCODE_LE;
2235 case GT_EXPR:
2236 return COMPCODE_GT;
2237 case NE_EXPR:
2238 return COMPCODE_NE;
2239 case GE_EXPR:
2240 return COMPCODE_GE;
2241 case ORDERED_EXPR:
2242 return COMPCODE_ORD;
2243 case UNORDERED_EXPR:
2244 return COMPCODE_UNORD;
2245 case UNLT_EXPR:
2246 return COMPCODE_UNLT;
2247 case UNEQ_EXPR:
2248 return COMPCODE_UNEQ;
2249 case UNLE_EXPR:
2250 return COMPCODE_UNLE;
2251 case UNGT_EXPR:
2252 return COMPCODE_UNGT;
2253 case LTGT_EXPR:
2254 return COMPCODE_LTGT;
2255 case UNGE_EXPR:
2256 return COMPCODE_UNGE;
2257 default:
2258 gcc_unreachable ();
2262 /* Convert a compcode bit-based encoding of a comparison operator back
2263 to GCC's enum tree_code representation. This function is the
2264 inverse of comparison_to_compcode. */
2266 static enum tree_code
2267 compcode_to_comparison (enum comparison_code code)
2269 switch (code)
2271 case COMPCODE_LT:
2272 return LT_EXPR;
2273 case COMPCODE_EQ:
2274 return EQ_EXPR;
2275 case COMPCODE_LE:
2276 return LE_EXPR;
2277 case COMPCODE_GT:
2278 return GT_EXPR;
2279 case COMPCODE_NE:
2280 return NE_EXPR;
2281 case COMPCODE_GE:
2282 return GE_EXPR;
2283 case COMPCODE_ORD:
2284 return ORDERED_EXPR;
2285 case COMPCODE_UNORD:
2286 return UNORDERED_EXPR;
2287 case COMPCODE_UNLT:
2288 return UNLT_EXPR;
2289 case COMPCODE_UNEQ:
2290 return UNEQ_EXPR;
2291 case COMPCODE_UNLE:
2292 return UNLE_EXPR;
2293 case COMPCODE_UNGT:
2294 return UNGT_EXPR;
2295 case COMPCODE_LTGT:
2296 return LTGT_EXPR;
2297 case COMPCODE_UNGE:
2298 return UNGE_EXPR;
2299 default:
2300 gcc_unreachable ();
2304 /* Return a tree for the comparison which is the combination of
2305 doing the AND or OR (depending on CODE) of the two operations LCODE
2306 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2307 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2308 if this makes the transformation invalid. */
2310 tree
2311 combine_comparisons (location_t loc,
2312 enum tree_code code, enum tree_code lcode,
2313 enum tree_code rcode, tree truth_type,
2314 tree ll_arg, tree lr_arg)
2316 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2317 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2318 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2319 int compcode;
2321 switch (code)
2323 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2324 compcode = lcompcode & rcompcode;
2325 break;
2327 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2328 compcode = lcompcode | rcompcode;
2329 break;
2331 default:
2332 return NULL_TREE;
2335 if (!honor_nans)
2337 /* Eliminate unordered comparisons, as well as LTGT and ORD
2338 which are not used unless the mode has NaNs. */
2339 compcode &= ~COMPCODE_UNORD;
2340 if (compcode == COMPCODE_LTGT)
2341 compcode = COMPCODE_NE;
2342 else if (compcode == COMPCODE_ORD)
2343 compcode = COMPCODE_TRUE;
2345 else if (flag_trapping_math)
2347 /* Check that the original operation and the optimized ones will trap
2348 under the same condition. */
2349 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2350 && (lcompcode != COMPCODE_EQ)
2351 && (lcompcode != COMPCODE_ORD);
2352 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2353 && (rcompcode != COMPCODE_EQ)
2354 && (rcompcode != COMPCODE_ORD);
2355 bool trap = (compcode & COMPCODE_UNORD) == 0
2356 && (compcode != COMPCODE_EQ)
2357 && (compcode != COMPCODE_ORD);
2359 /* In a short-circuited boolean expression the LHS might be
2360 such that the RHS, if evaluated, will never trap. For
2361 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2362 if neither x nor y is NaN. (This is a mixed blessing: for
2363 example, the expression above will never trap, hence
2364 optimizing it to x < y would be invalid). */
2365 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2366 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2367 rtrap = false;
2369 /* If the comparison was short-circuited, and only the RHS
2370 trapped, we may now generate a spurious trap. */
2371 if (rtrap && !ltrap
2372 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2373 return NULL_TREE;
2375 /* If we changed the conditions that cause a trap, we lose. */
2376 if ((ltrap || rtrap) != trap)
2377 return NULL_TREE;
2380 if (compcode == COMPCODE_TRUE)
2381 return constant_boolean_node (true, truth_type);
2382 else if (compcode == COMPCODE_FALSE)
2383 return constant_boolean_node (false, truth_type);
2384 else
2386 enum tree_code tcode;
2388 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2389 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2393 /* Return nonzero if two operands (typically of the same tree node)
2394 are necessarily equal. If either argument has side-effects this
2395 function returns zero. FLAGS modifies behavior as follows:
2397 If OEP_ONLY_CONST is set, only return nonzero for constants.
2398 This function tests whether the operands are indistinguishable;
2399 it does not test whether they are equal using C's == operation.
2400 The distinction is important for IEEE floating point, because
2401 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2402 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2404 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2405 even though it may hold multiple values during a function.
2406 This is because a GCC tree node guarantees that nothing else is
2407 executed between the evaluation of its "operands" (which may often
2408 be evaluated in arbitrary order). Hence if the operands themselves
2409 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2410 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2411 unset means assuming isochronic (or instantaneous) tree equivalence.
2412 Unless comparing arbitrary expression trees, such as from different
2413 statements, this flag can usually be left unset.
2415 If OEP_PURE_SAME is set, then pure functions with identical arguments
2416 are considered the same. It is used when the caller has other ways
2417 to ensure that global memory is unchanged in between. */
2420 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2422 /* If either is ERROR_MARK, they aren't equal. */
2423 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2424 || TREE_TYPE (arg0) == error_mark_node
2425 || TREE_TYPE (arg1) == error_mark_node)
2426 return 0;
2428 /* Similar, if either does not have a type (like a released SSA name),
2429 they aren't equal. */
2430 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2431 return 0;
2433 /* Check equality of integer constants before bailing out due to
2434 precision differences. */
2435 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2436 return tree_int_cst_equal (arg0, arg1);
2438 /* If both types don't have the same signedness, then we can't consider
2439 them equal. We must check this before the STRIP_NOPS calls
2440 because they may change the signedness of the arguments. As pointers
2441 strictly don't have a signedness, require either two pointers or
2442 two non-pointers as well. */
2443 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2444 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2445 return 0;
2447 /* We cannot consider pointers to different address space equal. */
2448 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2449 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2450 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2451 return 0;
2453 /* If both types don't have the same precision, then it is not safe
2454 to strip NOPs. */
2455 if (element_precision (TREE_TYPE (arg0))
2456 != element_precision (TREE_TYPE (arg1)))
2457 return 0;
2459 STRIP_NOPS (arg0);
2460 STRIP_NOPS (arg1);
2462 /* In case both args are comparisons but with different comparison
2463 code, try to swap the comparison operands of one arg to produce
2464 a match and compare that variant. */
2465 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2466 && COMPARISON_CLASS_P (arg0)
2467 && COMPARISON_CLASS_P (arg1))
2469 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2471 if (TREE_CODE (arg0) == swap_code)
2472 return operand_equal_p (TREE_OPERAND (arg0, 0),
2473 TREE_OPERAND (arg1, 1), flags)
2474 && operand_equal_p (TREE_OPERAND (arg0, 1),
2475 TREE_OPERAND (arg1, 0), flags);
2478 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2479 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2480 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2481 return 0;
2483 /* This is needed for conversions and for COMPONENT_REF.
2484 Might as well play it safe and always test this. */
2485 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2486 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2487 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2488 return 0;
2490 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2491 We don't care about side effects in that case because the SAVE_EXPR
2492 takes care of that for us. In all other cases, two expressions are
2493 equal if they have no side effects. If we have two identical
2494 expressions with side effects that should be treated the same due
2495 to the only side effects being identical SAVE_EXPR's, that will
2496 be detected in the recursive calls below.
2497 If we are taking an invariant address of two identical objects
2498 they are necessarily equal as well. */
2499 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2500 && (TREE_CODE (arg0) == SAVE_EXPR
2501 || (flags & OEP_CONSTANT_ADDRESS_OF)
2502 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2503 return 1;
2505 /* Next handle constant cases, those for which we can return 1 even
2506 if ONLY_CONST is set. */
2507 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2508 switch (TREE_CODE (arg0))
2510 case INTEGER_CST:
2511 return tree_int_cst_equal (arg0, arg1);
2513 case FIXED_CST:
2514 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2515 TREE_FIXED_CST (arg1));
2517 case REAL_CST:
2518 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2519 TREE_REAL_CST (arg1)))
2520 return 1;
2523 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2525 /* If we do not distinguish between signed and unsigned zero,
2526 consider them equal. */
2527 if (real_zerop (arg0) && real_zerop (arg1))
2528 return 1;
2530 return 0;
2532 case VECTOR_CST:
2534 unsigned i;
2536 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2537 return 0;
2539 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2541 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2542 VECTOR_CST_ELT (arg1, i), flags))
2543 return 0;
2545 return 1;
2548 case COMPLEX_CST:
2549 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2550 flags)
2551 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2552 flags));
2554 case STRING_CST:
2555 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2556 && ! memcmp (TREE_STRING_POINTER (arg0),
2557 TREE_STRING_POINTER (arg1),
2558 TREE_STRING_LENGTH (arg0)));
2560 case ADDR_EXPR:
2561 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2562 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2563 ? OEP_CONSTANT_ADDRESS_OF : 0);
2564 default:
2565 break;
2568 if (flags & OEP_ONLY_CONST)
2569 return 0;
2571 /* Define macros to test an operand from arg0 and arg1 for equality and a
2572 variant that allows null and views null as being different from any
2573 non-null value. In the latter case, if either is null, the both
2574 must be; otherwise, do the normal comparison. */
2575 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2576 TREE_OPERAND (arg1, N), flags)
2578 #define OP_SAME_WITH_NULL(N) \
2579 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2580 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2582 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2584 case tcc_unary:
2585 /* Two conversions are equal only if signedness and modes match. */
2586 switch (TREE_CODE (arg0))
2588 CASE_CONVERT:
2589 case FIX_TRUNC_EXPR:
2590 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2591 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2592 return 0;
2593 break;
2594 default:
2595 break;
2598 return OP_SAME (0);
2601 case tcc_comparison:
2602 case tcc_binary:
2603 if (OP_SAME (0) && OP_SAME (1))
2604 return 1;
2606 /* For commutative ops, allow the other order. */
2607 return (commutative_tree_code (TREE_CODE (arg0))
2608 && operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2613 case tcc_reference:
2614 /* If either of the pointer (or reference) expressions we are
2615 dereferencing contain a side effect, these cannot be equal,
2616 but their addresses can be. */
2617 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2618 && (TREE_SIDE_EFFECTS (arg0)
2619 || TREE_SIDE_EFFECTS (arg1)))
2620 return 0;
2622 switch (TREE_CODE (arg0))
2624 case INDIRECT_REF:
2625 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2626 return OP_SAME (0);
2628 case REALPART_EXPR:
2629 case IMAGPART_EXPR:
2630 return OP_SAME (0);
2632 case TARGET_MEM_REF:
2633 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2634 /* Require equal extra operands and then fall through to MEM_REF
2635 handling of the two common operands. */
2636 if (!OP_SAME_WITH_NULL (2)
2637 || !OP_SAME_WITH_NULL (3)
2638 || !OP_SAME_WITH_NULL (4))
2639 return 0;
2640 /* Fallthru. */
2641 case MEM_REF:
2642 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2643 /* Require equal access sizes, and similar pointer types.
2644 We can have incomplete types for array references of
2645 variable-sized arrays from the Fortran frontend
2646 though. Also verify the types are compatible. */
2647 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2648 || (TYPE_SIZE (TREE_TYPE (arg0))
2649 && TYPE_SIZE (TREE_TYPE (arg1))
2650 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2651 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2652 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2653 && alias_ptr_types_compatible_p
2654 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2655 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2656 && OP_SAME (0) && OP_SAME (1));
2658 case ARRAY_REF:
2659 case ARRAY_RANGE_REF:
2660 /* Operands 2 and 3 may be null.
2661 Compare the array index by value if it is constant first as we
2662 may have different types but same value here. */
2663 if (!OP_SAME (0))
2664 return 0;
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2667 TREE_OPERAND (arg1, 1))
2668 || OP_SAME (1))
2669 && OP_SAME_WITH_NULL (2)
2670 && OP_SAME_WITH_NULL (3));
2672 case COMPONENT_REF:
2673 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2674 may be NULL when we're called to compare MEM_EXPRs. */
2675 if (!OP_SAME_WITH_NULL (0)
2676 || !OP_SAME (1))
2677 return 0;
2678 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2679 return OP_SAME_WITH_NULL (2);
2681 case BIT_FIELD_REF:
2682 if (!OP_SAME (0))
2683 return 0;
2684 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2685 return OP_SAME (1) && OP_SAME (2);
2687 default:
2688 return 0;
2691 case tcc_expression:
2692 switch (TREE_CODE (arg0))
2694 case ADDR_EXPR:
2695 case TRUTH_NOT_EXPR:
2696 return OP_SAME (0);
2698 case TRUTH_ANDIF_EXPR:
2699 case TRUTH_ORIF_EXPR:
2700 return OP_SAME (0) && OP_SAME (1);
2702 case FMA_EXPR:
2703 case WIDEN_MULT_PLUS_EXPR:
2704 case WIDEN_MULT_MINUS_EXPR:
2705 if (!OP_SAME (2))
2706 return 0;
2707 /* The multiplcation operands are commutative. */
2708 /* FALLTHRU */
2710 case TRUTH_AND_EXPR:
2711 case TRUTH_OR_EXPR:
2712 case TRUTH_XOR_EXPR:
2713 if (OP_SAME (0) && OP_SAME (1))
2714 return 1;
2716 /* Otherwise take into account this is a commutative operation. */
2717 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2718 TREE_OPERAND (arg1, 1), flags)
2719 && operand_equal_p (TREE_OPERAND (arg0, 1),
2720 TREE_OPERAND (arg1, 0), flags));
2722 case COND_EXPR:
2723 case VEC_COND_EXPR:
2724 case DOT_PROD_EXPR:
2725 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2727 default:
2728 return 0;
2731 case tcc_vl_exp:
2732 switch (TREE_CODE (arg0))
2734 case CALL_EXPR:
2735 /* If the CALL_EXPRs call different functions, then they
2736 clearly can not be equal. */
2737 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2738 flags))
2739 return 0;
2742 unsigned int cef = call_expr_flags (arg0);
2743 if (flags & OEP_PURE_SAME)
2744 cef &= ECF_CONST | ECF_PURE;
2745 else
2746 cef &= ECF_CONST;
2747 if (!cef)
2748 return 0;
2751 /* Now see if all the arguments are the same. */
2753 const_call_expr_arg_iterator iter0, iter1;
2754 const_tree a0, a1;
2755 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2756 a1 = first_const_call_expr_arg (arg1, &iter1);
2757 a0 && a1;
2758 a0 = next_const_call_expr_arg (&iter0),
2759 a1 = next_const_call_expr_arg (&iter1))
2760 if (! operand_equal_p (a0, a1, flags))
2761 return 0;
2763 /* If we get here and both argument lists are exhausted
2764 then the CALL_EXPRs are equal. */
2765 return ! (a0 || a1);
2767 default:
2768 return 0;
2771 case tcc_declaration:
2772 /* Consider __builtin_sqrt equal to sqrt. */
2773 return (TREE_CODE (arg0) == FUNCTION_DECL
2774 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2775 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2776 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2778 default:
2779 return 0;
2782 #undef OP_SAME
2783 #undef OP_SAME_WITH_NULL
2786 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2787 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2789 When in doubt, return 0. */
2791 static int
2792 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2794 int unsignedp1, unsignedpo;
2795 tree primarg0, primarg1, primother;
2796 unsigned int correct_width;
2798 if (operand_equal_p (arg0, arg1, 0))
2799 return 1;
2801 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2802 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2803 return 0;
2805 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2806 and see if the inner values are the same. This removes any
2807 signedness comparison, which doesn't matter here. */
2808 primarg0 = arg0, primarg1 = arg1;
2809 STRIP_NOPS (primarg0);
2810 STRIP_NOPS (primarg1);
2811 if (operand_equal_p (primarg0, primarg1, 0))
2812 return 1;
2814 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2815 actual comparison operand, ARG0.
2817 First throw away any conversions to wider types
2818 already present in the operands. */
2820 primarg1 = get_narrower (arg1, &unsignedp1);
2821 primother = get_narrower (other, &unsignedpo);
2823 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2824 if (unsignedp1 == unsignedpo
2825 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2826 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2828 tree type = TREE_TYPE (arg0);
2830 /* Make sure shorter operand is extended the right way
2831 to match the longer operand. */
2832 primarg1 = fold_convert (signed_or_unsigned_type_for
2833 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2835 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2836 return 1;
2839 return 0;
2842 /* See if ARG is an expression that is either a comparison or is performing
2843 arithmetic on comparisons. The comparisons must only be comparing
2844 two different values, which will be stored in *CVAL1 and *CVAL2; if
2845 they are nonzero it means that some operands have already been found.
2846 No variables may be used anywhere else in the expression except in the
2847 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2848 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2850 If this is true, return 1. Otherwise, return zero. */
2852 static int
2853 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2855 enum tree_code code = TREE_CODE (arg);
2856 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2858 /* We can handle some of the tcc_expression cases here. */
2859 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2860 tclass = tcc_unary;
2861 else if (tclass == tcc_expression
2862 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2863 || code == COMPOUND_EXPR))
2864 tclass = tcc_binary;
2866 else if (tclass == tcc_expression && code == SAVE_EXPR
2867 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2869 /* If we've already found a CVAL1 or CVAL2, this expression is
2870 two complex to handle. */
2871 if (*cval1 || *cval2)
2872 return 0;
2874 tclass = tcc_unary;
2875 *save_p = 1;
2878 switch (tclass)
2880 case tcc_unary:
2881 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2883 case tcc_binary:
2884 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2885 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2886 cval1, cval2, save_p));
2888 case tcc_constant:
2889 return 1;
2891 case tcc_expression:
2892 if (code == COND_EXPR)
2893 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2894 cval1, cval2, save_p)
2895 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2896 cval1, cval2, save_p)
2897 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2898 cval1, cval2, save_p));
2899 return 0;
2901 case tcc_comparison:
2902 /* First see if we can handle the first operand, then the second. For
2903 the second operand, we know *CVAL1 can't be zero. It must be that
2904 one side of the comparison is each of the values; test for the
2905 case where this isn't true by failing if the two operands
2906 are the same. */
2908 if (operand_equal_p (TREE_OPERAND (arg, 0),
2909 TREE_OPERAND (arg, 1), 0))
2910 return 0;
2912 if (*cval1 == 0)
2913 *cval1 = TREE_OPERAND (arg, 0);
2914 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2916 else if (*cval2 == 0)
2917 *cval2 = TREE_OPERAND (arg, 0);
2918 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2920 else
2921 return 0;
2923 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2925 else if (*cval2 == 0)
2926 *cval2 = TREE_OPERAND (arg, 1);
2927 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2929 else
2930 return 0;
2932 return 1;
2934 default:
2935 return 0;
2939 /* ARG is a tree that is known to contain just arithmetic operations and
2940 comparisons. Evaluate the operations in the tree substituting NEW0 for
2941 any occurrence of OLD0 as an operand of a comparison and likewise for
2942 NEW1 and OLD1. */
2944 static tree
2945 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2946 tree old1, tree new1)
2948 tree type = TREE_TYPE (arg);
2949 enum tree_code code = TREE_CODE (arg);
2950 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2952 /* We can handle some of the tcc_expression cases here. */
2953 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2954 tclass = tcc_unary;
2955 else if (tclass == tcc_expression
2956 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2957 tclass = tcc_binary;
2959 switch (tclass)
2961 case tcc_unary:
2962 return fold_build1_loc (loc, code, type,
2963 eval_subst (loc, TREE_OPERAND (arg, 0),
2964 old0, new0, old1, new1));
2966 case tcc_binary:
2967 return fold_build2_loc (loc, code, type,
2968 eval_subst (loc, TREE_OPERAND (arg, 0),
2969 old0, new0, old1, new1),
2970 eval_subst (loc, TREE_OPERAND (arg, 1),
2971 old0, new0, old1, new1));
2973 case tcc_expression:
2974 switch (code)
2976 case SAVE_EXPR:
2977 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2978 old1, new1);
2980 case COMPOUND_EXPR:
2981 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2982 old1, new1);
2984 case COND_EXPR:
2985 return fold_build3_loc (loc, code, type,
2986 eval_subst (loc, TREE_OPERAND (arg, 0),
2987 old0, new0, old1, new1),
2988 eval_subst (loc, TREE_OPERAND (arg, 1),
2989 old0, new0, old1, new1),
2990 eval_subst (loc, TREE_OPERAND (arg, 2),
2991 old0, new0, old1, new1));
2992 default:
2993 break;
2995 /* Fall through - ??? */
2997 case tcc_comparison:
2999 tree arg0 = TREE_OPERAND (arg, 0);
3000 tree arg1 = TREE_OPERAND (arg, 1);
3002 /* We need to check both for exact equality and tree equality. The
3003 former will be true if the operand has a side-effect. In that
3004 case, we know the operand occurred exactly once. */
3006 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3007 arg0 = new0;
3008 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3009 arg0 = new1;
3011 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3012 arg1 = new0;
3013 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3014 arg1 = new1;
3016 return fold_build2_loc (loc, code, type, arg0, arg1);
3019 default:
3020 return arg;
3024 /* Return a tree for the case when the result of an expression is RESULT
3025 converted to TYPE and OMITTED was previously an operand of the expression
3026 but is now not needed (e.g., we folded OMITTED * 0).
3028 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3029 the conversion of RESULT to TYPE. */
3031 tree
3032 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3034 tree t = fold_convert_loc (loc, type, result);
3036 /* If the resulting operand is an empty statement, just return the omitted
3037 statement casted to void. */
3038 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3039 return build1_loc (loc, NOP_EXPR, void_type_node,
3040 fold_ignored_result (omitted));
3042 if (TREE_SIDE_EFFECTS (omitted))
3043 return build2_loc (loc, COMPOUND_EXPR, type,
3044 fold_ignored_result (omitted), t);
3046 return non_lvalue_loc (loc, t);
3049 /* Return a tree for the case when the result of an expression is RESULT
3050 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3051 of the expression but are now not needed.
3053 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3054 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3055 evaluated before OMITTED2. Otherwise, if neither has side effects,
3056 just do the conversion of RESULT to TYPE. */
3058 tree
3059 omit_two_operands_loc (location_t loc, tree type, tree result,
3060 tree omitted1, tree omitted2)
3062 tree t = fold_convert_loc (loc, type, result);
3064 if (TREE_SIDE_EFFECTS (omitted2))
3065 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3066 if (TREE_SIDE_EFFECTS (omitted1))
3067 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3069 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3073 /* Return a simplified tree node for the truth-negation of ARG. This
3074 never alters ARG itself. We assume that ARG is an operation that
3075 returns a truth value (0 or 1).
3077 FIXME: one would think we would fold the result, but it causes
3078 problems with the dominator optimizer. */
3080 static tree
3081 fold_truth_not_expr (location_t loc, tree arg)
3083 tree type = TREE_TYPE (arg);
3084 enum tree_code code = TREE_CODE (arg);
3085 location_t loc1, loc2;
3087 /* If this is a comparison, we can simply invert it, except for
3088 floating-point non-equality comparisons, in which case we just
3089 enclose a TRUTH_NOT_EXPR around what we have. */
3091 if (TREE_CODE_CLASS (code) == tcc_comparison)
3093 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3094 if (FLOAT_TYPE_P (op_type)
3095 && flag_trapping_math
3096 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3097 && code != NE_EXPR && code != EQ_EXPR)
3098 return NULL_TREE;
3100 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3101 if (code == ERROR_MARK)
3102 return NULL_TREE;
3104 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3105 TREE_OPERAND (arg, 1));
3108 switch (code)
3110 case INTEGER_CST:
3111 return constant_boolean_node (integer_zerop (arg), type);
3113 case TRUTH_AND_EXPR:
3114 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3115 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3116 return build2_loc (loc, TRUTH_OR_EXPR, type,
3117 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3118 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3120 case TRUTH_OR_EXPR:
3121 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3122 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 return build2_loc (loc, TRUTH_AND_EXPR, type,
3124 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3125 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3127 case TRUTH_XOR_EXPR:
3128 /* Here we can invert either operand. We invert the first operand
3129 unless the second operand is a TRUTH_NOT_EXPR in which case our
3130 result is the XOR of the first operand with the inside of the
3131 negation of the second operand. */
3133 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3134 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3135 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3136 else
3137 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3138 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3139 TREE_OPERAND (arg, 1));
3141 case TRUTH_ANDIF_EXPR:
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3144 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3145 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3146 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3148 case TRUTH_ORIF_EXPR:
3149 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3150 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3151 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3152 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3153 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3155 case TRUTH_NOT_EXPR:
3156 return TREE_OPERAND (arg, 0);
3158 case COND_EXPR:
3160 tree arg1 = TREE_OPERAND (arg, 1);
3161 tree arg2 = TREE_OPERAND (arg, 2);
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3164 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3166 /* A COND_EXPR may have a throw as one operand, which
3167 then has void type. Just leave void operands
3168 as they are. */
3169 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3170 VOID_TYPE_P (TREE_TYPE (arg1))
3171 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3172 VOID_TYPE_P (TREE_TYPE (arg2))
3173 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3176 case COMPOUND_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, COMPOUND_EXPR, type,
3179 TREE_OPERAND (arg, 0),
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3182 case NON_LVALUE_EXPR:
3183 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3184 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3186 CASE_CONVERT:
3187 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3188 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3190 /* ... fall through ... */
3192 case FLOAT_EXPR:
3193 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3194 return build1_loc (loc, TREE_CODE (arg), type,
3195 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3197 case BIT_AND_EXPR:
3198 if (!integer_onep (TREE_OPERAND (arg, 1)))
3199 return NULL_TREE;
3200 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3202 case SAVE_EXPR:
3203 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3205 case CLEANUP_POINT_EXPR:
3206 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3207 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3210 default:
3211 return NULL_TREE;
3215 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3216 assume that ARG is an operation that returns a truth value (0 or 1
3217 for scalars, 0 or -1 for vectors). Return the folded expression if
3218 folding is successful. Otherwise, return NULL_TREE. */
3220 static tree
3221 fold_invert_truthvalue (location_t loc, tree arg)
3223 tree type = TREE_TYPE (arg);
3224 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3225 ? BIT_NOT_EXPR
3226 : TRUTH_NOT_EXPR,
3227 type, arg);
3230 /* Return a simplified tree node for the truth-negation of ARG. This
3231 never alters ARG itself. We assume that ARG is an operation that
3232 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3234 tree
3235 invert_truthvalue_loc (location_t loc, tree arg)
3237 if (TREE_CODE (arg) == ERROR_MARK)
3238 return arg;
3240 tree type = TREE_TYPE (arg);
3241 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3242 ? BIT_NOT_EXPR
3243 : TRUTH_NOT_EXPR,
3244 type, arg);
3247 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3248 operands are another bit-wise operation with a common input. If so,
3249 distribute the bit operations to save an operation and possibly two if
3250 constants are involved. For example, convert
3251 (A | B) & (A | C) into A | (B & C)
3252 Further simplification will occur if B and C are constants.
3254 If this optimization cannot be done, 0 will be returned. */
3256 static tree
3257 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3258 tree arg0, tree arg1)
3260 tree common;
3261 tree left, right;
3263 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3264 || TREE_CODE (arg0) == code
3265 || (TREE_CODE (arg0) != BIT_AND_EXPR
3266 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3267 return 0;
3269 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3271 common = TREE_OPERAND (arg0, 0);
3272 left = TREE_OPERAND (arg0, 1);
3273 right = TREE_OPERAND (arg1, 1);
3275 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3277 common = TREE_OPERAND (arg0, 0);
3278 left = TREE_OPERAND (arg0, 1);
3279 right = TREE_OPERAND (arg1, 0);
3281 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3283 common = TREE_OPERAND (arg0, 1);
3284 left = TREE_OPERAND (arg0, 0);
3285 right = TREE_OPERAND (arg1, 1);
3287 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3289 common = TREE_OPERAND (arg0, 1);
3290 left = TREE_OPERAND (arg0, 0);
3291 right = TREE_OPERAND (arg1, 0);
3293 else
3294 return 0;
3296 common = fold_convert_loc (loc, type, common);
3297 left = fold_convert_loc (loc, type, left);
3298 right = fold_convert_loc (loc, type, right);
3299 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3300 fold_build2_loc (loc, code, type, left, right));
3303 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3304 with code CODE. This optimization is unsafe. */
3305 static tree
3306 distribute_real_division (location_t loc, enum tree_code code, tree type,
3307 tree arg0, tree arg1)
3309 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3310 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3312 /* (A / C) +- (B / C) -> (A +- B) / C. */
3313 if (mul0 == mul1
3314 && operand_equal_p (TREE_OPERAND (arg0, 1),
3315 TREE_OPERAND (arg1, 1), 0))
3316 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3317 fold_build2_loc (loc, code, type,
3318 TREE_OPERAND (arg0, 0),
3319 TREE_OPERAND (arg1, 0)),
3320 TREE_OPERAND (arg0, 1));
3322 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3323 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3324 TREE_OPERAND (arg1, 0), 0)
3325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3326 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3328 REAL_VALUE_TYPE r0, r1;
3329 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3330 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3331 if (!mul0)
3332 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3333 if (!mul1)
3334 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3335 real_arithmetic (&r0, code, &r0, &r1);
3336 return fold_build2_loc (loc, MULT_EXPR, type,
3337 TREE_OPERAND (arg0, 0),
3338 build_real (type, r0));
3341 return NULL_TREE;
3344 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3345 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3347 static tree
3348 make_bit_field_ref (location_t loc, tree inner, tree type,
3349 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3351 tree result, bftype;
3353 if (bitpos == 0)
3355 tree size = TYPE_SIZE (TREE_TYPE (inner));
3356 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3357 || POINTER_TYPE_P (TREE_TYPE (inner)))
3358 && tree_fits_shwi_p (size)
3359 && tree_to_shwi (size) == bitsize)
3360 return fold_convert_loc (loc, type, inner);
3363 bftype = type;
3364 if (TYPE_PRECISION (bftype) != bitsize
3365 || TYPE_UNSIGNED (bftype) == !unsignedp)
3366 bftype = build_nonstandard_integer_type (bitsize, 0);
3368 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3369 size_int (bitsize), bitsize_int (bitpos));
3371 if (bftype != type)
3372 result = fold_convert_loc (loc, type, result);
3374 return result;
3377 /* Optimize a bit-field compare.
3379 There are two cases: First is a compare against a constant and the
3380 second is a comparison of two items where the fields are at the same
3381 bit position relative to the start of a chunk (byte, halfword, word)
3382 large enough to contain it. In these cases we can avoid the shift
3383 implicit in bitfield extractions.
3385 For constants, we emit a compare of the shifted constant with the
3386 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3387 compared. For two fields at the same position, we do the ANDs with the
3388 similar mask and compare the result of the ANDs.
3390 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3391 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3392 are the left and right operands of the comparison, respectively.
3394 If the optimization described above can be done, we return the resulting
3395 tree. Otherwise we return zero. */
3397 static tree
3398 optimize_bit_field_compare (location_t loc, enum tree_code code,
3399 tree compare_type, tree lhs, tree rhs)
3401 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3402 tree type = TREE_TYPE (lhs);
3403 tree unsigned_type;
3404 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3405 machine_mode lmode, rmode, nmode;
3406 int lunsignedp, runsignedp;
3407 int lvolatilep = 0, rvolatilep = 0;
3408 tree linner, rinner = NULL_TREE;
3409 tree mask;
3410 tree offset;
3412 /* Get all the information about the extractions being done. If the bit size
3413 if the same as the size of the underlying object, we aren't doing an
3414 extraction at all and so can do nothing. We also don't want to
3415 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3416 then will no longer be able to replace it. */
3417 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3418 &lunsignedp, &lvolatilep, false);
3419 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3420 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3421 return 0;
3423 if (!const_p)
3425 /* If this is not a constant, we can only do something if bit positions,
3426 sizes, and signedness are the same. */
3427 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3428 &runsignedp, &rvolatilep, false);
3430 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3431 || lunsignedp != runsignedp || offset != 0
3432 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3433 return 0;
3436 /* See if we can find a mode to refer to this field. We should be able to,
3437 but fail if we can't. */
3438 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3439 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3440 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3441 TYPE_ALIGN (TREE_TYPE (rinner))),
3442 word_mode, false);
3443 if (nmode == VOIDmode)
3444 return 0;
3446 /* Set signed and unsigned types of the precision of this mode for the
3447 shifts below. */
3448 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3450 /* Compute the bit position and size for the new reference and our offset
3451 within it. If the new reference is the same size as the original, we
3452 won't optimize anything, so return zero. */
3453 nbitsize = GET_MODE_BITSIZE (nmode);
3454 nbitpos = lbitpos & ~ (nbitsize - 1);
3455 lbitpos -= nbitpos;
3456 if (nbitsize == lbitsize)
3457 return 0;
3459 if (BYTES_BIG_ENDIAN)
3460 lbitpos = nbitsize - lbitsize - lbitpos;
3462 /* Make the mask to be used against the extracted field. */
3463 mask = build_int_cst_type (unsigned_type, -1);
3464 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3465 mask = const_binop (RSHIFT_EXPR, mask,
3466 size_int (nbitsize - lbitsize - lbitpos));
3468 if (! const_p)
3469 /* If not comparing with constant, just rework the comparison
3470 and return. */
3471 return fold_build2_loc (loc, code, compare_type,
3472 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3473 make_bit_field_ref (loc, linner,
3474 unsigned_type,
3475 nbitsize, nbitpos,
3477 mask),
3478 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3479 make_bit_field_ref (loc, rinner,
3480 unsigned_type,
3481 nbitsize, nbitpos,
3483 mask));
3485 /* Otherwise, we are handling the constant case. See if the constant is too
3486 big for the field. Warn and return a tree of for 0 (false) if so. We do
3487 this not only for its own sake, but to avoid having to test for this
3488 error case below. If we didn't, we might generate wrong code.
3490 For unsigned fields, the constant shifted right by the field length should
3491 be all zero. For signed fields, the high-order bits should agree with
3492 the sign bit. */
3494 if (lunsignedp)
3496 if (wi::lrshift (rhs, lbitsize) != 0)
3498 warning (0, "comparison is always %d due to width of bit-field",
3499 code == NE_EXPR);
3500 return constant_boolean_node (code == NE_EXPR, compare_type);
3503 else
3505 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3506 if (tem != 0 && tem != -1)
3508 warning (0, "comparison is always %d due to width of bit-field",
3509 code == NE_EXPR);
3510 return constant_boolean_node (code == NE_EXPR, compare_type);
3514 /* Single-bit compares should always be against zero. */
3515 if (lbitsize == 1 && ! integer_zerop (rhs))
3517 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3518 rhs = build_int_cst (type, 0);
3521 /* Make a new bitfield reference, shift the constant over the
3522 appropriate number of bits and mask it with the computed mask
3523 (in case this was a signed field). If we changed it, make a new one. */
3524 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3526 rhs = const_binop (BIT_AND_EXPR,
3527 const_binop (LSHIFT_EXPR,
3528 fold_convert_loc (loc, unsigned_type, rhs),
3529 size_int (lbitpos)),
3530 mask);
3532 lhs = build2_loc (loc, code, compare_type,
3533 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3534 return lhs;
3537 /* Subroutine for fold_truth_andor_1: decode a field reference.
3539 If EXP is a comparison reference, we return the innermost reference.
3541 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3542 set to the starting bit number.
3544 If the innermost field can be completely contained in a mode-sized
3545 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3547 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3548 otherwise it is not changed.
3550 *PUNSIGNEDP is set to the signedness of the field.
3552 *PMASK is set to the mask used. This is either contained in a
3553 BIT_AND_EXPR or derived from the width of the field.
3555 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3557 Return 0 if this is not a component reference or is one that we can't
3558 do anything with. */
3560 static tree
3561 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3562 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3563 int *punsignedp, int *pvolatilep,
3564 tree *pmask, tree *pand_mask)
3566 tree outer_type = 0;
3567 tree and_mask = 0;
3568 tree mask, inner, offset;
3569 tree unsigned_type;
3570 unsigned int precision;
3572 /* All the optimizations using this function assume integer fields.
3573 There are problems with FP fields since the type_for_size call
3574 below can fail for, e.g., XFmode. */
3575 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3576 return 0;
3578 /* We are interested in the bare arrangement of bits, so strip everything
3579 that doesn't affect the machine mode. However, record the type of the
3580 outermost expression if it may matter below. */
3581 if (CONVERT_EXPR_P (exp)
3582 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3583 outer_type = TREE_TYPE (exp);
3584 STRIP_NOPS (exp);
3586 if (TREE_CODE (exp) == BIT_AND_EXPR)
3588 and_mask = TREE_OPERAND (exp, 1);
3589 exp = TREE_OPERAND (exp, 0);
3590 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3591 if (TREE_CODE (and_mask) != INTEGER_CST)
3592 return 0;
3595 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3596 punsignedp, pvolatilep, false);
3597 if ((inner == exp && and_mask == 0)
3598 || *pbitsize < 0 || offset != 0
3599 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3600 return 0;
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3606 *punsignedp = TYPE_UNSIGNED (outer_type);
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3610 precision = TYPE_PRECISION (unsigned_type);
3612 mask = build_int_cst_type (unsigned_type, -1);
3614 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3615 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3618 if (and_mask != 0)
3619 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3620 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3622 *pmask = mask;
3623 *pand_mask = and_mask;
3624 return inner;
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3628 bit positions and MASK is SIGNED. */
3630 static int
3631 all_ones_mask_p (const_tree mask, unsigned int size)
3633 tree type = TREE_TYPE (mask);
3634 unsigned int precision = TYPE_PRECISION (type);
3636 /* If this function returns true when the type of the mask is
3637 UNSIGNED, then there will be errors. In particular see
3638 gcc.c-torture/execute/990326-1.c. There does not appear to be
3639 any documentation paper trail as to why this is so. But the pre
3640 wide-int worked with that restriction and it has been preserved
3641 here. */
3642 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3643 return false;
3645 return wi::mask (size, false, precision) == mask;
3648 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3649 represents the sign bit of EXP's type. If EXP represents a sign
3650 or zero extension, also test VAL against the unextended type.
3651 The return value is the (sub)expression whose sign bit is VAL,
3652 or NULL_TREE otherwise. */
3654 static tree
3655 sign_bit_p (tree exp, const_tree val)
3657 int width;
3658 tree t;
3660 /* Tree EXP must have an integral type. */
3661 t = TREE_TYPE (exp);
3662 if (! INTEGRAL_TYPE_P (t))
3663 return NULL_TREE;
3665 /* Tree VAL must be an integer constant. */
3666 if (TREE_CODE (val) != INTEGER_CST
3667 || TREE_OVERFLOW (val))
3668 return NULL_TREE;
3670 width = TYPE_PRECISION (t);
3671 if (wi::only_sign_bit_p (val, width))
3672 return exp;
3674 /* Handle extension from a narrower type. */
3675 if (TREE_CODE (exp) == NOP_EXPR
3676 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3677 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3679 return NULL_TREE;
3682 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3683 to be evaluated unconditionally. */
3685 static int
3686 simple_operand_p (const_tree exp)
3688 /* Strip any conversions that don't change the machine mode. */
3689 STRIP_NOPS (exp);
3691 return (CONSTANT_CLASS_P (exp)
3692 || TREE_CODE (exp) == SSA_NAME
3693 || (DECL_P (exp)
3694 && ! TREE_ADDRESSABLE (exp)
3695 && ! TREE_THIS_VOLATILE (exp)
3696 && ! DECL_NONLOCAL (exp)
3697 /* Don't regard global variables as simple. They may be
3698 allocated in ways unknown to the compiler (shared memory,
3699 #pragma weak, etc). */
3700 && ! TREE_PUBLIC (exp)
3701 && ! DECL_EXTERNAL (exp)
3702 /* Weakrefs are not safe to be read, since they can be NULL.
3703 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3704 have DECL_WEAK flag set. */
3705 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3706 /* Loading a static variable is unduly expensive, but global
3707 registers aren't expensive. */
3708 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3711 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3712 to be evaluated unconditionally.
3713 I addition to simple_operand_p, we assume that comparisons, conversions,
3714 and logic-not operations are simple, if their operands are simple, too. */
3716 static bool
3717 simple_operand_p_2 (tree exp)
3719 enum tree_code code;
3721 if (TREE_SIDE_EFFECTS (exp)
3722 || tree_could_trap_p (exp))
3723 return false;
3725 while (CONVERT_EXPR_P (exp))
3726 exp = TREE_OPERAND (exp, 0);
3728 code = TREE_CODE (exp);
3730 if (TREE_CODE_CLASS (code) == tcc_comparison)
3731 return (simple_operand_p (TREE_OPERAND (exp, 0))
3732 && simple_operand_p (TREE_OPERAND (exp, 1)));
3734 if (code == TRUTH_NOT_EXPR)
3735 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3737 return simple_operand_p (exp);
3741 /* The following functions are subroutines to fold_range_test and allow it to
3742 try to change a logical combination of comparisons into a range test.
3744 For example, both
3745 X == 2 || X == 3 || X == 4 || X == 5
3747 X >= 2 && X <= 5
3748 are converted to
3749 (unsigned) (X - 2) <= 3
3751 We describe each set of comparisons as being either inside or outside
3752 a range, using a variable named like IN_P, and then describe the
3753 range with a lower and upper bound. If one of the bounds is omitted,
3754 it represents either the highest or lowest value of the type.
3756 In the comments below, we represent a range by two numbers in brackets
3757 preceded by a "+" to designate being inside that range, or a "-" to
3758 designate being outside that range, so the condition can be inverted by
3759 flipping the prefix. An omitted bound is represented by a "-". For
3760 example, "- [-, 10]" means being outside the range starting at the lowest
3761 possible value and ending at 10, in other words, being greater than 10.
3762 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3763 always false.
3765 We set up things so that the missing bounds are handled in a consistent
3766 manner so neither a missing bound nor "true" and "false" need to be
3767 handled using a special case. */
3769 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3770 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3771 and UPPER1_P are nonzero if the respective argument is an upper bound
3772 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3773 must be specified for a comparison. ARG1 will be converted to ARG0's
3774 type if both are specified. */
3776 static tree
3777 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3778 tree arg1, int upper1_p)
3780 tree tem;
3781 int result;
3782 int sgn0, sgn1;
3784 /* If neither arg represents infinity, do the normal operation.
3785 Else, if not a comparison, return infinity. Else handle the special
3786 comparison rules. Note that most of the cases below won't occur, but
3787 are handled for consistency. */
3789 if (arg0 != 0 && arg1 != 0)
3791 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3792 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3793 STRIP_NOPS (tem);
3794 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3797 if (TREE_CODE_CLASS (code) != tcc_comparison)
3798 return 0;
3800 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3801 for neither. In real maths, we cannot assume open ended ranges are
3802 the same. But, this is computer arithmetic, where numbers are finite.
3803 We can therefore make the transformation of any unbounded range with
3804 the value Z, Z being greater than any representable number. This permits
3805 us to treat unbounded ranges as equal. */
3806 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3807 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3808 switch (code)
3810 case EQ_EXPR:
3811 result = sgn0 == sgn1;
3812 break;
3813 case NE_EXPR:
3814 result = sgn0 != sgn1;
3815 break;
3816 case LT_EXPR:
3817 result = sgn0 < sgn1;
3818 break;
3819 case LE_EXPR:
3820 result = sgn0 <= sgn1;
3821 break;
3822 case GT_EXPR:
3823 result = sgn0 > sgn1;
3824 break;
3825 case GE_EXPR:
3826 result = sgn0 >= sgn1;
3827 break;
3828 default:
3829 gcc_unreachable ();
3832 return constant_boolean_node (result, type);
3835 /* Helper routine for make_range. Perform one step for it, return
3836 new expression if the loop should continue or NULL_TREE if it should
3837 stop. */
3839 tree
3840 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3841 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3842 bool *strict_overflow_p)
3844 tree arg0_type = TREE_TYPE (arg0);
3845 tree n_low, n_high, low = *p_low, high = *p_high;
3846 int in_p = *p_in_p, n_in_p;
3848 switch (code)
3850 case TRUTH_NOT_EXPR:
3851 /* We can only do something if the range is testing for zero. */
3852 if (low == NULL_TREE || high == NULL_TREE
3853 || ! integer_zerop (low) || ! integer_zerop (high))
3854 return NULL_TREE;
3855 *p_in_p = ! in_p;
3856 return arg0;
3858 case EQ_EXPR: case NE_EXPR:
3859 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3860 /* We can only do something if the range is testing for zero
3861 and if the second operand is an integer constant. Note that
3862 saying something is "in" the range we make is done by
3863 complementing IN_P since it will set in the initial case of
3864 being not equal to zero; "out" is leaving it alone. */
3865 if (low == NULL_TREE || high == NULL_TREE
3866 || ! integer_zerop (low) || ! integer_zerop (high)
3867 || TREE_CODE (arg1) != INTEGER_CST)
3868 return NULL_TREE;
3870 switch (code)
3872 case NE_EXPR: /* - [c, c] */
3873 low = high = arg1;
3874 break;
3875 case EQ_EXPR: /* + [c, c] */
3876 in_p = ! in_p, low = high = arg1;
3877 break;
3878 case GT_EXPR: /* - [-, c] */
3879 low = 0, high = arg1;
3880 break;
3881 case GE_EXPR: /* + [c, -] */
3882 in_p = ! in_p, low = arg1, high = 0;
3883 break;
3884 case LT_EXPR: /* - [c, -] */
3885 low = arg1, high = 0;
3886 break;
3887 case LE_EXPR: /* + [-, c] */
3888 in_p = ! in_p, low = 0, high = arg1;
3889 break;
3890 default:
3891 gcc_unreachable ();
3894 /* If this is an unsigned comparison, we also know that EXP is
3895 greater than or equal to zero. We base the range tests we make
3896 on that fact, so we record it here so we can parse existing
3897 range tests. We test arg0_type since often the return type
3898 of, e.g. EQ_EXPR, is boolean. */
3899 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3901 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3902 in_p, low, high, 1,
3903 build_int_cst (arg0_type, 0),
3904 NULL_TREE))
3905 return NULL_TREE;
3907 in_p = n_in_p, low = n_low, high = n_high;
3909 /* If the high bound is missing, but we have a nonzero low
3910 bound, reverse the range so it goes from zero to the low bound
3911 minus 1. */
3912 if (high == 0 && low && ! integer_zerop (low))
3914 in_p = ! in_p;
3915 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3916 build_int_cst (TREE_TYPE (low), 1), 0);
3917 low = build_int_cst (arg0_type, 0);
3921 *p_low = low;
3922 *p_high = high;
3923 *p_in_p = in_p;
3924 return arg0;
3926 case NEGATE_EXPR:
3927 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3928 low and high are non-NULL, then normalize will DTRT. */
3929 if (!TYPE_UNSIGNED (arg0_type)
3930 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3932 if (low == NULL_TREE)
3933 low = TYPE_MIN_VALUE (arg0_type);
3934 if (high == NULL_TREE)
3935 high = TYPE_MAX_VALUE (arg0_type);
3938 /* (-x) IN [a,b] -> x in [-b, -a] */
3939 n_low = range_binop (MINUS_EXPR, exp_type,
3940 build_int_cst (exp_type, 0),
3941 0, high, 1);
3942 n_high = range_binop (MINUS_EXPR, exp_type,
3943 build_int_cst (exp_type, 0),
3944 0, low, 0);
3945 if (n_high != 0 && TREE_OVERFLOW (n_high))
3946 return NULL_TREE;
3947 goto normalize;
3949 case BIT_NOT_EXPR:
3950 /* ~ X -> -X - 1 */
3951 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3952 build_int_cst (exp_type, 1));
3954 case PLUS_EXPR:
3955 case MINUS_EXPR:
3956 if (TREE_CODE (arg1) != INTEGER_CST)
3957 return NULL_TREE;
3959 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3960 move a constant to the other side. */
3961 if (!TYPE_UNSIGNED (arg0_type)
3962 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3963 return NULL_TREE;
3965 /* If EXP is signed, any overflow in the computation is undefined,
3966 so we don't worry about it so long as our computations on
3967 the bounds don't overflow. For unsigned, overflow is defined
3968 and this is exactly the right thing. */
3969 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3970 arg0_type, low, 0, arg1, 0);
3971 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3972 arg0_type, high, 1, arg1, 0);
3973 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3974 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3975 return NULL_TREE;
3977 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3978 *strict_overflow_p = true;
3980 normalize:
3981 /* Check for an unsigned range which has wrapped around the maximum
3982 value thus making n_high < n_low, and normalize it. */
3983 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3985 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3986 build_int_cst (TREE_TYPE (n_high), 1), 0);
3987 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3988 build_int_cst (TREE_TYPE (n_low), 1), 0);
3990 /* If the range is of the form +/- [ x+1, x ], we won't
3991 be able to normalize it. But then, it represents the
3992 whole range or the empty set, so make it
3993 +/- [ -, - ]. */
3994 if (tree_int_cst_equal (n_low, low)
3995 && tree_int_cst_equal (n_high, high))
3996 low = high = 0;
3997 else
3998 in_p = ! in_p;
4000 else
4001 low = n_low, high = n_high;
4003 *p_low = low;
4004 *p_high = high;
4005 *p_in_p = in_p;
4006 return arg0;
4008 CASE_CONVERT:
4009 case NON_LVALUE_EXPR:
4010 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4011 return NULL_TREE;
4013 if (! INTEGRAL_TYPE_P (arg0_type)
4014 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4015 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4016 return NULL_TREE;
4018 n_low = low, n_high = high;
4020 if (n_low != 0)
4021 n_low = fold_convert_loc (loc, arg0_type, n_low);
4023 if (n_high != 0)
4024 n_high = fold_convert_loc (loc, arg0_type, n_high);
4026 /* If we're converting arg0 from an unsigned type, to exp,
4027 a signed type, we will be doing the comparison as unsigned.
4028 The tests above have already verified that LOW and HIGH
4029 are both positive.
4031 So we have to ensure that we will handle large unsigned
4032 values the same way that the current signed bounds treat
4033 negative values. */
4035 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4037 tree high_positive;
4038 tree equiv_type;
4039 /* For fixed-point modes, we need to pass the saturating flag
4040 as the 2nd parameter. */
4041 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4042 equiv_type
4043 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4044 TYPE_SATURATING (arg0_type));
4045 else
4046 equiv_type
4047 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4049 /* A range without an upper bound is, naturally, unbounded.
4050 Since convert would have cropped a very large value, use
4051 the max value for the destination type. */
4052 high_positive
4053 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4054 : TYPE_MAX_VALUE (arg0_type);
4056 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4057 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4058 fold_convert_loc (loc, arg0_type,
4059 high_positive),
4060 build_int_cst (arg0_type, 1));
4062 /* If the low bound is specified, "and" the range with the
4063 range for which the original unsigned value will be
4064 positive. */
4065 if (low != 0)
4067 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4068 1, fold_convert_loc (loc, arg0_type,
4069 integer_zero_node),
4070 high_positive))
4071 return NULL_TREE;
4073 in_p = (n_in_p == in_p);
4075 else
4077 /* Otherwise, "or" the range with the range of the input
4078 that will be interpreted as negative. */
4079 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4080 1, fold_convert_loc (loc, arg0_type,
4081 integer_zero_node),
4082 high_positive))
4083 return NULL_TREE;
4085 in_p = (in_p != n_in_p);
4089 *p_low = n_low;
4090 *p_high = n_high;
4091 *p_in_p = in_p;
4092 return arg0;
4094 default:
4095 return NULL_TREE;
4099 /* Given EXP, a logical expression, set the range it is testing into
4100 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4101 actually being tested. *PLOW and *PHIGH will be made of the same
4102 type as the returned expression. If EXP is not a comparison, we
4103 will most likely not be returning a useful value and range. Set
4104 *STRICT_OVERFLOW_P to true if the return value is only valid
4105 because signed overflow is undefined; otherwise, do not change
4106 *STRICT_OVERFLOW_P. */
4108 tree
4109 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4110 bool *strict_overflow_p)
4112 enum tree_code code;
4113 tree arg0, arg1 = NULL_TREE;
4114 tree exp_type, nexp;
4115 int in_p;
4116 tree low, high;
4117 location_t loc = EXPR_LOCATION (exp);
4119 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4120 and see if we can refine the range. Some of the cases below may not
4121 happen, but it doesn't seem worth worrying about this. We "continue"
4122 the outer loop when we've changed something; otherwise we "break"
4123 the switch, which will "break" the while. */
4125 in_p = 0;
4126 low = high = build_int_cst (TREE_TYPE (exp), 0);
4128 while (1)
4130 code = TREE_CODE (exp);
4131 exp_type = TREE_TYPE (exp);
4132 arg0 = NULL_TREE;
4134 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4136 if (TREE_OPERAND_LENGTH (exp) > 0)
4137 arg0 = TREE_OPERAND (exp, 0);
4138 if (TREE_CODE_CLASS (code) == tcc_binary
4139 || TREE_CODE_CLASS (code) == tcc_comparison
4140 || (TREE_CODE_CLASS (code) == tcc_expression
4141 && TREE_OPERAND_LENGTH (exp) > 1))
4142 arg1 = TREE_OPERAND (exp, 1);
4144 if (arg0 == NULL_TREE)
4145 break;
4147 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4148 &high, &in_p, strict_overflow_p);
4149 if (nexp == NULL_TREE)
4150 break;
4151 exp = nexp;
4154 /* If EXP is a constant, we can evaluate whether this is true or false. */
4155 if (TREE_CODE (exp) == INTEGER_CST)
4157 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4158 exp, 0, low, 0))
4159 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4160 exp, 1, high, 1)));
4161 low = high = 0;
4162 exp = 0;
4165 *pin_p = in_p, *plow = low, *phigh = high;
4166 return exp;
4169 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4170 type, TYPE, return an expression to test if EXP is in (or out of, depending
4171 on IN_P) the range. Return 0 if the test couldn't be created. */
4173 tree
4174 build_range_check (location_t loc, tree type, tree exp, int in_p,
4175 tree low, tree high)
4177 tree etype = TREE_TYPE (exp), value;
4179 #ifdef HAVE_canonicalize_funcptr_for_compare
4180 /* Disable this optimization for function pointer expressions
4181 on targets that require function pointer canonicalization. */
4182 if (HAVE_canonicalize_funcptr_for_compare
4183 && TREE_CODE (etype) == POINTER_TYPE
4184 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4185 return NULL_TREE;
4186 #endif
4188 if (! in_p)
4190 value = build_range_check (loc, type, exp, 1, low, high);
4191 if (value != 0)
4192 return invert_truthvalue_loc (loc, value);
4194 return 0;
4197 if (low == 0 && high == 0)
4198 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4200 if (low == 0)
4201 return fold_build2_loc (loc, LE_EXPR, type, exp,
4202 fold_convert_loc (loc, etype, high));
4204 if (high == 0)
4205 return fold_build2_loc (loc, GE_EXPR, type, exp,
4206 fold_convert_loc (loc, etype, low));
4208 if (operand_equal_p (low, high, 0))
4209 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4210 fold_convert_loc (loc, etype, low));
4212 if (integer_zerop (low))
4214 if (! TYPE_UNSIGNED (etype))
4216 etype = unsigned_type_for (etype);
4217 high = fold_convert_loc (loc, etype, high);
4218 exp = fold_convert_loc (loc, etype, exp);
4220 return build_range_check (loc, type, exp, 1, 0, high);
4223 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4224 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4226 int prec = TYPE_PRECISION (etype);
4228 if (wi::mask (prec - 1, false, prec) == high)
4230 if (TYPE_UNSIGNED (etype))
4232 tree signed_etype = signed_type_for (etype);
4233 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4234 etype
4235 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4236 else
4237 etype = signed_etype;
4238 exp = fold_convert_loc (loc, etype, exp);
4240 return fold_build2_loc (loc, GT_EXPR, type, exp,
4241 build_int_cst (etype, 0));
4245 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4246 This requires wrap-around arithmetics for the type of the expression.
4247 First make sure that arithmetics in this type is valid, then make sure
4248 that it wraps around. */
4249 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4250 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4251 TYPE_UNSIGNED (etype));
4253 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4255 tree utype, minv, maxv;
4257 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4258 for the type in question, as we rely on this here. */
4259 utype = unsigned_type_for (etype);
4260 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4261 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4262 build_int_cst (TREE_TYPE (maxv), 1), 1);
4263 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4265 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4266 minv, 1, maxv, 1)))
4267 etype = utype;
4268 else
4269 return 0;
4272 high = fold_convert_loc (loc, etype, high);
4273 low = fold_convert_loc (loc, etype, low);
4274 exp = fold_convert_loc (loc, etype, exp);
4276 value = const_binop (MINUS_EXPR, high, low);
4279 if (POINTER_TYPE_P (etype))
4281 if (value != 0 && !TREE_OVERFLOW (value))
4283 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4284 return build_range_check (loc, type,
4285 fold_build_pointer_plus_loc (loc, exp, low),
4286 1, build_int_cst (etype, 0), value);
4288 return 0;
4291 if (value != 0 && !TREE_OVERFLOW (value))
4292 return build_range_check (loc, type,
4293 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4294 1, build_int_cst (etype, 0), value);
4296 return 0;
4299 /* Return the predecessor of VAL in its type, handling the infinite case. */
4301 static tree
4302 range_predecessor (tree val)
4304 tree type = TREE_TYPE (val);
4306 if (INTEGRAL_TYPE_P (type)
4307 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4308 return 0;
4309 else
4310 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4311 build_int_cst (TREE_TYPE (val), 1), 0);
4314 /* Return the successor of VAL in its type, handling the infinite case. */
4316 static tree
4317 range_successor (tree val)
4319 tree type = TREE_TYPE (val);
4321 if (INTEGRAL_TYPE_P (type)
4322 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4323 return 0;
4324 else
4325 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4326 build_int_cst (TREE_TYPE (val), 1), 0);
4329 /* Given two ranges, see if we can merge them into one. Return 1 if we
4330 can, 0 if we can't. Set the output range into the specified parameters. */
4332 bool
4333 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4334 tree high0, int in1_p, tree low1, tree high1)
4336 int no_overlap;
4337 int subset;
4338 int temp;
4339 tree tem;
4340 int in_p;
4341 tree low, high;
4342 int lowequal = ((low0 == 0 && low1 == 0)
4343 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4344 low0, 0, low1, 0)));
4345 int highequal = ((high0 == 0 && high1 == 0)
4346 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4347 high0, 1, high1, 1)));
4349 /* Make range 0 be the range that starts first, or ends last if they
4350 start at the same value. Swap them if it isn't. */
4351 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4352 low0, 0, low1, 0))
4353 || (lowequal
4354 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4355 high1, 1, high0, 1))))
4357 temp = in0_p, in0_p = in1_p, in1_p = temp;
4358 tem = low0, low0 = low1, low1 = tem;
4359 tem = high0, high0 = high1, high1 = tem;
4362 /* Now flag two cases, whether the ranges are disjoint or whether the
4363 second range is totally subsumed in the first. Note that the tests
4364 below are simplified by the ones above. */
4365 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4366 high0, 1, low1, 0));
4367 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4368 high1, 1, high0, 1));
4370 /* We now have four cases, depending on whether we are including or
4371 excluding the two ranges. */
4372 if (in0_p && in1_p)
4374 /* If they don't overlap, the result is false. If the second range
4375 is a subset it is the result. Otherwise, the range is from the start
4376 of the second to the end of the first. */
4377 if (no_overlap)
4378 in_p = 0, low = high = 0;
4379 else if (subset)
4380 in_p = 1, low = low1, high = high1;
4381 else
4382 in_p = 1, low = low1, high = high0;
4385 else if (in0_p && ! in1_p)
4387 /* If they don't overlap, the result is the first range. If they are
4388 equal, the result is false. If the second range is a subset of the
4389 first, and the ranges begin at the same place, we go from just after
4390 the end of the second range to the end of the first. If the second
4391 range is not a subset of the first, or if it is a subset and both
4392 ranges end at the same place, the range starts at the start of the
4393 first range and ends just before the second range.
4394 Otherwise, we can't describe this as a single range. */
4395 if (no_overlap)
4396 in_p = 1, low = low0, high = high0;
4397 else if (lowequal && highequal)
4398 in_p = 0, low = high = 0;
4399 else if (subset && lowequal)
4401 low = range_successor (high1);
4402 high = high0;
4403 in_p = 1;
4404 if (low == 0)
4406 /* We are in the weird situation where high0 > high1 but
4407 high1 has no successor. Punt. */
4408 return 0;
4411 else if (! subset || highequal)
4413 low = low0;
4414 high = range_predecessor (low1);
4415 in_p = 1;
4416 if (high == 0)
4418 /* low0 < low1 but low1 has no predecessor. Punt. */
4419 return 0;
4422 else
4423 return 0;
4426 else if (! in0_p && in1_p)
4428 /* If they don't overlap, the result is the second range. If the second
4429 is a subset of the first, the result is false. Otherwise,
4430 the range starts just after the first range and ends at the
4431 end of the second. */
4432 if (no_overlap)
4433 in_p = 1, low = low1, high = high1;
4434 else if (subset || highequal)
4435 in_p = 0, low = high = 0;
4436 else
4438 low = range_successor (high0);
4439 high = high1;
4440 in_p = 1;
4441 if (low == 0)
4443 /* high1 > high0 but high0 has no successor. Punt. */
4444 return 0;
4449 else
4451 /* The case where we are excluding both ranges. Here the complex case
4452 is if they don't overlap. In that case, the only time we have a
4453 range is if they are adjacent. If the second is a subset of the
4454 first, the result is the first. Otherwise, the range to exclude
4455 starts at the beginning of the first range and ends at the end of the
4456 second. */
4457 if (no_overlap)
4459 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4460 range_successor (high0),
4461 1, low1, 0)))
4462 in_p = 0, low = low0, high = high1;
4463 else
4465 /* Canonicalize - [min, x] into - [-, x]. */
4466 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4467 switch (TREE_CODE (TREE_TYPE (low0)))
4469 case ENUMERAL_TYPE:
4470 if (TYPE_PRECISION (TREE_TYPE (low0))
4471 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4472 break;
4473 /* FALLTHROUGH */
4474 case INTEGER_TYPE:
4475 if (tree_int_cst_equal (low0,
4476 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4477 low0 = 0;
4478 break;
4479 case POINTER_TYPE:
4480 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4481 && integer_zerop (low0))
4482 low0 = 0;
4483 break;
4484 default:
4485 break;
4488 /* Canonicalize - [x, max] into - [x, -]. */
4489 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4490 switch (TREE_CODE (TREE_TYPE (high1)))
4492 case ENUMERAL_TYPE:
4493 if (TYPE_PRECISION (TREE_TYPE (high1))
4494 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4495 break;
4496 /* FALLTHROUGH */
4497 case INTEGER_TYPE:
4498 if (tree_int_cst_equal (high1,
4499 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4500 high1 = 0;
4501 break;
4502 case POINTER_TYPE:
4503 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4504 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4505 high1, 1,
4506 build_int_cst (TREE_TYPE (high1), 1),
4507 1)))
4508 high1 = 0;
4509 break;
4510 default:
4511 break;
4514 /* The ranges might be also adjacent between the maximum and
4515 minimum values of the given type. For
4516 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4517 return + [x + 1, y - 1]. */
4518 if (low0 == 0 && high1 == 0)
4520 low = range_successor (high0);
4521 high = range_predecessor (low1);
4522 if (low == 0 || high == 0)
4523 return 0;
4525 in_p = 1;
4527 else
4528 return 0;
4531 else if (subset)
4532 in_p = 0, low = low0, high = high0;
4533 else
4534 in_p = 0, low = low0, high = high1;
4537 *pin_p = in_p, *plow = low, *phigh = high;
4538 return 1;
4542 /* Subroutine of fold, looking inside expressions of the form
4543 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4544 of the COND_EXPR. This function is being used also to optimize
4545 A op B ? C : A, by reversing the comparison first.
4547 Return a folded expression whose code is not a COND_EXPR
4548 anymore, or NULL_TREE if no folding opportunity is found. */
4550 static tree
4551 fold_cond_expr_with_comparison (location_t loc, tree type,
4552 tree arg0, tree arg1, tree arg2)
4554 enum tree_code comp_code = TREE_CODE (arg0);
4555 tree arg00 = TREE_OPERAND (arg0, 0);
4556 tree arg01 = TREE_OPERAND (arg0, 1);
4557 tree arg1_type = TREE_TYPE (arg1);
4558 tree tem;
4560 STRIP_NOPS (arg1);
4561 STRIP_NOPS (arg2);
4563 /* If we have A op 0 ? A : -A, consider applying the following
4564 transformations:
4566 A == 0? A : -A same as -A
4567 A != 0? A : -A same as A
4568 A >= 0? A : -A same as abs (A)
4569 A > 0? A : -A same as abs (A)
4570 A <= 0? A : -A same as -abs (A)
4571 A < 0? A : -A same as -abs (A)
4573 None of these transformations work for modes with signed
4574 zeros. If A is +/-0, the first two transformations will
4575 change the sign of the result (from +0 to -0, or vice
4576 versa). The last four will fix the sign of the result,
4577 even though the original expressions could be positive or
4578 negative, depending on the sign of A.
4580 Note that all these transformations are correct if A is
4581 NaN, since the two alternatives (A and -A) are also NaNs. */
4582 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4583 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4584 ? real_zerop (arg01)
4585 : integer_zerop (arg01))
4586 && ((TREE_CODE (arg2) == NEGATE_EXPR
4587 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4588 /* In the case that A is of the form X-Y, '-A' (arg2) may
4589 have already been folded to Y-X, check for that. */
4590 || (TREE_CODE (arg1) == MINUS_EXPR
4591 && TREE_CODE (arg2) == MINUS_EXPR
4592 && operand_equal_p (TREE_OPERAND (arg1, 0),
4593 TREE_OPERAND (arg2, 1), 0)
4594 && operand_equal_p (TREE_OPERAND (arg1, 1),
4595 TREE_OPERAND (arg2, 0), 0))))
4596 switch (comp_code)
4598 case EQ_EXPR:
4599 case UNEQ_EXPR:
4600 tem = fold_convert_loc (loc, arg1_type, arg1);
4601 return pedantic_non_lvalue_loc (loc,
4602 fold_convert_loc (loc, type,
4603 negate_expr (tem)));
4604 case NE_EXPR:
4605 case LTGT_EXPR:
4606 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4607 case UNGE_EXPR:
4608 case UNGT_EXPR:
4609 if (flag_trapping_math)
4610 break;
4611 /* Fall through. */
4612 case GE_EXPR:
4613 case GT_EXPR:
4614 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4615 arg1 = fold_convert_loc (loc, signed_type_for
4616 (TREE_TYPE (arg1)), arg1);
4617 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4618 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4619 case UNLE_EXPR:
4620 case UNLT_EXPR:
4621 if (flag_trapping_math)
4622 break;
4623 case LE_EXPR:
4624 case LT_EXPR:
4625 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4626 arg1 = fold_convert_loc (loc, signed_type_for
4627 (TREE_TYPE (arg1)), arg1);
4628 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4629 return negate_expr (fold_convert_loc (loc, type, tem));
4630 default:
4631 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4632 break;
4635 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4636 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4637 both transformations are correct when A is NaN: A != 0
4638 is then true, and A == 0 is false. */
4640 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4641 && integer_zerop (arg01) && integer_zerop (arg2))
4643 if (comp_code == NE_EXPR)
4644 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4645 else if (comp_code == EQ_EXPR)
4646 return build_zero_cst (type);
4649 /* Try some transformations of A op B ? A : B.
4651 A == B? A : B same as B
4652 A != B? A : B same as A
4653 A >= B? A : B same as max (A, B)
4654 A > B? A : B same as max (B, A)
4655 A <= B? A : B same as min (A, B)
4656 A < B? A : B same as min (B, A)
4658 As above, these transformations don't work in the presence
4659 of signed zeros. For example, if A and B are zeros of
4660 opposite sign, the first two transformations will change
4661 the sign of the result. In the last four, the original
4662 expressions give different results for (A=+0, B=-0) and
4663 (A=-0, B=+0), but the transformed expressions do not.
4665 The first two transformations are correct if either A or B
4666 is a NaN. In the first transformation, the condition will
4667 be false, and B will indeed be chosen. In the case of the
4668 second transformation, the condition A != B will be true,
4669 and A will be chosen.
4671 The conversions to max() and min() are not correct if B is
4672 a number and A is not. The conditions in the original
4673 expressions will be false, so all four give B. The min()
4674 and max() versions would give a NaN instead. */
4675 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4676 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4677 /* Avoid these transformations if the COND_EXPR may be used
4678 as an lvalue in the C++ front-end. PR c++/19199. */
4679 && (in_gimple_form
4680 || VECTOR_TYPE_P (type)
4681 || (strcmp (lang_hooks.name, "GNU C++") != 0
4682 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4683 || ! maybe_lvalue_p (arg1)
4684 || ! maybe_lvalue_p (arg2)))
4686 tree comp_op0 = arg00;
4687 tree comp_op1 = arg01;
4688 tree comp_type = TREE_TYPE (comp_op0);
4690 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4691 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4693 comp_type = type;
4694 comp_op0 = arg1;
4695 comp_op1 = arg2;
4698 switch (comp_code)
4700 case EQ_EXPR:
4701 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4702 case NE_EXPR:
4703 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4704 case LE_EXPR:
4705 case LT_EXPR:
4706 case UNLE_EXPR:
4707 case UNLT_EXPR:
4708 /* In C++ a ?: expression can be an lvalue, so put the
4709 operand which will be used if they are equal first
4710 so that we can convert this back to the
4711 corresponding COND_EXPR. */
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4714 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4715 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4716 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4717 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4718 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4719 comp_op1, comp_op0);
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, tem));
4723 break;
4724 case GE_EXPR:
4725 case GT_EXPR:
4726 case UNGE_EXPR:
4727 case UNGT_EXPR:
4728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4730 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4731 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4732 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4733 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4734 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4735 comp_op1, comp_op0);
4736 return pedantic_non_lvalue_loc (loc,
4737 fold_convert_loc (loc, type, tem));
4739 break;
4740 case UNEQ_EXPR:
4741 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, arg2));
4744 break;
4745 case LTGT_EXPR:
4746 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4747 return pedantic_non_lvalue_loc (loc,
4748 fold_convert_loc (loc, type, arg1));
4749 break;
4750 default:
4751 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4752 break;
4756 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4757 we might still be able to simplify this. For example,
4758 if C1 is one less or one more than C2, this might have started
4759 out as a MIN or MAX and been transformed by this function.
4760 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4762 if (INTEGRAL_TYPE_P (type)
4763 && TREE_CODE (arg01) == INTEGER_CST
4764 && TREE_CODE (arg2) == INTEGER_CST)
4765 switch (comp_code)
4767 case EQ_EXPR:
4768 if (TREE_CODE (arg1) == INTEGER_CST)
4769 break;
4770 /* We can replace A with C1 in this case. */
4771 arg1 = fold_convert_loc (loc, type, arg01);
4772 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4774 case LT_EXPR:
4775 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4776 MIN_EXPR, to preserve the signedness of the comparison. */
4777 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4778 OEP_ONLY_CONST)
4779 && operand_equal_p (arg01,
4780 const_binop (PLUS_EXPR, arg2,
4781 build_int_cst (type, 1)),
4782 OEP_ONLY_CONST))
4784 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4785 fold_convert_loc (loc, TREE_TYPE (arg00),
4786 arg2));
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, tem));
4790 break;
4792 case LE_EXPR:
4793 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4794 as above. */
4795 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (MINUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4802 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc,
4806 fold_convert_loc (loc, type, tem));
4808 break;
4810 case GT_EXPR:
4811 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4812 MAX_EXPR, to preserve the signedness of the comparison. */
4813 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4814 OEP_ONLY_CONST)
4815 && operand_equal_p (arg01,
4816 const_binop (MINUS_EXPR, arg2,
4817 build_int_cst (type, 1)),
4818 OEP_ONLY_CONST))
4820 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4821 fold_convert_loc (loc, TREE_TYPE (arg00),
4822 arg2));
4823 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4825 break;
4827 case GE_EXPR:
4828 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4829 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4830 OEP_ONLY_CONST)
4831 && operand_equal_p (arg01,
4832 const_binop (PLUS_EXPR, arg2,
4833 build_int_cst (type, 1)),
4834 OEP_ONLY_CONST))
4836 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4837 fold_convert_loc (loc, TREE_TYPE (arg00),
4838 arg2));
4839 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4841 break;
4842 case NE_EXPR:
4843 break;
4844 default:
4845 gcc_unreachable ();
4848 return NULL_TREE;
4853 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4854 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4855 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4856 false) >= 2)
4857 #endif
4859 /* EXP is some logical combination of boolean tests. See if we can
4860 merge it into some range test. Return the new tree if so. */
4862 static tree
4863 fold_range_test (location_t loc, enum tree_code code, tree type,
4864 tree op0, tree op1)
4866 int or_op = (code == TRUTH_ORIF_EXPR
4867 || code == TRUTH_OR_EXPR);
4868 int in0_p, in1_p, in_p;
4869 tree low0, low1, low, high0, high1, high;
4870 bool strict_overflow_p = false;
4871 tree tem, lhs, rhs;
4872 const char * const warnmsg = G_("assuming signed overflow does not occur "
4873 "when simplifying range test");
4875 if (!INTEGRAL_TYPE_P (type))
4876 return 0;
4878 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4879 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4881 /* If this is an OR operation, invert both sides; we will invert
4882 again at the end. */
4883 if (or_op)
4884 in0_p = ! in0_p, in1_p = ! in1_p;
4886 /* If both expressions are the same, if we can merge the ranges, and we
4887 can build the range test, return it or it inverted. If one of the
4888 ranges is always true or always false, consider it to be the same
4889 expression as the other. */
4890 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4891 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4892 in1_p, low1, high1)
4893 && 0 != (tem = (build_range_check (loc, type,
4894 lhs != 0 ? lhs
4895 : rhs != 0 ? rhs : integer_zero_node,
4896 in_p, low, high))))
4898 if (strict_overflow_p)
4899 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4900 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4903 /* On machines where the branch cost is expensive, if this is a
4904 short-circuited branch and the underlying object on both sides
4905 is the same, make a non-short-circuit operation. */
4906 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4907 && lhs != 0 && rhs != 0
4908 && (code == TRUTH_ANDIF_EXPR
4909 || code == TRUTH_ORIF_EXPR)
4910 && operand_equal_p (lhs, rhs, 0))
4912 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4913 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4914 which cases we can't do this. */
4915 if (simple_operand_p (lhs))
4916 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4917 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4918 type, op0, op1);
4920 else if (!lang_hooks.decls.global_bindings_p ()
4921 && !CONTAINS_PLACEHOLDER_P (lhs))
4923 tree common = save_expr (lhs);
4925 if (0 != (lhs = build_range_check (loc, type, common,
4926 or_op ? ! in0_p : in0_p,
4927 low0, high0))
4928 && (0 != (rhs = build_range_check (loc, type, common,
4929 or_op ? ! in1_p : in1_p,
4930 low1, high1))))
4932 if (strict_overflow_p)
4933 fold_overflow_warning (warnmsg,
4934 WARN_STRICT_OVERFLOW_COMPARISON);
4935 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4936 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4937 type, lhs, rhs);
4942 return 0;
4945 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4946 bit value. Arrange things so the extra bits will be set to zero if and
4947 only if C is signed-extended to its full width. If MASK is nonzero,
4948 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4950 static tree
4951 unextend (tree c, int p, int unsignedp, tree mask)
4953 tree type = TREE_TYPE (c);
4954 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4955 tree temp;
4957 if (p == modesize || unsignedp)
4958 return c;
4960 /* We work by getting just the sign bit into the low-order bit, then
4961 into the high-order bit, then sign-extend. We then XOR that value
4962 with C. */
4963 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
4965 /* We must use a signed type in order to get an arithmetic right shift.
4966 However, we must also avoid introducing accidental overflows, so that
4967 a subsequent call to integer_zerop will work. Hence we must
4968 do the type conversion here. At this point, the constant is either
4969 zero or one, and the conversion to a signed type can never overflow.
4970 We could get an overflow if this conversion is done anywhere else. */
4971 if (TYPE_UNSIGNED (type))
4972 temp = fold_convert (signed_type_for (type), temp);
4974 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4975 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4976 if (mask != 0)
4977 temp = const_binop (BIT_AND_EXPR, temp,
4978 fold_convert (TREE_TYPE (c), mask));
4979 /* If necessary, convert the type back to match the type of C. */
4980 if (TYPE_UNSIGNED (type))
4981 temp = fold_convert (type, temp);
4983 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4986 /* For an expression that has the form
4987 (A && B) || ~B
4989 (A || B) && ~B,
4990 we can drop one of the inner expressions and simplify to
4991 A || ~B
4993 A && ~B
4994 LOC is the location of the resulting expression. OP is the inner
4995 logical operation; the left-hand side in the examples above, while CMPOP
4996 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4997 removing a condition that guards another, as in
4998 (A != NULL && A->...) || A == NULL
4999 which we must not transform. If RHS_ONLY is true, only eliminate the
5000 right-most operand of the inner logical operation. */
5002 static tree
5003 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5004 bool rhs_only)
5006 tree type = TREE_TYPE (cmpop);
5007 enum tree_code code = TREE_CODE (cmpop);
5008 enum tree_code truthop_code = TREE_CODE (op);
5009 tree lhs = TREE_OPERAND (op, 0);
5010 tree rhs = TREE_OPERAND (op, 1);
5011 tree orig_lhs = lhs, orig_rhs = rhs;
5012 enum tree_code rhs_code = TREE_CODE (rhs);
5013 enum tree_code lhs_code = TREE_CODE (lhs);
5014 enum tree_code inv_code;
5016 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5017 return NULL_TREE;
5019 if (TREE_CODE_CLASS (code) != tcc_comparison)
5020 return NULL_TREE;
5022 if (rhs_code == truthop_code)
5024 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5025 if (newrhs != NULL_TREE)
5027 rhs = newrhs;
5028 rhs_code = TREE_CODE (rhs);
5031 if (lhs_code == truthop_code && !rhs_only)
5033 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5034 if (newlhs != NULL_TREE)
5036 lhs = newlhs;
5037 lhs_code = TREE_CODE (lhs);
5041 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5042 if (inv_code == rhs_code
5043 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5044 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5045 return lhs;
5046 if (!rhs_only && inv_code == lhs_code
5047 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5048 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5049 return rhs;
5050 if (rhs != orig_rhs || lhs != orig_lhs)
5051 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5052 lhs, rhs);
5053 return NULL_TREE;
5056 /* Find ways of folding logical expressions of LHS and RHS:
5057 Try to merge two comparisons to the same innermost item.
5058 Look for range tests like "ch >= '0' && ch <= '9'".
5059 Look for combinations of simple terms on machines with expensive branches
5060 and evaluate the RHS unconditionally.
5062 For example, if we have p->a == 2 && p->b == 4 and we can make an
5063 object large enough to span both A and B, we can do this with a comparison
5064 against the object ANDed with the a mask.
5066 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5067 operations to do this with one comparison.
5069 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5070 function and the one above.
5072 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5073 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5075 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5076 two operands.
5078 We return the simplified tree or 0 if no optimization is possible. */
5080 static tree
5081 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5082 tree lhs, tree rhs)
5084 /* If this is the "or" of two comparisons, we can do something if
5085 the comparisons are NE_EXPR. If this is the "and", we can do something
5086 if the comparisons are EQ_EXPR. I.e.,
5087 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5089 WANTED_CODE is this operation code. For single bit fields, we can
5090 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5091 comparison for one-bit fields. */
5093 enum tree_code wanted_code;
5094 enum tree_code lcode, rcode;
5095 tree ll_arg, lr_arg, rl_arg, rr_arg;
5096 tree ll_inner, lr_inner, rl_inner, rr_inner;
5097 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5098 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5099 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5100 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5101 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5102 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5103 machine_mode lnmode, rnmode;
5104 tree ll_mask, lr_mask, rl_mask, rr_mask;
5105 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5106 tree l_const, r_const;
5107 tree lntype, rntype, result;
5108 HOST_WIDE_INT first_bit, end_bit;
5109 int volatilep;
5111 /* Start by getting the comparison codes. Fail if anything is volatile.
5112 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5113 it were surrounded with a NE_EXPR. */
5115 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5116 return 0;
5118 lcode = TREE_CODE (lhs);
5119 rcode = TREE_CODE (rhs);
5121 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5123 lhs = build2 (NE_EXPR, truth_type, lhs,
5124 build_int_cst (TREE_TYPE (lhs), 0));
5125 lcode = NE_EXPR;
5128 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5130 rhs = build2 (NE_EXPR, truth_type, rhs,
5131 build_int_cst (TREE_TYPE (rhs), 0));
5132 rcode = NE_EXPR;
5135 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5136 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5137 return 0;
5139 ll_arg = TREE_OPERAND (lhs, 0);
5140 lr_arg = TREE_OPERAND (lhs, 1);
5141 rl_arg = TREE_OPERAND (rhs, 0);
5142 rr_arg = TREE_OPERAND (rhs, 1);
5144 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5145 if (simple_operand_p (ll_arg)
5146 && simple_operand_p (lr_arg))
5148 if (operand_equal_p (ll_arg, rl_arg, 0)
5149 && operand_equal_p (lr_arg, rr_arg, 0))
5151 result = combine_comparisons (loc, code, lcode, rcode,
5152 truth_type, ll_arg, lr_arg);
5153 if (result)
5154 return result;
5156 else if (operand_equal_p (ll_arg, rr_arg, 0)
5157 && operand_equal_p (lr_arg, rl_arg, 0))
5159 result = combine_comparisons (loc, code, lcode,
5160 swap_tree_comparison (rcode),
5161 truth_type, ll_arg, lr_arg);
5162 if (result)
5163 return result;
5167 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5168 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5170 /* If the RHS can be evaluated unconditionally and its operands are
5171 simple, it wins to evaluate the RHS unconditionally on machines
5172 with expensive branches. In this case, this isn't a comparison
5173 that can be merged. */
5175 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5176 false) >= 2
5177 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5178 && simple_operand_p (rl_arg)
5179 && simple_operand_p (rr_arg))
5181 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5182 if (code == TRUTH_OR_EXPR
5183 && lcode == NE_EXPR && integer_zerop (lr_arg)
5184 && rcode == NE_EXPR && integer_zerop (rr_arg)
5185 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5186 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5187 return build2_loc (loc, NE_EXPR, truth_type,
5188 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5189 ll_arg, rl_arg),
5190 build_int_cst (TREE_TYPE (ll_arg), 0));
5192 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5193 if (code == TRUTH_AND_EXPR
5194 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5195 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5196 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5197 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5198 return build2_loc (loc, EQ_EXPR, truth_type,
5199 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5200 ll_arg, rl_arg),
5201 build_int_cst (TREE_TYPE (ll_arg), 0));
5204 /* See if the comparisons can be merged. Then get all the parameters for
5205 each side. */
5207 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5208 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5209 return 0;
5211 volatilep = 0;
5212 ll_inner = decode_field_reference (loc, ll_arg,
5213 &ll_bitsize, &ll_bitpos, &ll_mode,
5214 &ll_unsignedp, &volatilep, &ll_mask,
5215 &ll_and_mask);
5216 lr_inner = decode_field_reference (loc, lr_arg,
5217 &lr_bitsize, &lr_bitpos, &lr_mode,
5218 &lr_unsignedp, &volatilep, &lr_mask,
5219 &lr_and_mask);
5220 rl_inner = decode_field_reference (loc, rl_arg,
5221 &rl_bitsize, &rl_bitpos, &rl_mode,
5222 &rl_unsignedp, &volatilep, &rl_mask,
5223 &rl_and_mask);
5224 rr_inner = decode_field_reference (loc, rr_arg,
5225 &rr_bitsize, &rr_bitpos, &rr_mode,
5226 &rr_unsignedp, &volatilep, &rr_mask,
5227 &rr_and_mask);
5229 /* It must be true that the inner operation on the lhs of each
5230 comparison must be the same if we are to be able to do anything.
5231 Then see if we have constants. If not, the same must be true for
5232 the rhs's. */
5233 if (volatilep || ll_inner == 0 || rl_inner == 0
5234 || ! operand_equal_p (ll_inner, rl_inner, 0))
5235 return 0;
5237 if (TREE_CODE (lr_arg) == INTEGER_CST
5238 && TREE_CODE (rr_arg) == INTEGER_CST)
5239 l_const = lr_arg, r_const = rr_arg;
5240 else if (lr_inner == 0 || rr_inner == 0
5241 || ! operand_equal_p (lr_inner, rr_inner, 0))
5242 return 0;
5243 else
5244 l_const = r_const = 0;
5246 /* If either comparison code is not correct for our logical operation,
5247 fail. However, we can convert a one-bit comparison against zero into
5248 the opposite comparison against that bit being set in the field. */
5250 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5251 if (lcode != wanted_code)
5253 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5255 /* Make the left operand unsigned, since we are only interested
5256 in the value of one bit. Otherwise we are doing the wrong
5257 thing below. */
5258 ll_unsignedp = 1;
5259 l_const = ll_mask;
5261 else
5262 return 0;
5265 /* This is analogous to the code for l_const above. */
5266 if (rcode != wanted_code)
5268 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5270 rl_unsignedp = 1;
5271 r_const = rl_mask;
5273 else
5274 return 0;
5277 /* See if we can find a mode that contains both fields being compared on
5278 the left. If we can't, fail. Otherwise, update all constants and masks
5279 to be relative to a field of that size. */
5280 first_bit = MIN (ll_bitpos, rl_bitpos);
5281 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5282 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5283 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5284 volatilep);
5285 if (lnmode == VOIDmode)
5286 return 0;
5288 lnbitsize = GET_MODE_BITSIZE (lnmode);
5289 lnbitpos = first_bit & ~ (lnbitsize - 1);
5290 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5291 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5293 if (BYTES_BIG_ENDIAN)
5295 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5296 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5299 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5300 size_int (xll_bitpos));
5301 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5302 size_int (xrl_bitpos));
5304 if (l_const)
5306 l_const = fold_convert_loc (loc, lntype, l_const);
5307 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5308 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5309 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5310 fold_build1_loc (loc, BIT_NOT_EXPR,
5311 lntype, ll_mask))))
5313 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5315 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5318 if (r_const)
5320 r_const = fold_convert_loc (loc, lntype, r_const);
5321 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5322 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5323 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5324 fold_build1_loc (loc, BIT_NOT_EXPR,
5325 lntype, rl_mask))))
5327 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5329 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5333 /* If the right sides are not constant, do the same for it. Also,
5334 disallow this optimization if a size or signedness mismatch occurs
5335 between the left and right sides. */
5336 if (l_const == 0)
5338 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5339 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5340 /* Make sure the two fields on the right
5341 correspond to the left without being swapped. */
5342 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5343 return 0;
5345 first_bit = MIN (lr_bitpos, rr_bitpos);
5346 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5347 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5348 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5349 volatilep);
5350 if (rnmode == VOIDmode)
5351 return 0;
5353 rnbitsize = GET_MODE_BITSIZE (rnmode);
5354 rnbitpos = first_bit & ~ (rnbitsize - 1);
5355 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5356 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5358 if (BYTES_BIG_ENDIAN)
5360 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5361 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5364 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5365 rntype, lr_mask),
5366 size_int (xlr_bitpos));
5367 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5368 rntype, rr_mask),
5369 size_int (xrr_bitpos));
5371 /* Make a mask that corresponds to both fields being compared.
5372 Do this for both items being compared. If the operands are the
5373 same size and the bits being compared are in the same position
5374 then we can do this by masking both and comparing the masked
5375 results. */
5376 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5377 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5378 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5380 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5381 ll_unsignedp || rl_unsignedp);
5382 if (! all_ones_mask_p (ll_mask, lnbitsize))
5383 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5385 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5386 lr_unsignedp || rr_unsignedp);
5387 if (! all_ones_mask_p (lr_mask, rnbitsize))
5388 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5390 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5393 /* There is still another way we can do something: If both pairs of
5394 fields being compared are adjacent, we may be able to make a wider
5395 field containing them both.
5397 Note that we still must mask the lhs/rhs expressions. Furthermore,
5398 the mask must be shifted to account for the shift done by
5399 make_bit_field_ref. */
5400 if ((ll_bitsize + ll_bitpos == rl_bitpos
5401 && lr_bitsize + lr_bitpos == rr_bitpos)
5402 || (ll_bitpos == rl_bitpos + rl_bitsize
5403 && lr_bitpos == rr_bitpos + rr_bitsize))
5405 tree type;
5407 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5408 ll_bitsize + rl_bitsize,
5409 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5410 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5411 lr_bitsize + rr_bitsize,
5412 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5414 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5415 size_int (MIN (xll_bitpos, xrl_bitpos)));
5416 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5417 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5419 /* Convert to the smaller type before masking out unwanted bits. */
5420 type = lntype;
5421 if (lntype != rntype)
5423 if (lnbitsize > rnbitsize)
5425 lhs = fold_convert_loc (loc, rntype, lhs);
5426 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5427 type = rntype;
5429 else if (lnbitsize < rnbitsize)
5431 rhs = fold_convert_loc (loc, lntype, rhs);
5432 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5433 type = lntype;
5437 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5438 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5440 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5441 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5443 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5446 return 0;
5449 /* Handle the case of comparisons with constants. If there is something in
5450 common between the masks, those bits of the constants must be the same.
5451 If not, the condition is always false. Test for this to avoid generating
5452 incorrect code below. */
5453 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5454 if (! integer_zerop (result)
5455 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5456 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5458 if (wanted_code == NE_EXPR)
5460 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5461 return constant_boolean_node (true, truth_type);
5463 else
5465 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5466 return constant_boolean_node (false, truth_type);
5470 /* Construct the expression we will return. First get the component
5471 reference we will make. Unless the mask is all ones the width of
5472 that field, perform the mask operation. Then compare with the
5473 merged constant. */
5474 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5475 ll_unsignedp || rl_unsignedp);
5477 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5478 if (! all_ones_mask_p (ll_mask, lnbitsize))
5479 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5481 return build2_loc (loc, wanted_code, truth_type, result,
5482 const_binop (BIT_IOR_EXPR, l_const, r_const));
5485 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5486 constant. */
5488 static tree
5489 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5490 tree op0, tree op1)
5492 tree arg0 = op0;
5493 enum tree_code op_code;
5494 tree comp_const;
5495 tree minmax_const;
5496 int consts_equal, consts_lt;
5497 tree inner;
5499 STRIP_SIGN_NOPS (arg0);
5501 op_code = TREE_CODE (arg0);
5502 minmax_const = TREE_OPERAND (arg0, 1);
5503 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5504 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5505 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5506 inner = TREE_OPERAND (arg0, 0);
5508 /* If something does not permit us to optimize, return the original tree. */
5509 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5510 || TREE_CODE (comp_const) != INTEGER_CST
5511 || TREE_OVERFLOW (comp_const)
5512 || TREE_CODE (minmax_const) != INTEGER_CST
5513 || TREE_OVERFLOW (minmax_const))
5514 return NULL_TREE;
5516 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5517 and GT_EXPR, doing the rest with recursive calls using logical
5518 simplifications. */
5519 switch (code)
5521 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5523 tree tem
5524 = optimize_minmax_comparison (loc,
5525 invert_tree_comparison (code, false),
5526 type, op0, op1);
5527 if (tem)
5528 return invert_truthvalue_loc (loc, tem);
5529 return NULL_TREE;
5532 case GE_EXPR:
5533 return
5534 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5535 optimize_minmax_comparison
5536 (loc, EQ_EXPR, type, arg0, comp_const),
5537 optimize_minmax_comparison
5538 (loc, GT_EXPR, type, arg0, comp_const));
5540 case EQ_EXPR:
5541 if (op_code == MAX_EXPR && consts_equal)
5542 /* MAX (X, 0) == 0 -> X <= 0 */
5543 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5545 else if (op_code == MAX_EXPR && consts_lt)
5546 /* MAX (X, 0) == 5 -> X == 5 */
5547 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5549 else if (op_code == MAX_EXPR)
5550 /* MAX (X, 0) == -1 -> false */
5551 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5553 else if (consts_equal)
5554 /* MIN (X, 0) == 0 -> X >= 0 */
5555 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5557 else if (consts_lt)
5558 /* MIN (X, 0) == 5 -> false */
5559 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5561 else
5562 /* MIN (X, 0) == -1 -> X == -1 */
5563 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5565 case GT_EXPR:
5566 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5567 /* MAX (X, 0) > 0 -> X > 0
5568 MAX (X, 0) > 5 -> X > 5 */
5569 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5571 else if (op_code == MAX_EXPR)
5572 /* MAX (X, 0) > -1 -> true */
5573 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5575 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5576 /* MIN (X, 0) > 0 -> false
5577 MIN (X, 0) > 5 -> false */
5578 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5580 else
5581 /* MIN (X, 0) > -1 -> X > -1 */
5582 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5584 default:
5585 return NULL_TREE;
5589 /* T is an integer expression that is being multiplied, divided, or taken a
5590 modulus (CODE says which and what kind of divide or modulus) by a
5591 constant C. See if we can eliminate that operation by folding it with
5592 other operations already in T. WIDE_TYPE, if non-null, is a type that
5593 should be used for the computation if wider than our type.
5595 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5596 (X * 2) + (Y * 4). We must, however, be assured that either the original
5597 expression would not overflow or that overflow is undefined for the type
5598 in the language in question.
5600 If we return a non-null expression, it is an equivalent form of the
5601 original computation, but need not be in the original type.
5603 We set *STRICT_OVERFLOW_P to true if the return values depends on
5604 signed overflow being undefined. Otherwise we do not change
5605 *STRICT_OVERFLOW_P. */
5607 static tree
5608 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5609 bool *strict_overflow_p)
5611 /* To avoid exponential search depth, refuse to allow recursion past
5612 three levels. Beyond that (1) it's highly unlikely that we'll find
5613 something interesting and (2) we've probably processed it before
5614 when we built the inner expression. */
5616 static int depth;
5617 tree ret;
5619 if (depth > 3)
5620 return NULL;
5622 depth++;
5623 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5624 depth--;
5626 return ret;
5629 static tree
5630 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5631 bool *strict_overflow_p)
5633 tree type = TREE_TYPE (t);
5634 enum tree_code tcode = TREE_CODE (t);
5635 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5636 > GET_MODE_SIZE (TYPE_MODE (type)))
5637 ? wide_type : type);
5638 tree t1, t2;
5639 int same_p = tcode == code;
5640 tree op0 = NULL_TREE, op1 = NULL_TREE;
5641 bool sub_strict_overflow_p;
5643 /* Don't deal with constants of zero here; they confuse the code below. */
5644 if (integer_zerop (c))
5645 return NULL_TREE;
5647 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5648 op0 = TREE_OPERAND (t, 0);
5650 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5651 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5653 /* Note that we need not handle conditional operations here since fold
5654 already handles those cases. So just do arithmetic here. */
5655 switch (tcode)
5657 case INTEGER_CST:
5658 /* For a constant, we can always simplify if we are a multiply
5659 or (for divide and modulus) if it is a multiple of our constant. */
5660 if (code == MULT_EXPR
5661 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5662 return const_binop (code, fold_convert (ctype, t),
5663 fold_convert (ctype, c));
5664 break;
5666 CASE_CONVERT: case NON_LVALUE_EXPR:
5667 /* If op0 is an expression ... */
5668 if ((COMPARISON_CLASS_P (op0)
5669 || UNARY_CLASS_P (op0)
5670 || BINARY_CLASS_P (op0)
5671 || VL_EXP_CLASS_P (op0)
5672 || EXPRESSION_CLASS_P (op0))
5673 /* ... and has wrapping overflow, and its type is smaller
5674 than ctype, then we cannot pass through as widening. */
5675 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5676 && (TYPE_PRECISION (ctype)
5677 > TYPE_PRECISION (TREE_TYPE (op0))))
5678 /* ... or this is a truncation (t is narrower than op0),
5679 then we cannot pass through this narrowing. */
5680 || (TYPE_PRECISION (type)
5681 < TYPE_PRECISION (TREE_TYPE (op0)))
5682 /* ... or signedness changes for division or modulus,
5683 then we cannot pass through this conversion. */
5684 || (code != MULT_EXPR
5685 && (TYPE_UNSIGNED (ctype)
5686 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5687 /* ... or has undefined overflow while the converted to
5688 type has not, we cannot do the operation in the inner type
5689 as that would introduce undefined overflow. */
5690 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5691 && !TYPE_OVERFLOW_UNDEFINED (type))))
5692 break;
5694 /* Pass the constant down and see if we can make a simplification. If
5695 we can, replace this expression with the inner simplification for
5696 possible later conversion to our or some other type. */
5697 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5698 && TREE_CODE (t2) == INTEGER_CST
5699 && !TREE_OVERFLOW (t2)
5700 && (0 != (t1 = extract_muldiv (op0, t2, code,
5701 code == MULT_EXPR
5702 ? ctype : NULL_TREE,
5703 strict_overflow_p))))
5704 return t1;
5705 break;
5707 case ABS_EXPR:
5708 /* If widening the type changes it from signed to unsigned, then we
5709 must avoid building ABS_EXPR itself as unsigned. */
5710 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5712 tree cstype = (*signed_type_for) (ctype);
5713 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5714 != 0)
5716 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5717 return fold_convert (ctype, t1);
5719 break;
5721 /* If the constant is negative, we cannot simplify this. */
5722 if (tree_int_cst_sgn (c) == -1)
5723 break;
5724 /* FALLTHROUGH */
5725 case NEGATE_EXPR:
5726 /* For division and modulus, type can't be unsigned, as e.g.
5727 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5728 For signed types, even with wrapping overflow, this is fine. */
5729 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5730 break;
5731 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5732 != 0)
5733 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5734 break;
5736 case MIN_EXPR: case MAX_EXPR:
5737 /* If widening the type changes the signedness, then we can't perform
5738 this optimization as that changes the result. */
5739 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5740 break;
5742 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5743 sub_strict_overflow_p = false;
5744 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5745 &sub_strict_overflow_p)) != 0
5746 && (t2 = extract_muldiv (op1, c, code, wide_type,
5747 &sub_strict_overflow_p)) != 0)
5749 if (tree_int_cst_sgn (c) < 0)
5750 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5751 if (sub_strict_overflow_p)
5752 *strict_overflow_p = true;
5753 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5754 fold_convert (ctype, t2));
5756 break;
5758 case LSHIFT_EXPR: case RSHIFT_EXPR:
5759 /* If the second operand is constant, this is a multiplication
5760 or floor division, by a power of two, so we can treat it that
5761 way unless the multiplier or divisor overflows. Signed
5762 left-shift overflow is implementation-defined rather than
5763 undefined in C90, so do not convert signed left shift into
5764 multiplication. */
5765 if (TREE_CODE (op1) == INTEGER_CST
5766 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5767 /* const_binop may not detect overflow correctly,
5768 so check for it explicitly here. */
5769 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5770 && 0 != (t1 = fold_convert (ctype,
5771 const_binop (LSHIFT_EXPR,
5772 size_one_node,
5773 op1)))
5774 && !TREE_OVERFLOW (t1))
5775 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5776 ? MULT_EXPR : FLOOR_DIV_EXPR,
5777 ctype,
5778 fold_convert (ctype, op0),
5779 t1),
5780 c, code, wide_type, strict_overflow_p);
5781 break;
5783 case PLUS_EXPR: case MINUS_EXPR:
5784 /* See if we can eliminate the operation on both sides. If we can, we
5785 can return a new PLUS or MINUS. If we can't, the only remaining
5786 cases where we can do anything are if the second operand is a
5787 constant. */
5788 sub_strict_overflow_p = false;
5789 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5790 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5791 if (t1 != 0 && t2 != 0
5792 && (code == MULT_EXPR
5793 /* If not multiplication, we can only do this if both operands
5794 are divisible by c. */
5795 || (multiple_of_p (ctype, op0, c)
5796 && multiple_of_p (ctype, op1, c))))
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5804 /* If this was a subtraction, negate OP1 and set it to be an addition.
5805 This simplifies the logic below. */
5806 if (tcode == MINUS_EXPR)
5808 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5809 /* If OP1 was not easily negatable, the constant may be OP0. */
5810 if (TREE_CODE (op0) == INTEGER_CST)
5812 tree tem = op0;
5813 op0 = op1;
5814 op1 = tem;
5815 tem = t1;
5816 t1 = t2;
5817 t2 = tem;
5821 if (TREE_CODE (op1) != INTEGER_CST)
5822 break;
5824 /* If either OP1 or C are negative, this optimization is not safe for
5825 some of the division and remainder types while for others we need
5826 to change the code. */
5827 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5829 if (code == CEIL_DIV_EXPR)
5830 code = FLOOR_DIV_EXPR;
5831 else if (code == FLOOR_DIV_EXPR)
5832 code = CEIL_DIV_EXPR;
5833 else if (code != MULT_EXPR
5834 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5835 break;
5838 /* If it's a multiply or a division/modulus operation of a multiple
5839 of our constant, do the operation and verify it doesn't overflow. */
5840 if (code == MULT_EXPR
5841 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5843 op1 = const_binop (code, fold_convert (ctype, op1),
5844 fold_convert (ctype, c));
5845 /* We allow the constant to overflow with wrapping semantics. */
5846 if (op1 == 0
5847 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5848 break;
5850 else
5851 break;
5853 /* If we have an unsigned type, we cannot widen the operation since it
5854 will change the result if the original computation overflowed. */
5855 if (TYPE_UNSIGNED (ctype) && ctype != type)
5856 break;
5858 /* If we were able to eliminate our operation from the first side,
5859 apply our operation to the second side and reform the PLUS. */
5860 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5861 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5863 /* The last case is if we are a multiply. In that case, we can
5864 apply the distributive law to commute the multiply and addition
5865 if the multiplication of the constants doesn't overflow
5866 and overflow is defined. With undefined overflow
5867 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5868 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5869 return fold_build2 (tcode, ctype,
5870 fold_build2 (code, ctype,
5871 fold_convert (ctype, op0),
5872 fold_convert (ctype, c)),
5873 op1);
5875 break;
5877 case MULT_EXPR:
5878 /* We have a special case here if we are doing something like
5879 (C * 8) % 4 since we know that's zero. */
5880 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5881 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5882 /* If the multiplication can overflow we cannot optimize this. */
5883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5884 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5885 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5887 *strict_overflow_p = true;
5888 return omit_one_operand (type, integer_zero_node, op0);
5891 /* ... fall through ... */
5893 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5894 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5895 /* If we can extract our operation from the LHS, do so and return a
5896 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5897 do something only if the second operand is a constant. */
5898 if (same_p
5899 && (t1 = extract_muldiv (op0, c, code, wide_type,
5900 strict_overflow_p)) != 0)
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5902 fold_convert (ctype, op1));
5903 else if (tcode == MULT_EXPR && code == MULT_EXPR
5904 && (t1 = extract_muldiv (op1, c, code, wide_type,
5905 strict_overflow_p)) != 0)
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5907 fold_convert (ctype, t1));
5908 else if (TREE_CODE (op1) != INTEGER_CST)
5909 return 0;
5911 /* If these are the same operation types, we can associate them
5912 assuming no overflow. */
5913 if (tcode == code)
5915 bool overflow_p = false;
5916 bool overflow_mul_p;
5917 signop sign = TYPE_SIGN (ctype);
5918 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5919 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5920 if (overflow_mul_p
5921 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5922 overflow_p = true;
5923 if (!overflow_p)
5924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5925 wide_int_to_tree (ctype, mul));
5928 /* If these operations "cancel" each other, we have the main
5929 optimizations of this pass, which occur when either constant is a
5930 multiple of the other, in which case we replace this with either an
5931 operation or CODE or TCODE.
5933 If we have an unsigned type, we cannot do this since it will change
5934 the result if the original computation overflowed. */
5935 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5936 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5937 || (tcode == MULT_EXPR
5938 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5939 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5940 && code != MULT_EXPR)))
5942 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5944 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5945 *strict_overflow_p = true;
5946 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5947 fold_convert (ctype,
5948 const_binop (TRUNC_DIV_EXPR,
5949 op1, c)));
5951 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5953 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5954 *strict_overflow_p = true;
5955 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5956 fold_convert (ctype,
5957 const_binop (TRUNC_DIV_EXPR,
5958 c, op1)));
5961 break;
5963 default:
5964 break;
5967 return 0;
5970 /* Return a node which has the indicated constant VALUE (either 0 or
5971 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5972 and is of the indicated TYPE. */
5974 tree
5975 constant_boolean_node (bool value, tree type)
5977 if (type == integer_type_node)
5978 return value ? integer_one_node : integer_zero_node;
5979 else if (type == boolean_type_node)
5980 return value ? boolean_true_node : boolean_false_node;
5981 else if (TREE_CODE (type) == VECTOR_TYPE)
5982 return build_vector_from_val (type,
5983 build_int_cst (TREE_TYPE (type),
5984 value ? -1 : 0));
5985 else
5986 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5990 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5991 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5992 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5993 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5994 COND is the first argument to CODE; otherwise (as in the example
5995 given here), it is the second argument. TYPE is the type of the
5996 original expression. Return NULL_TREE if no simplification is
5997 possible. */
5999 static tree
6000 fold_binary_op_with_conditional_arg (location_t loc,
6001 enum tree_code code,
6002 tree type, tree op0, tree op1,
6003 tree cond, tree arg, int cond_first_p)
6005 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6006 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6007 tree test, true_value, false_value;
6008 tree lhs = NULL_TREE;
6009 tree rhs = NULL_TREE;
6010 enum tree_code cond_code = COND_EXPR;
6012 if (TREE_CODE (cond) == COND_EXPR
6013 || TREE_CODE (cond) == VEC_COND_EXPR)
6015 test = TREE_OPERAND (cond, 0);
6016 true_value = TREE_OPERAND (cond, 1);
6017 false_value = TREE_OPERAND (cond, 2);
6018 /* If this operand throws an expression, then it does not make
6019 sense to try to perform a logical or arithmetic operation
6020 involving it. */
6021 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6022 lhs = true_value;
6023 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6024 rhs = false_value;
6026 else
6028 tree testtype = TREE_TYPE (cond);
6029 test = cond;
6030 true_value = constant_boolean_node (true, testtype);
6031 false_value = constant_boolean_node (false, testtype);
6034 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6035 cond_code = VEC_COND_EXPR;
6037 /* This transformation is only worthwhile if we don't have to wrap ARG
6038 in a SAVE_EXPR and the operation can be simplified without recursing
6039 on at least one of the branches once its pushed inside the COND_EXPR. */
6040 if (!TREE_CONSTANT (arg)
6041 && (TREE_SIDE_EFFECTS (arg)
6042 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6043 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6044 return NULL_TREE;
6046 arg = fold_convert_loc (loc, arg_type, arg);
6047 if (lhs == 0)
6049 true_value = fold_convert_loc (loc, cond_type, true_value);
6050 if (cond_first_p)
6051 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6052 else
6053 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6055 if (rhs == 0)
6057 false_value = fold_convert_loc (loc, cond_type, false_value);
6058 if (cond_first_p)
6059 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6060 else
6061 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6064 /* Check that we have simplified at least one of the branches. */
6065 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6066 return NULL_TREE;
6068 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6072 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6074 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6075 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6076 ADDEND is the same as X.
6078 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6079 and finite. The problematic cases are when X is zero, and its mode
6080 has signed zeros. In the case of rounding towards -infinity,
6081 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6082 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6084 bool
6085 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6087 if (!real_zerop (addend))
6088 return false;
6090 /* Don't allow the fold with -fsignaling-nans. */
6091 if (HONOR_SNANS (TYPE_MODE (type)))
6092 return false;
6094 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6095 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6096 return true;
6098 /* In a vector or complex, we would need to check the sign of all zeros. */
6099 if (TREE_CODE (addend) != REAL_CST)
6100 return false;
6102 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6103 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6104 negate = !negate;
6106 /* The mode has signed zeros, and we have to honor their sign.
6107 In this situation, there is only one case we can return true for.
6108 X - 0 is the same as X unless rounding towards -infinity is
6109 supported. */
6110 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6113 /* Subroutine of fold() that checks comparisons of built-in math
6114 functions against real constants.
6116 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6117 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6118 is the type of the result and ARG0 and ARG1 are the operands of the
6119 comparison. ARG1 must be a TREE_REAL_CST.
6121 The function returns the constant folded tree if a simplification
6122 can be made, and NULL_TREE otherwise. */
6124 static tree
6125 fold_mathfn_compare (location_t loc,
6126 enum built_in_function fcode, enum tree_code code,
6127 tree type, tree arg0, tree arg1)
6129 REAL_VALUE_TYPE c;
6131 if (BUILTIN_SQRT_P (fcode))
6133 tree arg = CALL_EXPR_ARG (arg0, 0);
6134 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6136 c = TREE_REAL_CST (arg1);
6137 if (REAL_VALUE_NEGATIVE (c))
6139 /* sqrt(x) < y is always false, if y is negative. */
6140 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6141 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6143 /* sqrt(x) > y is always true, if y is negative and we
6144 don't care about NaNs, i.e. negative values of x. */
6145 if (code == NE_EXPR || !HONOR_NANS (mode))
6146 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6148 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6149 return fold_build2_loc (loc, GE_EXPR, type, arg,
6150 build_real (TREE_TYPE (arg), dconst0));
6152 else if (code == GT_EXPR || code == GE_EXPR)
6154 REAL_VALUE_TYPE c2;
6156 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6157 real_convert (&c2, mode, &c2);
6159 if (REAL_VALUE_ISINF (c2))
6161 /* sqrt(x) > y is x == +Inf, when y is very large. */
6162 if (HONOR_INFINITIES (mode))
6163 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6166 /* sqrt(x) > y is always false, when y is very large
6167 and we don't care about infinities. */
6168 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6171 /* sqrt(x) > c is the same as x > c*c. */
6172 return fold_build2_loc (loc, code, type, arg,
6173 build_real (TREE_TYPE (arg), c2));
6175 else if (code == LT_EXPR || code == LE_EXPR)
6177 REAL_VALUE_TYPE c2;
6179 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6180 real_convert (&c2, mode, &c2);
6182 if (REAL_VALUE_ISINF (c2))
6184 /* sqrt(x) < y is always true, when y is a very large
6185 value and we don't care about NaNs or Infinities. */
6186 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6187 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6189 /* sqrt(x) < y is x != +Inf when y is very large and we
6190 don't care about NaNs. */
6191 if (! HONOR_NANS (mode))
6192 return fold_build2_loc (loc, NE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg), c2));
6195 /* sqrt(x) < y is x >= 0 when y is very large and we
6196 don't care about Infinities. */
6197 if (! HONOR_INFINITIES (mode))
6198 return fold_build2_loc (loc, GE_EXPR, type, arg,
6199 build_real (TREE_TYPE (arg), dconst0));
6201 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6202 arg = save_expr (arg);
6203 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6204 fold_build2_loc (loc, GE_EXPR, type, arg,
6205 build_real (TREE_TYPE (arg),
6206 dconst0)),
6207 fold_build2_loc (loc, NE_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg),
6209 c2)));
6212 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6213 if (! HONOR_NANS (mode))
6214 return fold_build2_loc (loc, code, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6218 arg = save_expr (arg);
6219 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6220 fold_build2_loc (loc, GE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg),
6222 dconst0)),
6223 fold_build2_loc (loc, code, type, arg,
6224 build_real (TREE_TYPE (arg),
6225 c2)));
6229 return NULL_TREE;
6232 /* Subroutine of fold() that optimizes comparisons against Infinities,
6233 either +Inf or -Inf.
6235 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6236 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6237 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6239 The function returns the constant folded tree if a simplification
6240 can be made, and NULL_TREE otherwise. */
6242 static tree
6243 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6244 tree arg0, tree arg1)
6246 machine_mode mode;
6247 REAL_VALUE_TYPE max;
6248 tree temp;
6249 bool neg;
6251 mode = TYPE_MODE (TREE_TYPE (arg0));
6253 /* For negative infinity swap the sense of the comparison. */
6254 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6255 if (neg)
6256 code = swap_tree_comparison (code);
6258 switch (code)
6260 case GT_EXPR:
6261 /* x > +Inf is always false, if with ignore sNANs. */
6262 if (HONOR_SNANS (mode))
6263 return NULL_TREE;
6264 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6266 case LE_EXPR:
6267 /* x <= +Inf is always true, if we don't case about NaNs. */
6268 if (! HONOR_NANS (mode))
6269 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6271 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6272 arg0 = save_expr (arg0);
6273 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6275 case EQ_EXPR:
6276 case GE_EXPR:
6277 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6278 real_maxval (&max, neg, mode);
6279 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6280 arg0, build_real (TREE_TYPE (arg0), max));
6282 case LT_EXPR:
6283 /* x < +Inf is always equal to x <= DBL_MAX. */
6284 real_maxval (&max, neg, mode);
6285 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6286 arg0, build_real (TREE_TYPE (arg0), max));
6288 case NE_EXPR:
6289 /* x != +Inf is always equal to !(x > DBL_MAX). */
6290 real_maxval (&max, neg, mode);
6291 if (! HONOR_NANS (mode))
6292 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6293 arg0, build_real (TREE_TYPE (arg0), max));
6295 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6296 arg0, build_real (TREE_TYPE (arg0), max));
6297 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6299 default:
6300 break;
6303 return NULL_TREE;
6306 /* Subroutine of fold() that optimizes comparisons of a division by
6307 a nonzero integer constant against an integer constant, i.e.
6308 X/C1 op C2.
6310 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6311 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6312 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6314 The function returns the constant folded tree if a simplification
6315 can be made, and NULL_TREE otherwise. */
6317 static tree
6318 fold_div_compare (location_t loc,
6319 enum tree_code code, tree type, tree arg0, tree arg1)
6321 tree prod, tmp, hi, lo;
6322 tree arg00 = TREE_OPERAND (arg0, 0);
6323 tree arg01 = TREE_OPERAND (arg0, 1);
6324 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6325 bool neg_overflow = false;
6326 bool overflow;
6328 /* We have to do this the hard way to detect unsigned overflow.
6329 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6330 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6331 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6332 neg_overflow = false;
6334 if (sign == UNSIGNED)
6336 tmp = int_const_binop (MINUS_EXPR, arg01,
6337 build_int_cst (TREE_TYPE (arg01), 1));
6338 lo = prod;
6340 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6341 val = wi::add (prod, tmp, sign, &overflow);
6342 hi = force_fit_type (TREE_TYPE (arg00), val,
6343 -1, overflow | TREE_OVERFLOW (prod));
6345 else if (tree_int_cst_sgn (arg01) >= 0)
6347 tmp = int_const_binop (MINUS_EXPR, arg01,
6348 build_int_cst (TREE_TYPE (arg01), 1));
6349 switch (tree_int_cst_sgn (arg1))
6351 case -1:
6352 neg_overflow = true;
6353 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6354 hi = prod;
6355 break;
6357 case 0:
6358 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6359 hi = tmp;
6360 break;
6362 case 1:
6363 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6364 lo = prod;
6365 break;
6367 default:
6368 gcc_unreachable ();
6371 else
6373 /* A negative divisor reverses the relational operators. */
6374 code = swap_tree_comparison (code);
6376 tmp = int_const_binop (PLUS_EXPR, arg01,
6377 build_int_cst (TREE_TYPE (arg01), 1));
6378 switch (tree_int_cst_sgn (arg1))
6380 case -1:
6381 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6382 lo = prod;
6383 break;
6385 case 0:
6386 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6387 lo = tmp;
6388 break;
6390 case 1:
6391 neg_overflow = true;
6392 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6393 hi = prod;
6394 break;
6396 default:
6397 gcc_unreachable ();
6401 switch (code)
6403 case EQ_EXPR:
6404 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6405 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6406 if (TREE_OVERFLOW (hi))
6407 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6408 if (TREE_OVERFLOW (lo))
6409 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6410 return build_range_check (loc, type, arg00, 1, lo, hi);
6412 case NE_EXPR:
6413 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6414 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6415 if (TREE_OVERFLOW (hi))
6416 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6417 if (TREE_OVERFLOW (lo))
6418 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6419 return build_range_check (loc, type, arg00, 0, lo, hi);
6421 case LT_EXPR:
6422 if (TREE_OVERFLOW (lo))
6424 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6425 return omit_one_operand_loc (loc, type, tmp, arg00);
6427 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6429 case LE_EXPR:
6430 if (TREE_OVERFLOW (hi))
6432 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6433 return omit_one_operand_loc (loc, type, tmp, arg00);
6435 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6437 case GT_EXPR:
6438 if (TREE_OVERFLOW (hi))
6440 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6441 return omit_one_operand_loc (loc, type, tmp, arg00);
6443 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6445 case GE_EXPR:
6446 if (TREE_OVERFLOW (lo))
6448 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6449 return omit_one_operand_loc (loc, type, tmp, arg00);
6451 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6453 default:
6454 break;
6457 return NULL_TREE;
6461 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6462 equality/inequality test, then return a simplified form of the test
6463 using a sign testing. Otherwise return NULL. TYPE is the desired
6464 result type. */
6466 static tree
6467 fold_single_bit_test_into_sign_test (location_t loc,
6468 enum tree_code code, tree arg0, tree arg1,
6469 tree result_type)
6471 /* If this is testing a single bit, we can optimize the test. */
6472 if ((code == NE_EXPR || code == EQ_EXPR)
6473 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6474 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6476 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6477 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6478 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6480 if (arg00 != NULL_TREE
6481 /* This is only a win if casting to a signed type is cheap,
6482 i.e. when arg00's type is not a partial mode. */
6483 && TYPE_PRECISION (TREE_TYPE (arg00))
6484 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6486 tree stype = signed_type_for (TREE_TYPE (arg00));
6487 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6488 result_type,
6489 fold_convert_loc (loc, stype, arg00),
6490 build_int_cst (stype, 0));
6494 return NULL_TREE;
6497 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6498 equality/inequality test, then return a simplified form of
6499 the test using shifts and logical operations. Otherwise return
6500 NULL. TYPE is the desired result type. */
6502 tree
6503 fold_single_bit_test (location_t loc, enum tree_code code,
6504 tree arg0, tree arg1, tree result_type)
6506 /* If this is testing a single bit, we can optimize the test. */
6507 if ((code == NE_EXPR || code == EQ_EXPR)
6508 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6509 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6511 tree inner = TREE_OPERAND (arg0, 0);
6512 tree type = TREE_TYPE (arg0);
6513 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6514 machine_mode operand_mode = TYPE_MODE (type);
6515 int ops_unsigned;
6516 tree signed_type, unsigned_type, intermediate_type;
6517 tree tem, one;
6519 /* First, see if we can fold the single bit test into a sign-bit
6520 test. */
6521 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6522 result_type);
6523 if (tem)
6524 return tem;
6526 /* Otherwise we have (A & C) != 0 where C is a single bit,
6527 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6528 Similarly for (A & C) == 0. */
6530 /* If INNER is a right shift of a constant and it plus BITNUM does
6531 not overflow, adjust BITNUM and INNER. */
6532 if (TREE_CODE (inner) == RSHIFT_EXPR
6533 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6534 && bitnum < TYPE_PRECISION (type)
6535 && wi::ltu_p (TREE_OPERAND (inner, 1),
6536 TYPE_PRECISION (type) - bitnum))
6538 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6539 inner = TREE_OPERAND (inner, 0);
6542 /* If we are going to be able to omit the AND below, we must do our
6543 operations as unsigned. If we must use the AND, we have a choice.
6544 Normally unsigned is faster, but for some machines signed is. */
6545 #ifdef LOAD_EXTEND_OP
6546 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6547 && !flag_syntax_only) ? 0 : 1;
6548 #else
6549 ops_unsigned = 1;
6550 #endif
6552 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6553 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6554 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6555 inner = fold_convert_loc (loc, intermediate_type, inner);
6557 if (bitnum != 0)
6558 inner = build2 (RSHIFT_EXPR, intermediate_type,
6559 inner, size_int (bitnum));
6561 one = build_int_cst (intermediate_type, 1);
6563 if (code == EQ_EXPR)
6564 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6566 /* Put the AND last so it can combine with more things. */
6567 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6569 /* Make sure to return the proper type. */
6570 inner = fold_convert_loc (loc, result_type, inner);
6572 return inner;
6574 return NULL_TREE;
6577 /* Check whether we are allowed to reorder operands arg0 and arg1,
6578 such that the evaluation of arg1 occurs before arg0. */
6580 static bool
6581 reorder_operands_p (const_tree arg0, const_tree arg1)
6583 if (! flag_evaluation_order)
6584 return true;
6585 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6586 return true;
6587 return ! TREE_SIDE_EFFECTS (arg0)
6588 && ! TREE_SIDE_EFFECTS (arg1);
6591 /* Test whether it is preferable two swap two operands, ARG0 and
6592 ARG1, for example because ARG0 is an integer constant and ARG1
6593 isn't. If REORDER is true, only recommend swapping if we can
6594 evaluate the operands in reverse order. */
6596 bool
6597 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6599 if (CONSTANT_CLASS_P (arg1))
6600 return 0;
6601 if (CONSTANT_CLASS_P (arg0))
6602 return 1;
6604 STRIP_SIGN_NOPS (arg0);
6605 STRIP_SIGN_NOPS (arg1);
6607 if (TREE_CONSTANT (arg1))
6608 return 0;
6609 if (TREE_CONSTANT (arg0))
6610 return 1;
6612 if (reorder && flag_evaluation_order
6613 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6614 return 0;
6616 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6617 for commutative and comparison operators. Ensuring a canonical
6618 form allows the optimizers to find additional redundancies without
6619 having to explicitly check for both orderings. */
6620 if (TREE_CODE (arg0) == SSA_NAME
6621 && TREE_CODE (arg1) == SSA_NAME
6622 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6623 return 1;
6625 /* Put SSA_NAMEs last. */
6626 if (TREE_CODE (arg1) == SSA_NAME)
6627 return 0;
6628 if (TREE_CODE (arg0) == SSA_NAME)
6629 return 1;
6631 /* Put variables last. */
6632 if (DECL_P (arg1))
6633 return 0;
6634 if (DECL_P (arg0))
6635 return 1;
6637 return 0;
6640 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6641 ARG0 is extended to a wider type. */
6643 static tree
6644 fold_widened_comparison (location_t loc, enum tree_code code,
6645 tree type, tree arg0, tree arg1)
6647 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6648 tree arg1_unw;
6649 tree shorter_type, outer_type;
6650 tree min, max;
6651 bool above, below;
6653 if (arg0_unw == arg0)
6654 return NULL_TREE;
6655 shorter_type = TREE_TYPE (arg0_unw);
6657 #ifdef HAVE_canonicalize_funcptr_for_compare
6658 /* Disable this optimization if we're casting a function pointer
6659 type on targets that require function pointer canonicalization. */
6660 if (HAVE_canonicalize_funcptr_for_compare
6661 && TREE_CODE (shorter_type) == POINTER_TYPE
6662 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6663 return NULL_TREE;
6664 #endif
6666 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6667 return NULL_TREE;
6669 arg1_unw = get_unwidened (arg1, NULL_TREE);
6671 /* If possible, express the comparison in the shorter mode. */
6672 if ((code == EQ_EXPR || code == NE_EXPR
6673 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6674 && (TREE_TYPE (arg1_unw) == shorter_type
6675 || ((TYPE_PRECISION (shorter_type)
6676 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6677 && (TYPE_UNSIGNED (shorter_type)
6678 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6679 || (TREE_CODE (arg1_unw) == INTEGER_CST
6680 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6681 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6682 && int_fits_type_p (arg1_unw, shorter_type))))
6683 return fold_build2_loc (loc, code, type, arg0_unw,
6684 fold_convert_loc (loc, shorter_type, arg1_unw));
6686 if (TREE_CODE (arg1_unw) != INTEGER_CST
6687 || TREE_CODE (shorter_type) != INTEGER_TYPE
6688 || !int_fits_type_p (arg1_unw, shorter_type))
6689 return NULL_TREE;
6691 /* If we are comparing with the integer that does not fit into the range
6692 of the shorter type, the result is known. */
6693 outer_type = TREE_TYPE (arg1_unw);
6694 min = lower_bound_in_type (outer_type, shorter_type);
6695 max = upper_bound_in_type (outer_type, shorter_type);
6697 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6698 max, arg1_unw));
6699 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6700 arg1_unw, min));
6702 switch (code)
6704 case EQ_EXPR:
6705 if (above || below)
6706 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6707 break;
6709 case NE_EXPR:
6710 if (above || below)
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6712 break;
6714 case LT_EXPR:
6715 case LE_EXPR:
6716 if (above)
6717 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6718 else if (below)
6719 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6721 case GT_EXPR:
6722 case GE_EXPR:
6723 if (above)
6724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6725 else if (below)
6726 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6728 default:
6729 break;
6732 return NULL_TREE;
6735 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6736 ARG0 just the signedness is changed. */
6738 static tree
6739 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6740 tree arg0, tree arg1)
6742 tree arg0_inner;
6743 tree inner_type, outer_type;
6745 if (!CONVERT_EXPR_P (arg0))
6746 return NULL_TREE;
6748 outer_type = TREE_TYPE (arg0);
6749 arg0_inner = TREE_OPERAND (arg0, 0);
6750 inner_type = TREE_TYPE (arg0_inner);
6752 #ifdef HAVE_canonicalize_funcptr_for_compare
6753 /* Disable this optimization if we're casting a function pointer
6754 type on targets that require function pointer canonicalization. */
6755 if (HAVE_canonicalize_funcptr_for_compare
6756 && TREE_CODE (inner_type) == POINTER_TYPE
6757 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6758 return NULL_TREE;
6759 #endif
6761 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6762 return NULL_TREE;
6764 if (TREE_CODE (arg1) != INTEGER_CST
6765 && !(CONVERT_EXPR_P (arg1)
6766 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6767 return NULL_TREE;
6769 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6770 && code != NE_EXPR
6771 && code != EQ_EXPR)
6772 return NULL_TREE;
6774 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6775 return NULL_TREE;
6777 if (TREE_CODE (arg1) == INTEGER_CST)
6778 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6779 TREE_OVERFLOW (arg1));
6780 else
6781 arg1 = fold_convert_loc (loc, inner_type, arg1);
6783 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6787 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6788 means A >= Y && A != MAX, but in this case we know that
6789 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6791 static tree
6792 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6794 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6796 if (TREE_CODE (bound) == LT_EXPR)
6797 a = TREE_OPERAND (bound, 0);
6798 else if (TREE_CODE (bound) == GT_EXPR)
6799 a = TREE_OPERAND (bound, 1);
6800 else
6801 return NULL_TREE;
6803 typea = TREE_TYPE (a);
6804 if (!INTEGRAL_TYPE_P (typea)
6805 && !POINTER_TYPE_P (typea))
6806 return NULL_TREE;
6808 if (TREE_CODE (ineq) == LT_EXPR)
6810 a1 = TREE_OPERAND (ineq, 1);
6811 y = TREE_OPERAND (ineq, 0);
6813 else if (TREE_CODE (ineq) == GT_EXPR)
6815 a1 = TREE_OPERAND (ineq, 0);
6816 y = TREE_OPERAND (ineq, 1);
6818 else
6819 return NULL_TREE;
6821 if (TREE_TYPE (a1) != typea)
6822 return NULL_TREE;
6824 if (POINTER_TYPE_P (typea))
6826 /* Convert the pointer types into integer before taking the difference. */
6827 tree ta = fold_convert_loc (loc, ssizetype, a);
6828 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6829 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6831 else
6832 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6834 if (!diff || !integer_onep (diff))
6835 return NULL_TREE;
6837 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6840 /* Fold a sum or difference of at least one multiplication.
6841 Returns the folded tree or NULL if no simplification could be made. */
6843 static tree
6844 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6845 tree arg0, tree arg1)
6847 tree arg00, arg01, arg10, arg11;
6848 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6850 /* (A * C) +- (B * C) -> (A+-B) * C.
6851 (A * C) +- A -> A * (C+-1).
6852 We are most concerned about the case where C is a constant,
6853 but other combinations show up during loop reduction. Since
6854 it is not difficult, try all four possibilities. */
6856 if (TREE_CODE (arg0) == MULT_EXPR)
6858 arg00 = TREE_OPERAND (arg0, 0);
6859 arg01 = TREE_OPERAND (arg0, 1);
6861 else if (TREE_CODE (arg0) == INTEGER_CST)
6863 arg00 = build_one_cst (type);
6864 arg01 = arg0;
6866 else
6868 /* We cannot generate constant 1 for fract. */
6869 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6870 return NULL_TREE;
6871 arg00 = arg0;
6872 arg01 = build_one_cst (type);
6874 if (TREE_CODE (arg1) == MULT_EXPR)
6876 arg10 = TREE_OPERAND (arg1, 0);
6877 arg11 = TREE_OPERAND (arg1, 1);
6879 else if (TREE_CODE (arg1) == INTEGER_CST)
6881 arg10 = build_one_cst (type);
6882 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6883 the purpose of this canonicalization. */
6884 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6885 && negate_expr_p (arg1)
6886 && code == PLUS_EXPR)
6888 arg11 = negate_expr (arg1);
6889 code = MINUS_EXPR;
6891 else
6892 arg11 = arg1;
6894 else
6896 /* We cannot generate constant 1 for fract. */
6897 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6898 return NULL_TREE;
6899 arg10 = arg1;
6900 arg11 = build_one_cst (type);
6902 same = NULL_TREE;
6904 if (operand_equal_p (arg01, arg11, 0))
6905 same = arg01, alt0 = arg00, alt1 = arg10;
6906 else if (operand_equal_p (arg00, arg10, 0))
6907 same = arg00, alt0 = arg01, alt1 = arg11;
6908 else if (operand_equal_p (arg00, arg11, 0))
6909 same = arg00, alt0 = arg01, alt1 = arg10;
6910 else if (operand_equal_p (arg01, arg10, 0))
6911 same = arg01, alt0 = arg00, alt1 = arg11;
6913 /* No identical multiplicands; see if we can find a common
6914 power-of-two factor in non-power-of-two multiplies. This
6915 can help in multi-dimensional array access. */
6916 else if (tree_fits_shwi_p (arg01)
6917 && tree_fits_shwi_p (arg11))
6919 HOST_WIDE_INT int01, int11, tmp;
6920 bool swap = false;
6921 tree maybe_same;
6922 int01 = tree_to_shwi (arg01);
6923 int11 = tree_to_shwi (arg11);
6925 /* Move min of absolute values to int11. */
6926 if (absu_hwi (int01) < absu_hwi (int11))
6928 tmp = int01, int01 = int11, int11 = tmp;
6929 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6930 maybe_same = arg01;
6931 swap = true;
6933 else
6934 maybe_same = arg11;
6936 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6937 /* The remainder should not be a constant, otherwise we
6938 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6939 increased the number of multiplications necessary. */
6940 && TREE_CODE (arg10) != INTEGER_CST)
6942 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6943 build_int_cst (TREE_TYPE (arg00),
6944 int01 / int11));
6945 alt1 = arg10;
6946 same = maybe_same;
6947 if (swap)
6948 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6952 if (same)
6953 return fold_build2_loc (loc, MULT_EXPR, type,
6954 fold_build2_loc (loc, code, type,
6955 fold_convert_loc (loc, type, alt0),
6956 fold_convert_loc (loc, type, alt1)),
6957 fold_convert_loc (loc, type, same));
6959 return NULL_TREE;
6962 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6963 specified by EXPR into the buffer PTR of length LEN bytes.
6964 Return the number of bytes placed in the buffer, or zero
6965 upon failure. */
6967 static int
6968 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6970 tree type = TREE_TYPE (expr);
6971 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6972 int byte, offset, word, words;
6973 unsigned char value;
6975 if ((off == -1 && total_bytes > len)
6976 || off >= total_bytes)
6977 return 0;
6978 if (off == -1)
6979 off = 0;
6980 words = total_bytes / UNITS_PER_WORD;
6982 for (byte = 0; byte < total_bytes; byte++)
6984 int bitpos = byte * BITS_PER_UNIT;
6985 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6986 number of bytes. */
6987 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6989 if (total_bytes > UNITS_PER_WORD)
6991 word = byte / UNITS_PER_WORD;
6992 if (WORDS_BIG_ENDIAN)
6993 word = (words - 1) - word;
6994 offset = word * UNITS_PER_WORD;
6995 if (BYTES_BIG_ENDIAN)
6996 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6997 else
6998 offset += byte % UNITS_PER_WORD;
7000 else
7001 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7002 if (offset >= off
7003 && offset - off < len)
7004 ptr[offset - off] = value;
7006 return MIN (len, total_bytes - off);
7010 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7011 specified by EXPR into the buffer PTR of length LEN bytes.
7012 Return the number of bytes placed in the buffer, or zero
7013 upon failure. */
7015 static int
7016 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7018 tree type = TREE_TYPE (expr);
7019 machine_mode mode = TYPE_MODE (type);
7020 int total_bytes = GET_MODE_SIZE (mode);
7021 FIXED_VALUE_TYPE value;
7022 tree i_value, i_type;
7024 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7025 return 0;
7027 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7029 if (NULL_TREE == i_type
7030 || TYPE_PRECISION (i_type) != total_bytes)
7031 return 0;
7033 value = TREE_FIXED_CST (expr);
7034 i_value = double_int_to_tree (i_type, value.data);
7036 return native_encode_int (i_value, ptr, len, off);
7040 /* Subroutine of native_encode_expr. Encode the REAL_CST
7041 specified by EXPR into the buffer PTR of length LEN bytes.
7042 Return the number of bytes placed in the buffer, or zero
7043 upon failure. */
7045 static int
7046 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7048 tree type = TREE_TYPE (expr);
7049 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7050 int byte, offset, word, words, bitpos;
7051 unsigned char value;
7053 /* There are always 32 bits in each long, no matter the size of
7054 the hosts long. We handle floating point representations with
7055 up to 192 bits. */
7056 long tmp[6];
7058 if ((off == -1 && total_bytes > len)
7059 || off >= total_bytes)
7060 return 0;
7061 if (off == -1)
7062 off = 0;
7063 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7065 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7067 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7068 bitpos += BITS_PER_UNIT)
7070 byte = (bitpos / BITS_PER_UNIT) & 3;
7071 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7073 if (UNITS_PER_WORD < 4)
7075 word = byte / UNITS_PER_WORD;
7076 if (WORDS_BIG_ENDIAN)
7077 word = (words - 1) - word;
7078 offset = word * UNITS_PER_WORD;
7079 if (BYTES_BIG_ENDIAN)
7080 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7081 else
7082 offset += byte % UNITS_PER_WORD;
7084 else
7085 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7086 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7087 if (offset >= off
7088 && offset - off < len)
7089 ptr[offset - off] = value;
7091 return MIN (len, total_bytes - off);
7094 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 Return the number of bytes placed in the buffer, or zero
7097 upon failure. */
7099 static int
7100 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7102 int rsize, isize;
7103 tree part;
7105 part = TREE_REALPART (expr);
7106 rsize = native_encode_expr (part, ptr, len, off);
7107 if (off == -1
7108 && rsize == 0)
7109 return 0;
7110 part = TREE_IMAGPART (expr);
7111 if (off != -1)
7112 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7113 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7114 if (off == -1
7115 && isize != rsize)
7116 return 0;
7117 return rsize + isize;
7121 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7126 static int
7127 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7129 unsigned i, count;
7130 int size, offset;
7131 tree itype, elem;
7133 offset = 0;
7134 count = VECTOR_CST_NELTS (expr);
7135 itype = TREE_TYPE (TREE_TYPE (expr));
7136 size = GET_MODE_SIZE (TYPE_MODE (itype));
7137 for (i = 0; i < count; i++)
7139 if (off >= size)
7141 off -= size;
7142 continue;
7144 elem = VECTOR_CST_ELT (expr, i);
7145 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7146 if ((off == -1 && res != size)
7147 || res == 0)
7148 return 0;
7149 offset += res;
7150 if (offset >= len)
7151 return offset;
7152 if (off != -1)
7153 off = 0;
7155 return offset;
7159 /* Subroutine of native_encode_expr. Encode the STRING_CST
7160 specified by EXPR into the buffer PTR of length LEN bytes.
7161 Return the number of bytes placed in the buffer, or zero
7162 upon failure. */
7164 static int
7165 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7167 tree type = TREE_TYPE (expr);
7168 HOST_WIDE_INT total_bytes;
7170 if (TREE_CODE (type) != ARRAY_TYPE
7171 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7172 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7173 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7174 return 0;
7175 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7176 if ((off == -1 && total_bytes > len)
7177 || off >= total_bytes)
7178 return 0;
7179 if (off == -1)
7180 off = 0;
7181 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7183 int written = 0;
7184 if (off < TREE_STRING_LENGTH (expr))
7186 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7187 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7189 memset (ptr + written, 0,
7190 MIN (total_bytes - written, len - written));
7192 else
7193 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7194 return MIN (total_bytes - off, len);
7198 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7199 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7200 buffer PTR of length LEN bytes. If OFF is not -1 then start
7201 the encoding at byte offset OFF and encode at most LEN bytes.
7202 Return the number of bytes placed in the buffer, or zero upon failure. */
7205 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7207 switch (TREE_CODE (expr))
7209 case INTEGER_CST:
7210 return native_encode_int (expr, ptr, len, off);
7212 case REAL_CST:
7213 return native_encode_real (expr, ptr, len, off);
7215 case FIXED_CST:
7216 return native_encode_fixed (expr, ptr, len, off);
7218 case COMPLEX_CST:
7219 return native_encode_complex (expr, ptr, len, off);
7221 case VECTOR_CST:
7222 return native_encode_vector (expr, ptr, len, off);
7224 case STRING_CST:
7225 return native_encode_string (expr, ptr, len, off);
7227 default:
7228 return 0;
7233 /* Subroutine of native_interpret_expr. Interpret the contents of
7234 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7235 If the buffer cannot be interpreted, return NULL_TREE. */
7237 static tree
7238 native_interpret_int (tree type, const unsigned char *ptr, int len)
7240 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7242 if (total_bytes > len
7243 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7244 return NULL_TREE;
7246 wide_int result = wi::from_buffer (ptr, total_bytes);
7248 return wide_int_to_tree (type, result);
7252 /* Subroutine of native_interpret_expr. Interpret the contents of
7253 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7254 If the buffer cannot be interpreted, return NULL_TREE. */
7256 static tree
7257 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7259 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7260 double_int result;
7261 FIXED_VALUE_TYPE fixed_value;
7263 if (total_bytes > len
7264 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7265 return NULL_TREE;
7267 result = double_int::from_buffer (ptr, total_bytes);
7268 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7270 return build_fixed (type, fixed_value);
7274 /* Subroutine of native_interpret_expr. Interpret the contents of
7275 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7276 If the buffer cannot be interpreted, return NULL_TREE. */
7278 static tree
7279 native_interpret_real (tree type, const unsigned char *ptr, int len)
7281 machine_mode mode = TYPE_MODE (type);
7282 int total_bytes = GET_MODE_SIZE (mode);
7283 int byte, offset, word, words, bitpos;
7284 unsigned char value;
7285 /* There are always 32 bits in each long, no matter the size of
7286 the hosts long. We handle floating point representations with
7287 up to 192 bits. */
7288 REAL_VALUE_TYPE r;
7289 long tmp[6];
7291 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7292 if (total_bytes > len || total_bytes > 24)
7293 return NULL_TREE;
7294 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7296 memset (tmp, 0, sizeof (tmp));
7297 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7298 bitpos += BITS_PER_UNIT)
7300 byte = (bitpos / BITS_PER_UNIT) & 3;
7301 if (UNITS_PER_WORD < 4)
7303 word = byte / UNITS_PER_WORD;
7304 if (WORDS_BIG_ENDIAN)
7305 word = (words - 1) - word;
7306 offset = word * UNITS_PER_WORD;
7307 if (BYTES_BIG_ENDIAN)
7308 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7309 else
7310 offset += byte % UNITS_PER_WORD;
7312 else
7313 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7314 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7316 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7319 real_from_target (&r, tmp, mode);
7320 return build_real (type, r);
7324 /* Subroutine of native_interpret_expr. Interpret the contents of
7325 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7326 If the buffer cannot be interpreted, return NULL_TREE. */
7328 static tree
7329 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7331 tree etype, rpart, ipart;
7332 int size;
7334 etype = TREE_TYPE (type);
7335 size = GET_MODE_SIZE (TYPE_MODE (etype));
7336 if (size * 2 > len)
7337 return NULL_TREE;
7338 rpart = native_interpret_expr (etype, ptr, size);
7339 if (!rpart)
7340 return NULL_TREE;
7341 ipart = native_interpret_expr (etype, ptr+size, size);
7342 if (!ipart)
7343 return NULL_TREE;
7344 return build_complex (type, rpart, ipart);
7348 /* Subroutine of native_interpret_expr. Interpret the contents of
7349 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7350 If the buffer cannot be interpreted, return NULL_TREE. */
7352 static tree
7353 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7355 tree etype, elem;
7356 int i, size, count;
7357 tree *elements;
7359 etype = TREE_TYPE (type);
7360 size = GET_MODE_SIZE (TYPE_MODE (etype));
7361 count = TYPE_VECTOR_SUBPARTS (type);
7362 if (size * count > len)
7363 return NULL_TREE;
7365 elements = XALLOCAVEC (tree, count);
7366 for (i = count - 1; i >= 0; i--)
7368 elem = native_interpret_expr (etype, ptr+(i*size), size);
7369 if (!elem)
7370 return NULL_TREE;
7371 elements[i] = elem;
7373 return build_vector (type, elements);
7377 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7378 the buffer PTR of length LEN as a constant of type TYPE. For
7379 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7380 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7381 return NULL_TREE. */
7383 tree
7384 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7386 switch (TREE_CODE (type))
7388 case INTEGER_TYPE:
7389 case ENUMERAL_TYPE:
7390 case BOOLEAN_TYPE:
7391 case POINTER_TYPE:
7392 case REFERENCE_TYPE:
7393 return native_interpret_int (type, ptr, len);
7395 case REAL_TYPE:
7396 return native_interpret_real (type, ptr, len);
7398 case FIXED_POINT_TYPE:
7399 return native_interpret_fixed (type, ptr, len);
7401 case COMPLEX_TYPE:
7402 return native_interpret_complex (type, ptr, len);
7404 case VECTOR_TYPE:
7405 return native_interpret_vector (type, ptr, len);
7407 default:
7408 return NULL_TREE;
7412 /* Returns true if we can interpret the contents of a native encoding
7413 as TYPE. */
7415 static bool
7416 can_native_interpret_type_p (tree type)
7418 switch (TREE_CODE (type))
7420 case INTEGER_TYPE:
7421 case ENUMERAL_TYPE:
7422 case BOOLEAN_TYPE:
7423 case POINTER_TYPE:
7424 case REFERENCE_TYPE:
7425 case FIXED_POINT_TYPE:
7426 case REAL_TYPE:
7427 case COMPLEX_TYPE:
7428 case VECTOR_TYPE:
7429 return true;
7430 default:
7431 return false;
7435 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7436 TYPE at compile-time. If we're unable to perform the conversion
7437 return NULL_TREE. */
7439 static tree
7440 fold_view_convert_expr (tree type, tree expr)
7442 /* We support up to 512-bit values (for V8DFmode). */
7443 unsigned char buffer[64];
7444 int len;
7446 /* Check that the host and target are sane. */
7447 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7448 return NULL_TREE;
7450 len = native_encode_expr (expr, buffer, sizeof (buffer));
7451 if (len == 0)
7452 return NULL_TREE;
7454 return native_interpret_expr (type, buffer, len);
7457 /* Build an expression for the address of T. Folds away INDIRECT_REF
7458 to avoid confusing the gimplify process. */
7460 tree
7461 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7463 /* The size of the object is not relevant when talking about its address. */
7464 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7465 t = TREE_OPERAND (t, 0);
7467 if (TREE_CODE (t) == INDIRECT_REF)
7469 t = TREE_OPERAND (t, 0);
7471 if (TREE_TYPE (t) != ptrtype)
7472 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7474 else if (TREE_CODE (t) == MEM_REF
7475 && integer_zerop (TREE_OPERAND (t, 1)))
7476 return TREE_OPERAND (t, 0);
7477 else if (TREE_CODE (t) == MEM_REF
7478 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7479 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7480 TREE_OPERAND (t, 0),
7481 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7482 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7484 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7486 if (TREE_TYPE (t) != ptrtype)
7487 t = fold_convert_loc (loc, ptrtype, t);
7489 else
7490 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7492 return t;
7495 /* Build an expression for the address of T. */
7497 tree
7498 build_fold_addr_expr_loc (location_t loc, tree t)
7500 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7502 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7505 static bool vec_cst_ctor_to_array (tree, tree *);
7507 /* Fold a unary expression of code CODE and type TYPE with operand
7508 OP0. Return the folded expression if folding is successful.
7509 Otherwise, return NULL_TREE. */
7511 tree
7512 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7514 tree tem;
7515 tree arg0;
7516 enum tree_code_class kind = TREE_CODE_CLASS (code);
7518 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7519 && TREE_CODE_LENGTH (code) == 1);
7521 tem = generic_simplify (loc, code, type, op0);
7522 if (tem)
7523 return tem;
7525 arg0 = op0;
7526 if (arg0)
7528 if (CONVERT_EXPR_CODE_P (code)
7529 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7531 /* Don't use STRIP_NOPS, because signedness of argument type
7532 matters. */
7533 STRIP_SIGN_NOPS (arg0);
7535 else
7537 /* Strip any conversions that don't change the mode. This
7538 is safe for every expression, except for a comparison
7539 expression because its signedness is derived from its
7540 operands.
7542 Note that this is done as an internal manipulation within
7543 the constant folder, in order to find the simplest
7544 representation of the arguments so that their form can be
7545 studied. In any cases, the appropriate type conversions
7546 should be put back in the tree that will get out of the
7547 constant folder. */
7548 STRIP_NOPS (arg0);
7552 if (TREE_CODE_CLASS (code) == tcc_unary)
7554 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7555 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7556 fold_build1_loc (loc, code, type,
7557 fold_convert_loc (loc, TREE_TYPE (op0),
7558 TREE_OPERAND (arg0, 1))));
7559 else if (TREE_CODE (arg0) == COND_EXPR)
7561 tree arg01 = TREE_OPERAND (arg0, 1);
7562 tree arg02 = TREE_OPERAND (arg0, 2);
7563 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7564 arg01 = fold_build1_loc (loc, code, type,
7565 fold_convert_loc (loc,
7566 TREE_TYPE (op0), arg01));
7567 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7568 arg02 = fold_build1_loc (loc, code, type,
7569 fold_convert_loc (loc,
7570 TREE_TYPE (op0), arg02));
7571 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7572 arg01, arg02);
7574 /* If this was a conversion, and all we did was to move into
7575 inside the COND_EXPR, bring it back out. But leave it if
7576 it is a conversion from integer to integer and the
7577 result precision is no wider than a word since such a
7578 conversion is cheap and may be optimized away by combine,
7579 while it couldn't if it were outside the COND_EXPR. Then return
7580 so we don't get into an infinite recursion loop taking the
7581 conversion out and then back in. */
7583 if ((CONVERT_EXPR_CODE_P (code)
7584 || code == NON_LVALUE_EXPR)
7585 && TREE_CODE (tem) == COND_EXPR
7586 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7587 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7588 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7589 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7590 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7591 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7592 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7593 && (INTEGRAL_TYPE_P
7594 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7595 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7596 || flag_syntax_only))
7597 tem = build1_loc (loc, code, type,
7598 build3 (COND_EXPR,
7599 TREE_TYPE (TREE_OPERAND
7600 (TREE_OPERAND (tem, 1), 0)),
7601 TREE_OPERAND (tem, 0),
7602 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7603 TREE_OPERAND (TREE_OPERAND (tem, 2),
7604 0)));
7605 return tem;
7609 switch (code)
7611 case NON_LVALUE_EXPR:
7612 if (!maybe_lvalue_p (op0))
7613 return fold_convert_loc (loc, type, op0);
7614 return NULL_TREE;
7616 CASE_CONVERT:
7617 case FLOAT_EXPR:
7618 case FIX_TRUNC_EXPR:
7619 if (COMPARISON_CLASS_P (op0))
7621 /* If we have (type) (a CMP b) and type is an integral type, return
7622 new expression involving the new type. Canonicalize
7623 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7624 non-integral type.
7625 Do not fold the result as that would not simplify further, also
7626 folding again results in recursions. */
7627 if (TREE_CODE (type) == BOOLEAN_TYPE)
7628 return build2_loc (loc, TREE_CODE (op0), type,
7629 TREE_OPERAND (op0, 0),
7630 TREE_OPERAND (op0, 1));
7631 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7632 && TREE_CODE (type) != VECTOR_TYPE)
7633 return build3_loc (loc, COND_EXPR, type, op0,
7634 constant_boolean_node (true, type),
7635 constant_boolean_node (false, type));
7638 /* Handle (T *)&A.B.C for A being of type T and B and C
7639 living at offset zero. This occurs frequently in
7640 C++ upcasting and then accessing the base. */
7641 if (TREE_CODE (op0) == ADDR_EXPR
7642 && POINTER_TYPE_P (type)
7643 && handled_component_p (TREE_OPERAND (op0, 0)))
7645 HOST_WIDE_INT bitsize, bitpos;
7646 tree offset;
7647 machine_mode mode;
7648 int unsignedp, volatilep;
7649 tree base = TREE_OPERAND (op0, 0);
7650 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7651 &mode, &unsignedp, &volatilep, false);
7652 /* If the reference was to a (constant) zero offset, we can use
7653 the address of the base if it has the same base type
7654 as the result type and the pointer type is unqualified. */
7655 if (! offset && bitpos == 0
7656 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7657 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7658 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7659 return fold_convert_loc (loc, type,
7660 build_fold_addr_expr_loc (loc, base));
7663 if (TREE_CODE (op0) == MODIFY_EXPR
7664 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7665 /* Detect assigning a bitfield. */
7666 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7667 && DECL_BIT_FIELD
7668 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7670 /* Don't leave an assignment inside a conversion
7671 unless assigning a bitfield. */
7672 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7673 /* First do the assignment, then return converted constant. */
7674 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7675 TREE_NO_WARNING (tem) = 1;
7676 TREE_USED (tem) = 1;
7677 return tem;
7680 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7681 constants (if x has signed type, the sign bit cannot be set
7682 in c). This folds extension into the BIT_AND_EXPR.
7683 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7684 very likely don't have maximal range for their precision and this
7685 transformation effectively doesn't preserve non-maximal ranges. */
7686 if (TREE_CODE (type) == INTEGER_TYPE
7687 && TREE_CODE (op0) == BIT_AND_EXPR
7688 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7690 tree and_expr = op0;
7691 tree and0 = TREE_OPERAND (and_expr, 0);
7692 tree and1 = TREE_OPERAND (and_expr, 1);
7693 int change = 0;
7695 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7696 || (TYPE_PRECISION (type)
7697 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7698 change = 1;
7699 else if (TYPE_PRECISION (TREE_TYPE (and1))
7700 <= HOST_BITS_PER_WIDE_INT
7701 && tree_fits_uhwi_p (and1))
7703 unsigned HOST_WIDE_INT cst;
7705 cst = tree_to_uhwi (and1);
7706 cst &= HOST_WIDE_INT_M1U
7707 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7708 change = (cst == 0);
7709 #ifdef LOAD_EXTEND_OP
7710 if (change
7711 && !flag_syntax_only
7712 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7713 == ZERO_EXTEND))
7715 tree uns = unsigned_type_for (TREE_TYPE (and0));
7716 and0 = fold_convert_loc (loc, uns, and0);
7717 and1 = fold_convert_loc (loc, uns, and1);
7719 #endif
7721 if (change)
7723 tem = force_fit_type (type, wi::to_widest (and1), 0,
7724 TREE_OVERFLOW (and1));
7725 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7726 fold_convert_loc (loc, type, and0), tem);
7730 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7731 when one of the new casts will fold away. Conservatively we assume
7732 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7733 if (POINTER_TYPE_P (type)
7734 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7735 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7736 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7737 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7738 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7740 tree arg00 = TREE_OPERAND (arg0, 0);
7741 tree arg01 = TREE_OPERAND (arg0, 1);
7743 return fold_build_pointer_plus_loc
7744 (loc, fold_convert_loc (loc, type, arg00), arg01);
7747 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7748 of the same precision, and X is an integer type not narrower than
7749 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7750 if (INTEGRAL_TYPE_P (type)
7751 && TREE_CODE (op0) == BIT_NOT_EXPR
7752 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7753 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7754 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7756 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7757 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7758 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7759 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7760 fold_convert_loc (loc, type, tem));
7763 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7764 type of X and Y (integer types only). */
7765 if (INTEGRAL_TYPE_P (type)
7766 && TREE_CODE (op0) == MULT_EXPR
7767 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7768 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7770 /* Be careful not to introduce new overflows. */
7771 tree mult_type;
7772 if (TYPE_OVERFLOW_WRAPS (type))
7773 mult_type = type;
7774 else
7775 mult_type = unsigned_type_for (type);
7777 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7779 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7780 fold_convert_loc (loc, mult_type,
7781 TREE_OPERAND (op0, 0)),
7782 fold_convert_loc (loc, mult_type,
7783 TREE_OPERAND (op0, 1)));
7784 return fold_convert_loc (loc, type, tem);
7788 tem = fold_convert_const (code, type, arg0);
7789 return tem ? tem : NULL_TREE;
7791 case ADDR_SPACE_CONVERT_EXPR:
7792 if (integer_zerop (arg0))
7793 return fold_convert_const (code, type, arg0);
7794 return NULL_TREE;
7796 case FIXED_CONVERT_EXPR:
7797 tem = fold_convert_const (code, type, arg0);
7798 return tem ? tem : NULL_TREE;
7800 case VIEW_CONVERT_EXPR:
7801 if (TREE_CODE (op0) == MEM_REF)
7802 return fold_build2_loc (loc, MEM_REF, type,
7803 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7805 return fold_view_convert_expr (type, op0);
7807 case NEGATE_EXPR:
7808 tem = fold_negate_expr (loc, arg0);
7809 if (tem)
7810 return fold_convert_loc (loc, type, tem);
7811 return NULL_TREE;
7813 case ABS_EXPR:
7814 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7815 return fold_abs_const (arg0, type);
7816 /* Convert fabs((double)float) into (double)fabsf(float). */
7817 else if (TREE_CODE (arg0) == NOP_EXPR
7818 && TREE_CODE (type) == REAL_TYPE)
7820 tree targ0 = strip_float_extensions (arg0);
7821 if (targ0 != arg0)
7822 return fold_convert_loc (loc, type,
7823 fold_build1_loc (loc, ABS_EXPR,
7824 TREE_TYPE (targ0),
7825 targ0));
7827 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7828 else if (TREE_CODE (arg0) == ABS_EXPR)
7829 return arg0;
7831 /* Strip sign ops from argument. */
7832 if (TREE_CODE (type) == REAL_TYPE)
7834 tem = fold_strip_sign_ops (arg0);
7835 if (tem)
7836 return fold_build1_loc (loc, ABS_EXPR, type,
7837 fold_convert_loc (loc, type, tem));
7839 return NULL_TREE;
7841 case CONJ_EXPR:
7842 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7843 return fold_convert_loc (loc, type, arg0);
7844 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7846 tree itype = TREE_TYPE (type);
7847 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7848 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7849 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7850 negate_expr (ipart));
7852 if (TREE_CODE (arg0) == COMPLEX_CST)
7854 tree itype = TREE_TYPE (type);
7855 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7856 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7857 return build_complex (type, rpart, negate_expr (ipart));
7859 if (TREE_CODE (arg0) == CONJ_EXPR)
7860 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7861 return NULL_TREE;
7863 case BIT_NOT_EXPR:
7864 if (TREE_CODE (arg0) == INTEGER_CST)
7865 return fold_not_const (arg0, type);
7866 /* Convert ~ (-A) to A - 1. */
7867 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7868 return fold_build2_loc (loc, MINUS_EXPR, type,
7869 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7870 build_int_cst (type, 1));
7871 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7872 else if (INTEGRAL_TYPE_P (type)
7873 && ((TREE_CODE (arg0) == MINUS_EXPR
7874 && integer_onep (TREE_OPERAND (arg0, 1)))
7875 || (TREE_CODE (arg0) == PLUS_EXPR
7876 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7877 return fold_build1_loc (loc, NEGATE_EXPR, type,
7878 fold_convert_loc (loc, type,
7879 TREE_OPERAND (arg0, 0)));
7880 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7881 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7882 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7883 fold_convert_loc (loc, type,
7884 TREE_OPERAND (arg0, 0)))))
7885 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7886 fold_convert_loc (loc, type,
7887 TREE_OPERAND (arg0, 1)));
7888 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7889 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7890 fold_convert_loc (loc, type,
7891 TREE_OPERAND (arg0, 1)))))
7892 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7893 fold_convert_loc (loc, type,
7894 TREE_OPERAND (arg0, 0)), tem);
7895 /* Perform BIT_NOT_EXPR on each element individually. */
7896 else if (TREE_CODE (arg0) == VECTOR_CST)
7898 tree *elements;
7899 tree elem;
7900 unsigned count = VECTOR_CST_NELTS (arg0), i;
7902 elements = XALLOCAVEC (tree, count);
7903 for (i = 0; i < count; i++)
7905 elem = VECTOR_CST_ELT (arg0, i);
7906 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7907 if (elem == NULL_TREE)
7908 break;
7909 elements[i] = elem;
7911 if (i == count)
7912 return build_vector (type, elements);
7915 return NULL_TREE;
7917 case TRUTH_NOT_EXPR:
7918 /* Note that the operand of this must be an int
7919 and its values must be 0 or 1.
7920 ("true" is a fixed value perhaps depending on the language,
7921 but we don't handle values other than 1 correctly yet.) */
7922 tem = fold_truth_not_expr (loc, arg0);
7923 if (!tem)
7924 return NULL_TREE;
7925 return fold_convert_loc (loc, type, tem);
7927 case REALPART_EXPR:
7928 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7929 return fold_convert_loc (loc, type, arg0);
7930 if (TREE_CODE (arg0) == COMPLEX_CST)
7931 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7932 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7934 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7935 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7936 fold_build1_loc (loc, REALPART_EXPR, itype,
7937 TREE_OPERAND (arg0, 0)),
7938 fold_build1_loc (loc, REALPART_EXPR, itype,
7939 TREE_OPERAND (arg0, 1)));
7940 return fold_convert_loc (loc, type, tem);
7942 if (TREE_CODE (arg0) == CONJ_EXPR)
7944 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7945 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7946 TREE_OPERAND (arg0, 0));
7947 return fold_convert_loc (loc, type, tem);
7949 if (TREE_CODE (arg0) == CALL_EXPR)
7951 tree fn = get_callee_fndecl (arg0);
7952 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7953 switch (DECL_FUNCTION_CODE (fn))
7955 CASE_FLT_FN (BUILT_IN_CEXPI):
7956 fn = mathfn_built_in (type, BUILT_IN_COS);
7957 if (fn)
7958 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7959 break;
7961 default:
7962 break;
7965 return NULL_TREE;
7967 case IMAGPART_EXPR:
7968 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7969 return build_zero_cst (type);
7970 if (TREE_CODE (arg0) == COMPLEX_CST)
7971 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
7972 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7974 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7975 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7976 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7977 TREE_OPERAND (arg0, 0)),
7978 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7979 TREE_OPERAND (arg0, 1)));
7980 return fold_convert_loc (loc, type, tem);
7982 if (TREE_CODE (arg0) == CONJ_EXPR)
7984 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7985 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7986 return fold_convert_loc (loc, type, negate_expr (tem));
7988 if (TREE_CODE (arg0) == CALL_EXPR)
7990 tree fn = get_callee_fndecl (arg0);
7991 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7992 switch (DECL_FUNCTION_CODE (fn))
7994 CASE_FLT_FN (BUILT_IN_CEXPI):
7995 fn = mathfn_built_in (type, BUILT_IN_SIN);
7996 if (fn)
7997 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7998 break;
8000 default:
8001 break;
8004 return NULL_TREE;
8006 case INDIRECT_REF:
8007 /* Fold *&X to X if X is an lvalue. */
8008 if (TREE_CODE (op0) == ADDR_EXPR)
8010 tree op00 = TREE_OPERAND (op0, 0);
8011 if ((TREE_CODE (op00) == VAR_DECL
8012 || TREE_CODE (op00) == PARM_DECL
8013 || TREE_CODE (op00) == RESULT_DECL)
8014 && !TREE_READONLY (op00))
8015 return op00;
8017 return NULL_TREE;
8019 case VEC_UNPACK_LO_EXPR:
8020 case VEC_UNPACK_HI_EXPR:
8021 case VEC_UNPACK_FLOAT_LO_EXPR:
8022 case VEC_UNPACK_FLOAT_HI_EXPR:
8024 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8025 tree *elts;
8026 enum tree_code subcode;
8028 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8029 if (TREE_CODE (arg0) != VECTOR_CST)
8030 return NULL_TREE;
8032 elts = XALLOCAVEC (tree, nelts * 2);
8033 if (!vec_cst_ctor_to_array (arg0, elts))
8034 return NULL_TREE;
8036 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8037 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8038 elts += nelts;
8040 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8041 subcode = NOP_EXPR;
8042 else
8043 subcode = FLOAT_EXPR;
8045 for (i = 0; i < nelts; i++)
8047 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8048 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8049 return NULL_TREE;
8052 return build_vector (type, elts);
8055 case REDUC_MIN_EXPR:
8056 case REDUC_MAX_EXPR:
8057 case REDUC_PLUS_EXPR:
8059 unsigned int nelts, i;
8060 tree *elts;
8061 enum tree_code subcode;
8063 if (TREE_CODE (op0) != VECTOR_CST)
8064 return NULL_TREE;
8065 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8067 elts = XALLOCAVEC (tree, nelts);
8068 if (!vec_cst_ctor_to_array (op0, elts))
8069 return NULL_TREE;
8071 switch (code)
8073 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8074 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8075 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8076 default: gcc_unreachable ();
8079 for (i = 1; i < nelts; i++)
8081 elts[0] = const_binop (subcode, elts[0], elts[i]);
8082 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8083 return NULL_TREE;
8086 return elts[0];
8089 default:
8090 return NULL_TREE;
8091 } /* switch (code) */
8095 /* If the operation was a conversion do _not_ mark a resulting constant
8096 with TREE_OVERFLOW if the original constant was not. These conversions
8097 have implementation defined behavior and retaining the TREE_OVERFLOW
8098 flag here would confuse later passes such as VRP. */
8099 tree
8100 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8101 tree type, tree op0)
8103 tree res = fold_unary_loc (loc, code, type, op0);
8104 if (res
8105 && TREE_CODE (res) == INTEGER_CST
8106 && TREE_CODE (op0) == INTEGER_CST
8107 && CONVERT_EXPR_CODE_P (code))
8108 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8110 return res;
8113 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8114 operands OP0 and OP1. LOC is the location of the resulting expression.
8115 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8116 Return the folded expression if folding is successful. Otherwise,
8117 return NULL_TREE. */
8118 static tree
8119 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8120 tree arg0, tree arg1, tree op0, tree op1)
8122 tree tem;
8124 /* We only do these simplifications if we are optimizing. */
8125 if (!optimize)
8126 return NULL_TREE;
8128 /* Check for things like (A || B) && (A || C). We can convert this
8129 to A || (B && C). Note that either operator can be any of the four
8130 truth and/or operations and the transformation will still be
8131 valid. Also note that we only care about order for the
8132 ANDIF and ORIF operators. If B contains side effects, this
8133 might change the truth-value of A. */
8134 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8135 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8136 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8137 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8138 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8139 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8141 tree a00 = TREE_OPERAND (arg0, 0);
8142 tree a01 = TREE_OPERAND (arg0, 1);
8143 tree a10 = TREE_OPERAND (arg1, 0);
8144 tree a11 = TREE_OPERAND (arg1, 1);
8145 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8146 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8147 && (code == TRUTH_AND_EXPR
8148 || code == TRUTH_OR_EXPR));
8150 if (operand_equal_p (a00, a10, 0))
8151 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8152 fold_build2_loc (loc, code, type, a01, a11));
8153 else if (commutative && operand_equal_p (a00, a11, 0))
8154 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8155 fold_build2_loc (loc, code, type, a01, a10));
8156 else if (commutative && operand_equal_p (a01, a10, 0))
8157 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8158 fold_build2_loc (loc, code, type, a00, a11));
8160 /* This case if tricky because we must either have commutative
8161 operators or else A10 must not have side-effects. */
8163 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8164 && operand_equal_p (a01, a11, 0))
8165 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8166 fold_build2_loc (loc, code, type, a00, a10),
8167 a01);
8170 /* See if we can build a range comparison. */
8171 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8172 return tem;
8174 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8175 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8177 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8178 if (tem)
8179 return fold_build2_loc (loc, code, type, tem, arg1);
8182 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8183 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8185 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8186 if (tem)
8187 return fold_build2_loc (loc, code, type, arg0, tem);
8190 /* Check for the possibility of merging component references. If our
8191 lhs is another similar operation, try to merge its rhs with our
8192 rhs. Then try to merge our lhs and rhs. */
8193 if (TREE_CODE (arg0) == code
8194 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8195 TREE_OPERAND (arg0, 1), arg1)))
8196 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8198 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8199 return tem;
8201 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8202 && (code == TRUTH_AND_EXPR
8203 || code == TRUTH_ANDIF_EXPR
8204 || code == TRUTH_OR_EXPR
8205 || code == TRUTH_ORIF_EXPR))
8207 enum tree_code ncode, icode;
8209 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8210 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8211 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8213 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8214 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8215 We don't want to pack more than two leafs to a non-IF AND/OR
8216 expression.
8217 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8218 equal to IF-CODE, then we don't want to add right-hand operand.
8219 If the inner right-hand side of left-hand operand has
8220 side-effects, or isn't simple, then we can't add to it,
8221 as otherwise we might destroy if-sequence. */
8222 if (TREE_CODE (arg0) == icode
8223 && simple_operand_p_2 (arg1)
8224 /* Needed for sequence points to handle trappings, and
8225 side-effects. */
8226 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8228 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8229 arg1);
8230 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8231 tem);
8233 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8234 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8235 else if (TREE_CODE (arg1) == icode
8236 && simple_operand_p_2 (arg0)
8237 /* Needed for sequence points to handle trappings, and
8238 side-effects. */
8239 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8241 tem = fold_build2_loc (loc, ncode, type,
8242 arg0, TREE_OPERAND (arg1, 0));
8243 return fold_build2_loc (loc, icode, type, tem,
8244 TREE_OPERAND (arg1, 1));
8246 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8247 into (A OR B).
8248 For sequence point consistancy, we need to check for trapping,
8249 and side-effects. */
8250 else if (code == icode && simple_operand_p_2 (arg0)
8251 && simple_operand_p_2 (arg1))
8252 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8255 return NULL_TREE;
8258 /* Fold a binary expression of code CODE and type TYPE with operands
8259 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8260 Return the folded expression if folding is successful. Otherwise,
8261 return NULL_TREE. */
8263 static tree
8264 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8266 enum tree_code compl_code;
8268 if (code == MIN_EXPR)
8269 compl_code = MAX_EXPR;
8270 else if (code == MAX_EXPR)
8271 compl_code = MIN_EXPR;
8272 else
8273 gcc_unreachable ();
8275 /* MIN (MAX (a, b), b) == b. */
8276 if (TREE_CODE (op0) == compl_code
8277 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8278 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8280 /* MIN (MAX (b, a), b) == b. */
8281 if (TREE_CODE (op0) == compl_code
8282 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8283 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8284 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8286 /* MIN (a, MAX (a, b)) == a. */
8287 if (TREE_CODE (op1) == compl_code
8288 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8289 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8290 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8292 /* MIN (a, MAX (b, a)) == a. */
8293 if (TREE_CODE (op1) == compl_code
8294 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8295 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8296 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8298 return NULL_TREE;
8301 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8302 by changing CODE to reduce the magnitude of constants involved in
8303 ARG0 of the comparison.
8304 Returns a canonicalized comparison tree if a simplification was
8305 possible, otherwise returns NULL_TREE.
8306 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8307 valid if signed overflow is undefined. */
8309 static tree
8310 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8311 tree arg0, tree arg1,
8312 bool *strict_overflow_p)
8314 enum tree_code code0 = TREE_CODE (arg0);
8315 tree t, cst0 = NULL_TREE;
8316 int sgn0;
8317 bool swap = false;
8319 /* Match A +- CST code arg1 and CST code arg1. We can change the
8320 first form only if overflow is undefined. */
8321 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8322 /* In principle pointers also have undefined overflow behavior,
8323 but that causes problems elsewhere. */
8324 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8325 && (code0 == MINUS_EXPR
8326 || code0 == PLUS_EXPR)
8327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8328 || code0 == INTEGER_CST))
8329 return NULL_TREE;
8331 /* Identify the constant in arg0 and its sign. */
8332 if (code0 == INTEGER_CST)
8333 cst0 = arg0;
8334 else
8335 cst0 = TREE_OPERAND (arg0, 1);
8336 sgn0 = tree_int_cst_sgn (cst0);
8338 /* Overflowed constants and zero will cause problems. */
8339 if (integer_zerop (cst0)
8340 || TREE_OVERFLOW (cst0))
8341 return NULL_TREE;
8343 /* See if we can reduce the magnitude of the constant in
8344 arg0 by changing the comparison code. */
8345 if (code0 == INTEGER_CST)
8347 /* CST <= arg1 -> CST-1 < arg1. */
8348 if (code == LE_EXPR && sgn0 == 1)
8349 code = LT_EXPR;
8350 /* -CST < arg1 -> -CST-1 <= arg1. */
8351 else if (code == LT_EXPR && sgn0 == -1)
8352 code = LE_EXPR;
8353 /* CST > arg1 -> CST-1 >= arg1. */
8354 else if (code == GT_EXPR && sgn0 == 1)
8355 code = GE_EXPR;
8356 /* -CST >= arg1 -> -CST-1 > arg1. */
8357 else if (code == GE_EXPR && sgn0 == -1)
8358 code = GT_EXPR;
8359 else
8360 return NULL_TREE;
8361 /* arg1 code' CST' might be more canonical. */
8362 swap = true;
8364 else
8366 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8367 if (code == LT_EXPR
8368 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8369 code = LE_EXPR;
8370 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8371 else if (code == GT_EXPR
8372 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8373 code = GE_EXPR;
8374 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8375 else if (code == LE_EXPR
8376 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8377 code = LT_EXPR;
8378 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8379 else if (code == GE_EXPR
8380 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8381 code = GT_EXPR;
8382 else
8383 return NULL_TREE;
8384 *strict_overflow_p = true;
8387 /* Now build the constant reduced in magnitude. But not if that
8388 would produce one outside of its types range. */
8389 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8390 && ((sgn0 == 1
8391 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8392 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8393 || (sgn0 == -1
8394 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8395 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8396 /* We cannot swap the comparison here as that would cause us to
8397 endlessly recurse. */
8398 return NULL_TREE;
8400 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8401 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8402 if (code0 != INTEGER_CST)
8403 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8404 t = fold_convert (TREE_TYPE (arg1), t);
8406 /* If swapping might yield to a more canonical form, do so. */
8407 if (swap)
8408 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8409 else
8410 return fold_build2_loc (loc, code, type, t, arg1);
8413 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8414 overflow further. Try to decrease the magnitude of constants involved
8415 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8416 and put sole constants at the second argument position.
8417 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8419 static tree
8420 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8421 tree arg0, tree arg1)
8423 tree t;
8424 bool strict_overflow_p;
8425 const char * const warnmsg = G_("assuming signed overflow does not occur "
8426 "when reducing constant in comparison");
8428 /* Try canonicalization by simplifying arg0. */
8429 strict_overflow_p = false;
8430 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8431 &strict_overflow_p);
8432 if (t)
8434 if (strict_overflow_p)
8435 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8436 return t;
8439 /* Try canonicalization by simplifying arg1 using the swapped
8440 comparison. */
8441 code = swap_tree_comparison (code);
8442 strict_overflow_p = false;
8443 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8444 &strict_overflow_p);
8445 if (t && strict_overflow_p)
8446 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8447 return t;
8450 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8451 space. This is used to avoid issuing overflow warnings for
8452 expressions like &p->x which can not wrap. */
8454 static bool
8455 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8457 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8458 return true;
8460 if (bitpos < 0)
8461 return true;
8463 wide_int wi_offset;
8464 int precision = TYPE_PRECISION (TREE_TYPE (base));
8465 if (offset == NULL_TREE)
8466 wi_offset = wi::zero (precision);
8467 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8468 return true;
8469 else
8470 wi_offset = offset;
8472 bool overflow;
8473 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8474 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8475 if (overflow)
8476 return true;
8478 if (!wi::fits_uhwi_p (total))
8479 return true;
8481 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8482 if (size <= 0)
8483 return true;
8485 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8486 array. */
8487 if (TREE_CODE (base) == ADDR_EXPR)
8489 HOST_WIDE_INT base_size;
8491 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8492 if (base_size > 0 && size < base_size)
8493 size = base_size;
8496 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8499 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8500 kind INTEGER_CST. This makes sure to properly sign-extend the
8501 constant. */
8503 static HOST_WIDE_INT
8504 size_low_cst (const_tree t)
8506 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8507 int prec = TYPE_PRECISION (TREE_TYPE (t));
8508 if (prec < HOST_BITS_PER_WIDE_INT)
8509 return sext_hwi (w, prec);
8510 return w;
8513 /* Subroutine of fold_binary. This routine performs all of the
8514 transformations that are common to the equality/inequality
8515 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8516 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8517 fold_binary should call fold_binary. Fold a comparison with
8518 tree code CODE and type TYPE with operands OP0 and OP1. Return
8519 the folded comparison or NULL_TREE. */
8521 static tree
8522 fold_comparison (location_t loc, enum tree_code code, tree type,
8523 tree op0, tree op1)
8525 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8526 tree arg0, arg1, tem;
8528 arg0 = op0;
8529 arg1 = op1;
8531 STRIP_SIGN_NOPS (arg0);
8532 STRIP_SIGN_NOPS (arg1);
8534 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8535 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8536 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8537 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8538 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8539 && TREE_CODE (arg1) == INTEGER_CST
8540 && !TREE_OVERFLOW (arg1))
8542 const enum tree_code
8543 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8544 tree const1 = TREE_OPERAND (arg0, 1);
8545 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8546 tree variable = TREE_OPERAND (arg0, 0);
8547 tree new_const = int_const_binop (reverse_op, const2, const1);
8549 /* If the constant operation overflowed this can be
8550 simplified as a comparison against INT_MAX/INT_MIN. */
8551 if (TREE_OVERFLOW (new_const)
8552 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8554 int const1_sgn = tree_int_cst_sgn (const1);
8555 enum tree_code code2 = code;
8557 /* Get the sign of the constant on the lhs if the
8558 operation were VARIABLE + CONST1. */
8559 if (TREE_CODE (arg0) == MINUS_EXPR)
8560 const1_sgn = -const1_sgn;
8562 /* The sign of the constant determines if we overflowed
8563 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8564 Canonicalize to the INT_MIN overflow by swapping the comparison
8565 if necessary. */
8566 if (const1_sgn == -1)
8567 code2 = swap_tree_comparison (code);
8569 /* We now can look at the canonicalized case
8570 VARIABLE + 1 CODE2 INT_MIN
8571 and decide on the result. */
8572 switch (code2)
8574 case EQ_EXPR:
8575 case LT_EXPR:
8576 case LE_EXPR:
8577 return
8578 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8580 case NE_EXPR:
8581 case GE_EXPR:
8582 case GT_EXPR:
8583 return
8584 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8586 default:
8587 gcc_unreachable ();
8590 else
8592 if (!equality_code)
8593 fold_overflow_warning ("assuming signed overflow does not occur "
8594 "when changing X +- C1 cmp C2 to "
8595 "X cmp C2 -+ C1",
8596 WARN_STRICT_OVERFLOW_COMPARISON);
8597 return fold_build2_loc (loc, code, type, variable, new_const);
8601 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8602 if (TREE_CODE (arg0) == MINUS_EXPR
8603 && equality_code
8604 && integer_zerop (arg1))
8606 /* ??? The transformation is valid for the other operators if overflow
8607 is undefined for the type, but performing it here badly interacts
8608 with the transformation in fold_cond_expr_with_comparison which
8609 attempts to synthetize ABS_EXPR. */
8610 if (!equality_code)
8611 fold_overflow_warning ("assuming signed overflow does not occur "
8612 "when changing X - Y cmp 0 to X cmp Y",
8613 WARN_STRICT_OVERFLOW_COMPARISON);
8614 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8615 TREE_OPERAND (arg0, 1));
8618 /* For comparisons of pointers we can decompose it to a compile time
8619 comparison of the base objects and the offsets into the object.
8620 This requires at least one operand being an ADDR_EXPR or a
8621 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8622 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8623 && (TREE_CODE (arg0) == ADDR_EXPR
8624 || TREE_CODE (arg1) == ADDR_EXPR
8625 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8626 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8628 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8629 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8630 machine_mode mode;
8631 int volatilep, unsignedp;
8632 bool indirect_base0 = false, indirect_base1 = false;
8634 /* Get base and offset for the access. Strip ADDR_EXPR for
8635 get_inner_reference, but put it back by stripping INDIRECT_REF
8636 off the base object if possible. indirect_baseN will be true
8637 if baseN is not an address but refers to the object itself. */
8638 base0 = arg0;
8639 if (TREE_CODE (arg0) == ADDR_EXPR)
8641 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8642 &bitsize, &bitpos0, &offset0, &mode,
8643 &unsignedp, &volatilep, false);
8644 if (TREE_CODE (base0) == INDIRECT_REF)
8645 base0 = TREE_OPERAND (base0, 0);
8646 else
8647 indirect_base0 = true;
8649 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8651 base0 = TREE_OPERAND (arg0, 0);
8652 STRIP_SIGN_NOPS (base0);
8653 if (TREE_CODE (base0) == ADDR_EXPR)
8655 base0 = TREE_OPERAND (base0, 0);
8656 indirect_base0 = true;
8658 offset0 = TREE_OPERAND (arg0, 1);
8659 if (tree_fits_shwi_p (offset0))
8661 HOST_WIDE_INT off = size_low_cst (offset0);
8662 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8663 * BITS_PER_UNIT)
8664 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8666 bitpos0 = off * BITS_PER_UNIT;
8667 offset0 = NULL_TREE;
8672 base1 = arg1;
8673 if (TREE_CODE (arg1) == ADDR_EXPR)
8675 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8676 &bitsize, &bitpos1, &offset1, &mode,
8677 &unsignedp, &volatilep, false);
8678 if (TREE_CODE (base1) == INDIRECT_REF)
8679 base1 = TREE_OPERAND (base1, 0);
8680 else
8681 indirect_base1 = true;
8683 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8685 base1 = TREE_OPERAND (arg1, 0);
8686 STRIP_SIGN_NOPS (base1);
8687 if (TREE_CODE (base1) == ADDR_EXPR)
8689 base1 = TREE_OPERAND (base1, 0);
8690 indirect_base1 = true;
8692 offset1 = TREE_OPERAND (arg1, 1);
8693 if (tree_fits_shwi_p (offset1))
8695 HOST_WIDE_INT off = size_low_cst (offset1);
8696 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8697 * BITS_PER_UNIT)
8698 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8700 bitpos1 = off * BITS_PER_UNIT;
8701 offset1 = NULL_TREE;
8706 /* A local variable can never be pointed to by
8707 the default SSA name of an incoming parameter. */
8708 if ((TREE_CODE (arg0) == ADDR_EXPR
8709 && indirect_base0
8710 && TREE_CODE (base0) == VAR_DECL
8711 && auto_var_in_fn_p (base0, current_function_decl)
8712 && !indirect_base1
8713 && TREE_CODE (base1) == SSA_NAME
8714 && SSA_NAME_IS_DEFAULT_DEF (base1)
8715 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8716 || (TREE_CODE (arg1) == ADDR_EXPR
8717 && indirect_base1
8718 && TREE_CODE (base1) == VAR_DECL
8719 && auto_var_in_fn_p (base1, current_function_decl)
8720 && !indirect_base0
8721 && TREE_CODE (base0) == SSA_NAME
8722 && SSA_NAME_IS_DEFAULT_DEF (base0)
8723 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8725 if (code == NE_EXPR)
8726 return constant_boolean_node (1, type);
8727 else if (code == EQ_EXPR)
8728 return constant_boolean_node (0, type);
8730 /* If we have equivalent bases we might be able to simplify. */
8731 else if (indirect_base0 == indirect_base1
8732 && operand_equal_p (base0, base1, 0))
8734 /* We can fold this expression to a constant if the non-constant
8735 offset parts are equal. */
8736 if ((offset0 == offset1
8737 || (offset0 && offset1
8738 && operand_equal_p (offset0, offset1, 0)))
8739 && (code == EQ_EXPR
8740 || code == NE_EXPR
8741 || (indirect_base0 && DECL_P (base0))
8742 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8745 if (!equality_code
8746 && bitpos0 != bitpos1
8747 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8748 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8749 fold_overflow_warning (("assuming pointer wraparound does not "
8750 "occur when comparing P +- C1 with "
8751 "P +- C2"),
8752 WARN_STRICT_OVERFLOW_CONDITIONAL);
8754 switch (code)
8756 case EQ_EXPR:
8757 return constant_boolean_node (bitpos0 == bitpos1, type);
8758 case NE_EXPR:
8759 return constant_boolean_node (bitpos0 != bitpos1, type);
8760 case LT_EXPR:
8761 return constant_boolean_node (bitpos0 < bitpos1, type);
8762 case LE_EXPR:
8763 return constant_boolean_node (bitpos0 <= bitpos1, type);
8764 case GE_EXPR:
8765 return constant_boolean_node (bitpos0 >= bitpos1, type);
8766 case GT_EXPR:
8767 return constant_boolean_node (bitpos0 > bitpos1, type);
8768 default:;
8771 /* We can simplify the comparison to a comparison of the variable
8772 offset parts if the constant offset parts are equal.
8773 Be careful to use signed sizetype here because otherwise we
8774 mess with array offsets in the wrong way. This is possible
8775 because pointer arithmetic is restricted to retain within an
8776 object and overflow on pointer differences is undefined as of
8777 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8778 else if (bitpos0 == bitpos1
8779 && (equality_code
8780 || (indirect_base0 && DECL_P (base0))
8781 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8783 /* By converting to signed sizetype we cover middle-end pointer
8784 arithmetic which operates on unsigned pointer types of size
8785 type size and ARRAY_REF offsets which are properly sign or
8786 zero extended from their type in case it is narrower than
8787 sizetype. */
8788 if (offset0 == NULL_TREE)
8789 offset0 = build_int_cst (ssizetype, 0);
8790 else
8791 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8792 if (offset1 == NULL_TREE)
8793 offset1 = build_int_cst (ssizetype, 0);
8794 else
8795 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8797 if (!equality_code
8798 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8799 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8800 fold_overflow_warning (("assuming pointer wraparound does not "
8801 "occur when comparing P +- C1 with "
8802 "P +- C2"),
8803 WARN_STRICT_OVERFLOW_COMPARISON);
8805 return fold_build2_loc (loc, code, type, offset0, offset1);
8808 /* For non-equal bases we can simplify if they are addresses
8809 of local binding decls or constants. */
8810 else if (indirect_base0 && indirect_base1
8811 /* We know that !operand_equal_p (base0, base1, 0)
8812 because the if condition was false. But make
8813 sure two decls are not the same. */
8814 && base0 != base1
8815 && TREE_CODE (arg0) == ADDR_EXPR
8816 && TREE_CODE (arg1) == ADDR_EXPR
8817 && (((TREE_CODE (base0) == VAR_DECL
8818 || TREE_CODE (base0) == PARM_DECL)
8819 && (targetm.binds_local_p (base0)
8820 || CONSTANT_CLASS_P (base1)))
8821 || CONSTANT_CLASS_P (base0))
8822 && (((TREE_CODE (base1) == VAR_DECL
8823 || TREE_CODE (base1) == PARM_DECL)
8824 && (targetm.binds_local_p (base1)
8825 || CONSTANT_CLASS_P (base0)))
8826 || CONSTANT_CLASS_P (base1)))
8828 if (code == EQ_EXPR)
8829 return omit_two_operands_loc (loc, type, boolean_false_node,
8830 arg0, arg1);
8831 else if (code == NE_EXPR)
8832 return omit_two_operands_loc (loc, type, boolean_true_node,
8833 arg0, arg1);
8835 /* For equal offsets we can simplify to a comparison of the
8836 base addresses. */
8837 else if (bitpos0 == bitpos1
8838 && (indirect_base0
8839 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8840 && (indirect_base1
8841 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8842 && ((offset0 == offset1)
8843 || (offset0 && offset1
8844 && operand_equal_p (offset0, offset1, 0))))
8846 if (indirect_base0)
8847 base0 = build_fold_addr_expr_loc (loc, base0);
8848 if (indirect_base1)
8849 base1 = build_fold_addr_expr_loc (loc, base1);
8850 return fold_build2_loc (loc, code, type, base0, base1);
8854 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8855 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8856 the resulting offset is smaller in absolute value than the
8857 original one and has the same sign. */
8858 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8859 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8860 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8861 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8862 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8863 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8864 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8866 tree const1 = TREE_OPERAND (arg0, 1);
8867 tree const2 = TREE_OPERAND (arg1, 1);
8868 tree variable1 = TREE_OPERAND (arg0, 0);
8869 tree variable2 = TREE_OPERAND (arg1, 0);
8870 tree cst;
8871 const char * const warnmsg = G_("assuming signed overflow does not "
8872 "occur when combining constants around "
8873 "a comparison");
8875 /* Put the constant on the side where it doesn't overflow and is
8876 of lower absolute value and of same sign than before. */
8877 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8878 ? MINUS_EXPR : PLUS_EXPR,
8879 const2, const1);
8880 if (!TREE_OVERFLOW (cst)
8881 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8882 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8884 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8885 return fold_build2_loc (loc, code, type,
8886 variable1,
8887 fold_build2_loc (loc, TREE_CODE (arg1),
8888 TREE_TYPE (arg1),
8889 variable2, cst));
8892 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8893 ? MINUS_EXPR : PLUS_EXPR,
8894 const1, const2);
8895 if (!TREE_OVERFLOW (cst)
8896 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8897 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8899 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8900 return fold_build2_loc (loc, code, type,
8901 fold_build2_loc (loc, TREE_CODE (arg0),
8902 TREE_TYPE (arg0),
8903 variable1, cst),
8904 variable2);
8908 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8909 signed arithmetic case. That form is created by the compiler
8910 often enough for folding it to be of value. One example is in
8911 computing loop trip counts after Operator Strength Reduction. */
8912 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8913 && TREE_CODE (arg0) == MULT_EXPR
8914 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8915 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8916 && integer_zerop (arg1))
8918 tree const1 = TREE_OPERAND (arg0, 1);
8919 tree const2 = arg1; /* zero */
8920 tree variable1 = TREE_OPERAND (arg0, 0);
8921 enum tree_code cmp_code = code;
8923 /* Handle unfolded multiplication by zero. */
8924 if (integer_zerop (const1))
8925 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8927 fold_overflow_warning (("assuming signed overflow does not occur when "
8928 "eliminating multiplication in comparison "
8929 "with zero"),
8930 WARN_STRICT_OVERFLOW_COMPARISON);
8932 /* If const1 is negative we swap the sense of the comparison. */
8933 if (tree_int_cst_sgn (const1) < 0)
8934 cmp_code = swap_tree_comparison (cmp_code);
8936 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8939 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8940 if (tem)
8941 return tem;
8943 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8945 tree targ0 = strip_float_extensions (arg0);
8946 tree targ1 = strip_float_extensions (arg1);
8947 tree newtype = TREE_TYPE (targ0);
8949 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8950 newtype = TREE_TYPE (targ1);
8952 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8953 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8954 return fold_build2_loc (loc, code, type,
8955 fold_convert_loc (loc, newtype, targ0),
8956 fold_convert_loc (loc, newtype, targ1));
8958 /* (-a) CMP (-b) -> b CMP a */
8959 if (TREE_CODE (arg0) == NEGATE_EXPR
8960 && TREE_CODE (arg1) == NEGATE_EXPR)
8961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8962 TREE_OPERAND (arg0, 0));
8964 if (TREE_CODE (arg1) == REAL_CST)
8966 REAL_VALUE_TYPE cst;
8967 cst = TREE_REAL_CST (arg1);
8969 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8970 if (TREE_CODE (arg0) == NEGATE_EXPR)
8971 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8972 TREE_OPERAND (arg0, 0),
8973 build_real (TREE_TYPE (arg1),
8974 real_value_negate (&cst)));
8976 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8977 /* a CMP (-0) -> a CMP 0 */
8978 if (REAL_VALUE_MINUS_ZERO (cst))
8979 return fold_build2_loc (loc, code, type, arg0,
8980 build_real (TREE_TYPE (arg1), dconst0));
8982 /* x != NaN is always true, other ops are always false. */
8983 if (REAL_VALUE_ISNAN (cst)
8984 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8986 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8987 return omit_one_operand_loc (loc, type, tem, arg0);
8990 /* Fold comparisons against infinity. */
8991 if (REAL_VALUE_ISINF (cst)
8992 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8994 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8995 if (tem != NULL_TREE)
8996 return tem;
9000 /* If this is a comparison of a real constant with a PLUS_EXPR
9001 or a MINUS_EXPR of a real constant, we can convert it into a
9002 comparison with a revised real constant as long as no overflow
9003 occurs when unsafe_math_optimizations are enabled. */
9004 if (flag_unsafe_math_optimizations
9005 && TREE_CODE (arg1) == REAL_CST
9006 && (TREE_CODE (arg0) == PLUS_EXPR
9007 || TREE_CODE (arg0) == MINUS_EXPR)
9008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9009 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9010 ? MINUS_EXPR : PLUS_EXPR,
9011 arg1, TREE_OPERAND (arg0, 1)))
9012 && !TREE_OVERFLOW (tem))
9013 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9015 /* Likewise, we can simplify a comparison of a real constant with
9016 a MINUS_EXPR whose first operand is also a real constant, i.e.
9017 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9018 floating-point types only if -fassociative-math is set. */
9019 if (flag_associative_math
9020 && TREE_CODE (arg1) == REAL_CST
9021 && TREE_CODE (arg0) == MINUS_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9023 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9024 arg1))
9025 && !TREE_OVERFLOW (tem))
9026 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9027 TREE_OPERAND (arg0, 1), tem);
9029 /* Fold comparisons against built-in math functions. */
9030 if (TREE_CODE (arg1) == REAL_CST
9031 && flag_unsafe_math_optimizations
9032 && ! flag_errno_math)
9034 enum built_in_function fcode = builtin_mathfn_code (arg0);
9036 if (fcode != END_BUILTINS)
9038 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9039 if (tem != NULL_TREE)
9040 return tem;
9045 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9046 && CONVERT_EXPR_P (arg0))
9048 /* If we are widening one operand of an integer comparison,
9049 see if the other operand is similarly being widened. Perhaps we
9050 can do the comparison in the narrower type. */
9051 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9052 if (tem)
9053 return tem;
9055 /* Or if we are changing signedness. */
9056 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9057 if (tem)
9058 return tem;
9061 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9062 constant, we can simplify it. */
9063 if (TREE_CODE (arg1) == INTEGER_CST
9064 && (TREE_CODE (arg0) == MIN_EXPR
9065 || TREE_CODE (arg0) == MAX_EXPR)
9066 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9068 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9069 if (tem)
9070 return tem;
9073 /* Simplify comparison of something with itself. (For IEEE
9074 floating-point, we can only do some of these simplifications.) */
9075 if (operand_equal_p (arg0, arg1, 0))
9077 switch (code)
9079 case EQ_EXPR:
9080 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9081 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9082 return constant_boolean_node (1, type);
9083 break;
9085 case GE_EXPR:
9086 case LE_EXPR:
9087 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9088 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9089 return constant_boolean_node (1, type);
9090 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9092 case NE_EXPR:
9093 /* For NE, we can only do this simplification if integer
9094 or we don't honor IEEE floating point NaNs. */
9095 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9096 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9097 break;
9098 /* ... fall through ... */
9099 case GT_EXPR:
9100 case LT_EXPR:
9101 return constant_boolean_node (0, type);
9102 default:
9103 gcc_unreachable ();
9107 /* If we are comparing an expression that just has comparisons
9108 of two integer values, arithmetic expressions of those comparisons,
9109 and constants, we can simplify it. There are only three cases
9110 to check: the two values can either be equal, the first can be
9111 greater, or the second can be greater. Fold the expression for
9112 those three values. Since each value must be 0 or 1, we have
9113 eight possibilities, each of which corresponds to the constant 0
9114 or 1 or one of the six possible comparisons.
9116 This handles common cases like (a > b) == 0 but also handles
9117 expressions like ((x > y) - (y > x)) > 0, which supposedly
9118 occur in macroized code. */
9120 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9122 tree cval1 = 0, cval2 = 0;
9123 int save_p = 0;
9125 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9126 /* Don't handle degenerate cases here; they should already
9127 have been handled anyway. */
9128 && cval1 != 0 && cval2 != 0
9129 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9130 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9131 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9132 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9133 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9134 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9135 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9137 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9138 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9140 /* We can't just pass T to eval_subst in case cval1 or cval2
9141 was the same as ARG1. */
9143 tree high_result
9144 = fold_build2_loc (loc, code, type,
9145 eval_subst (loc, arg0, cval1, maxval,
9146 cval2, minval),
9147 arg1);
9148 tree equal_result
9149 = fold_build2_loc (loc, code, type,
9150 eval_subst (loc, arg0, cval1, maxval,
9151 cval2, maxval),
9152 arg1);
9153 tree low_result
9154 = fold_build2_loc (loc, code, type,
9155 eval_subst (loc, arg0, cval1, minval,
9156 cval2, maxval),
9157 arg1);
9159 /* All three of these results should be 0 or 1. Confirm they are.
9160 Then use those values to select the proper code to use. */
9162 if (TREE_CODE (high_result) == INTEGER_CST
9163 && TREE_CODE (equal_result) == INTEGER_CST
9164 && TREE_CODE (low_result) == INTEGER_CST)
9166 /* Make a 3-bit mask with the high-order bit being the
9167 value for `>', the next for '=', and the low for '<'. */
9168 switch ((integer_onep (high_result) * 4)
9169 + (integer_onep (equal_result) * 2)
9170 + integer_onep (low_result))
9172 case 0:
9173 /* Always false. */
9174 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9175 case 1:
9176 code = LT_EXPR;
9177 break;
9178 case 2:
9179 code = EQ_EXPR;
9180 break;
9181 case 3:
9182 code = LE_EXPR;
9183 break;
9184 case 4:
9185 code = GT_EXPR;
9186 break;
9187 case 5:
9188 code = NE_EXPR;
9189 break;
9190 case 6:
9191 code = GE_EXPR;
9192 break;
9193 case 7:
9194 /* Always true. */
9195 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9198 if (save_p)
9200 tem = save_expr (build2 (code, type, cval1, cval2));
9201 SET_EXPR_LOCATION (tem, loc);
9202 return tem;
9204 return fold_build2_loc (loc, code, type, cval1, cval2);
9209 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9210 into a single range test. */
9211 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9212 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9213 && TREE_CODE (arg1) == INTEGER_CST
9214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9215 && !integer_zerop (TREE_OPERAND (arg0, 1))
9216 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9217 && !TREE_OVERFLOW (arg1))
9219 tem = fold_div_compare (loc, code, type, arg0, arg1);
9220 if (tem != NULL_TREE)
9221 return tem;
9224 /* Fold ~X op ~Y as Y op X. */
9225 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9226 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9228 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9229 return fold_build2_loc (loc, code, type,
9230 fold_convert_loc (loc, cmp_type,
9231 TREE_OPERAND (arg1, 0)),
9232 TREE_OPERAND (arg0, 0));
9235 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9236 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9237 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9239 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9240 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9241 TREE_OPERAND (arg0, 0),
9242 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9243 fold_convert_loc (loc, cmp_type, arg1)));
9246 return NULL_TREE;
9250 /* Subroutine of fold_binary. Optimize complex multiplications of the
9251 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9252 argument EXPR represents the expression "z" of type TYPE. */
9254 static tree
9255 fold_mult_zconjz (location_t loc, tree type, tree expr)
9257 tree itype = TREE_TYPE (type);
9258 tree rpart, ipart, tem;
9260 if (TREE_CODE (expr) == COMPLEX_EXPR)
9262 rpart = TREE_OPERAND (expr, 0);
9263 ipart = TREE_OPERAND (expr, 1);
9265 else if (TREE_CODE (expr) == COMPLEX_CST)
9267 rpart = TREE_REALPART (expr);
9268 ipart = TREE_IMAGPART (expr);
9270 else
9272 expr = save_expr (expr);
9273 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9274 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9277 rpart = save_expr (rpart);
9278 ipart = save_expr (ipart);
9279 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9280 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9281 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9282 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9283 build_zero_cst (itype));
9287 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9288 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9289 guarantees that P and N have the same least significant log2(M) bits.
9290 N is not otherwise constrained. In particular, N is not normalized to
9291 0 <= N < M as is common. In general, the precise value of P is unknown.
9292 M is chosen as large as possible such that constant N can be determined.
9294 Returns M and sets *RESIDUE to N.
9296 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9297 account. This is not always possible due to PR 35705.
9300 static unsigned HOST_WIDE_INT
9301 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9302 bool allow_func_align)
9304 enum tree_code code;
9306 *residue = 0;
9308 code = TREE_CODE (expr);
9309 if (code == ADDR_EXPR)
9311 unsigned int bitalign;
9312 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9313 *residue /= BITS_PER_UNIT;
9314 return bitalign / BITS_PER_UNIT;
9316 else if (code == POINTER_PLUS_EXPR)
9318 tree op0, op1;
9319 unsigned HOST_WIDE_INT modulus;
9320 enum tree_code inner_code;
9322 op0 = TREE_OPERAND (expr, 0);
9323 STRIP_NOPS (op0);
9324 modulus = get_pointer_modulus_and_residue (op0, residue,
9325 allow_func_align);
9327 op1 = TREE_OPERAND (expr, 1);
9328 STRIP_NOPS (op1);
9329 inner_code = TREE_CODE (op1);
9330 if (inner_code == INTEGER_CST)
9332 *residue += TREE_INT_CST_LOW (op1);
9333 return modulus;
9335 else if (inner_code == MULT_EXPR)
9337 op1 = TREE_OPERAND (op1, 1);
9338 if (TREE_CODE (op1) == INTEGER_CST)
9340 unsigned HOST_WIDE_INT align;
9342 /* Compute the greatest power-of-2 divisor of op1. */
9343 align = TREE_INT_CST_LOW (op1);
9344 align &= -align;
9346 /* If align is non-zero and less than *modulus, replace
9347 *modulus with align., If align is 0, then either op1 is 0
9348 or the greatest power-of-2 divisor of op1 doesn't fit in an
9349 unsigned HOST_WIDE_INT. In either case, no additional
9350 constraint is imposed. */
9351 if (align)
9352 modulus = MIN (modulus, align);
9354 return modulus;
9359 /* If we get here, we were unable to determine anything useful about the
9360 expression. */
9361 return 1;
9364 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9365 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9367 static bool
9368 vec_cst_ctor_to_array (tree arg, tree *elts)
9370 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9372 if (TREE_CODE (arg) == VECTOR_CST)
9374 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9375 elts[i] = VECTOR_CST_ELT (arg, i);
9377 else if (TREE_CODE (arg) == CONSTRUCTOR)
9379 constructor_elt *elt;
9381 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9382 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9383 return false;
9384 else
9385 elts[i] = elt->value;
9387 else
9388 return false;
9389 for (; i < nelts; i++)
9390 elts[i]
9391 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9392 return true;
9395 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9396 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9397 NULL_TREE otherwise. */
9399 static tree
9400 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9402 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9403 tree *elts;
9404 bool need_ctor = false;
9406 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9407 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9408 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9409 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9410 return NULL_TREE;
9412 elts = XALLOCAVEC (tree, nelts * 3);
9413 if (!vec_cst_ctor_to_array (arg0, elts)
9414 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9415 return NULL_TREE;
9417 for (i = 0; i < nelts; i++)
9419 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9420 need_ctor = true;
9421 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9424 if (need_ctor)
9426 vec<constructor_elt, va_gc> *v;
9427 vec_alloc (v, nelts);
9428 for (i = 0; i < nelts; i++)
9429 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9430 return build_constructor (type, v);
9432 else
9433 return build_vector (type, &elts[2 * nelts]);
9436 /* Try to fold a pointer difference of type TYPE two address expressions of
9437 array references AREF0 and AREF1 using location LOC. Return a
9438 simplified expression for the difference or NULL_TREE. */
9440 static tree
9441 fold_addr_of_array_ref_difference (location_t loc, tree type,
9442 tree aref0, tree aref1)
9444 tree base0 = TREE_OPERAND (aref0, 0);
9445 tree base1 = TREE_OPERAND (aref1, 0);
9446 tree base_offset = build_int_cst (type, 0);
9448 /* If the bases are array references as well, recurse. If the bases
9449 are pointer indirections compute the difference of the pointers.
9450 If the bases are equal, we are set. */
9451 if ((TREE_CODE (base0) == ARRAY_REF
9452 && TREE_CODE (base1) == ARRAY_REF
9453 && (base_offset
9454 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9455 || (INDIRECT_REF_P (base0)
9456 && INDIRECT_REF_P (base1)
9457 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9458 TREE_OPERAND (base0, 0),
9459 TREE_OPERAND (base1, 0))))
9460 || operand_equal_p (base0, base1, 0))
9462 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9463 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9464 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9465 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9466 return fold_build2_loc (loc, PLUS_EXPR, type,
9467 base_offset,
9468 fold_build2_loc (loc, MULT_EXPR, type,
9469 diff, esz));
9471 return NULL_TREE;
9474 /* If the real or vector real constant CST of type TYPE has an exact
9475 inverse, return it, else return NULL. */
9477 static tree
9478 exact_inverse (tree type, tree cst)
9480 REAL_VALUE_TYPE r;
9481 tree unit_type, *elts;
9482 machine_mode mode;
9483 unsigned vec_nelts, i;
9485 switch (TREE_CODE (cst))
9487 case REAL_CST:
9488 r = TREE_REAL_CST (cst);
9490 if (exact_real_inverse (TYPE_MODE (type), &r))
9491 return build_real (type, r);
9493 return NULL_TREE;
9495 case VECTOR_CST:
9496 vec_nelts = VECTOR_CST_NELTS (cst);
9497 elts = XALLOCAVEC (tree, vec_nelts);
9498 unit_type = TREE_TYPE (type);
9499 mode = TYPE_MODE (unit_type);
9501 for (i = 0; i < vec_nelts; i++)
9503 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9504 if (!exact_real_inverse (mode, &r))
9505 return NULL_TREE;
9506 elts[i] = build_real (unit_type, r);
9509 return build_vector (type, elts);
9511 default:
9512 return NULL_TREE;
9516 /* Mask out the tz least significant bits of X of type TYPE where
9517 tz is the number of trailing zeroes in Y. */
9518 static wide_int
9519 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9521 int tz = wi::ctz (y);
9522 if (tz > 0)
9523 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9524 return x;
9527 /* Return true when T is an address and is known to be nonzero.
9528 For floating point we further ensure that T is not denormal.
9529 Similar logic is present in nonzero_address in rtlanal.h.
9531 If the return value is based on the assumption that signed overflow
9532 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9533 change *STRICT_OVERFLOW_P. */
9535 static bool
9536 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9538 tree type = TREE_TYPE (t);
9539 enum tree_code code;
9541 /* Doing something useful for floating point would need more work. */
9542 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9543 return false;
9545 code = TREE_CODE (t);
9546 switch (TREE_CODE_CLASS (code))
9548 case tcc_unary:
9549 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9550 strict_overflow_p);
9551 case tcc_binary:
9552 case tcc_comparison:
9553 return tree_binary_nonzero_warnv_p (code, type,
9554 TREE_OPERAND (t, 0),
9555 TREE_OPERAND (t, 1),
9556 strict_overflow_p);
9557 case tcc_constant:
9558 case tcc_declaration:
9559 case tcc_reference:
9560 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9562 default:
9563 break;
9566 switch (code)
9568 case TRUTH_NOT_EXPR:
9569 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9570 strict_overflow_p);
9572 case TRUTH_AND_EXPR:
9573 case TRUTH_OR_EXPR:
9574 case TRUTH_XOR_EXPR:
9575 return tree_binary_nonzero_warnv_p (code, type,
9576 TREE_OPERAND (t, 0),
9577 TREE_OPERAND (t, 1),
9578 strict_overflow_p);
9580 case COND_EXPR:
9581 case CONSTRUCTOR:
9582 case OBJ_TYPE_REF:
9583 case ASSERT_EXPR:
9584 case ADDR_EXPR:
9585 case WITH_SIZE_EXPR:
9586 case SSA_NAME:
9587 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9589 case COMPOUND_EXPR:
9590 case MODIFY_EXPR:
9591 case BIND_EXPR:
9592 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9593 strict_overflow_p);
9595 case SAVE_EXPR:
9596 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9597 strict_overflow_p);
9599 case CALL_EXPR:
9601 tree fndecl = get_callee_fndecl (t);
9602 if (!fndecl) return false;
9603 if (flag_delete_null_pointer_checks && !flag_check_new
9604 && DECL_IS_OPERATOR_NEW (fndecl)
9605 && !TREE_NOTHROW (fndecl))
9606 return true;
9607 if (flag_delete_null_pointer_checks
9608 && lookup_attribute ("returns_nonnull",
9609 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9610 return true;
9611 return alloca_call_p (t);
9614 default:
9615 break;
9617 return false;
9620 /* Return true when T is an address and is known to be nonzero.
9621 Handle warnings about undefined signed overflow. */
9623 static bool
9624 tree_expr_nonzero_p (tree t)
9626 bool ret, strict_overflow_p;
9628 strict_overflow_p = false;
9629 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9630 if (strict_overflow_p)
9631 fold_overflow_warning (("assuming signed overflow does not occur when "
9632 "determining that expression is always "
9633 "non-zero"),
9634 WARN_STRICT_OVERFLOW_MISC);
9635 return ret;
9638 /* Fold a binary expression of code CODE and type TYPE with operands
9639 OP0 and OP1. LOC is the location of the resulting expression.
9640 Return the folded expression if folding is successful. Otherwise,
9641 return NULL_TREE. */
9643 tree
9644 fold_binary_loc (location_t loc,
9645 enum tree_code code, tree type, tree op0, tree op1)
9647 enum tree_code_class kind = TREE_CODE_CLASS (code);
9648 tree arg0, arg1, tem;
9649 tree t1 = NULL_TREE;
9650 bool strict_overflow_p;
9651 unsigned int prec;
9653 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9654 && TREE_CODE_LENGTH (code) == 2
9655 && op0 != NULL_TREE
9656 && op1 != NULL_TREE);
9658 arg0 = op0;
9659 arg1 = op1;
9661 /* Strip any conversions that don't change the mode. This is
9662 safe for every expression, except for a comparison expression
9663 because its signedness is derived from its operands. So, in
9664 the latter case, only strip conversions that don't change the
9665 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9666 preserved.
9668 Note that this is done as an internal manipulation within the
9669 constant folder, in order to find the simplest representation
9670 of the arguments so that their form can be studied. In any
9671 cases, the appropriate type conversions should be put back in
9672 the tree that will get out of the constant folder. */
9674 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9676 STRIP_SIGN_NOPS (arg0);
9677 STRIP_SIGN_NOPS (arg1);
9679 else
9681 STRIP_NOPS (arg0);
9682 STRIP_NOPS (arg1);
9685 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9686 constant but we can't do arithmetic on them. */
9687 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9688 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9689 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9690 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9691 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9692 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9693 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9695 if (kind == tcc_binary)
9697 /* Make sure type and arg0 have the same saturating flag. */
9698 gcc_assert (TYPE_SATURATING (type)
9699 == TYPE_SATURATING (TREE_TYPE (arg0)));
9700 tem = const_binop (code, arg0, arg1);
9702 else if (kind == tcc_comparison)
9703 tem = fold_relational_const (code, type, arg0, arg1);
9704 else
9705 tem = NULL_TREE;
9707 if (tem != NULL_TREE)
9709 if (TREE_TYPE (tem) != type)
9710 tem = fold_convert_loc (loc, type, tem);
9711 return tem;
9715 /* If this is a commutative operation, and ARG0 is a constant, move it
9716 to ARG1 to reduce the number of tests below. */
9717 if (commutative_tree_code (code)
9718 && tree_swap_operands_p (arg0, arg1, true))
9719 return fold_build2_loc (loc, code, type, op1, op0);
9721 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9722 to ARG1 to reduce the number of tests below. */
9723 if (kind == tcc_comparison
9724 && tree_swap_operands_p (arg0, arg1, true))
9725 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9727 tem = generic_simplify (loc, code, type, op0, op1);
9728 if (tem)
9729 return tem;
9731 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9733 First check for cases where an arithmetic operation is applied to a
9734 compound, conditional, or comparison operation. Push the arithmetic
9735 operation inside the compound or conditional to see if any folding
9736 can then be done. Convert comparison to conditional for this purpose.
9737 The also optimizes non-constant cases that used to be done in
9738 expand_expr.
9740 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9741 one of the operands is a comparison and the other is a comparison, a
9742 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9743 code below would make the expression more complex. Change it to a
9744 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9745 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9747 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9748 || code == EQ_EXPR || code == NE_EXPR)
9749 && TREE_CODE (type) != VECTOR_TYPE
9750 && ((truth_value_p (TREE_CODE (arg0))
9751 && (truth_value_p (TREE_CODE (arg1))
9752 || (TREE_CODE (arg1) == BIT_AND_EXPR
9753 && integer_onep (TREE_OPERAND (arg1, 1)))))
9754 || (truth_value_p (TREE_CODE (arg1))
9755 && (truth_value_p (TREE_CODE (arg0))
9756 || (TREE_CODE (arg0) == BIT_AND_EXPR
9757 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9759 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9760 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9761 : TRUTH_XOR_EXPR,
9762 boolean_type_node,
9763 fold_convert_loc (loc, boolean_type_node, arg0),
9764 fold_convert_loc (loc, boolean_type_node, arg1));
9766 if (code == EQ_EXPR)
9767 tem = invert_truthvalue_loc (loc, tem);
9769 return fold_convert_loc (loc, type, tem);
9772 if (TREE_CODE_CLASS (code) == tcc_binary
9773 || TREE_CODE_CLASS (code) == tcc_comparison)
9775 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9777 tem = fold_build2_loc (loc, code, type,
9778 fold_convert_loc (loc, TREE_TYPE (op0),
9779 TREE_OPERAND (arg0, 1)), op1);
9780 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9781 tem);
9783 if (TREE_CODE (arg1) == COMPOUND_EXPR
9784 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9786 tem = fold_build2_loc (loc, code, type, op0,
9787 fold_convert_loc (loc, TREE_TYPE (op1),
9788 TREE_OPERAND (arg1, 1)));
9789 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9790 tem);
9793 if (TREE_CODE (arg0) == COND_EXPR
9794 || TREE_CODE (arg0) == VEC_COND_EXPR
9795 || COMPARISON_CLASS_P (arg0))
9797 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9798 arg0, arg1,
9799 /*cond_first_p=*/1);
9800 if (tem != NULL_TREE)
9801 return tem;
9804 if (TREE_CODE (arg1) == COND_EXPR
9805 || TREE_CODE (arg1) == VEC_COND_EXPR
9806 || COMPARISON_CLASS_P (arg1))
9808 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9809 arg1, arg0,
9810 /*cond_first_p=*/0);
9811 if (tem != NULL_TREE)
9812 return tem;
9816 switch (code)
9818 case MEM_REF:
9819 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9820 if (TREE_CODE (arg0) == ADDR_EXPR
9821 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9823 tree iref = TREE_OPERAND (arg0, 0);
9824 return fold_build2 (MEM_REF, type,
9825 TREE_OPERAND (iref, 0),
9826 int_const_binop (PLUS_EXPR, arg1,
9827 TREE_OPERAND (iref, 1)));
9830 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9831 if (TREE_CODE (arg0) == ADDR_EXPR
9832 && handled_component_p (TREE_OPERAND (arg0, 0)))
9834 tree base;
9835 HOST_WIDE_INT coffset;
9836 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9837 &coffset);
9838 if (!base)
9839 return NULL_TREE;
9840 return fold_build2 (MEM_REF, type,
9841 build_fold_addr_expr (base),
9842 int_const_binop (PLUS_EXPR, arg1,
9843 size_int (coffset)));
9846 return NULL_TREE;
9848 case POINTER_PLUS_EXPR:
9849 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9850 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9851 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9852 return fold_convert_loc (loc, type,
9853 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9854 fold_convert_loc (loc, sizetype,
9855 arg1),
9856 fold_convert_loc (loc, sizetype,
9857 arg0)));
9859 /* PTR_CST +p CST -> CST1 */
9860 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9861 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9862 fold_convert_loc (loc, type, arg1));
9864 return NULL_TREE;
9866 case PLUS_EXPR:
9867 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9869 /* X + (X / CST) * -CST is X % CST. */
9870 if (TREE_CODE (arg1) == MULT_EXPR
9871 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9872 && operand_equal_p (arg0,
9873 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9875 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9876 tree cst1 = TREE_OPERAND (arg1, 1);
9877 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9878 cst1, cst0);
9879 if (sum && integer_zerop (sum))
9880 return fold_convert_loc (loc, type,
9881 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9882 TREE_TYPE (arg0), arg0,
9883 cst0));
9887 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9888 one. Make sure the type is not saturating and has the signedness of
9889 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9890 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9891 if ((TREE_CODE (arg0) == MULT_EXPR
9892 || TREE_CODE (arg1) == MULT_EXPR)
9893 && !TYPE_SATURATING (type)
9894 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9895 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9896 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9898 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9899 if (tem)
9900 return tem;
9903 if (! FLOAT_TYPE_P (type))
9905 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9906 with a constant, and the two constants have no bits in common,
9907 we should treat this as a BIT_IOR_EXPR since this may produce more
9908 simplifications. */
9909 if (TREE_CODE (arg0) == BIT_AND_EXPR
9910 && TREE_CODE (arg1) == BIT_AND_EXPR
9911 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9912 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9913 && wi::bit_and (TREE_OPERAND (arg0, 1),
9914 TREE_OPERAND (arg1, 1)) == 0)
9916 code = BIT_IOR_EXPR;
9917 goto bit_ior;
9920 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9921 (plus (plus (mult) (mult)) (foo)) so that we can
9922 take advantage of the factoring cases below. */
9923 if (TYPE_OVERFLOW_WRAPS (type)
9924 && (((TREE_CODE (arg0) == PLUS_EXPR
9925 || TREE_CODE (arg0) == MINUS_EXPR)
9926 && TREE_CODE (arg1) == MULT_EXPR)
9927 || ((TREE_CODE (arg1) == PLUS_EXPR
9928 || TREE_CODE (arg1) == MINUS_EXPR)
9929 && TREE_CODE (arg0) == MULT_EXPR)))
9931 tree parg0, parg1, parg, marg;
9932 enum tree_code pcode;
9934 if (TREE_CODE (arg1) == MULT_EXPR)
9935 parg = arg0, marg = arg1;
9936 else
9937 parg = arg1, marg = arg0;
9938 pcode = TREE_CODE (parg);
9939 parg0 = TREE_OPERAND (parg, 0);
9940 parg1 = TREE_OPERAND (parg, 1);
9941 STRIP_NOPS (parg0);
9942 STRIP_NOPS (parg1);
9944 if (TREE_CODE (parg0) == MULT_EXPR
9945 && TREE_CODE (parg1) != MULT_EXPR)
9946 return fold_build2_loc (loc, pcode, type,
9947 fold_build2_loc (loc, PLUS_EXPR, type,
9948 fold_convert_loc (loc, type,
9949 parg0),
9950 fold_convert_loc (loc, type,
9951 marg)),
9952 fold_convert_loc (loc, type, parg1));
9953 if (TREE_CODE (parg0) != MULT_EXPR
9954 && TREE_CODE (parg1) == MULT_EXPR)
9955 return
9956 fold_build2_loc (loc, PLUS_EXPR, type,
9957 fold_convert_loc (loc, type, parg0),
9958 fold_build2_loc (loc, pcode, type,
9959 fold_convert_loc (loc, type, marg),
9960 fold_convert_loc (loc, type,
9961 parg1)));
9964 else
9966 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9967 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9968 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9970 /* Likewise if the operands are reversed. */
9971 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9972 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9974 /* Convert X + -C into X - C. */
9975 if (TREE_CODE (arg1) == REAL_CST
9976 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9978 tem = fold_negate_const (arg1, type);
9979 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9980 return fold_build2_loc (loc, MINUS_EXPR, type,
9981 fold_convert_loc (loc, type, arg0),
9982 fold_convert_loc (loc, type, tem));
9985 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9986 to __complex__ ( x, y ). This is not the same for SNaNs or
9987 if signed zeros are involved. */
9988 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9989 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9990 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9992 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9993 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9994 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9995 bool arg0rz = false, arg0iz = false;
9996 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9997 || (arg0i && (arg0iz = real_zerop (arg0i))))
9999 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10000 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10001 if (arg0rz && arg1i && real_zerop (arg1i))
10003 tree rp = arg1r ? arg1r
10004 : build1 (REALPART_EXPR, rtype, arg1);
10005 tree ip = arg0i ? arg0i
10006 : build1 (IMAGPART_EXPR, rtype, arg0);
10007 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10009 else if (arg0iz && arg1r && real_zerop (arg1r))
10011 tree rp = arg0r ? arg0r
10012 : build1 (REALPART_EXPR, rtype, arg0);
10013 tree ip = arg1i ? arg1i
10014 : build1 (IMAGPART_EXPR, rtype, arg1);
10015 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10020 if (flag_unsafe_math_optimizations
10021 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10022 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10023 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10024 return tem;
10026 /* Convert x+x into x*2.0. */
10027 if (operand_equal_p (arg0, arg1, 0)
10028 && SCALAR_FLOAT_TYPE_P (type))
10029 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10030 build_real (type, dconst2));
10032 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10033 We associate floats only if the user has specified
10034 -fassociative-math. */
10035 if (flag_associative_math
10036 && TREE_CODE (arg1) == PLUS_EXPR
10037 && TREE_CODE (arg0) != MULT_EXPR)
10039 tree tree10 = TREE_OPERAND (arg1, 0);
10040 tree tree11 = TREE_OPERAND (arg1, 1);
10041 if (TREE_CODE (tree11) == MULT_EXPR
10042 && TREE_CODE (tree10) == MULT_EXPR)
10044 tree tree0;
10045 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10046 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10049 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10050 We associate floats only if the user has specified
10051 -fassociative-math. */
10052 if (flag_associative_math
10053 && TREE_CODE (arg0) == PLUS_EXPR
10054 && TREE_CODE (arg1) != MULT_EXPR)
10056 tree tree00 = TREE_OPERAND (arg0, 0);
10057 tree tree01 = TREE_OPERAND (arg0, 1);
10058 if (TREE_CODE (tree01) == MULT_EXPR
10059 && TREE_CODE (tree00) == MULT_EXPR)
10061 tree tree0;
10062 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10063 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10068 bit_rotate:
10069 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10070 is a rotate of A by C1 bits. */
10071 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10072 is a rotate of A by B bits. */
10074 enum tree_code code0, code1;
10075 tree rtype;
10076 code0 = TREE_CODE (arg0);
10077 code1 = TREE_CODE (arg1);
10078 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10079 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10080 && operand_equal_p (TREE_OPERAND (arg0, 0),
10081 TREE_OPERAND (arg1, 0), 0)
10082 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10083 TYPE_UNSIGNED (rtype))
10084 /* Only create rotates in complete modes. Other cases are not
10085 expanded properly. */
10086 && (element_precision (rtype)
10087 == element_precision (TYPE_MODE (rtype))))
10089 tree tree01, tree11;
10090 enum tree_code code01, code11;
10092 tree01 = TREE_OPERAND (arg0, 1);
10093 tree11 = TREE_OPERAND (arg1, 1);
10094 STRIP_NOPS (tree01);
10095 STRIP_NOPS (tree11);
10096 code01 = TREE_CODE (tree01);
10097 code11 = TREE_CODE (tree11);
10098 if (code01 == INTEGER_CST
10099 && code11 == INTEGER_CST
10100 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10101 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10103 tem = build2_loc (loc, LROTATE_EXPR,
10104 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10105 TREE_OPERAND (arg0, 0),
10106 code0 == LSHIFT_EXPR ? tree01 : tree11);
10107 return fold_convert_loc (loc, type, tem);
10109 else if (code11 == MINUS_EXPR)
10111 tree tree110, tree111;
10112 tree110 = TREE_OPERAND (tree11, 0);
10113 tree111 = TREE_OPERAND (tree11, 1);
10114 STRIP_NOPS (tree110);
10115 STRIP_NOPS (tree111);
10116 if (TREE_CODE (tree110) == INTEGER_CST
10117 && 0 == compare_tree_int (tree110,
10118 element_precision
10119 (TREE_TYPE (TREE_OPERAND
10120 (arg0, 0))))
10121 && operand_equal_p (tree01, tree111, 0))
10122 return
10123 fold_convert_loc (loc, type,
10124 build2 ((code0 == LSHIFT_EXPR
10125 ? LROTATE_EXPR
10126 : RROTATE_EXPR),
10127 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10128 TREE_OPERAND (arg0, 0), tree01));
10130 else if (code01 == MINUS_EXPR)
10132 tree tree010, tree011;
10133 tree010 = TREE_OPERAND (tree01, 0);
10134 tree011 = TREE_OPERAND (tree01, 1);
10135 STRIP_NOPS (tree010);
10136 STRIP_NOPS (tree011);
10137 if (TREE_CODE (tree010) == INTEGER_CST
10138 && 0 == compare_tree_int (tree010,
10139 element_precision
10140 (TREE_TYPE (TREE_OPERAND
10141 (arg0, 0))))
10142 && operand_equal_p (tree11, tree011, 0))
10143 return fold_convert_loc
10144 (loc, type,
10145 build2 ((code0 != LSHIFT_EXPR
10146 ? LROTATE_EXPR
10147 : RROTATE_EXPR),
10148 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10149 TREE_OPERAND (arg0, 0), tree11));
10154 associate:
10155 /* In most languages, can't associate operations on floats through
10156 parentheses. Rather than remember where the parentheses were, we
10157 don't associate floats at all, unless the user has specified
10158 -fassociative-math.
10159 And, we need to make sure type is not saturating. */
10161 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10162 && !TYPE_SATURATING (type))
10164 tree var0, con0, lit0, minus_lit0;
10165 tree var1, con1, lit1, minus_lit1;
10166 tree atype = type;
10167 bool ok = true;
10169 /* Split both trees into variables, constants, and literals. Then
10170 associate each group together, the constants with literals,
10171 then the result with variables. This increases the chances of
10172 literals being recombined later and of generating relocatable
10173 expressions for the sum of a constant and literal. */
10174 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10175 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10176 code == MINUS_EXPR);
10178 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10179 if (code == MINUS_EXPR)
10180 code = PLUS_EXPR;
10182 /* With undefined overflow prefer doing association in a type
10183 which wraps on overflow, if that is one of the operand types. */
10184 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10185 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10187 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10188 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10189 atype = TREE_TYPE (arg0);
10190 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10191 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10192 atype = TREE_TYPE (arg1);
10193 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10196 /* With undefined overflow we can only associate constants with one
10197 variable, and constants whose association doesn't overflow. */
10198 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10199 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10201 if (var0 && var1)
10203 tree tmp0 = var0;
10204 tree tmp1 = var1;
10206 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10207 tmp0 = TREE_OPERAND (tmp0, 0);
10208 if (CONVERT_EXPR_P (tmp0)
10209 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10210 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10211 <= TYPE_PRECISION (atype)))
10212 tmp0 = TREE_OPERAND (tmp0, 0);
10213 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10214 tmp1 = TREE_OPERAND (tmp1, 0);
10215 if (CONVERT_EXPR_P (tmp1)
10216 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10217 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10218 <= TYPE_PRECISION (atype)))
10219 tmp1 = TREE_OPERAND (tmp1, 0);
10220 /* The only case we can still associate with two variables
10221 is if they are the same, modulo negation and bit-pattern
10222 preserving conversions. */
10223 if (!operand_equal_p (tmp0, tmp1, 0))
10224 ok = false;
10228 /* Only do something if we found more than two objects. Otherwise,
10229 nothing has changed and we risk infinite recursion. */
10230 if (ok
10231 && (2 < ((var0 != 0) + (var1 != 0)
10232 + (con0 != 0) + (con1 != 0)
10233 + (lit0 != 0) + (lit1 != 0)
10234 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10236 bool any_overflows = false;
10237 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10238 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10239 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10240 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10241 var0 = associate_trees (loc, var0, var1, code, atype);
10242 con0 = associate_trees (loc, con0, con1, code, atype);
10243 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10244 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10245 code, atype);
10247 /* Preserve the MINUS_EXPR if the negative part of the literal is
10248 greater than the positive part. Otherwise, the multiplicative
10249 folding code (i.e extract_muldiv) may be fooled in case
10250 unsigned constants are subtracted, like in the following
10251 example: ((X*2 + 4) - 8U)/2. */
10252 if (minus_lit0 && lit0)
10254 if (TREE_CODE (lit0) == INTEGER_CST
10255 && TREE_CODE (minus_lit0) == INTEGER_CST
10256 && tree_int_cst_lt (lit0, minus_lit0))
10258 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10259 MINUS_EXPR, atype);
10260 lit0 = 0;
10262 else
10264 lit0 = associate_trees (loc, lit0, minus_lit0,
10265 MINUS_EXPR, atype);
10266 minus_lit0 = 0;
10270 /* Don't introduce overflows through reassociation. */
10271 if (!any_overflows
10272 && ((lit0 && TREE_OVERFLOW (lit0))
10273 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10274 return NULL_TREE;
10276 if (minus_lit0)
10278 if (con0 == 0)
10279 return
10280 fold_convert_loc (loc, type,
10281 associate_trees (loc, var0, minus_lit0,
10282 MINUS_EXPR, atype));
10283 else
10285 con0 = associate_trees (loc, con0, minus_lit0,
10286 MINUS_EXPR, atype);
10287 return
10288 fold_convert_loc (loc, type,
10289 associate_trees (loc, var0, con0,
10290 PLUS_EXPR, atype));
10294 con0 = associate_trees (loc, con0, lit0, code, atype);
10295 return
10296 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10297 code, atype));
10301 return NULL_TREE;
10303 case MINUS_EXPR:
10304 /* Pointer simplifications for subtraction, simple reassociations. */
10305 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10307 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10308 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10309 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10311 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10312 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10313 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10314 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10315 return fold_build2_loc (loc, PLUS_EXPR, type,
10316 fold_build2_loc (loc, MINUS_EXPR, type,
10317 arg00, arg10),
10318 fold_build2_loc (loc, MINUS_EXPR, type,
10319 arg01, arg11));
10321 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10322 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10324 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10325 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10326 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10327 fold_convert_loc (loc, type, arg1));
10328 if (tmp)
10329 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10331 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10332 simplifies. */
10333 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10335 tree arg10 = fold_convert_loc (loc, type,
10336 TREE_OPERAND (arg1, 0));
10337 tree arg11 = fold_convert_loc (loc, type,
10338 TREE_OPERAND (arg1, 1));
10339 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10340 fold_convert_loc (loc, type, arg0),
10341 arg10);
10342 if (tmp)
10343 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10346 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10347 if (TREE_CODE (arg0) == NEGATE_EXPR
10348 && negate_expr_p (arg1)
10349 && reorder_operands_p (arg0, arg1))
10350 return fold_build2_loc (loc, MINUS_EXPR, type,
10351 fold_convert_loc (loc, type,
10352 negate_expr (arg1)),
10353 fold_convert_loc (loc, type,
10354 TREE_OPERAND (arg0, 0)));
10355 /* Convert -A - 1 to ~A. */
10356 if (TREE_CODE (arg0) == NEGATE_EXPR
10357 && integer_each_onep (arg1)
10358 && !TYPE_OVERFLOW_TRAPS (type))
10359 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10360 fold_convert_loc (loc, type,
10361 TREE_OPERAND (arg0, 0)));
10363 /* Convert -1 - A to ~A. */
10364 if (TREE_CODE (type) != COMPLEX_TYPE
10365 && integer_all_onesp (arg0))
10366 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10369 /* X - (X / Y) * Y is X % Y. */
10370 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10371 && TREE_CODE (arg1) == MULT_EXPR
10372 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10373 && operand_equal_p (arg0,
10374 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10375 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10376 TREE_OPERAND (arg1, 1), 0))
10377 return
10378 fold_convert_loc (loc, type,
10379 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10380 arg0, TREE_OPERAND (arg1, 1)));
10382 if (! FLOAT_TYPE_P (type))
10384 if (integer_zerop (arg0))
10385 return negate_expr (fold_convert_loc (loc, type, arg1));
10387 /* Fold A - (A & B) into ~B & A. */
10388 if (!TREE_SIDE_EFFECTS (arg0)
10389 && TREE_CODE (arg1) == BIT_AND_EXPR)
10391 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10393 tree arg10 = fold_convert_loc (loc, type,
10394 TREE_OPERAND (arg1, 0));
10395 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10396 fold_build1_loc (loc, BIT_NOT_EXPR,
10397 type, arg10),
10398 fold_convert_loc (loc, type, arg0));
10400 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10402 tree arg11 = fold_convert_loc (loc,
10403 type, TREE_OPERAND (arg1, 1));
10404 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10405 fold_build1_loc (loc, BIT_NOT_EXPR,
10406 type, arg11),
10407 fold_convert_loc (loc, type, arg0));
10411 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10412 any power of 2 minus 1. */
10413 if (TREE_CODE (arg0) == BIT_AND_EXPR
10414 && TREE_CODE (arg1) == BIT_AND_EXPR
10415 && operand_equal_p (TREE_OPERAND (arg0, 0),
10416 TREE_OPERAND (arg1, 0), 0))
10418 tree mask0 = TREE_OPERAND (arg0, 1);
10419 tree mask1 = TREE_OPERAND (arg1, 1);
10420 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10422 if (operand_equal_p (tem, mask1, 0))
10424 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10425 TREE_OPERAND (arg0, 0), mask1);
10426 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10431 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10432 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10433 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10435 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10436 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10437 (-ARG1 + ARG0) reduces to -ARG1. */
10438 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10439 return negate_expr (fold_convert_loc (loc, type, arg1));
10441 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10442 __complex__ ( x, -y ). This is not the same for SNaNs or if
10443 signed zeros are involved. */
10444 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10445 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10446 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10448 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10449 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10450 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10451 bool arg0rz = false, arg0iz = false;
10452 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10453 || (arg0i && (arg0iz = real_zerop (arg0i))))
10455 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10456 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10457 if (arg0rz && arg1i && real_zerop (arg1i))
10459 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10460 arg1r ? arg1r
10461 : build1 (REALPART_EXPR, rtype, arg1));
10462 tree ip = arg0i ? arg0i
10463 : build1 (IMAGPART_EXPR, rtype, arg0);
10464 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10466 else if (arg0iz && arg1r && real_zerop (arg1r))
10468 tree rp = arg0r ? arg0r
10469 : build1 (REALPART_EXPR, rtype, arg0);
10470 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10471 arg1i ? arg1i
10472 : build1 (IMAGPART_EXPR, rtype, arg1));
10473 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10478 /* A - B -> A + (-B) if B is easily negatable. */
10479 if (negate_expr_p (arg1)
10480 && !TYPE_OVERFLOW_SANITIZED (type)
10481 && ((FLOAT_TYPE_P (type)
10482 /* Avoid this transformation if B is a positive REAL_CST. */
10483 && (TREE_CODE (arg1) != REAL_CST
10484 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10485 || INTEGRAL_TYPE_P (type)))
10486 return fold_build2_loc (loc, PLUS_EXPR, type,
10487 fold_convert_loc (loc, type, arg0),
10488 fold_convert_loc (loc, type,
10489 negate_expr (arg1)));
10491 /* Try folding difference of addresses. */
10493 HOST_WIDE_INT diff;
10495 if ((TREE_CODE (arg0) == ADDR_EXPR
10496 || TREE_CODE (arg1) == ADDR_EXPR)
10497 && ptr_difference_const (arg0, arg1, &diff))
10498 return build_int_cst_type (type, diff);
10501 /* Fold &a[i] - &a[j] to i-j. */
10502 if (TREE_CODE (arg0) == ADDR_EXPR
10503 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10504 && TREE_CODE (arg1) == ADDR_EXPR
10505 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10507 tree tem = fold_addr_of_array_ref_difference (loc, type,
10508 TREE_OPERAND (arg0, 0),
10509 TREE_OPERAND (arg1, 0));
10510 if (tem)
10511 return tem;
10514 if (FLOAT_TYPE_P (type)
10515 && flag_unsafe_math_optimizations
10516 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10517 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10518 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10519 return tem;
10521 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10522 one. Make sure the type is not saturating and has the signedness of
10523 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10524 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10525 if ((TREE_CODE (arg0) == MULT_EXPR
10526 || TREE_CODE (arg1) == MULT_EXPR)
10527 && !TYPE_SATURATING (type)
10528 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10529 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10530 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10532 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10533 if (tem)
10534 return tem;
10537 goto associate;
10539 case MULT_EXPR:
10540 /* (-A) * (-B) -> A * B */
10541 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10542 return fold_build2_loc (loc, MULT_EXPR, type,
10543 fold_convert_loc (loc, type,
10544 TREE_OPERAND (arg0, 0)),
10545 fold_convert_loc (loc, type,
10546 negate_expr (arg1)));
10547 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10548 return fold_build2_loc (loc, MULT_EXPR, type,
10549 fold_convert_loc (loc, type,
10550 negate_expr (arg0)),
10551 fold_convert_loc (loc, type,
10552 TREE_OPERAND (arg1, 0)));
10554 if (! FLOAT_TYPE_P (type))
10556 /* Transform x * -1 into -x. Make sure to do the negation
10557 on the original operand with conversions not stripped
10558 because we can only strip non-sign-changing conversions. */
10559 if (integer_minus_onep (arg1))
10560 return fold_convert_loc (loc, type, negate_expr (op0));
10561 /* Transform x * -C into -x * C if x is easily negatable. */
10562 if (TREE_CODE (arg1) == INTEGER_CST
10563 && tree_int_cst_sgn (arg1) == -1
10564 && negate_expr_p (arg0)
10565 && (tem = negate_expr (arg1)) != arg1
10566 && !TREE_OVERFLOW (tem))
10567 return fold_build2_loc (loc, MULT_EXPR, type,
10568 fold_convert_loc (loc, type,
10569 negate_expr (arg0)),
10570 tem);
10572 /* (a * (1 << b)) is (a << b) */
10573 if (TREE_CODE (arg1) == LSHIFT_EXPR
10574 && integer_onep (TREE_OPERAND (arg1, 0)))
10575 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10576 TREE_OPERAND (arg1, 1));
10577 if (TREE_CODE (arg0) == LSHIFT_EXPR
10578 && integer_onep (TREE_OPERAND (arg0, 0)))
10579 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10580 TREE_OPERAND (arg0, 1));
10582 /* (A + A) * C -> A * 2 * C */
10583 if (TREE_CODE (arg0) == PLUS_EXPR
10584 && TREE_CODE (arg1) == INTEGER_CST
10585 && operand_equal_p (TREE_OPERAND (arg0, 0),
10586 TREE_OPERAND (arg0, 1), 0))
10587 return fold_build2_loc (loc, MULT_EXPR, type,
10588 omit_one_operand_loc (loc, type,
10589 TREE_OPERAND (arg0, 0),
10590 TREE_OPERAND (arg0, 1)),
10591 fold_build2_loc (loc, MULT_EXPR, type,
10592 build_int_cst (type, 2) , arg1));
10594 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10595 sign-changing only. */
10596 if (TREE_CODE (arg1) == INTEGER_CST
10597 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10598 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10599 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10601 strict_overflow_p = false;
10602 if (TREE_CODE (arg1) == INTEGER_CST
10603 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10604 &strict_overflow_p)))
10606 if (strict_overflow_p)
10607 fold_overflow_warning (("assuming signed overflow does not "
10608 "occur when simplifying "
10609 "multiplication"),
10610 WARN_STRICT_OVERFLOW_MISC);
10611 return fold_convert_loc (loc, type, tem);
10614 /* Optimize z * conj(z) for integer complex numbers. */
10615 if (TREE_CODE (arg0) == CONJ_EXPR
10616 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10617 return fold_mult_zconjz (loc, type, arg1);
10618 if (TREE_CODE (arg1) == CONJ_EXPR
10619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10620 return fold_mult_zconjz (loc, type, arg0);
10622 else
10624 /* Maybe fold x * 0 to 0. The expressions aren't the same
10625 when x is NaN, since x * 0 is also NaN. Nor are they the
10626 same in modes with signed zeros, since multiplying a
10627 negative value by 0 gives -0, not +0. */
10628 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10629 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10630 && real_zerop (arg1))
10631 return omit_one_operand_loc (loc, type, arg1, arg0);
10632 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10633 Likewise for complex arithmetic with signed zeros. */
10634 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10635 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10636 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10637 && real_onep (arg1))
10638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10640 /* Transform x * -1.0 into -x. */
10641 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10642 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10643 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10644 && real_minus_onep (arg1))
10645 return fold_convert_loc (loc, type, negate_expr (arg0));
10647 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10648 the result for floating point types due to rounding so it is applied
10649 only if -fassociative-math was specify. */
10650 if (flag_associative_math
10651 && TREE_CODE (arg0) == RDIV_EXPR
10652 && TREE_CODE (arg1) == REAL_CST
10653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10655 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10656 arg1);
10657 if (tem)
10658 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10659 TREE_OPERAND (arg0, 1));
10662 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10663 if (operand_equal_p (arg0, arg1, 0))
10665 tree tem = fold_strip_sign_ops (arg0);
10666 if (tem != NULL_TREE)
10668 tem = fold_convert_loc (loc, type, tem);
10669 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10673 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10674 This is not the same for NaNs or if signed zeros are
10675 involved. */
10676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10677 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10678 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10679 && TREE_CODE (arg1) == COMPLEX_CST
10680 && real_zerop (TREE_REALPART (arg1)))
10682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10683 if (real_onep (TREE_IMAGPART (arg1)))
10684 return
10685 fold_build2_loc (loc, COMPLEX_EXPR, type,
10686 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10687 rtype, arg0)),
10688 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10689 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10690 return
10691 fold_build2_loc (loc, COMPLEX_EXPR, type,
10692 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10693 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10694 rtype, arg0)));
10697 /* Optimize z * conj(z) for floating point complex numbers.
10698 Guarded by flag_unsafe_math_optimizations as non-finite
10699 imaginary components don't produce scalar results. */
10700 if (flag_unsafe_math_optimizations
10701 && TREE_CODE (arg0) == CONJ_EXPR
10702 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10703 return fold_mult_zconjz (loc, type, arg1);
10704 if (flag_unsafe_math_optimizations
10705 && TREE_CODE (arg1) == CONJ_EXPR
10706 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10707 return fold_mult_zconjz (loc, type, arg0);
10709 if (flag_unsafe_math_optimizations)
10711 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10712 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10714 /* Optimizations of root(...)*root(...). */
10715 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10717 tree rootfn, arg;
10718 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10719 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10721 /* Optimize sqrt(x)*sqrt(x) as x. */
10722 if (BUILTIN_SQRT_P (fcode0)
10723 && operand_equal_p (arg00, arg10, 0)
10724 && ! HONOR_SNANS (TYPE_MODE (type)))
10725 return arg00;
10727 /* Optimize root(x)*root(y) as root(x*y). */
10728 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10729 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10730 return build_call_expr_loc (loc, rootfn, 1, arg);
10733 /* Optimize expN(x)*expN(y) as expN(x+y). */
10734 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10736 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10737 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10738 CALL_EXPR_ARG (arg0, 0),
10739 CALL_EXPR_ARG (arg1, 0));
10740 return build_call_expr_loc (loc, expfn, 1, arg);
10743 /* Optimizations of pow(...)*pow(...). */
10744 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10745 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10746 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10748 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10749 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10750 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10751 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10753 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10754 if (operand_equal_p (arg01, arg11, 0))
10756 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10757 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10758 arg00, arg10);
10759 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10762 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10763 if (operand_equal_p (arg00, arg10, 0))
10765 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10766 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10767 arg01, arg11);
10768 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10772 /* Optimize tan(x)*cos(x) as sin(x). */
10773 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10774 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10775 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10776 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10777 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10778 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10779 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10780 CALL_EXPR_ARG (arg1, 0), 0))
10782 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10784 if (sinfn != NULL_TREE)
10785 return build_call_expr_loc (loc, sinfn, 1,
10786 CALL_EXPR_ARG (arg0, 0));
10789 /* Optimize x*pow(x,c) as pow(x,c+1). */
10790 if (fcode1 == BUILT_IN_POW
10791 || fcode1 == BUILT_IN_POWF
10792 || fcode1 == BUILT_IN_POWL)
10794 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10795 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10796 if (TREE_CODE (arg11) == REAL_CST
10797 && !TREE_OVERFLOW (arg11)
10798 && operand_equal_p (arg0, arg10, 0))
10800 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10801 REAL_VALUE_TYPE c;
10802 tree arg;
10804 c = TREE_REAL_CST (arg11);
10805 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10806 arg = build_real (type, c);
10807 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10811 /* Optimize pow(x,c)*x as pow(x,c+1). */
10812 if (fcode0 == BUILT_IN_POW
10813 || fcode0 == BUILT_IN_POWF
10814 || fcode0 == BUILT_IN_POWL)
10816 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10817 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10818 if (TREE_CODE (arg01) == REAL_CST
10819 && !TREE_OVERFLOW (arg01)
10820 && operand_equal_p (arg1, arg00, 0))
10822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10823 REAL_VALUE_TYPE c;
10824 tree arg;
10826 c = TREE_REAL_CST (arg01);
10827 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10828 arg = build_real (type, c);
10829 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10833 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10834 if (!in_gimple_form
10835 && optimize
10836 && operand_equal_p (arg0, arg1, 0))
10838 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10840 if (powfn)
10842 tree arg = build_real (type, dconst2);
10843 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10848 goto associate;
10850 case BIT_IOR_EXPR:
10851 bit_ior:
10852 /* ~X | X is -1. */
10853 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10854 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10856 t1 = build_zero_cst (type);
10857 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10858 return omit_one_operand_loc (loc, type, t1, arg1);
10861 /* X | ~X is -1. */
10862 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10865 t1 = build_zero_cst (type);
10866 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10867 return omit_one_operand_loc (loc, type, t1, arg0);
10870 /* Canonicalize (X & C1) | C2. */
10871 if (TREE_CODE (arg0) == BIT_AND_EXPR
10872 && TREE_CODE (arg1) == INTEGER_CST
10873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10875 int width = TYPE_PRECISION (type), w;
10876 wide_int c1 = TREE_OPERAND (arg0, 1);
10877 wide_int c2 = arg1;
10879 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10880 if ((c1 & c2) == c1)
10881 return omit_one_operand_loc (loc, type, arg1,
10882 TREE_OPERAND (arg0, 0));
10884 wide_int msk = wi::mask (width, false,
10885 TYPE_PRECISION (TREE_TYPE (arg1)));
10887 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10888 if (msk.and_not (c1 | c2) == 0)
10889 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10890 TREE_OPERAND (arg0, 0), arg1);
10892 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10893 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10894 mode which allows further optimizations. */
10895 c1 &= msk;
10896 c2 &= msk;
10897 wide_int c3 = c1.and_not (c2);
10898 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10900 wide_int mask = wi::mask (w, false,
10901 TYPE_PRECISION (type));
10902 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10904 c3 = mask;
10905 break;
10909 if (c3 != c1)
10910 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10911 fold_build2_loc (loc, BIT_AND_EXPR, type,
10912 TREE_OPERAND (arg0, 0),
10913 wide_int_to_tree (type,
10914 c3)),
10915 arg1);
10918 /* (X & ~Y) | (~X & Y) is X ^ Y */
10919 if (TREE_CODE (arg0) == BIT_AND_EXPR
10920 && TREE_CODE (arg1) == BIT_AND_EXPR)
10922 tree a0, a1, l0, l1, n0, n1;
10924 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10925 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10927 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10928 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10930 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10931 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10933 if ((operand_equal_p (n0, a0, 0)
10934 && operand_equal_p (n1, a1, 0))
10935 || (operand_equal_p (n0, a1, 0)
10936 && operand_equal_p (n1, a0, 0)))
10937 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10940 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10941 if (t1 != NULL_TREE)
10942 return t1;
10944 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10946 This results in more efficient code for machines without a NAND
10947 instruction. Combine will canonicalize to the first form
10948 which will allow use of NAND instructions provided by the
10949 backend if they exist. */
10950 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10951 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10953 return
10954 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10955 build2 (BIT_AND_EXPR, type,
10956 fold_convert_loc (loc, type,
10957 TREE_OPERAND (arg0, 0)),
10958 fold_convert_loc (loc, type,
10959 TREE_OPERAND (arg1, 0))));
10962 /* See if this can be simplified into a rotate first. If that
10963 is unsuccessful continue in the association code. */
10964 goto bit_rotate;
10966 case BIT_XOR_EXPR:
10967 /* ~X ^ X is -1. */
10968 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10971 t1 = build_zero_cst (type);
10972 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10973 return omit_one_operand_loc (loc, type, t1, arg1);
10976 /* X ^ ~X is -1. */
10977 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10978 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10980 t1 = build_zero_cst (type);
10981 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10982 return omit_one_operand_loc (loc, type, t1, arg0);
10985 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10986 with a constant, and the two constants have no bits in common,
10987 we should treat this as a BIT_IOR_EXPR since this may produce more
10988 simplifications. */
10989 if (TREE_CODE (arg0) == BIT_AND_EXPR
10990 && TREE_CODE (arg1) == BIT_AND_EXPR
10991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10992 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10993 && wi::bit_and (TREE_OPERAND (arg0, 1),
10994 TREE_OPERAND (arg1, 1)) == 0)
10996 code = BIT_IOR_EXPR;
10997 goto bit_ior;
11000 /* (X | Y) ^ X -> Y & ~ X*/
11001 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11002 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11004 tree t2 = TREE_OPERAND (arg0, 1);
11005 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11006 arg1);
11007 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11008 fold_convert_loc (loc, type, t2),
11009 fold_convert_loc (loc, type, t1));
11010 return t1;
11013 /* (Y | X) ^ X -> Y & ~ X*/
11014 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11015 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11017 tree t2 = TREE_OPERAND (arg0, 0);
11018 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11019 arg1);
11020 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11021 fold_convert_loc (loc, type, t2),
11022 fold_convert_loc (loc, type, t1));
11023 return t1;
11026 /* X ^ (X | Y) -> Y & ~ X*/
11027 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11028 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11030 tree t2 = TREE_OPERAND (arg1, 1);
11031 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11032 arg0);
11033 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11034 fold_convert_loc (loc, type, t2),
11035 fold_convert_loc (loc, type, t1));
11036 return t1;
11039 /* X ^ (Y | X) -> Y & ~ X*/
11040 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11041 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11043 tree t2 = TREE_OPERAND (arg1, 0);
11044 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11045 arg0);
11046 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11047 fold_convert_loc (loc, type, t2),
11048 fold_convert_loc (loc, type, t1));
11049 return t1;
11052 /* Convert ~X ^ ~Y to X ^ Y. */
11053 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11054 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11055 return fold_build2_loc (loc, code, type,
11056 fold_convert_loc (loc, type,
11057 TREE_OPERAND (arg0, 0)),
11058 fold_convert_loc (loc, type,
11059 TREE_OPERAND (arg1, 0)));
11061 /* Convert ~X ^ C to X ^ ~C. */
11062 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11063 && TREE_CODE (arg1) == INTEGER_CST)
11064 return fold_build2_loc (loc, code, type,
11065 fold_convert_loc (loc, type,
11066 TREE_OPERAND (arg0, 0)),
11067 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11069 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11070 if (TREE_CODE (arg0) == BIT_AND_EXPR
11071 && INTEGRAL_TYPE_P (type)
11072 && integer_onep (TREE_OPERAND (arg0, 1))
11073 && integer_onep (arg1))
11074 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11075 build_zero_cst (TREE_TYPE (arg0)));
11077 /* Fold (X & Y) ^ Y as ~X & Y. */
11078 if (TREE_CODE (arg0) == BIT_AND_EXPR
11079 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11081 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11082 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11083 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11084 fold_convert_loc (loc, type, arg1));
11086 /* Fold (X & Y) ^ X as ~Y & X. */
11087 if (TREE_CODE (arg0) == BIT_AND_EXPR
11088 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11089 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11091 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11092 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11093 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11094 fold_convert_loc (loc, type, arg1));
11096 /* Fold X ^ (X & Y) as X & ~Y. */
11097 if (TREE_CODE (arg1) == BIT_AND_EXPR
11098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11100 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11101 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11102 fold_convert_loc (loc, type, arg0),
11103 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11105 /* Fold X ^ (Y & X) as ~Y & X. */
11106 if (TREE_CODE (arg1) == BIT_AND_EXPR
11107 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11108 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11110 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11111 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11112 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11113 fold_convert_loc (loc, type, arg0));
11116 /* See if this can be simplified into a rotate first. If that
11117 is unsuccessful continue in the association code. */
11118 goto bit_rotate;
11120 case BIT_AND_EXPR:
11121 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11122 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11123 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11124 || (TREE_CODE (arg0) == EQ_EXPR
11125 && integer_zerop (TREE_OPERAND (arg0, 1))))
11126 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11127 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11129 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11130 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11131 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11132 || (TREE_CODE (arg1) == EQ_EXPR
11133 && integer_zerop (TREE_OPERAND (arg1, 1))))
11134 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11135 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11137 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11138 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11139 && INTEGRAL_TYPE_P (type)
11140 && integer_onep (TREE_OPERAND (arg0, 1))
11141 && integer_onep (arg1))
11143 tree tem2;
11144 tem = TREE_OPERAND (arg0, 0);
11145 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11146 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11147 tem, tem2);
11148 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11149 build_zero_cst (TREE_TYPE (tem)));
11151 /* Fold ~X & 1 as (X & 1) == 0. */
11152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11153 && INTEGRAL_TYPE_P (type)
11154 && integer_onep (arg1))
11156 tree tem2;
11157 tem = TREE_OPERAND (arg0, 0);
11158 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11159 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11160 tem, tem2);
11161 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11162 build_zero_cst (TREE_TYPE (tem)));
11164 /* Fold !X & 1 as X == 0. */
11165 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11166 && integer_onep (arg1))
11168 tem = TREE_OPERAND (arg0, 0);
11169 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11170 build_zero_cst (TREE_TYPE (tem)));
11173 /* Fold (X ^ Y) & Y as ~X & Y. */
11174 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11175 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11177 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11178 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11179 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11180 fold_convert_loc (loc, type, arg1));
11182 /* Fold (X ^ Y) & X as ~Y & X. */
11183 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11185 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11187 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11188 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11189 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11190 fold_convert_loc (loc, type, arg1));
11192 /* Fold X & (X ^ Y) as X & ~Y. */
11193 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11194 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11196 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11197 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11198 fold_convert_loc (loc, type, arg0),
11199 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11201 /* Fold X & (Y ^ X) as ~Y & X. */
11202 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11203 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11204 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11206 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11207 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11208 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11209 fold_convert_loc (loc, type, arg0));
11212 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11213 multiple of 1 << CST. */
11214 if (TREE_CODE (arg1) == INTEGER_CST)
11216 wide_int cst1 = arg1;
11217 wide_int ncst1 = -cst1;
11218 if ((cst1 & ncst1) == ncst1
11219 && multiple_of_p (type, arg0,
11220 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11221 return fold_convert_loc (loc, type, arg0);
11224 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11225 bits from CST2. */
11226 if (TREE_CODE (arg1) == INTEGER_CST
11227 && TREE_CODE (arg0) == MULT_EXPR
11228 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11230 wide_int warg1 = arg1;
11231 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11233 if (masked == 0)
11234 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11235 arg0, arg1);
11236 else if (masked != warg1)
11238 /* Avoid the transform if arg1 is a mask of some
11239 mode which allows further optimizations. */
11240 int pop = wi::popcount (warg1);
11241 if (!(pop >= BITS_PER_UNIT
11242 && exact_log2 (pop) != -1
11243 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11244 return fold_build2_loc (loc, code, type, op0,
11245 wide_int_to_tree (type, masked));
11249 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11250 ((A & N) + B) & M -> (A + B) & M
11251 Similarly if (N & M) == 0,
11252 ((A | N) + B) & M -> (A + B) & M
11253 and for - instead of + (or unary - instead of +)
11254 and/or ^ instead of |.
11255 If B is constant and (B & M) == 0, fold into A & M. */
11256 if (TREE_CODE (arg1) == INTEGER_CST)
11258 wide_int cst1 = arg1;
11259 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11260 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11261 && (TREE_CODE (arg0) == PLUS_EXPR
11262 || TREE_CODE (arg0) == MINUS_EXPR
11263 || TREE_CODE (arg0) == NEGATE_EXPR)
11264 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11265 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11267 tree pmop[2];
11268 int which = 0;
11269 wide_int cst0;
11271 /* Now we know that arg0 is (C + D) or (C - D) or
11272 -C and arg1 (M) is == (1LL << cst) - 1.
11273 Store C into PMOP[0] and D into PMOP[1]. */
11274 pmop[0] = TREE_OPERAND (arg0, 0);
11275 pmop[1] = NULL;
11276 if (TREE_CODE (arg0) != NEGATE_EXPR)
11278 pmop[1] = TREE_OPERAND (arg0, 1);
11279 which = 1;
11282 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11283 which = -1;
11285 for (; which >= 0; which--)
11286 switch (TREE_CODE (pmop[which]))
11288 case BIT_AND_EXPR:
11289 case BIT_IOR_EXPR:
11290 case BIT_XOR_EXPR:
11291 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11292 != INTEGER_CST)
11293 break;
11294 cst0 = TREE_OPERAND (pmop[which], 1);
11295 cst0 &= cst1;
11296 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11298 if (cst0 != cst1)
11299 break;
11301 else if (cst0 != 0)
11302 break;
11303 /* If C or D is of the form (A & N) where
11304 (N & M) == M, or of the form (A | N) or
11305 (A ^ N) where (N & M) == 0, replace it with A. */
11306 pmop[which] = TREE_OPERAND (pmop[which], 0);
11307 break;
11308 case INTEGER_CST:
11309 /* If C or D is a N where (N & M) == 0, it can be
11310 omitted (assumed 0). */
11311 if ((TREE_CODE (arg0) == PLUS_EXPR
11312 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11313 && (cst1 & pmop[which]) == 0)
11314 pmop[which] = NULL;
11315 break;
11316 default:
11317 break;
11320 /* Only build anything new if we optimized one or both arguments
11321 above. */
11322 if (pmop[0] != TREE_OPERAND (arg0, 0)
11323 || (TREE_CODE (arg0) != NEGATE_EXPR
11324 && pmop[1] != TREE_OPERAND (arg0, 1)))
11326 tree utype = TREE_TYPE (arg0);
11327 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11329 /* Perform the operations in a type that has defined
11330 overflow behavior. */
11331 utype = unsigned_type_for (TREE_TYPE (arg0));
11332 if (pmop[0] != NULL)
11333 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11334 if (pmop[1] != NULL)
11335 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11338 if (TREE_CODE (arg0) == NEGATE_EXPR)
11339 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11340 else if (TREE_CODE (arg0) == PLUS_EXPR)
11342 if (pmop[0] != NULL && pmop[1] != NULL)
11343 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11344 pmop[0], pmop[1]);
11345 else if (pmop[0] != NULL)
11346 tem = pmop[0];
11347 else if (pmop[1] != NULL)
11348 tem = pmop[1];
11349 else
11350 return build_int_cst (type, 0);
11352 else if (pmop[0] == NULL)
11353 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11354 else
11355 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11356 pmop[0], pmop[1]);
11357 /* TEM is now the new binary +, - or unary - replacement. */
11358 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11359 fold_convert_loc (loc, utype, arg1));
11360 return fold_convert_loc (loc, type, tem);
11365 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11366 if (t1 != NULL_TREE)
11367 return t1;
11368 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11369 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11370 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11372 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11374 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11375 if (mask == -1)
11376 return
11377 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11380 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11382 This results in more efficient code for machines without a NOR
11383 instruction. Combine will canonicalize to the first form
11384 which will allow use of NOR instructions provided by the
11385 backend if they exist. */
11386 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11387 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11389 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11390 build2 (BIT_IOR_EXPR, type,
11391 fold_convert_loc (loc, type,
11392 TREE_OPERAND (arg0, 0)),
11393 fold_convert_loc (loc, type,
11394 TREE_OPERAND (arg1, 0))));
11397 /* If arg0 is derived from the address of an object or function, we may
11398 be able to fold this expression using the object or function's
11399 alignment. */
11400 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11402 unsigned HOST_WIDE_INT modulus, residue;
11403 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11405 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11406 integer_onep (arg1));
11408 /* This works because modulus is a power of 2. If this weren't the
11409 case, we'd have to replace it by its greatest power-of-2
11410 divisor: modulus & -modulus. */
11411 if (low < modulus)
11412 return build_int_cst (type, residue & low);
11415 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11416 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11417 if the new mask might be further optimized. */
11418 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11419 || TREE_CODE (arg0) == RSHIFT_EXPR)
11420 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11421 && TREE_CODE (arg1) == INTEGER_CST
11422 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11423 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11424 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11425 < TYPE_PRECISION (TREE_TYPE (arg0))))
11427 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11428 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11429 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11430 tree shift_type = TREE_TYPE (arg0);
11432 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11433 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11434 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11435 && TYPE_PRECISION (TREE_TYPE (arg0))
11436 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11438 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11439 tree arg00 = TREE_OPERAND (arg0, 0);
11440 /* See if more bits can be proven as zero because of
11441 zero extension. */
11442 if (TREE_CODE (arg00) == NOP_EXPR
11443 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11445 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11446 if (TYPE_PRECISION (inner_type)
11447 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11448 && TYPE_PRECISION (inner_type) < prec)
11450 prec = TYPE_PRECISION (inner_type);
11451 /* See if we can shorten the right shift. */
11452 if (shiftc < prec)
11453 shift_type = inner_type;
11454 /* Otherwise X >> C1 is all zeros, so we'll optimize
11455 it into (X, 0) later on by making sure zerobits
11456 is all ones. */
11459 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11460 if (shiftc < prec)
11462 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11463 zerobits <<= prec - shiftc;
11465 /* For arithmetic shift if sign bit could be set, zerobits
11466 can contain actually sign bits, so no transformation is
11467 possible, unless MASK masks them all away. In that
11468 case the shift needs to be converted into logical shift. */
11469 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11470 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11472 if ((mask & zerobits) == 0)
11473 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11474 else
11475 zerobits = 0;
11479 /* ((X << 16) & 0xff00) is (X, 0). */
11480 if ((mask & zerobits) == mask)
11481 return omit_one_operand_loc (loc, type,
11482 build_int_cst (type, 0), arg0);
11484 newmask = mask | zerobits;
11485 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11487 /* Only do the transformation if NEWMASK is some integer
11488 mode's mask. */
11489 for (prec = BITS_PER_UNIT;
11490 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11491 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11492 break;
11493 if (prec < HOST_BITS_PER_WIDE_INT
11494 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11496 tree newmaskt;
11498 if (shift_type != TREE_TYPE (arg0))
11500 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11501 fold_convert_loc (loc, shift_type,
11502 TREE_OPERAND (arg0, 0)),
11503 TREE_OPERAND (arg0, 1));
11504 tem = fold_convert_loc (loc, type, tem);
11506 else
11507 tem = op0;
11508 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11509 if (!tree_int_cst_equal (newmaskt, arg1))
11510 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11515 goto associate;
11517 case RDIV_EXPR:
11518 /* Don't touch a floating-point divide by zero unless the mode
11519 of the constant can represent infinity. */
11520 if (TREE_CODE (arg1) == REAL_CST
11521 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11522 && real_zerop (arg1))
11523 return NULL_TREE;
11525 /* Optimize A / A to 1.0 if we don't care about
11526 NaNs or Infinities. Skip the transformation
11527 for non-real operands. */
11528 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11529 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11530 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11531 && operand_equal_p (arg0, arg1, 0))
11533 tree r = build_real (TREE_TYPE (arg0), dconst1);
11535 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11538 /* The complex version of the above A / A optimization. */
11539 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11540 && operand_equal_p (arg0, arg1, 0))
11542 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11543 if (! HONOR_NANS (TYPE_MODE (elem_type))
11544 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11546 tree r = build_real (elem_type, dconst1);
11547 /* omit_two_operands will call fold_convert for us. */
11548 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11552 /* (-A) / (-B) -> A / B */
11553 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11554 return fold_build2_loc (loc, RDIV_EXPR, type,
11555 TREE_OPERAND (arg0, 0),
11556 negate_expr (arg1));
11557 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11558 return fold_build2_loc (loc, RDIV_EXPR, type,
11559 negate_expr (arg0),
11560 TREE_OPERAND (arg1, 0));
11562 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11563 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11564 && real_onep (arg1))
11565 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11567 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11568 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11569 && real_minus_onep (arg1))
11570 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11571 negate_expr (arg0)));
11573 /* If ARG1 is a constant, we can convert this to a multiply by the
11574 reciprocal. This does not have the same rounding properties,
11575 so only do this if -freciprocal-math. We can actually
11576 always safely do it if ARG1 is a power of two, but it's hard to
11577 tell if it is or not in a portable manner. */
11578 if (optimize
11579 && (TREE_CODE (arg1) == REAL_CST
11580 || (TREE_CODE (arg1) == COMPLEX_CST
11581 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11582 || (TREE_CODE (arg1) == VECTOR_CST
11583 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11585 if (flag_reciprocal_math
11586 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11587 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11588 /* Find the reciprocal if optimizing and the result is exact.
11589 TODO: Complex reciprocal not implemented. */
11590 if (TREE_CODE (arg1) != COMPLEX_CST)
11592 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11594 if (inverse)
11595 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11598 /* Convert A/B/C to A/(B*C). */
11599 if (flag_reciprocal_math
11600 && TREE_CODE (arg0) == RDIV_EXPR)
11601 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11602 fold_build2_loc (loc, MULT_EXPR, type,
11603 TREE_OPERAND (arg0, 1), arg1));
11605 /* Convert A/(B/C) to (A/B)*C. */
11606 if (flag_reciprocal_math
11607 && TREE_CODE (arg1) == RDIV_EXPR)
11608 return fold_build2_loc (loc, MULT_EXPR, type,
11609 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11610 TREE_OPERAND (arg1, 0)),
11611 TREE_OPERAND (arg1, 1));
11613 /* Convert C1/(X*C2) into (C1/C2)/X. */
11614 if (flag_reciprocal_math
11615 && TREE_CODE (arg1) == MULT_EXPR
11616 && TREE_CODE (arg0) == REAL_CST
11617 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11619 tree tem = const_binop (RDIV_EXPR, arg0,
11620 TREE_OPERAND (arg1, 1));
11621 if (tem)
11622 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11623 TREE_OPERAND (arg1, 0));
11626 if (flag_unsafe_math_optimizations)
11628 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11629 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11631 /* Optimize sin(x)/cos(x) as tan(x). */
11632 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11633 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11634 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11635 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11636 CALL_EXPR_ARG (arg1, 0), 0))
11638 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11640 if (tanfn != NULL_TREE)
11641 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11644 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11645 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11646 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11647 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11648 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11649 CALL_EXPR_ARG (arg1, 0), 0))
11651 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11653 if (tanfn != NULL_TREE)
11655 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11656 CALL_EXPR_ARG (arg0, 0));
11657 return fold_build2_loc (loc, RDIV_EXPR, type,
11658 build_real (type, dconst1), tmp);
11662 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11663 NaNs or Infinities. */
11664 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11665 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11666 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11668 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11669 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11671 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11672 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11673 && operand_equal_p (arg00, arg01, 0))
11675 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11677 if (cosfn != NULL_TREE)
11678 return build_call_expr_loc (loc, cosfn, 1, arg00);
11682 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11683 NaNs or Infinities. */
11684 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11685 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11686 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11688 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11689 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11691 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11692 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11693 && operand_equal_p (arg00, arg01, 0))
11695 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11697 if (cosfn != NULL_TREE)
11699 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11700 return fold_build2_loc (loc, RDIV_EXPR, type,
11701 build_real (type, dconst1),
11702 tmp);
11707 /* Optimize pow(x,c)/x as pow(x,c-1). */
11708 if (fcode0 == BUILT_IN_POW
11709 || fcode0 == BUILT_IN_POWF
11710 || fcode0 == BUILT_IN_POWL)
11712 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11713 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11714 if (TREE_CODE (arg01) == REAL_CST
11715 && !TREE_OVERFLOW (arg01)
11716 && operand_equal_p (arg1, arg00, 0))
11718 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11719 REAL_VALUE_TYPE c;
11720 tree arg;
11722 c = TREE_REAL_CST (arg01);
11723 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11724 arg = build_real (type, c);
11725 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11729 /* Optimize a/root(b/c) into a*root(c/b). */
11730 if (BUILTIN_ROOT_P (fcode1))
11732 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11734 if (TREE_CODE (rootarg) == RDIV_EXPR)
11736 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11737 tree b = TREE_OPERAND (rootarg, 0);
11738 tree c = TREE_OPERAND (rootarg, 1);
11740 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11742 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11743 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11747 /* Optimize x/expN(y) into x*expN(-y). */
11748 if (BUILTIN_EXPONENT_P (fcode1))
11750 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11751 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11752 arg1 = build_call_expr_loc (loc,
11753 expfn, 1,
11754 fold_convert_loc (loc, type, arg));
11755 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11758 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11759 if (fcode1 == BUILT_IN_POW
11760 || fcode1 == BUILT_IN_POWF
11761 || fcode1 == BUILT_IN_POWL)
11763 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11764 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11765 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11766 tree neg11 = fold_convert_loc (loc, type,
11767 negate_expr (arg11));
11768 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11769 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11772 return NULL_TREE;
11774 case TRUNC_DIV_EXPR:
11775 /* Optimize (X & (-A)) / A where A is a power of 2,
11776 to X >> log2(A) */
11777 if (TREE_CODE (arg0) == BIT_AND_EXPR
11778 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11779 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11781 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11782 arg1, TREE_OPERAND (arg0, 1));
11783 if (sum && integer_zerop (sum)) {
11784 tree pow2 = build_int_cst (integer_type_node,
11785 wi::exact_log2 (arg1));
11786 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11787 TREE_OPERAND (arg0, 0), pow2);
11791 /* Fall through */
11793 case FLOOR_DIV_EXPR:
11794 /* Simplify A / (B << N) where A and B are positive and B is
11795 a power of 2, to A >> (N + log2(B)). */
11796 strict_overflow_p = false;
11797 if (TREE_CODE (arg1) == LSHIFT_EXPR
11798 && (TYPE_UNSIGNED (type)
11799 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11801 tree sval = TREE_OPERAND (arg1, 0);
11802 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11804 tree sh_cnt = TREE_OPERAND (arg1, 1);
11805 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11806 wi::exact_log2 (sval));
11808 if (strict_overflow_p)
11809 fold_overflow_warning (("assuming signed overflow does not "
11810 "occur when simplifying A / (B << N)"),
11811 WARN_STRICT_OVERFLOW_MISC);
11813 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11814 sh_cnt, pow2);
11815 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11816 fold_convert_loc (loc, type, arg0), sh_cnt);
11820 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11821 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11822 if (INTEGRAL_TYPE_P (type)
11823 && TYPE_UNSIGNED (type)
11824 && code == FLOOR_DIV_EXPR)
11825 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11827 /* Fall through */
11829 case ROUND_DIV_EXPR:
11830 case CEIL_DIV_EXPR:
11831 case EXACT_DIV_EXPR:
11832 if (integer_zerop (arg1))
11833 return NULL_TREE;
11834 /* X / -1 is -X. */
11835 if (!TYPE_UNSIGNED (type)
11836 && TREE_CODE (arg1) == INTEGER_CST
11837 && wi::eq_p (arg1, -1))
11838 return fold_convert_loc (loc, type, negate_expr (arg0));
11840 /* Convert -A / -B to A / B when the type is signed and overflow is
11841 undefined. */
11842 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11843 && TREE_CODE (arg0) == NEGATE_EXPR
11844 && negate_expr_p (arg1))
11846 if (INTEGRAL_TYPE_P (type))
11847 fold_overflow_warning (("assuming signed overflow does not occur "
11848 "when distributing negation across "
11849 "division"),
11850 WARN_STRICT_OVERFLOW_MISC);
11851 return fold_build2_loc (loc, code, type,
11852 fold_convert_loc (loc, type,
11853 TREE_OPERAND (arg0, 0)),
11854 fold_convert_loc (loc, type,
11855 negate_expr (arg1)));
11857 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11858 && TREE_CODE (arg1) == NEGATE_EXPR
11859 && negate_expr_p (arg0))
11861 if (INTEGRAL_TYPE_P (type))
11862 fold_overflow_warning (("assuming signed overflow does not occur "
11863 "when distributing negation across "
11864 "division"),
11865 WARN_STRICT_OVERFLOW_MISC);
11866 return fold_build2_loc (loc, code, type,
11867 fold_convert_loc (loc, type,
11868 negate_expr (arg0)),
11869 fold_convert_loc (loc, type,
11870 TREE_OPERAND (arg1, 0)));
11873 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11874 operation, EXACT_DIV_EXPR.
11876 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11877 At one time others generated faster code, it's not clear if they do
11878 after the last round to changes to the DIV code in expmed.c. */
11879 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11880 && multiple_of_p (type, arg0, arg1))
11881 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11883 strict_overflow_p = false;
11884 if (TREE_CODE (arg1) == INTEGER_CST
11885 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11886 &strict_overflow_p)))
11888 if (strict_overflow_p)
11889 fold_overflow_warning (("assuming signed overflow does not occur "
11890 "when simplifying division"),
11891 WARN_STRICT_OVERFLOW_MISC);
11892 return fold_convert_loc (loc, type, tem);
11895 return NULL_TREE;
11897 case CEIL_MOD_EXPR:
11898 case FLOOR_MOD_EXPR:
11899 case ROUND_MOD_EXPR:
11900 case TRUNC_MOD_EXPR:
11901 /* X % -1 is zero. */
11902 if (!TYPE_UNSIGNED (type)
11903 && TREE_CODE (arg1) == INTEGER_CST
11904 && wi::eq_p (arg1, -1))
11905 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11907 /* X % -C is the same as X % C. */
11908 if (code == TRUNC_MOD_EXPR
11909 && TYPE_SIGN (type) == SIGNED
11910 && TREE_CODE (arg1) == INTEGER_CST
11911 && !TREE_OVERFLOW (arg1)
11912 && wi::neg_p (arg1)
11913 && !TYPE_OVERFLOW_TRAPS (type)
11914 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11915 && !sign_bit_p (arg1, arg1))
11916 return fold_build2_loc (loc, code, type,
11917 fold_convert_loc (loc, type, arg0),
11918 fold_convert_loc (loc, type,
11919 negate_expr (arg1)));
11921 /* X % -Y is the same as X % Y. */
11922 if (code == TRUNC_MOD_EXPR
11923 && !TYPE_UNSIGNED (type)
11924 && TREE_CODE (arg1) == NEGATE_EXPR
11925 && !TYPE_OVERFLOW_TRAPS (type))
11926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11927 fold_convert_loc (loc, type,
11928 TREE_OPERAND (arg1, 0)));
11930 strict_overflow_p = false;
11931 if (TREE_CODE (arg1) == INTEGER_CST
11932 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11933 &strict_overflow_p)))
11935 if (strict_overflow_p)
11936 fold_overflow_warning (("assuming signed overflow does not occur "
11937 "when simplifying modulus"),
11938 WARN_STRICT_OVERFLOW_MISC);
11939 return fold_convert_loc (loc, type, tem);
11942 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11943 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11944 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11945 && (TYPE_UNSIGNED (type)
11946 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11948 tree c = arg1;
11949 /* Also optimize A % (C << N) where C is a power of 2,
11950 to A & ((C << N) - 1). */
11951 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11952 c = TREE_OPERAND (arg1, 0);
11954 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11956 tree mask
11957 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11958 build_int_cst (TREE_TYPE (arg1), 1));
11959 if (strict_overflow_p)
11960 fold_overflow_warning (("assuming signed overflow does not "
11961 "occur when simplifying "
11962 "X % (power of two)"),
11963 WARN_STRICT_OVERFLOW_MISC);
11964 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11965 fold_convert_loc (loc, type, arg0),
11966 fold_convert_loc (loc, type, mask));
11970 return NULL_TREE;
11972 case LROTATE_EXPR:
11973 case RROTATE_EXPR:
11974 if (integer_all_onesp (arg0))
11975 return omit_one_operand_loc (loc, type, arg0, arg1);
11976 goto shift;
11978 case RSHIFT_EXPR:
11979 /* Optimize -1 >> x for arithmetic right shifts. */
11980 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11981 && tree_expr_nonnegative_p (arg1))
11982 return omit_one_operand_loc (loc, type, arg0, arg1);
11983 /* ... fall through ... */
11985 case LSHIFT_EXPR:
11986 shift:
11987 if (integer_zerop (arg1))
11988 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11989 if (integer_zerop (arg0))
11990 return omit_one_operand_loc (loc, type, arg0, arg1);
11992 /* Prefer vector1 << scalar to vector1 << vector2
11993 if vector2 is uniform. */
11994 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
11995 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
11996 return fold_build2_loc (loc, code, type, op0, tem);
11998 /* Since negative shift count is not well-defined,
11999 don't try to compute it in the compiler. */
12000 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12001 return NULL_TREE;
12003 prec = element_precision (type);
12005 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12006 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12007 && tree_to_uhwi (arg1) < prec
12008 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12009 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12011 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12012 + tree_to_uhwi (arg1));
12014 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12015 being well defined. */
12016 if (low >= prec)
12018 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12019 low = low % prec;
12020 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12021 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12022 TREE_OPERAND (arg0, 0));
12023 else
12024 low = prec - 1;
12027 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12028 build_int_cst (TREE_TYPE (arg1), low));
12031 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12032 into x & ((unsigned)-1 >> c) for unsigned types. */
12033 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12034 || (TYPE_UNSIGNED (type)
12035 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12036 && tree_fits_uhwi_p (arg1)
12037 && tree_to_uhwi (arg1) < prec
12038 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12039 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12041 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12042 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12043 tree lshift;
12044 tree arg00;
12046 if (low0 == low1)
12048 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12050 lshift = build_minus_one_cst (type);
12051 lshift = const_binop (code, lshift, arg1);
12053 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12057 /* Rewrite an LROTATE_EXPR by a constant into an
12058 RROTATE_EXPR by a new constant. */
12059 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12061 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12062 tem = const_binop (MINUS_EXPR, tem, arg1);
12063 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12066 /* If we have a rotate of a bit operation with the rotate count and
12067 the second operand of the bit operation both constant,
12068 permute the two operations. */
12069 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12070 && (TREE_CODE (arg0) == BIT_AND_EXPR
12071 || TREE_CODE (arg0) == BIT_IOR_EXPR
12072 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12074 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12075 fold_build2_loc (loc, code, type,
12076 TREE_OPERAND (arg0, 0), arg1),
12077 fold_build2_loc (loc, code, type,
12078 TREE_OPERAND (arg0, 1), arg1));
12080 /* Two consecutive rotates adding up to the some integer
12081 multiple of the precision of the type can be ignored. */
12082 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12083 && TREE_CODE (arg0) == RROTATE_EXPR
12084 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12085 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12086 prec) == 0)
12087 return TREE_OPERAND (arg0, 0);
12089 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12090 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12091 if the latter can be further optimized. */
12092 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12093 && TREE_CODE (arg0) == BIT_AND_EXPR
12094 && TREE_CODE (arg1) == INTEGER_CST
12095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12097 tree mask = fold_build2_loc (loc, code, type,
12098 fold_convert_loc (loc, type,
12099 TREE_OPERAND (arg0, 1)),
12100 arg1);
12101 tree shift = fold_build2_loc (loc, code, type,
12102 fold_convert_loc (loc, type,
12103 TREE_OPERAND (arg0, 0)),
12104 arg1);
12105 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12106 if (tem)
12107 return tem;
12110 return NULL_TREE;
12112 case MIN_EXPR:
12113 if (operand_equal_p (arg0, arg1, 0))
12114 return omit_one_operand_loc (loc, type, arg0, arg1);
12115 if (INTEGRAL_TYPE_P (type)
12116 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12117 return omit_one_operand_loc (loc, type, arg1, arg0);
12118 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12119 if (tem)
12120 return tem;
12121 goto associate;
12123 case MAX_EXPR:
12124 if (operand_equal_p (arg0, arg1, 0))
12125 return omit_one_operand_loc (loc, type, arg0, arg1);
12126 if (INTEGRAL_TYPE_P (type)
12127 && TYPE_MAX_VALUE (type)
12128 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12129 return omit_one_operand_loc (loc, type, arg1, arg0);
12130 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12131 if (tem)
12132 return tem;
12133 goto associate;
12135 case TRUTH_ANDIF_EXPR:
12136 /* Note that the operands of this must be ints
12137 and their values must be 0 or 1.
12138 ("true" is a fixed value perhaps depending on the language.) */
12139 /* If first arg is constant zero, return it. */
12140 if (integer_zerop (arg0))
12141 return fold_convert_loc (loc, type, arg0);
12142 case TRUTH_AND_EXPR:
12143 /* If either arg is constant true, drop it. */
12144 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12146 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12147 /* Preserve sequence points. */
12148 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12149 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12150 /* If second arg is constant zero, result is zero, but first arg
12151 must be evaluated. */
12152 if (integer_zerop (arg1))
12153 return omit_one_operand_loc (loc, type, arg1, arg0);
12154 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12155 case will be handled here. */
12156 if (integer_zerop (arg0))
12157 return omit_one_operand_loc (loc, type, arg0, arg1);
12159 /* !X && X is always false. */
12160 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12162 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12163 /* X && !X is always false. */
12164 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12165 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12166 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12168 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12169 means A >= Y && A != MAX, but in this case we know that
12170 A < X <= MAX. */
12172 if (!TREE_SIDE_EFFECTS (arg0)
12173 && !TREE_SIDE_EFFECTS (arg1))
12175 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12176 if (tem && !operand_equal_p (tem, arg0, 0))
12177 return fold_build2_loc (loc, code, type, tem, arg1);
12179 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12180 if (tem && !operand_equal_p (tem, arg1, 0))
12181 return fold_build2_loc (loc, code, type, arg0, tem);
12184 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12185 != NULL_TREE)
12186 return tem;
12188 return NULL_TREE;
12190 case TRUTH_ORIF_EXPR:
12191 /* Note that the operands of this must be ints
12192 and their values must be 0 or true.
12193 ("true" is a fixed value perhaps depending on the language.) */
12194 /* If first arg is constant true, return it. */
12195 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12196 return fold_convert_loc (loc, type, arg0);
12197 case TRUTH_OR_EXPR:
12198 /* If either arg is constant zero, drop it. */
12199 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12201 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12202 /* Preserve sequence points. */
12203 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12204 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12205 /* If second arg is constant true, result is true, but we must
12206 evaluate first arg. */
12207 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12208 return omit_one_operand_loc (loc, type, arg1, arg0);
12209 /* Likewise for first arg, but note this only occurs here for
12210 TRUTH_OR_EXPR. */
12211 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12212 return omit_one_operand_loc (loc, type, arg0, arg1);
12214 /* !X || X is always true. */
12215 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12217 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12218 /* X || !X is always true. */
12219 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12221 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12223 /* (X && !Y) || (!X && Y) is X ^ Y */
12224 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12225 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12227 tree a0, a1, l0, l1, n0, n1;
12229 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12230 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12232 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12233 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12235 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12236 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12238 if ((operand_equal_p (n0, a0, 0)
12239 && operand_equal_p (n1, a1, 0))
12240 || (operand_equal_p (n0, a1, 0)
12241 && operand_equal_p (n1, a0, 0)))
12242 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12245 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12246 != NULL_TREE)
12247 return tem;
12249 return NULL_TREE;
12251 case TRUTH_XOR_EXPR:
12252 /* If the second arg is constant zero, drop it. */
12253 if (integer_zerop (arg1))
12254 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12255 /* If the second arg is constant true, this is a logical inversion. */
12256 if (integer_onep (arg1))
12258 tem = invert_truthvalue_loc (loc, arg0);
12259 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12261 /* Identical arguments cancel to zero. */
12262 if (operand_equal_p (arg0, arg1, 0))
12263 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12265 /* !X ^ X is always true. */
12266 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12267 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12268 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12270 /* X ^ !X is always true. */
12271 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12272 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12273 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12275 return NULL_TREE;
12277 case EQ_EXPR:
12278 case NE_EXPR:
12279 STRIP_NOPS (arg0);
12280 STRIP_NOPS (arg1);
12282 tem = fold_comparison (loc, code, type, op0, op1);
12283 if (tem != NULL_TREE)
12284 return tem;
12286 /* bool_var != 0 becomes bool_var. */
12287 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12288 && code == NE_EXPR)
12289 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12291 /* bool_var == 1 becomes bool_var. */
12292 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12293 && code == EQ_EXPR)
12294 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12296 /* bool_var != 1 becomes !bool_var. */
12297 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12298 && code == NE_EXPR)
12299 return fold_convert_loc (loc, type,
12300 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12301 TREE_TYPE (arg0), arg0));
12303 /* bool_var == 0 becomes !bool_var. */
12304 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12305 && code == EQ_EXPR)
12306 return fold_convert_loc (loc, type,
12307 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12308 TREE_TYPE (arg0), arg0));
12310 /* !exp != 0 becomes !exp */
12311 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12312 && code == NE_EXPR)
12313 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12315 /* If this is an equality comparison of the address of two non-weak,
12316 unaliased symbols neither of which are extern (since we do not
12317 have access to attributes for externs), then we know the result. */
12318 if (TREE_CODE (arg0) == ADDR_EXPR
12319 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12320 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12321 && ! lookup_attribute ("alias",
12322 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12323 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12324 && TREE_CODE (arg1) == ADDR_EXPR
12325 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12326 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12327 && ! lookup_attribute ("alias",
12328 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12329 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12331 /* We know that we're looking at the address of two
12332 non-weak, unaliased, static _DECL nodes.
12334 It is both wasteful and incorrect to call operand_equal_p
12335 to compare the two ADDR_EXPR nodes. It is wasteful in that
12336 all we need to do is test pointer equality for the arguments
12337 to the two ADDR_EXPR nodes. It is incorrect to use
12338 operand_equal_p as that function is NOT equivalent to a
12339 C equality test. It can in fact return false for two
12340 objects which would test as equal using the C equality
12341 operator. */
12342 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12343 return constant_boolean_node (equal
12344 ? code == EQ_EXPR : code != EQ_EXPR,
12345 type);
12348 /* Similarly for a NEGATE_EXPR. */
12349 if (TREE_CODE (arg0) == NEGATE_EXPR
12350 && TREE_CODE (arg1) == INTEGER_CST
12351 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12352 arg1)))
12353 && TREE_CODE (tem) == INTEGER_CST
12354 && !TREE_OVERFLOW (tem))
12355 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12357 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12358 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12359 && TREE_CODE (arg1) == INTEGER_CST
12360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12361 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12362 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12363 fold_convert_loc (loc,
12364 TREE_TYPE (arg0),
12365 arg1),
12366 TREE_OPERAND (arg0, 1)));
12368 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12369 if ((TREE_CODE (arg0) == PLUS_EXPR
12370 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12371 || TREE_CODE (arg0) == MINUS_EXPR)
12372 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12373 0)),
12374 arg1, 0)
12375 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12376 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12378 tree val = TREE_OPERAND (arg0, 1);
12379 return omit_two_operands_loc (loc, type,
12380 fold_build2_loc (loc, code, type,
12381 val,
12382 build_int_cst (TREE_TYPE (val),
12383 0)),
12384 TREE_OPERAND (arg0, 0), arg1);
12387 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12388 if (TREE_CODE (arg0) == MINUS_EXPR
12389 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12390 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12391 1)),
12392 arg1, 0)
12393 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12395 return omit_two_operands_loc (loc, type,
12396 code == NE_EXPR
12397 ? boolean_true_node : boolean_false_node,
12398 TREE_OPERAND (arg0, 1), arg1);
12401 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12402 if (TREE_CODE (arg0) == ABS_EXPR
12403 && (integer_zerop (arg1) || real_zerop (arg1)))
12404 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12406 /* If this is an EQ or NE comparison with zero and ARG0 is
12407 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12408 two operations, but the latter can be done in one less insn
12409 on machines that have only two-operand insns or on which a
12410 constant cannot be the first operand. */
12411 if (TREE_CODE (arg0) == BIT_AND_EXPR
12412 && integer_zerop (arg1))
12414 tree arg00 = TREE_OPERAND (arg0, 0);
12415 tree arg01 = TREE_OPERAND (arg0, 1);
12416 if (TREE_CODE (arg00) == LSHIFT_EXPR
12417 && integer_onep (TREE_OPERAND (arg00, 0)))
12419 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12420 arg01, TREE_OPERAND (arg00, 1));
12421 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12422 build_int_cst (TREE_TYPE (arg0), 1));
12423 return fold_build2_loc (loc, code, type,
12424 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12425 arg1);
12427 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12428 && integer_onep (TREE_OPERAND (arg01, 0)))
12430 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12431 arg00, TREE_OPERAND (arg01, 1));
12432 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12433 build_int_cst (TREE_TYPE (arg0), 1));
12434 return fold_build2_loc (loc, code, type,
12435 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12436 arg1);
12440 /* If this is an NE or EQ comparison of zero against the result of a
12441 signed MOD operation whose second operand is a power of 2, make
12442 the MOD operation unsigned since it is simpler and equivalent. */
12443 if (integer_zerop (arg1)
12444 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12445 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12446 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12447 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12448 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12449 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12451 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12452 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12453 fold_convert_loc (loc, newtype,
12454 TREE_OPERAND (arg0, 0)),
12455 fold_convert_loc (loc, newtype,
12456 TREE_OPERAND (arg0, 1)));
12458 return fold_build2_loc (loc, code, type, newmod,
12459 fold_convert_loc (loc, newtype, arg1));
12462 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12463 C1 is a valid shift constant, and C2 is a power of two, i.e.
12464 a single bit. */
12465 if (TREE_CODE (arg0) == BIT_AND_EXPR
12466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12467 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12468 == INTEGER_CST
12469 && integer_pow2p (TREE_OPERAND (arg0, 1))
12470 && integer_zerop (arg1))
12472 tree itype = TREE_TYPE (arg0);
12473 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12474 prec = TYPE_PRECISION (itype);
12476 /* Check for a valid shift count. */
12477 if (wi::ltu_p (arg001, prec))
12479 tree arg01 = TREE_OPERAND (arg0, 1);
12480 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12481 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12482 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12483 can be rewritten as (X & (C2 << C1)) != 0. */
12484 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12486 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12487 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12488 return fold_build2_loc (loc, code, type, tem,
12489 fold_convert_loc (loc, itype, arg1));
12491 /* Otherwise, for signed (arithmetic) shifts,
12492 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12493 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12494 else if (!TYPE_UNSIGNED (itype))
12495 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12496 arg000, build_int_cst (itype, 0));
12497 /* Otherwise, of unsigned (logical) shifts,
12498 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12499 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12500 else
12501 return omit_one_operand_loc (loc, type,
12502 code == EQ_EXPR ? integer_one_node
12503 : integer_zero_node,
12504 arg000);
12508 /* If we have (A & C) == C where C is a power of 2, convert this into
12509 (A & C) != 0. Similarly for NE_EXPR. */
12510 if (TREE_CODE (arg0) == BIT_AND_EXPR
12511 && integer_pow2p (TREE_OPERAND (arg0, 1))
12512 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12513 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12514 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12515 integer_zero_node));
12517 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12518 bit, then fold the expression into A < 0 or A >= 0. */
12519 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12520 if (tem)
12521 return tem;
12523 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12524 Similarly for NE_EXPR. */
12525 if (TREE_CODE (arg0) == BIT_AND_EXPR
12526 && TREE_CODE (arg1) == INTEGER_CST
12527 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12529 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12530 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12531 TREE_OPERAND (arg0, 1));
12532 tree dandnotc
12533 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12534 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12535 notc);
12536 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12537 if (integer_nonzerop (dandnotc))
12538 return omit_one_operand_loc (loc, type, rslt, arg0);
12541 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12542 Similarly for NE_EXPR. */
12543 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12544 && TREE_CODE (arg1) == INTEGER_CST
12545 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12547 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12548 tree candnotd
12549 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12550 TREE_OPERAND (arg0, 1),
12551 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12552 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12553 if (integer_nonzerop (candnotd))
12554 return omit_one_operand_loc (loc, type, rslt, arg0);
12557 /* If this is a comparison of a field, we may be able to simplify it. */
12558 if ((TREE_CODE (arg0) == COMPONENT_REF
12559 || TREE_CODE (arg0) == BIT_FIELD_REF)
12560 /* Handle the constant case even without -O
12561 to make sure the warnings are given. */
12562 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12564 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12565 if (t1)
12566 return t1;
12569 /* Optimize comparisons of strlen vs zero to a compare of the
12570 first character of the string vs zero. To wit,
12571 strlen(ptr) == 0 => *ptr == 0
12572 strlen(ptr) != 0 => *ptr != 0
12573 Other cases should reduce to one of these two (or a constant)
12574 due to the return value of strlen being unsigned. */
12575 if (TREE_CODE (arg0) == CALL_EXPR
12576 && integer_zerop (arg1))
12578 tree fndecl = get_callee_fndecl (arg0);
12580 if (fndecl
12581 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12582 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12583 && call_expr_nargs (arg0) == 1
12584 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12586 tree iref = build_fold_indirect_ref_loc (loc,
12587 CALL_EXPR_ARG (arg0, 0));
12588 return fold_build2_loc (loc, code, type, iref,
12589 build_int_cst (TREE_TYPE (iref), 0));
12593 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12594 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12595 if (TREE_CODE (arg0) == RSHIFT_EXPR
12596 && integer_zerop (arg1)
12597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12599 tree arg00 = TREE_OPERAND (arg0, 0);
12600 tree arg01 = TREE_OPERAND (arg0, 1);
12601 tree itype = TREE_TYPE (arg00);
12602 if (wi::eq_p (arg01, element_precision (itype) - 1))
12604 if (TYPE_UNSIGNED (itype))
12606 itype = signed_type_for (itype);
12607 arg00 = fold_convert_loc (loc, itype, arg00);
12609 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12610 type, arg00, build_zero_cst (itype));
12614 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12615 if (integer_zerop (arg1)
12616 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12617 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12618 TREE_OPERAND (arg0, 1));
12620 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12621 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12622 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12623 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12624 build_zero_cst (TREE_TYPE (arg0)));
12625 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12626 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12627 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12628 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12629 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12630 build_zero_cst (TREE_TYPE (arg0)));
12632 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12633 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12634 && TREE_CODE (arg1) == INTEGER_CST
12635 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12636 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12637 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12638 TREE_OPERAND (arg0, 1), arg1));
12640 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12641 (X & C) == 0 when C is a single bit. */
12642 if (TREE_CODE (arg0) == BIT_AND_EXPR
12643 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12644 && integer_zerop (arg1)
12645 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12647 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12648 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12649 TREE_OPERAND (arg0, 1));
12650 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12651 type, tem,
12652 fold_convert_loc (loc, TREE_TYPE (arg0),
12653 arg1));
12656 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12657 constant C is a power of two, i.e. a single bit. */
12658 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12659 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12660 && integer_zerop (arg1)
12661 && integer_pow2p (TREE_OPERAND (arg0, 1))
12662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12663 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12665 tree arg00 = TREE_OPERAND (arg0, 0);
12666 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12667 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12670 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12671 when is C is a power of two, i.e. a single bit. */
12672 if (TREE_CODE (arg0) == BIT_AND_EXPR
12673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12674 && integer_zerop (arg1)
12675 && integer_pow2p (TREE_OPERAND (arg0, 1))
12676 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12677 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12679 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12680 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12681 arg000, TREE_OPERAND (arg0, 1));
12682 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12683 tem, build_int_cst (TREE_TYPE (tem), 0));
12686 if (integer_zerop (arg1)
12687 && tree_expr_nonzero_p (arg0))
12689 tree res = constant_boolean_node (code==NE_EXPR, type);
12690 return omit_one_operand_loc (loc, type, res, arg0);
12693 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12694 if (TREE_CODE (arg0) == NEGATE_EXPR
12695 && TREE_CODE (arg1) == NEGATE_EXPR)
12696 return fold_build2_loc (loc, code, type,
12697 TREE_OPERAND (arg0, 0),
12698 fold_convert_loc (loc, TREE_TYPE (arg0),
12699 TREE_OPERAND (arg1, 0)));
12701 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12702 if (TREE_CODE (arg0) == BIT_AND_EXPR
12703 && TREE_CODE (arg1) == BIT_AND_EXPR)
12705 tree arg00 = TREE_OPERAND (arg0, 0);
12706 tree arg01 = TREE_OPERAND (arg0, 1);
12707 tree arg10 = TREE_OPERAND (arg1, 0);
12708 tree arg11 = TREE_OPERAND (arg1, 1);
12709 tree itype = TREE_TYPE (arg0);
12711 if (operand_equal_p (arg01, arg11, 0))
12712 return fold_build2_loc (loc, code, type,
12713 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12714 fold_build2_loc (loc,
12715 BIT_XOR_EXPR, itype,
12716 arg00, arg10),
12717 arg01),
12718 build_zero_cst (itype));
12720 if (operand_equal_p (arg01, arg10, 0))
12721 return fold_build2_loc (loc, code, type,
12722 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12723 fold_build2_loc (loc,
12724 BIT_XOR_EXPR, itype,
12725 arg00, arg11),
12726 arg01),
12727 build_zero_cst (itype));
12729 if (operand_equal_p (arg00, arg11, 0))
12730 return fold_build2_loc (loc, code, type,
12731 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12732 fold_build2_loc (loc,
12733 BIT_XOR_EXPR, itype,
12734 arg01, arg10),
12735 arg00),
12736 build_zero_cst (itype));
12738 if (operand_equal_p (arg00, arg10, 0))
12739 return fold_build2_loc (loc, code, type,
12740 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12741 fold_build2_loc (loc,
12742 BIT_XOR_EXPR, itype,
12743 arg01, arg11),
12744 arg00),
12745 build_zero_cst (itype));
12748 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12749 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12751 tree arg00 = TREE_OPERAND (arg0, 0);
12752 tree arg01 = TREE_OPERAND (arg0, 1);
12753 tree arg10 = TREE_OPERAND (arg1, 0);
12754 tree arg11 = TREE_OPERAND (arg1, 1);
12755 tree itype = TREE_TYPE (arg0);
12757 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12758 operand_equal_p guarantees no side-effects so we don't need
12759 to use omit_one_operand on Z. */
12760 if (operand_equal_p (arg01, arg11, 0))
12761 return fold_build2_loc (loc, code, type, arg00,
12762 fold_convert_loc (loc, TREE_TYPE (arg00),
12763 arg10));
12764 if (operand_equal_p (arg01, arg10, 0))
12765 return fold_build2_loc (loc, code, type, arg00,
12766 fold_convert_loc (loc, TREE_TYPE (arg00),
12767 arg11));
12768 if (operand_equal_p (arg00, arg11, 0))
12769 return fold_build2_loc (loc, code, type, arg01,
12770 fold_convert_loc (loc, TREE_TYPE (arg01),
12771 arg10));
12772 if (operand_equal_p (arg00, arg10, 0))
12773 return fold_build2_loc (loc, code, type, arg01,
12774 fold_convert_loc (loc, TREE_TYPE (arg01),
12775 arg11));
12777 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12778 if (TREE_CODE (arg01) == INTEGER_CST
12779 && TREE_CODE (arg11) == INTEGER_CST)
12781 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12782 fold_convert_loc (loc, itype, arg11));
12783 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12784 return fold_build2_loc (loc, code, type, tem,
12785 fold_convert_loc (loc, itype, arg10));
12789 /* Attempt to simplify equality/inequality comparisons of complex
12790 values. Only lower the comparison if the result is known or
12791 can be simplified to a single scalar comparison. */
12792 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12793 || TREE_CODE (arg0) == COMPLEX_CST)
12794 && (TREE_CODE (arg1) == COMPLEX_EXPR
12795 || TREE_CODE (arg1) == COMPLEX_CST))
12797 tree real0, imag0, real1, imag1;
12798 tree rcond, icond;
12800 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12802 real0 = TREE_OPERAND (arg0, 0);
12803 imag0 = TREE_OPERAND (arg0, 1);
12805 else
12807 real0 = TREE_REALPART (arg0);
12808 imag0 = TREE_IMAGPART (arg0);
12811 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12813 real1 = TREE_OPERAND (arg1, 0);
12814 imag1 = TREE_OPERAND (arg1, 1);
12816 else
12818 real1 = TREE_REALPART (arg1);
12819 imag1 = TREE_IMAGPART (arg1);
12822 rcond = fold_binary_loc (loc, code, type, real0, real1);
12823 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12825 if (integer_zerop (rcond))
12827 if (code == EQ_EXPR)
12828 return omit_two_operands_loc (loc, type, boolean_false_node,
12829 imag0, imag1);
12830 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12832 else
12834 if (code == NE_EXPR)
12835 return omit_two_operands_loc (loc, type, boolean_true_node,
12836 imag0, imag1);
12837 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12841 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12842 if (icond && TREE_CODE (icond) == INTEGER_CST)
12844 if (integer_zerop (icond))
12846 if (code == EQ_EXPR)
12847 return omit_two_operands_loc (loc, type, boolean_false_node,
12848 real0, real1);
12849 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12851 else
12853 if (code == NE_EXPR)
12854 return omit_two_operands_loc (loc, type, boolean_true_node,
12855 real0, real1);
12856 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12861 return NULL_TREE;
12863 case LT_EXPR:
12864 case GT_EXPR:
12865 case LE_EXPR:
12866 case GE_EXPR:
12867 tem = fold_comparison (loc, code, type, op0, op1);
12868 if (tem != NULL_TREE)
12869 return tem;
12871 /* Transform comparisons of the form X +- C CMP X. */
12872 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12874 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12875 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12876 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12877 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12879 tree arg01 = TREE_OPERAND (arg0, 1);
12880 enum tree_code code0 = TREE_CODE (arg0);
12881 int is_positive;
12883 if (TREE_CODE (arg01) == REAL_CST)
12884 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12885 else
12886 is_positive = tree_int_cst_sgn (arg01);
12888 /* (X - c) > X becomes false. */
12889 if (code == GT_EXPR
12890 && ((code0 == MINUS_EXPR && is_positive >= 0)
12891 || (code0 == PLUS_EXPR && is_positive <= 0)))
12893 if (TREE_CODE (arg01) == INTEGER_CST
12894 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12895 fold_overflow_warning (("assuming signed overflow does not "
12896 "occur when assuming that (X - c) > X "
12897 "is always false"),
12898 WARN_STRICT_OVERFLOW_ALL);
12899 return constant_boolean_node (0, type);
12902 /* Likewise (X + c) < X becomes false. */
12903 if (code == LT_EXPR
12904 && ((code0 == PLUS_EXPR && is_positive >= 0)
12905 || (code0 == MINUS_EXPR && is_positive <= 0)))
12907 if (TREE_CODE (arg01) == INTEGER_CST
12908 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12909 fold_overflow_warning (("assuming signed overflow does not "
12910 "occur when assuming that "
12911 "(X + c) < X is always false"),
12912 WARN_STRICT_OVERFLOW_ALL);
12913 return constant_boolean_node (0, type);
12916 /* Convert (X - c) <= X to true. */
12917 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12918 && code == LE_EXPR
12919 && ((code0 == MINUS_EXPR && is_positive >= 0)
12920 || (code0 == PLUS_EXPR && is_positive <= 0)))
12922 if (TREE_CODE (arg01) == INTEGER_CST
12923 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12924 fold_overflow_warning (("assuming signed overflow does not "
12925 "occur when assuming that "
12926 "(X - c) <= X is always true"),
12927 WARN_STRICT_OVERFLOW_ALL);
12928 return constant_boolean_node (1, type);
12931 /* Convert (X + c) >= X to true. */
12932 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12933 && code == GE_EXPR
12934 && ((code0 == PLUS_EXPR && is_positive >= 0)
12935 || (code0 == MINUS_EXPR && is_positive <= 0)))
12937 if (TREE_CODE (arg01) == INTEGER_CST
12938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12939 fold_overflow_warning (("assuming signed overflow does not "
12940 "occur when assuming that "
12941 "(X + c) >= X is always true"),
12942 WARN_STRICT_OVERFLOW_ALL);
12943 return constant_boolean_node (1, type);
12946 if (TREE_CODE (arg01) == INTEGER_CST)
12948 /* Convert X + c > X and X - c < X to true for integers. */
12949 if (code == GT_EXPR
12950 && ((code0 == PLUS_EXPR && is_positive > 0)
12951 || (code0 == MINUS_EXPR && is_positive < 0)))
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X + c) > X is always true"),
12957 WARN_STRICT_OVERFLOW_ALL);
12958 return constant_boolean_node (1, type);
12961 if (code == LT_EXPR
12962 && ((code0 == MINUS_EXPR && is_positive > 0)
12963 || (code0 == PLUS_EXPR && is_positive < 0)))
12965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12966 fold_overflow_warning (("assuming signed overflow does "
12967 "not occur when assuming that "
12968 "(X - c) < X is always true"),
12969 WARN_STRICT_OVERFLOW_ALL);
12970 return constant_boolean_node (1, type);
12973 /* Convert X + c <= X and X - c >= X to false for integers. */
12974 if (code == LE_EXPR
12975 && ((code0 == PLUS_EXPR && is_positive > 0)
12976 || (code0 == MINUS_EXPR && is_positive < 0)))
12978 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12979 fold_overflow_warning (("assuming signed overflow does "
12980 "not occur when assuming that "
12981 "(X + c) <= X is always false"),
12982 WARN_STRICT_OVERFLOW_ALL);
12983 return constant_boolean_node (0, type);
12986 if (code == GE_EXPR
12987 && ((code0 == MINUS_EXPR && is_positive > 0)
12988 || (code0 == PLUS_EXPR && is_positive < 0)))
12990 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12991 fold_overflow_warning (("assuming signed overflow does "
12992 "not occur when assuming that "
12993 "(X - c) >= X is always false"),
12994 WARN_STRICT_OVERFLOW_ALL);
12995 return constant_boolean_node (0, type);
13000 /* Comparisons with the highest or lowest possible integer of
13001 the specified precision will have known values. */
13003 tree arg1_type = TREE_TYPE (arg1);
13004 unsigned int prec = TYPE_PRECISION (arg1_type);
13006 if (TREE_CODE (arg1) == INTEGER_CST
13007 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13009 wide_int max = wi::max_value (arg1_type);
13010 wide_int signed_max = wi::max_value (prec, SIGNED);
13011 wide_int min = wi::min_value (arg1_type);
13013 if (wi::eq_p (arg1, max))
13014 switch (code)
13016 case GT_EXPR:
13017 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13019 case GE_EXPR:
13020 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13022 case LE_EXPR:
13023 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13025 case LT_EXPR:
13026 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13028 /* The GE_EXPR and LT_EXPR cases above are not normally
13029 reached because of previous transformations. */
13031 default:
13032 break;
13034 else if (wi::eq_p (arg1, max - 1))
13035 switch (code)
13037 case GT_EXPR:
13038 arg1 = const_binop (PLUS_EXPR, arg1,
13039 build_int_cst (TREE_TYPE (arg1), 1));
13040 return fold_build2_loc (loc, EQ_EXPR, type,
13041 fold_convert_loc (loc,
13042 TREE_TYPE (arg1), arg0),
13043 arg1);
13044 case LE_EXPR:
13045 arg1 = const_binop (PLUS_EXPR, arg1,
13046 build_int_cst (TREE_TYPE (arg1), 1));
13047 return fold_build2_loc (loc, NE_EXPR, type,
13048 fold_convert_loc (loc, TREE_TYPE (arg1),
13049 arg0),
13050 arg1);
13051 default:
13052 break;
13054 else if (wi::eq_p (arg1, min))
13055 switch (code)
13057 case LT_EXPR:
13058 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13060 case LE_EXPR:
13061 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13063 case GE_EXPR:
13064 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13066 case GT_EXPR:
13067 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13069 default:
13070 break;
13072 else if (wi::eq_p (arg1, min + 1))
13073 switch (code)
13075 case GE_EXPR:
13076 arg1 = const_binop (MINUS_EXPR, arg1,
13077 build_int_cst (TREE_TYPE (arg1), 1));
13078 return fold_build2_loc (loc, NE_EXPR, type,
13079 fold_convert_loc (loc,
13080 TREE_TYPE (arg1), arg0),
13081 arg1);
13082 case LT_EXPR:
13083 arg1 = const_binop (MINUS_EXPR, arg1,
13084 build_int_cst (TREE_TYPE (arg1), 1));
13085 return fold_build2_loc (loc, EQ_EXPR, type,
13086 fold_convert_loc (loc, TREE_TYPE (arg1),
13087 arg0),
13088 arg1);
13089 default:
13090 break;
13093 else if (wi::eq_p (arg1, signed_max)
13094 && TYPE_UNSIGNED (arg1_type)
13095 /* We will flip the signedness of the comparison operator
13096 associated with the mode of arg1, so the sign bit is
13097 specified by this mode. Check that arg1 is the signed
13098 max associated with this sign bit. */
13099 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13100 /* signed_type does not work on pointer types. */
13101 && INTEGRAL_TYPE_P (arg1_type))
13103 /* The following case also applies to X < signed_max+1
13104 and X >= signed_max+1 because previous transformations. */
13105 if (code == LE_EXPR || code == GT_EXPR)
13107 tree st = signed_type_for (arg1_type);
13108 return fold_build2_loc (loc,
13109 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13110 type, fold_convert_loc (loc, st, arg0),
13111 build_int_cst (st, 0));
13117 /* If we are comparing an ABS_EXPR with a constant, we can
13118 convert all the cases into explicit comparisons, but they may
13119 well not be faster than doing the ABS and one comparison.
13120 But ABS (X) <= C is a range comparison, which becomes a subtraction
13121 and a comparison, and is probably faster. */
13122 if (code == LE_EXPR
13123 && TREE_CODE (arg1) == INTEGER_CST
13124 && TREE_CODE (arg0) == ABS_EXPR
13125 && ! TREE_SIDE_EFFECTS (arg0)
13126 && (0 != (tem = negate_expr (arg1)))
13127 && TREE_CODE (tem) == INTEGER_CST
13128 && !TREE_OVERFLOW (tem))
13129 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13130 build2 (GE_EXPR, type,
13131 TREE_OPERAND (arg0, 0), tem),
13132 build2 (LE_EXPR, type,
13133 TREE_OPERAND (arg0, 0), arg1));
13135 /* Convert ABS_EXPR<x> >= 0 to true. */
13136 strict_overflow_p = false;
13137 if (code == GE_EXPR
13138 && (integer_zerop (arg1)
13139 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13140 && real_zerop (arg1)))
13141 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13143 if (strict_overflow_p)
13144 fold_overflow_warning (("assuming signed overflow does not occur "
13145 "when simplifying comparison of "
13146 "absolute value and zero"),
13147 WARN_STRICT_OVERFLOW_CONDITIONAL);
13148 return omit_one_operand_loc (loc, type,
13149 constant_boolean_node (true, type),
13150 arg0);
13153 /* Convert ABS_EXPR<x> < 0 to false. */
13154 strict_overflow_p = false;
13155 if (code == LT_EXPR
13156 && (integer_zerop (arg1) || real_zerop (arg1))
13157 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13159 if (strict_overflow_p)
13160 fold_overflow_warning (("assuming signed overflow does not occur "
13161 "when simplifying comparison of "
13162 "absolute value and zero"),
13163 WARN_STRICT_OVERFLOW_CONDITIONAL);
13164 return omit_one_operand_loc (loc, type,
13165 constant_boolean_node (false, type),
13166 arg0);
13169 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13170 and similarly for >= into !=. */
13171 if ((code == LT_EXPR || code == GE_EXPR)
13172 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13173 && TREE_CODE (arg1) == LSHIFT_EXPR
13174 && integer_onep (TREE_OPERAND (arg1, 0)))
13175 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13176 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13177 TREE_OPERAND (arg1, 1)),
13178 build_zero_cst (TREE_TYPE (arg0)));
13180 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13181 otherwise Y might be >= # of bits in X's type and thus e.g.
13182 (unsigned char) (1 << Y) for Y 15 might be 0.
13183 If the cast is widening, then 1 << Y should have unsigned type,
13184 otherwise if Y is number of bits in the signed shift type minus 1,
13185 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13186 31 might be 0xffffffff80000000. */
13187 if ((code == LT_EXPR || code == GE_EXPR)
13188 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13189 && CONVERT_EXPR_P (arg1)
13190 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13191 && (TYPE_PRECISION (TREE_TYPE (arg1))
13192 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13193 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13194 || (TYPE_PRECISION (TREE_TYPE (arg1))
13195 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13196 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13198 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13199 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13200 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13201 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13202 build_zero_cst (TREE_TYPE (arg0)));
13205 return NULL_TREE;
13207 case UNORDERED_EXPR:
13208 case ORDERED_EXPR:
13209 case UNLT_EXPR:
13210 case UNLE_EXPR:
13211 case UNGT_EXPR:
13212 case UNGE_EXPR:
13213 case UNEQ_EXPR:
13214 case LTGT_EXPR:
13215 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13217 t1 = fold_relational_const (code, type, arg0, arg1);
13218 if (t1 != NULL_TREE)
13219 return t1;
13222 /* If the first operand is NaN, the result is constant. */
13223 if (TREE_CODE (arg0) == REAL_CST
13224 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13225 && (code != LTGT_EXPR || ! flag_trapping_math))
13227 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13228 ? integer_zero_node
13229 : integer_one_node;
13230 return omit_one_operand_loc (loc, type, t1, arg1);
13233 /* If the second operand is NaN, the result is constant. */
13234 if (TREE_CODE (arg1) == REAL_CST
13235 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13236 && (code != LTGT_EXPR || ! flag_trapping_math))
13238 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13239 ? integer_zero_node
13240 : integer_one_node;
13241 return omit_one_operand_loc (loc, type, t1, arg0);
13244 /* Simplify unordered comparison of something with itself. */
13245 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13246 && operand_equal_p (arg0, arg1, 0))
13247 return constant_boolean_node (1, type);
13249 if (code == LTGT_EXPR
13250 && !flag_trapping_math
13251 && operand_equal_p (arg0, arg1, 0))
13252 return constant_boolean_node (0, type);
13254 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13256 tree targ0 = strip_float_extensions (arg0);
13257 tree targ1 = strip_float_extensions (arg1);
13258 tree newtype = TREE_TYPE (targ0);
13260 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13261 newtype = TREE_TYPE (targ1);
13263 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13264 return fold_build2_loc (loc, code, type,
13265 fold_convert_loc (loc, newtype, targ0),
13266 fold_convert_loc (loc, newtype, targ1));
13269 return NULL_TREE;
13271 case COMPOUND_EXPR:
13272 /* When pedantic, a compound expression can be neither an lvalue
13273 nor an integer constant expression. */
13274 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13275 return NULL_TREE;
13276 /* Don't let (0, 0) be null pointer constant. */
13277 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13278 : fold_convert_loc (loc, type, arg1);
13279 return pedantic_non_lvalue_loc (loc, tem);
13281 case COMPLEX_EXPR:
13282 if ((TREE_CODE (arg0) == REAL_CST
13283 && TREE_CODE (arg1) == REAL_CST)
13284 || (TREE_CODE (arg0) == INTEGER_CST
13285 && TREE_CODE (arg1) == INTEGER_CST))
13286 return build_complex (type, arg0, arg1);
13287 return NULL_TREE;
13289 case ASSERT_EXPR:
13290 /* An ASSERT_EXPR should never be passed to fold_binary. */
13291 gcc_unreachable ();
13293 case VEC_PACK_TRUNC_EXPR:
13294 case VEC_PACK_FIX_TRUNC_EXPR:
13296 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13297 tree *elts;
13299 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13300 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13301 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13302 return NULL_TREE;
13304 elts = XALLOCAVEC (tree, nelts);
13305 if (!vec_cst_ctor_to_array (arg0, elts)
13306 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13307 return NULL_TREE;
13309 for (i = 0; i < nelts; i++)
13311 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13312 ? NOP_EXPR : FIX_TRUNC_EXPR,
13313 TREE_TYPE (type), elts[i]);
13314 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13315 return NULL_TREE;
13318 return build_vector (type, elts);
13321 case VEC_WIDEN_MULT_LO_EXPR:
13322 case VEC_WIDEN_MULT_HI_EXPR:
13323 case VEC_WIDEN_MULT_EVEN_EXPR:
13324 case VEC_WIDEN_MULT_ODD_EXPR:
13326 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13327 unsigned int out, ofs, scale;
13328 tree *elts;
13330 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13331 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13332 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13333 return NULL_TREE;
13335 elts = XALLOCAVEC (tree, nelts * 4);
13336 if (!vec_cst_ctor_to_array (arg0, elts)
13337 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13338 return NULL_TREE;
13340 if (code == VEC_WIDEN_MULT_LO_EXPR)
13341 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13342 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13343 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13344 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13345 scale = 1, ofs = 0;
13346 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13347 scale = 1, ofs = 1;
13349 for (out = 0; out < nelts; out++)
13351 unsigned int in1 = (out << scale) + ofs;
13352 unsigned int in2 = in1 + nelts * 2;
13353 tree t1, t2;
13355 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13356 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13358 if (t1 == NULL_TREE || t2 == NULL_TREE)
13359 return NULL_TREE;
13360 elts[out] = const_binop (MULT_EXPR, t1, t2);
13361 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13362 return NULL_TREE;
13365 return build_vector (type, elts);
13368 default:
13369 return NULL_TREE;
13370 } /* switch (code) */
13373 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13374 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13375 of GOTO_EXPR. */
13377 static tree
13378 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13380 switch (TREE_CODE (*tp))
13382 case LABEL_EXPR:
13383 return *tp;
13385 case GOTO_EXPR:
13386 *walk_subtrees = 0;
13388 /* ... fall through ... */
13390 default:
13391 return NULL_TREE;
13395 /* Return whether the sub-tree ST contains a label which is accessible from
13396 outside the sub-tree. */
13398 static bool
13399 contains_label_p (tree st)
13401 return
13402 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13405 /* Fold a ternary expression of code CODE and type TYPE with operands
13406 OP0, OP1, and OP2. Return the folded expression if folding is
13407 successful. Otherwise, return NULL_TREE. */
13409 tree
13410 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13411 tree op0, tree op1, tree op2)
13413 tree tem;
13414 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13415 enum tree_code_class kind = TREE_CODE_CLASS (code);
13417 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13418 && TREE_CODE_LENGTH (code) == 3);
13420 /* If this is a commutative operation, and OP0 is a constant, move it
13421 to OP1 to reduce the number of tests below. */
13422 if (commutative_ternary_tree_code (code)
13423 && tree_swap_operands_p (op0, op1, true))
13424 return fold_build3_loc (loc, code, type, op1, op0, op2);
13426 tem = generic_simplify (loc, code, type, op0, op1, op2);
13427 if (tem)
13428 return tem;
13430 /* Strip any conversions that don't change the mode. This is safe
13431 for every expression, except for a comparison expression because
13432 its signedness is derived from its operands. So, in the latter
13433 case, only strip conversions that don't change the signedness.
13435 Note that this is done as an internal manipulation within the
13436 constant folder, in order to find the simplest representation of
13437 the arguments so that their form can be studied. In any cases,
13438 the appropriate type conversions should be put back in the tree
13439 that will get out of the constant folder. */
13440 if (op0)
13442 arg0 = op0;
13443 STRIP_NOPS (arg0);
13446 if (op1)
13448 arg1 = op1;
13449 STRIP_NOPS (arg1);
13452 if (op2)
13454 arg2 = op2;
13455 STRIP_NOPS (arg2);
13458 switch (code)
13460 case COMPONENT_REF:
13461 if (TREE_CODE (arg0) == CONSTRUCTOR
13462 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13464 unsigned HOST_WIDE_INT idx;
13465 tree field, value;
13466 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13467 if (field == arg1)
13468 return value;
13470 return NULL_TREE;
13472 case COND_EXPR:
13473 case VEC_COND_EXPR:
13474 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13475 so all simple results must be passed through pedantic_non_lvalue. */
13476 if (TREE_CODE (arg0) == INTEGER_CST)
13478 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13479 tem = integer_zerop (arg0) ? op2 : op1;
13480 /* Only optimize constant conditions when the selected branch
13481 has the same type as the COND_EXPR. This avoids optimizing
13482 away "c ? x : throw", where the throw has a void type.
13483 Avoid throwing away that operand which contains label. */
13484 if ((!TREE_SIDE_EFFECTS (unused_op)
13485 || !contains_label_p (unused_op))
13486 && (! VOID_TYPE_P (TREE_TYPE (tem))
13487 || VOID_TYPE_P (type)))
13488 return pedantic_non_lvalue_loc (loc, tem);
13489 return NULL_TREE;
13491 else if (TREE_CODE (arg0) == VECTOR_CST)
13493 if ((TREE_CODE (arg1) == VECTOR_CST
13494 || TREE_CODE (arg1) == CONSTRUCTOR)
13495 && (TREE_CODE (arg2) == VECTOR_CST
13496 || TREE_CODE (arg2) == CONSTRUCTOR))
13498 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13499 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13500 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13501 for (i = 0; i < nelts; i++)
13503 tree val = VECTOR_CST_ELT (arg0, i);
13504 if (integer_all_onesp (val))
13505 sel[i] = i;
13506 else if (integer_zerop (val))
13507 sel[i] = nelts + i;
13508 else /* Currently unreachable. */
13509 return NULL_TREE;
13511 tree t = fold_vec_perm (type, arg1, arg2, sel);
13512 if (t != NULL_TREE)
13513 return t;
13517 /* If we have A op B ? A : C, we may be able to convert this to a
13518 simpler expression, depending on the operation and the values
13519 of B and C. Signed zeros prevent all of these transformations,
13520 for reasons given above each one.
13522 Also try swapping the arguments and inverting the conditional. */
13523 if (COMPARISON_CLASS_P (arg0)
13524 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13525 arg1, TREE_OPERAND (arg0, 1))
13526 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13528 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13529 if (tem)
13530 return tem;
13533 if (COMPARISON_CLASS_P (arg0)
13534 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13535 op2,
13536 TREE_OPERAND (arg0, 1))
13537 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13539 location_t loc0 = expr_location_or (arg0, loc);
13540 tem = fold_invert_truthvalue (loc0, arg0);
13541 if (tem && COMPARISON_CLASS_P (tem))
13543 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13544 if (tem)
13545 return tem;
13549 /* If the second operand is simpler than the third, swap them
13550 since that produces better jump optimization results. */
13551 if (truth_value_p (TREE_CODE (arg0))
13552 && tree_swap_operands_p (op1, op2, false))
13554 location_t loc0 = expr_location_or (arg0, loc);
13555 /* See if this can be inverted. If it can't, possibly because
13556 it was a floating-point inequality comparison, don't do
13557 anything. */
13558 tem = fold_invert_truthvalue (loc0, arg0);
13559 if (tem)
13560 return fold_build3_loc (loc, code, type, tem, op2, op1);
13563 /* Convert A ? 1 : 0 to simply A. */
13564 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13565 : (integer_onep (op1)
13566 && !VECTOR_TYPE_P (type)))
13567 && integer_zerop (op2)
13568 /* If we try to convert OP0 to our type, the
13569 call to fold will try to move the conversion inside
13570 a COND, which will recurse. In that case, the COND_EXPR
13571 is probably the best choice, so leave it alone. */
13572 && type == TREE_TYPE (arg0))
13573 return pedantic_non_lvalue_loc (loc, arg0);
13575 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13576 over COND_EXPR in cases such as floating point comparisons. */
13577 if (integer_zerop (op1)
13578 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13579 : (integer_onep (op2)
13580 && !VECTOR_TYPE_P (type)))
13581 && truth_value_p (TREE_CODE (arg0)))
13582 return pedantic_non_lvalue_loc (loc,
13583 fold_convert_loc (loc, type,
13584 invert_truthvalue_loc (loc,
13585 arg0)));
13587 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13588 if (TREE_CODE (arg0) == LT_EXPR
13589 && integer_zerop (TREE_OPERAND (arg0, 1))
13590 && integer_zerop (op2)
13591 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13593 /* sign_bit_p looks through both zero and sign extensions,
13594 but for this optimization only sign extensions are
13595 usable. */
13596 tree tem2 = TREE_OPERAND (arg0, 0);
13597 while (tem != tem2)
13599 if (TREE_CODE (tem2) != NOP_EXPR
13600 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13602 tem = NULL_TREE;
13603 break;
13605 tem2 = TREE_OPERAND (tem2, 0);
13607 /* sign_bit_p only checks ARG1 bits within A's precision.
13608 If <sign bit of A> has wider type than A, bits outside
13609 of A's precision in <sign bit of A> need to be checked.
13610 If they are all 0, this optimization needs to be done
13611 in unsigned A's type, if they are all 1 in signed A's type,
13612 otherwise this can't be done. */
13613 if (tem
13614 && TYPE_PRECISION (TREE_TYPE (tem))
13615 < TYPE_PRECISION (TREE_TYPE (arg1))
13616 && TYPE_PRECISION (TREE_TYPE (tem))
13617 < TYPE_PRECISION (type))
13619 int inner_width, outer_width;
13620 tree tem_type;
13622 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13623 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13624 if (outer_width > TYPE_PRECISION (type))
13625 outer_width = TYPE_PRECISION (type);
13627 wide_int mask = wi::shifted_mask
13628 (inner_width, outer_width - inner_width, false,
13629 TYPE_PRECISION (TREE_TYPE (arg1)));
13631 wide_int common = mask & arg1;
13632 if (common == mask)
13634 tem_type = signed_type_for (TREE_TYPE (tem));
13635 tem = fold_convert_loc (loc, tem_type, tem);
13637 else if (common == 0)
13639 tem_type = unsigned_type_for (TREE_TYPE (tem));
13640 tem = fold_convert_loc (loc, tem_type, tem);
13642 else
13643 tem = NULL;
13646 if (tem)
13647 return
13648 fold_convert_loc (loc, type,
13649 fold_build2_loc (loc, BIT_AND_EXPR,
13650 TREE_TYPE (tem), tem,
13651 fold_convert_loc (loc,
13652 TREE_TYPE (tem),
13653 arg1)));
13656 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13657 already handled above. */
13658 if (TREE_CODE (arg0) == BIT_AND_EXPR
13659 && integer_onep (TREE_OPERAND (arg0, 1))
13660 && integer_zerop (op2)
13661 && integer_pow2p (arg1))
13663 tree tem = TREE_OPERAND (arg0, 0);
13664 STRIP_NOPS (tem);
13665 if (TREE_CODE (tem) == RSHIFT_EXPR
13666 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13667 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13668 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13669 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13670 TREE_OPERAND (tem, 0), arg1);
13673 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13674 is probably obsolete because the first operand should be a
13675 truth value (that's why we have the two cases above), but let's
13676 leave it in until we can confirm this for all front-ends. */
13677 if (integer_zerop (op2)
13678 && TREE_CODE (arg0) == NE_EXPR
13679 && integer_zerop (TREE_OPERAND (arg0, 1))
13680 && integer_pow2p (arg1)
13681 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13683 arg1, OEP_ONLY_CONST))
13684 return pedantic_non_lvalue_loc (loc,
13685 fold_convert_loc (loc, type,
13686 TREE_OPERAND (arg0, 0)));
13688 /* Disable the transformations below for vectors, since
13689 fold_binary_op_with_conditional_arg may undo them immediately,
13690 yielding an infinite loop. */
13691 if (code == VEC_COND_EXPR)
13692 return NULL_TREE;
13694 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13695 if (integer_zerop (op2)
13696 && truth_value_p (TREE_CODE (arg0))
13697 && truth_value_p (TREE_CODE (arg1))
13698 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13699 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13700 : TRUTH_ANDIF_EXPR,
13701 type, fold_convert_loc (loc, type, arg0), arg1);
13703 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13704 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13705 && truth_value_p (TREE_CODE (arg0))
13706 && truth_value_p (TREE_CODE (arg1))
13707 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13709 location_t loc0 = expr_location_or (arg0, loc);
13710 /* Only perform transformation if ARG0 is easily inverted. */
13711 tem = fold_invert_truthvalue (loc0, arg0);
13712 if (tem)
13713 return fold_build2_loc (loc, code == VEC_COND_EXPR
13714 ? BIT_IOR_EXPR
13715 : TRUTH_ORIF_EXPR,
13716 type, fold_convert_loc (loc, type, tem),
13717 arg1);
13720 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13721 if (integer_zerop (arg1)
13722 && truth_value_p (TREE_CODE (arg0))
13723 && truth_value_p (TREE_CODE (op2))
13724 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13726 location_t loc0 = expr_location_or (arg0, loc);
13727 /* Only perform transformation if ARG0 is easily inverted. */
13728 tem = fold_invert_truthvalue (loc0, arg0);
13729 if (tem)
13730 return fold_build2_loc (loc, code == VEC_COND_EXPR
13731 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13732 type, fold_convert_loc (loc, type, tem),
13733 op2);
13736 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13737 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13738 && truth_value_p (TREE_CODE (arg0))
13739 && truth_value_p (TREE_CODE (op2))
13740 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13741 return fold_build2_loc (loc, code == VEC_COND_EXPR
13742 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13743 type, fold_convert_loc (loc, type, arg0), op2);
13745 return NULL_TREE;
13747 case CALL_EXPR:
13748 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13749 of fold_ternary on them. */
13750 gcc_unreachable ();
13752 case BIT_FIELD_REF:
13753 if ((TREE_CODE (arg0) == VECTOR_CST
13754 || (TREE_CODE (arg0) == CONSTRUCTOR
13755 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13756 && (type == TREE_TYPE (TREE_TYPE (arg0))
13757 || (TREE_CODE (type) == VECTOR_TYPE
13758 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13760 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13761 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13762 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13763 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13765 if (n != 0
13766 && (idx % width) == 0
13767 && (n % width) == 0
13768 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13770 idx = idx / width;
13771 n = n / width;
13773 if (TREE_CODE (arg0) == VECTOR_CST)
13775 if (n == 1)
13776 return VECTOR_CST_ELT (arg0, idx);
13778 tree *vals = XALLOCAVEC (tree, n);
13779 for (unsigned i = 0; i < n; ++i)
13780 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13781 return build_vector (type, vals);
13784 /* Constructor elements can be subvectors. */
13785 unsigned HOST_WIDE_INT k = 1;
13786 if (CONSTRUCTOR_NELTS (arg0) != 0)
13788 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13789 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13790 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13793 /* We keep an exact subset of the constructor elements. */
13794 if ((idx % k) == 0 && (n % k) == 0)
13796 if (CONSTRUCTOR_NELTS (arg0) == 0)
13797 return build_constructor (type, NULL);
13798 idx /= k;
13799 n /= k;
13800 if (n == 1)
13802 if (idx < CONSTRUCTOR_NELTS (arg0))
13803 return CONSTRUCTOR_ELT (arg0, idx)->value;
13804 return build_zero_cst (type);
13807 vec<constructor_elt, va_gc> *vals;
13808 vec_alloc (vals, n);
13809 for (unsigned i = 0;
13810 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13811 ++i)
13812 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13813 CONSTRUCTOR_ELT
13814 (arg0, idx + i)->value);
13815 return build_constructor (type, vals);
13817 /* The bitfield references a single constructor element. */
13818 else if (idx + n <= (idx / k + 1) * k)
13820 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13821 return build_zero_cst (type);
13822 else if (n == k)
13823 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13824 else
13825 return fold_build3_loc (loc, code, type,
13826 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13827 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13832 /* A bit-field-ref that referenced the full argument can be stripped. */
13833 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13834 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13835 && integer_zerop (op2))
13836 return fold_convert_loc (loc, type, arg0);
13838 /* On constants we can use native encode/interpret to constant
13839 fold (nearly) all BIT_FIELD_REFs. */
13840 if (CONSTANT_CLASS_P (arg0)
13841 && can_native_interpret_type_p (type)
13842 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13843 /* This limitation should not be necessary, we just need to
13844 round this up to mode size. */
13845 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13846 /* Need bit-shifting of the buffer to relax the following. */
13847 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13849 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13850 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13851 unsigned HOST_WIDE_INT clen;
13852 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13853 /* ??? We cannot tell native_encode_expr to start at
13854 some random byte only. So limit us to a reasonable amount
13855 of work. */
13856 if (clen <= 4096)
13858 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13859 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13860 if (len > 0
13861 && len * BITS_PER_UNIT >= bitpos + bitsize)
13863 tree v = native_interpret_expr (type,
13864 b + bitpos / BITS_PER_UNIT,
13865 bitsize / BITS_PER_UNIT);
13866 if (v)
13867 return v;
13872 return NULL_TREE;
13874 case FMA_EXPR:
13875 /* For integers we can decompose the FMA if possible. */
13876 if (TREE_CODE (arg0) == INTEGER_CST
13877 && TREE_CODE (arg1) == INTEGER_CST)
13878 return fold_build2_loc (loc, PLUS_EXPR, type,
13879 const_binop (MULT_EXPR, arg0, arg1), arg2);
13880 if (integer_zerop (arg2))
13881 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13883 return fold_fma (loc, type, arg0, arg1, arg2);
13885 case VEC_PERM_EXPR:
13886 if (TREE_CODE (arg2) == VECTOR_CST)
13888 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13889 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13890 unsigned char *sel2 = sel + nelts;
13891 bool need_mask_canon = false;
13892 bool need_mask_canon2 = false;
13893 bool all_in_vec0 = true;
13894 bool all_in_vec1 = true;
13895 bool maybe_identity = true;
13896 bool single_arg = (op0 == op1);
13897 bool changed = false;
13899 mask2 = 2 * nelts - 1;
13900 mask = single_arg ? (nelts - 1) : mask2;
13901 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13902 for (i = 0; i < nelts; i++)
13904 tree val = VECTOR_CST_ELT (arg2, i);
13905 if (TREE_CODE (val) != INTEGER_CST)
13906 return NULL_TREE;
13908 /* Make sure that the perm value is in an acceptable
13909 range. */
13910 wide_int t = val;
13911 need_mask_canon |= wi::gtu_p (t, mask);
13912 need_mask_canon2 |= wi::gtu_p (t, mask2);
13913 sel[i] = t.to_uhwi () & mask;
13914 sel2[i] = t.to_uhwi () & mask2;
13916 if (sel[i] < nelts)
13917 all_in_vec1 = false;
13918 else
13919 all_in_vec0 = false;
13921 if ((sel[i] & (nelts-1)) != i)
13922 maybe_identity = false;
13925 if (maybe_identity)
13927 if (all_in_vec0)
13928 return op0;
13929 if (all_in_vec1)
13930 return op1;
13933 if (all_in_vec0)
13934 op1 = op0;
13935 else if (all_in_vec1)
13937 op0 = op1;
13938 for (i = 0; i < nelts; i++)
13939 sel[i] -= nelts;
13940 need_mask_canon = true;
13943 if ((TREE_CODE (op0) == VECTOR_CST
13944 || TREE_CODE (op0) == CONSTRUCTOR)
13945 && (TREE_CODE (op1) == VECTOR_CST
13946 || TREE_CODE (op1) == CONSTRUCTOR))
13948 tree t = fold_vec_perm (type, op0, op1, sel);
13949 if (t != NULL_TREE)
13950 return t;
13953 if (op0 == op1 && !single_arg)
13954 changed = true;
13956 /* Some targets are deficient and fail to expand a single
13957 argument permutation while still allowing an equivalent
13958 2-argument version. */
13959 if (need_mask_canon && arg2 == op2
13960 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13961 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13963 need_mask_canon = need_mask_canon2;
13964 sel = sel2;
13967 if (need_mask_canon && arg2 == op2)
13969 tree *tsel = XALLOCAVEC (tree, nelts);
13970 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13971 for (i = 0; i < nelts; i++)
13972 tsel[i] = build_int_cst (eltype, sel[i]);
13973 op2 = build_vector (TREE_TYPE (arg2), tsel);
13974 changed = true;
13977 if (changed)
13978 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13980 return NULL_TREE;
13982 default:
13983 return NULL_TREE;
13984 } /* switch (code) */
13987 /* Perform constant folding and related simplification of EXPR.
13988 The related simplifications include x*1 => x, x*0 => 0, etc.,
13989 and application of the associative law.
13990 NOP_EXPR conversions may be removed freely (as long as we
13991 are careful not to change the type of the overall expression).
13992 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13993 but we can constant-fold them if they have constant operands. */
13995 #ifdef ENABLE_FOLD_CHECKING
13996 # define fold(x) fold_1 (x)
13997 static tree fold_1 (tree);
13998 static
13999 #endif
14000 tree
14001 fold (tree expr)
14003 const tree t = expr;
14004 enum tree_code code = TREE_CODE (t);
14005 enum tree_code_class kind = TREE_CODE_CLASS (code);
14006 tree tem;
14007 location_t loc = EXPR_LOCATION (expr);
14009 /* Return right away if a constant. */
14010 if (kind == tcc_constant)
14011 return t;
14013 /* CALL_EXPR-like objects with variable numbers of operands are
14014 treated specially. */
14015 if (kind == tcc_vl_exp)
14017 if (code == CALL_EXPR)
14019 tem = fold_call_expr (loc, expr, false);
14020 return tem ? tem : expr;
14022 return expr;
14025 if (IS_EXPR_CODE_CLASS (kind))
14027 tree type = TREE_TYPE (t);
14028 tree op0, op1, op2;
14030 switch (TREE_CODE_LENGTH (code))
14032 case 1:
14033 op0 = TREE_OPERAND (t, 0);
14034 tem = fold_unary_loc (loc, code, type, op0);
14035 return tem ? tem : expr;
14036 case 2:
14037 op0 = TREE_OPERAND (t, 0);
14038 op1 = TREE_OPERAND (t, 1);
14039 tem = fold_binary_loc (loc, code, type, op0, op1);
14040 return tem ? tem : expr;
14041 case 3:
14042 op0 = TREE_OPERAND (t, 0);
14043 op1 = TREE_OPERAND (t, 1);
14044 op2 = TREE_OPERAND (t, 2);
14045 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14046 return tem ? tem : expr;
14047 default:
14048 break;
14052 switch (code)
14054 case ARRAY_REF:
14056 tree op0 = TREE_OPERAND (t, 0);
14057 tree op1 = TREE_OPERAND (t, 1);
14059 if (TREE_CODE (op1) == INTEGER_CST
14060 && TREE_CODE (op0) == CONSTRUCTOR
14061 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14063 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14064 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14065 unsigned HOST_WIDE_INT begin = 0;
14067 /* Find a matching index by means of a binary search. */
14068 while (begin != end)
14070 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14071 tree index = (*elts)[middle].index;
14073 if (TREE_CODE (index) == INTEGER_CST
14074 && tree_int_cst_lt (index, op1))
14075 begin = middle + 1;
14076 else if (TREE_CODE (index) == INTEGER_CST
14077 && tree_int_cst_lt (op1, index))
14078 end = middle;
14079 else if (TREE_CODE (index) == RANGE_EXPR
14080 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14081 begin = middle + 1;
14082 else if (TREE_CODE (index) == RANGE_EXPR
14083 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14084 end = middle;
14085 else
14086 return (*elts)[middle].value;
14090 return t;
14093 /* Return a VECTOR_CST if possible. */
14094 case CONSTRUCTOR:
14096 tree type = TREE_TYPE (t);
14097 if (TREE_CODE (type) != VECTOR_TYPE)
14098 return t;
14100 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14101 unsigned HOST_WIDE_INT idx, pos = 0;
14102 tree value;
14104 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14106 if (!CONSTANT_CLASS_P (value))
14107 return t;
14108 if (TREE_CODE (value) == VECTOR_CST)
14110 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14111 vec[pos++] = VECTOR_CST_ELT (value, i);
14113 else
14114 vec[pos++] = value;
14116 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14117 vec[pos] = build_zero_cst (TREE_TYPE (type));
14119 return build_vector (type, vec);
14122 case CONST_DECL:
14123 return fold (DECL_INITIAL (t));
14125 default:
14126 return t;
14127 } /* switch (code) */
14130 #ifdef ENABLE_FOLD_CHECKING
14131 #undef fold
14133 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14134 hash_table<pointer_hash<const tree_node> > *);
14135 static void fold_check_failed (const_tree, const_tree);
14136 void print_fold_checksum (const_tree);
14138 /* When --enable-checking=fold, compute a digest of expr before
14139 and after actual fold call to see if fold did not accidentally
14140 change original expr. */
14142 tree
14143 fold (tree expr)
14145 tree ret;
14146 struct md5_ctx ctx;
14147 unsigned char checksum_before[16], checksum_after[16];
14148 hash_table<pointer_hash<const tree_node> > ht (32);
14150 md5_init_ctx (&ctx);
14151 fold_checksum_tree (expr, &ctx, &ht);
14152 md5_finish_ctx (&ctx, checksum_before);
14153 ht.empty ();
14155 ret = fold_1 (expr);
14157 md5_init_ctx (&ctx);
14158 fold_checksum_tree (expr, &ctx, &ht);
14159 md5_finish_ctx (&ctx, checksum_after);
14161 if (memcmp (checksum_before, checksum_after, 16))
14162 fold_check_failed (expr, ret);
14164 return ret;
14167 void
14168 print_fold_checksum (const_tree expr)
14170 struct md5_ctx ctx;
14171 unsigned char checksum[16], cnt;
14172 hash_table<pointer_hash<const tree_node> > ht (32);
14174 md5_init_ctx (&ctx);
14175 fold_checksum_tree (expr, &ctx, &ht);
14176 md5_finish_ctx (&ctx, checksum);
14177 for (cnt = 0; cnt < 16; ++cnt)
14178 fprintf (stderr, "%02x", checksum[cnt]);
14179 putc ('\n', stderr);
14182 static void
14183 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14185 internal_error ("fold check: original tree changed by fold");
14188 static void
14189 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14190 hash_table<pointer_hash <const tree_node> > *ht)
14192 const tree_node **slot;
14193 enum tree_code code;
14194 union tree_node buf;
14195 int i, len;
14197 recursive_label:
14198 if (expr == NULL)
14199 return;
14200 slot = ht->find_slot (expr, INSERT);
14201 if (*slot != NULL)
14202 return;
14203 *slot = expr;
14204 code = TREE_CODE (expr);
14205 if (TREE_CODE_CLASS (code) == tcc_declaration
14206 && DECL_ASSEMBLER_NAME_SET_P (expr))
14208 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14209 memcpy ((char *) &buf, expr, tree_size (expr));
14210 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14211 expr = (tree) &buf;
14213 else if (TREE_CODE_CLASS (code) == tcc_type
14214 && (TYPE_POINTER_TO (expr)
14215 || TYPE_REFERENCE_TO (expr)
14216 || TYPE_CACHED_VALUES_P (expr)
14217 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14218 || TYPE_NEXT_VARIANT (expr)))
14220 /* Allow these fields to be modified. */
14221 tree tmp;
14222 memcpy ((char *) &buf, expr, tree_size (expr));
14223 expr = tmp = (tree) &buf;
14224 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14225 TYPE_POINTER_TO (tmp) = NULL;
14226 TYPE_REFERENCE_TO (tmp) = NULL;
14227 TYPE_NEXT_VARIANT (tmp) = NULL;
14228 if (TYPE_CACHED_VALUES_P (tmp))
14230 TYPE_CACHED_VALUES_P (tmp) = 0;
14231 TYPE_CACHED_VALUES (tmp) = NULL;
14234 md5_process_bytes (expr, tree_size (expr), ctx);
14235 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14236 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14237 if (TREE_CODE_CLASS (code) != tcc_type
14238 && TREE_CODE_CLASS (code) != tcc_declaration
14239 && code != TREE_LIST
14240 && code != SSA_NAME
14241 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14242 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14243 switch (TREE_CODE_CLASS (code))
14245 case tcc_constant:
14246 switch (code)
14248 case STRING_CST:
14249 md5_process_bytes (TREE_STRING_POINTER (expr),
14250 TREE_STRING_LENGTH (expr), ctx);
14251 break;
14252 case COMPLEX_CST:
14253 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14254 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14255 break;
14256 case VECTOR_CST:
14257 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14258 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14259 break;
14260 default:
14261 break;
14263 break;
14264 case tcc_exceptional:
14265 switch (code)
14267 case TREE_LIST:
14268 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14269 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14270 expr = TREE_CHAIN (expr);
14271 goto recursive_label;
14272 break;
14273 case TREE_VEC:
14274 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14275 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14276 break;
14277 default:
14278 break;
14280 break;
14281 case tcc_expression:
14282 case tcc_reference:
14283 case tcc_comparison:
14284 case tcc_unary:
14285 case tcc_binary:
14286 case tcc_statement:
14287 case tcc_vl_exp:
14288 len = TREE_OPERAND_LENGTH (expr);
14289 for (i = 0; i < len; ++i)
14290 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14291 break;
14292 case tcc_declaration:
14293 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14294 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14295 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14297 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14298 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14299 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14300 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14301 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14304 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14306 if (TREE_CODE (expr) == FUNCTION_DECL)
14308 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14309 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14311 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14313 break;
14314 case tcc_type:
14315 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14316 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14317 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14318 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14319 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14320 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14321 if (INTEGRAL_TYPE_P (expr)
14322 || SCALAR_FLOAT_TYPE_P (expr))
14324 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14325 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14327 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14328 if (TREE_CODE (expr) == RECORD_TYPE
14329 || TREE_CODE (expr) == UNION_TYPE
14330 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14331 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14332 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14333 break;
14334 default:
14335 break;
14339 /* Helper function for outputting the checksum of a tree T. When
14340 debugging with gdb, you can "define mynext" to be "next" followed
14341 by "call debug_fold_checksum (op0)", then just trace down till the
14342 outputs differ. */
14344 DEBUG_FUNCTION void
14345 debug_fold_checksum (const_tree t)
14347 int i;
14348 unsigned char checksum[16];
14349 struct md5_ctx ctx;
14350 hash_table<pointer_hash<const tree_node> > ht (32);
14352 md5_init_ctx (&ctx);
14353 fold_checksum_tree (t, &ctx, &ht);
14354 md5_finish_ctx (&ctx, checksum);
14355 ht.empty ();
14357 for (i = 0; i < 16; i++)
14358 fprintf (stderr, "%d ", checksum[i]);
14360 fprintf (stderr, "\n");
14363 #endif
14365 /* Fold a unary tree expression with code CODE of type TYPE with an
14366 operand OP0. LOC is the location of the resulting expression.
14367 Return a folded expression if successful. Otherwise, return a tree
14368 expression with code CODE of type TYPE with an operand OP0. */
14370 tree
14371 fold_build1_stat_loc (location_t loc,
14372 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14374 tree tem;
14375 #ifdef ENABLE_FOLD_CHECKING
14376 unsigned char checksum_before[16], checksum_after[16];
14377 struct md5_ctx ctx;
14378 hash_table<pointer_hash<const tree_node> > ht (32);
14380 md5_init_ctx (&ctx);
14381 fold_checksum_tree (op0, &ctx, &ht);
14382 md5_finish_ctx (&ctx, checksum_before);
14383 ht.empty ();
14384 #endif
14386 tem = fold_unary_loc (loc, code, type, op0);
14387 if (!tem)
14388 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14390 #ifdef ENABLE_FOLD_CHECKING
14391 md5_init_ctx (&ctx);
14392 fold_checksum_tree (op0, &ctx, &ht);
14393 md5_finish_ctx (&ctx, checksum_after);
14395 if (memcmp (checksum_before, checksum_after, 16))
14396 fold_check_failed (op0, tem);
14397 #endif
14398 return tem;
14401 /* Fold a binary tree expression with code CODE of type TYPE with
14402 operands OP0 and OP1. LOC is the location of the resulting
14403 expression. Return a folded expression if successful. Otherwise,
14404 return a tree expression with code CODE of type TYPE with operands
14405 OP0 and OP1. */
14407 tree
14408 fold_build2_stat_loc (location_t loc,
14409 enum tree_code code, tree type, tree op0, tree op1
14410 MEM_STAT_DECL)
14412 tree tem;
14413 #ifdef ENABLE_FOLD_CHECKING
14414 unsigned char checksum_before_op0[16],
14415 checksum_before_op1[16],
14416 checksum_after_op0[16],
14417 checksum_after_op1[16];
14418 struct md5_ctx ctx;
14419 hash_table<pointer_hash<const tree_node> > ht (32);
14421 md5_init_ctx (&ctx);
14422 fold_checksum_tree (op0, &ctx, &ht);
14423 md5_finish_ctx (&ctx, checksum_before_op0);
14424 ht.empty ();
14426 md5_init_ctx (&ctx);
14427 fold_checksum_tree (op1, &ctx, &ht);
14428 md5_finish_ctx (&ctx, checksum_before_op1);
14429 ht.empty ();
14430 #endif
14432 tem = fold_binary_loc (loc, code, type, op0, op1);
14433 if (!tem)
14434 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14436 #ifdef ENABLE_FOLD_CHECKING
14437 md5_init_ctx (&ctx);
14438 fold_checksum_tree (op0, &ctx, &ht);
14439 md5_finish_ctx (&ctx, checksum_after_op0);
14440 ht.empty ();
14442 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14443 fold_check_failed (op0, tem);
14445 md5_init_ctx (&ctx);
14446 fold_checksum_tree (op1, &ctx, &ht);
14447 md5_finish_ctx (&ctx, checksum_after_op1);
14449 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14450 fold_check_failed (op1, tem);
14451 #endif
14452 return tem;
14455 /* Fold a ternary tree expression with code CODE of type TYPE with
14456 operands OP0, OP1, and OP2. Return a folded expression if
14457 successful. Otherwise, return a tree expression with code CODE of
14458 type TYPE with operands OP0, OP1, and OP2. */
14460 tree
14461 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14462 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14464 tree tem;
14465 #ifdef ENABLE_FOLD_CHECKING
14466 unsigned char checksum_before_op0[16],
14467 checksum_before_op1[16],
14468 checksum_before_op2[16],
14469 checksum_after_op0[16],
14470 checksum_after_op1[16],
14471 checksum_after_op2[16];
14472 struct md5_ctx ctx;
14473 hash_table<pointer_hash<const tree_node> > ht (32);
14475 md5_init_ctx (&ctx);
14476 fold_checksum_tree (op0, &ctx, &ht);
14477 md5_finish_ctx (&ctx, checksum_before_op0);
14478 ht.empty ();
14480 md5_init_ctx (&ctx);
14481 fold_checksum_tree (op1, &ctx, &ht);
14482 md5_finish_ctx (&ctx, checksum_before_op1);
14483 ht.empty ();
14485 md5_init_ctx (&ctx);
14486 fold_checksum_tree (op2, &ctx, &ht);
14487 md5_finish_ctx (&ctx, checksum_before_op2);
14488 ht.empty ();
14489 #endif
14491 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14492 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14493 if (!tem)
14494 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14496 #ifdef ENABLE_FOLD_CHECKING
14497 md5_init_ctx (&ctx);
14498 fold_checksum_tree (op0, &ctx, &ht);
14499 md5_finish_ctx (&ctx, checksum_after_op0);
14500 ht.empty ();
14502 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14503 fold_check_failed (op0, tem);
14505 md5_init_ctx (&ctx);
14506 fold_checksum_tree (op1, &ctx, &ht);
14507 md5_finish_ctx (&ctx, checksum_after_op1);
14508 ht.empty ();
14510 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14511 fold_check_failed (op1, tem);
14513 md5_init_ctx (&ctx);
14514 fold_checksum_tree (op2, &ctx, &ht);
14515 md5_finish_ctx (&ctx, checksum_after_op2);
14517 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14518 fold_check_failed (op2, tem);
14519 #endif
14520 return tem;
14523 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14524 arguments in ARGARRAY, and a null static chain.
14525 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14526 of type TYPE from the given operands as constructed by build_call_array. */
14528 tree
14529 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14530 int nargs, tree *argarray)
14532 tree tem;
14533 #ifdef ENABLE_FOLD_CHECKING
14534 unsigned char checksum_before_fn[16],
14535 checksum_before_arglist[16],
14536 checksum_after_fn[16],
14537 checksum_after_arglist[16];
14538 struct md5_ctx ctx;
14539 hash_table<pointer_hash<const tree_node> > ht (32);
14540 int i;
14542 md5_init_ctx (&ctx);
14543 fold_checksum_tree (fn, &ctx, &ht);
14544 md5_finish_ctx (&ctx, checksum_before_fn);
14545 ht.empty ();
14547 md5_init_ctx (&ctx);
14548 for (i = 0; i < nargs; i++)
14549 fold_checksum_tree (argarray[i], &ctx, &ht);
14550 md5_finish_ctx (&ctx, checksum_before_arglist);
14551 ht.empty ();
14552 #endif
14554 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14556 #ifdef ENABLE_FOLD_CHECKING
14557 md5_init_ctx (&ctx);
14558 fold_checksum_tree (fn, &ctx, &ht);
14559 md5_finish_ctx (&ctx, checksum_after_fn);
14560 ht.empty ();
14562 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14563 fold_check_failed (fn, tem);
14565 md5_init_ctx (&ctx);
14566 for (i = 0; i < nargs; i++)
14567 fold_checksum_tree (argarray[i], &ctx, &ht);
14568 md5_finish_ctx (&ctx, checksum_after_arglist);
14570 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14571 fold_check_failed (NULL_TREE, tem);
14572 #endif
14573 return tem;
14576 /* Perform constant folding and related simplification of initializer
14577 expression EXPR. These behave identically to "fold_buildN" but ignore
14578 potential run-time traps and exceptions that fold must preserve. */
14580 #define START_FOLD_INIT \
14581 int saved_signaling_nans = flag_signaling_nans;\
14582 int saved_trapping_math = flag_trapping_math;\
14583 int saved_rounding_math = flag_rounding_math;\
14584 int saved_trapv = flag_trapv;\
14585 int saved_folding_initializer = folding_initializer;\
14586 flag_signaling_nans = 0;\
14587 flag_trapping_math = 0;\
14588 flag_rounding_math = 0;\
14589 flag_trapv = 0;\
14590 folding_initializer = 1;
14592 #define END_FOLD_INIT \
14593 flag_signaling_nans = saved_signaling_nans;\
14594 flag_trapping_math = saved_trapping_math;\
14595 flag_rounding_math = saved_rounding_math;\
14596 flag_trapv = saved_trapv;\
14597 folding_initializer = saved_folding_initializer;
14599 tree
14600 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14601 tree type, tree op)
14603 tree result;
14604 START_FOLD_INIT;
14606 result = fold_build1_loc (loc, code, type, op);
14608 END_FOLD_INIT;
14609 return result;
14612 tree
14613 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14614 tree type, tree op0, tree op1)
14616 tree result;
14617 START_FOLD_INIT;
14619 result = fold_build2_loc (loc, code, type, op0, op1);
14621 END_FOLD_INIT;
14622 return result;
14625 tree
14626 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14627 int nargs, tree *argarray)
14629 tree result;
14630 START_FOLD_INIT;
14632 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14634 END_FOLD_INIT;
14635 return result;
14638 #undef START_FOLD_INIT
14639 #undef END_FOLD_INIT
14641 /* Determine if first argument is a multiple of second argument. Return 0 if
14642 it is not, or we cannot easily determined it to be.
14644 An example of the sort of thing we care about (at this point; this routine
14645 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14646 fold cases do now) is discovering that
14648 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14650 is a multiple of
14652 SAVE_EXPR (J * 8)
14654 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14656 This code also handles discovering that
14658 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14660 is a multiple of 8 so we don't have to worry about dealing with a
14661 possible remainder.
14663 Note that we *look* inside a SAVE_EXPR only to determine how it was
14664 calculated; it is not safe for fold to do much of anything else with the
14665 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14666 at run time. For example, the latter example above *cannot* be implemented
14667 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14668 evaluation time of the original SAVE_EXPR is not necessarily the same at
14669 the time the new expression is evaluated. The only optimization of this
14670 sort that would be valid is changing
14672 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14674 divided by 8 to
14676 SAVE_EXPR (I) * SAVE_EXPR (J)
14678 (where the same SAVE_EXPR (J) is used in the original and the
14679 transformed version). */
14682 multiple_of_p (tree type, const_tree top, const_tree bottom)
14684 if (operand_equal_p (top, bottom, 0))
14685 return 1;
14687 if (TREE_CODE (type) != INTEGER_TYPE)
14688 return 0;
14690 switch (TREE_CODE (top))
14692 case BIT_AND_EXPR:
14693 /* Bitwise and provides a power of two multiple. If the mask is
14694 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14695 if (!integer_pow2p (bottom))
14696 return 0;
14697 /* FALLTHRU */
14699 case MULT_EXPR:
14700 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14701 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14703 case PLUS_EXPR:
14704 case MINUS_EXPR:
14705 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14706 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14708 case LSHIFT_EXPR:
14709 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14711 tree op1, t1;
14713 op1 = TREE_OPERAND (top, 1);
14714 /* const_binop may not detect overflow correctly,
14715 so check for it explicitly here. */
14716 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14717 && 0 != (t1 = fold_convert (type,
14718 const_binop (LSHIFT_EXPR,
14719 size_one_node,
14720 op1)))
14721 && !TREE_OVERFLOW (t1))
14722 return multiple_of_p (type, t1, bottom);
14724 return 0;
14726 case NOP_EXPR:
14727 /* Can't handle conversions from non-integral or wider integral type. */
14728 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14729 || (TYPE_PRECISION (type)
14730 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14731 return 0;
14733 /* .. fall through ... */
14735 case SAVE_EXPR:
14736 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14738 case COND_EXPR:
14739 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14740 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14742 case INTEGER_CST:
14743 if (TREE_CODE (bottom) != INTEGER_CST
14744 || integer_zerop (bottom)
14745 || (TYPE_UNSIGNED (type)
14746 && (tree_int_cst_sgn (top) < 0
14747 || tree_int_cst_sgn (bottom) < 0)))
14748 return 0;
14749 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14750 SIGNED);
14752 default:
14753 return 0;
14757 /* Return true if CODE or TYPE is known to be non-negative. */
14759 static bool
14760 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14762 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14763 && truth_value_p (code))
14764 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14765 have a signed:1 type (where the value is -1 and 0). */
14766 return true;
14767 return false;
14770 /* Return true if (CODE OP0) is known to be non-negative. If the return
14771 value is based on the assumption that signed overflow is undefined,
14772 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14773 *STRICT_OVERFLOW_P. */
14775 bool
14776 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14777 bool *strict_overflow_p)
14779 if (TYPE_UNSIGNED (type))
14780 return true;
14782 switch (code)
14784 case ABS_EXPR:
14785 /* We can't return 1 if flag_wrapv is set because
14786 ABS_EXPR<INT_MIN> = INT_MIN. */
14787 if (!INTEGRAL_TYPE_P (type))
14788 return true;
14789 if (TYPE_OVERFLOW_UNDEFINED (type))
14791 *strict_overflow_p = true;
14792 return true;
14794 break;
14796 case NON_LVALUE_EXPR:
14797 case FLOAT_EXPR:
14798 case FIX_TRUNC_EXPR:
14799 return tree_expr_nonnegative_warnv_p (op0,
14800 strict_overflow_p);
14802 case NOP_EXPR:
14804 tree inner_type = TREE_TYPE (op0);
14805 tree outer_type = type;
14807 if (TREE_CODE (outer_type) == REAL_TYPE)
14809 if (TREE_CODE (inner_type) == REAL_TYPE)
14810 return tree_expr_nonnegative_warnv_p (op0,
14811 strict_overflow_p);
14812 if (INTEGRAL_TYPE_P (inner_type))
14814 if (TYPE_UNSIGNED (inner_type))
14815 return true;
14816 return tree_expr_nonnegative_warnv_p (op0,
14817 strict_overflow_p);
14820 else if (INTEGRAL_TYPE_P (outer_type))
14822 if (TREE_CODE (inner_type) == REAL_TYPE)
14823 return tree_expr_nonnegative_warnv_p (op0,
14824 strict_overflow_p);
14825 if (INTEGRAL_TYPE_P (inner_type))
14826 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14827 && TYPE_UNSIGNED (inner_type);
14830 break;
14832 default:
14833 return tree_simple_nonnegative_warnv_p (code, type);
14836 /* We don't know sign of `t', so be conservative and return false. */
14837 return false;
14840 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. */
14845 bool
14846 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14847 tree op1, bool *strict_overflow_p)
14849 if (TYPE_UNSIGNED (type))
14850 return true;
14852 switch (code)
14854 case POINTER_PLUS_EXPR:
14855 case PLUS_EXPR:
14856 if (FLOAT_TYPE_P (type))
14857 return (tree_expr_nonnegative_warnv_p (op0,
14858 strict_overflow_p)
14859 && tree_expr_nonnegative_warnv_p (op1,
14860 strict_overflow_p));
14862 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14863 both unsigned and at least 2 bits shorter than the result. */
14864 if (TREE_CODE (type) == INTEGER_TYPE
14865 && TREE_CODE (op0) == NOP_EXPR
14866 && TREE_CODE (op1) == NOP_EXPR)
14868 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14869 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14870 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14871 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14873 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14874 TYPE_PRECISION (inner2)) + 1;
14875 return prec < TYPE_PRECISION (type);
14878 break;
14880 case MULT_EXPR:
14881 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14883 /* x * x is always non-negative for floating point x
14884 or without overflow. */
14885 if (operand_equal_p (op0, op1, 0)
14886 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14887 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14889 if (TYPE_OVERFLOW_UNDEFINED (type))
14890 *strict_overflow_p = true;
14891 return true;
14895 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14896 both unsigned and their total bits is shorter than the result. */
14897 if (TREE_CODE (type) == INTEGER_TYPE
14898 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14899 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14901 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14902 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14903 : TREE_TYPE (op0);
14904 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14905 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14906 : TREE_TYPE (op1);
14908 bool unsigned0 = TYPE_UNSIGNED (inner0);
14909 bool unsigned1 = TYPE_UNSIGNED (inner1);
14911 if (TREE_CODE (op0) == INTEGER_CST)
14912 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14914 if (TREE_CODE (op1) == INTEGER_CST)
14915 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14917 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14918 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14920 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14921 ? tree_int_cst_min_precision (op0, UNSIGNED)
14922 : TYPE_PRECISION (inner0);
14924 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14925 ? tree_int_cst_min_precision (op1, UNSIGNED)
14926 : TYPE_PRECISION (inner1);
14928 return precision0 + precision1 < TYPE_PRECISION (type);
14931 return false;
14933 case BIT_AND_EXPR:
14934 case MAX_EXPR:
14935 return (tree_expr_nonnegative_warnv_p (op0,
14936 strict_overflow_p)
14937 || tree_expr_nonnegative_warnv_p (op1,
14938 strict_overflow_p));
14940 case BIT_IOR_EXPR:
14941 case BIT_XOR_EXPR:
14942 case MIN_EXPR:
14943 case RDIV_EXPR:
14944 case TRUNC_DIV_EXPR:
14945 case CEIL_DIV_EXPR:
14946 case FLOOR_DIV_EXPR:
14947 case ROUND_DIV_EXPR:
14948 return (tree_expr_nonnegative_warnv_p (op0,
14949 strict_overflow_p)
14950 && tree_expr_nonnegative_warnv_p (op1,
14951 strict_overflow_p));
14953 case TRUNC_MOD_EXPR:
14954 case CEIL_MOD_EXPR:
14955 case FLOOR_MOD_EXPR:
14956 case ROUND_MOD_EXPR:
14957 return tree_expr_nonnegative_warnv_p (op0,
14958 strict_overflow_p);
14959 default:
14960 return tree_simple_nonnegative_warnv_p (code, type);
14963 /* We don't know sign of `t', so be conservative and return false. */
14964 return false;
14967 /* Return true if T is known to be non-negative. If the return
14968 value is based on the assumption that signed overflow is undefined,
14969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14970 *STRICT_OVERFLOW_P. */
14972 bool
14973 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14975 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14976 return true;
14978 switch (TREE_CODE (t))
14980 case INTEGER_CST:
14981 return tree_int_cst_sgn (t) >= 0;
14983 case REAL_CST:
14984 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14986 case FIXED_CST:
14987 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14989 case COND_EXPR:
14990 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14991 strict_overflow_p)
14992 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14993 strict_overflow_p));
14994 default:
14995 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14996 TREE_TYPE (t));
14998 /* We don't know sign of `t', so be conservative and return false. */
14999 return false;
15002 /* Return true if T is known to be non-negative. If the return
15003 value is based on the assumption that signed overflow is undefined,
15004 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15005 *STRICT_OVERFLOW_P. */
15007 bool
15008 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15009 tree arg0, tree arg1, bool *strict_overflow_p)
15011 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15012 switch (DECL_FUNCTION_CODE (fndecl))
15014 CASE_FLT_FN (BUILT_IN_ACOS):
15015 CASE_FLT_FN (BUILT_IN_ACOSH):
15016 CASE_FLT_FN (BUILT_IN_CABS):
15017 CASE_FLT_FN (BUILT_IN_COSH):
15018 CASE_FLT_FN (BUILT_IN_ERFC):
15019 CASE_FLT_FN (BUILT_IN_EXP):
15020 CASE_FLT_FN (BUILT_IN_EXP10):
15021 CASE_FLT_FN (BUILT_IN_EXP2):
15022 CASE_FLT_FN (BUILT_IN_FABS):
15023 CASE_FLT_FN (BUILT_IN_FDIM):
15024 CASE_FLT_FN (BUILT_IN_HYPOT):
15025 CASE_FLT_FN (BUILT_IN_POW10):
15026 CASE_INT_FN (BUILT_IN_FFS):
15027 CASE_INT_FN (BUILT_IN_PARITY):
15028 CASE_INT_FN (BUILT_IN_POPCOUNT):
15029 CASE_INT_FN (BUILT_IN_CLZ):
15030 CASE_INT_FN (BUILT_IN_CLRSB):
15031 case BUILT_IN_BSWAP32:
15032 case BUILT_IN_BSWAP64:
15033 /* Always true. */
15034 return true;
15036 CASE_FLT_FN (BUILT_IN_SQRT):
15037 /* sqrt(-0.0) is -0.0. */
15038 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15039 return true;
15040 return tree_expr_nonnegative_warnv_p (arg0,
15041 strict_overflow_p);
15043 CASE_FLT_FN (BUILT_IN_ASINH):
15044 CASE_FLT_FN (BUILT_IN_ATAN):
15045 CASE_FLT_FN (BUILT_IN_ATANH):
15046 CASE_FLT_FN (BUILT_IN_CBRT):
15047 CASE_FLT_FN (BUILT_IN_CEIL):
15048 CASE_FLT_FN (BUILT_IN_ERF):
15049 CASE_FLT_FN (BUILT_IN_EXPM1):
15050 CASE_FLT_FN (BUILT_IN_FLOOR):
15051 CASE_FLT_FN (BUILT_IN_FMOD):
15052 CASE_FLT_FN (BUILT_IN_FREXP):
15053 CASE_FLT_FN (BUILT_IN_ICEIL):
15054 CASE_FLT_FN (BUILT_IN_IFLOOR):
15055 CASE_FLT_FN (BUILT_IN_IRINT):
15056 CASE_FLT_FN (BUILT_IN_IROUND):
15057 CASE_FLT_FN (BUILT_IN_LCEIL):
15058 CASE_FLT_FN (BUILT_IN_LDEXP):
15059 CASE_FLT_FN (BUILT_IN_LFLOOR):
15060 CASE_FLT_FN (BUILT_IN_LLCEIL):
15061 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15062 CASE_FLT_FN (BUILT_IN_LLRINT):
15063 CASE_FLT_FN (BUILT_IN_LLROUND):
15064 CASE_FLT_FN (BUILT_IN_LRINT):
15065 CASE_FLT_FN (BUILT_IN_LROUND):
15066 CASE_FLT_FN (BUILT_IN_MODF):
15067 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15068 CASE_FLT_FN (BUILT_IN_RINT):
15069 CASE_FLT_FN (BUILT_IN_ROUND):
15070 CASE_FLT_FN (BUILT_IN_SCALB):
15071 CASE_FLT_FN (BUILT_IN_SCALBLN):
15072 CASE_FLT_FN (BUILT_IN_SCALBN):
15073 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15074 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15075 CASE_FLT_FN (BUILT_IN_SINH):
15076 CASE_FLT_FN (BUILT_IN_TANH):
15077 CASE_FLT_FN (BUILT_IN_TRUNC):
15078 /* True if the 1st argument is nonnegative. */
15079 return tree_expr_nonnegative_warnv_p (arg0,
15080 strict_overflow_p);
15082 CASE_FLT_FN (BUILT_IN_FMAX):
15083 /* True if the 1st OR 2nd arguments are nonnegative. */
15084 return (tree_expr_nonnegative_warnv_p (arg0,
15085 strict_overflow_p)
15086 || (tree_expr_nonnegative_warnv_p (arg1,
15087 strict_overflow_p)));
15089 CASE_FLT_FN (BUILT_IN_FMIN):
15090 /* True if the 1st AND 2nd arguments are nonnegative. */
15091 return (tree_expr_nonnegative_warnv_p (arg0,
15092 strict_overflow_p)
15093 && (tree_expr_nonnegative_warnv_p (arg1,
15094 strict_overflow_p)));
15096 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15097 /* True if the 2nd argument is nonnegative. */
15098 return tree_expr_nonnegative_warnv_p (arg1,
15099 strict_overflow_p);
15101 CASE_FLT_FN (BUILT_IN_POWI):
15102 /* True if the 1st argument is nonnegative or the second
15103 argument is an even integer. */
15104 if (TREE_CODE (arg1) == INTEGER_CST
15105 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15106 return true;
15107 return tree_expr_nonnegative_warnv_p (arg0,
15108 strict_overflow_p);
15110 CASE_FLT_FN (BUILT_IN_POW):
15111 /* True if the 1st argument is nonnegative or the second
15112 argument is an even integer valued real. */
15113 if (TREE_CODE (arg1) == REAL_CST)
15115 REAL_VALUE_TYPE c;
15116 HOST_WIDE_INT n;
15118 c = TREE_REAL_CST (arg1);
15119 n = real_to_integer (&c);
15120 if ((n & 1) == 0)
15122 REAL_VALUE_TYPE cint;
15123 real_from_integer (&cint, VOIDmode, n, SIGNED);
15124 if (real_identical (&c, &cint))
15125 return true;
15128 return tree_expr_nonnegative_warnv_p (arg0,
15129 strict_overflow_p);
15131 default:
15132 break;
15134 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15135 type);
15138 /* Return true if T is known to be non-negative. If the return
15139 value is based on the assumption that signed overflow is undefined,
15140 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15141 *STRICT_OVERFLOW_P. */
15143 static bool
15144 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15146 enum tree_code code = TREE_CODE (t);
15147 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15148 return true;
15150 switch (code)
15152 case TARGET_EXPR:
15154 tree temp = TARGET_EXPR_SLOT (t);
15155 t = TARGET_EXPR_INITIAL (t);
15157 /* If the initializer is non-void, then it's a normal expression
15158 that will be assigned to the slot. */
15159 if (!VOID_TYPE_P (t))
15160 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15162 /* Otherwise, the initializer sets the slot in some way. One common
15163 way is an assignment statement at the end of the initializer. */
15164 while (1)
15166 if (TREE_CODE (t) == BIND_EXPR)
15167 t = expr_last (BIND_EXPR_BODY (t));
15168 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15169 || TREE_CODE (t) == TRY_CATCH_EXPR)
15170 t = expr_last (TREE_OPERAND (t, 0));
15171 else if (TREE_CODE (t) == STATEMENT_LIST)
15172 t = expr_last (t);
15173 else
15174 break;
15176 if (TREE_CODE (t) == MODIFY_EXPR
15177 && TREE_OPERAND (t, 0) == temp)
15178 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15179 strict_overflow_p);
15181 return false;
15184 case CALL_EXPR:
15186 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15187 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15189 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15190 get_callee_fndecl (t),
15191 arg0,
15192 arg1,
15193 strict_overflow_p);
15195 case COMPOUND_EXPR:
15196 case MODIFY_EXPR:
15197 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15198 strict_overflow_p);
15199 case BIND_EXPR:
15200 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15201 strict_overflow_p);
15202 case SAVE_EXPR:
15203 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15204 strict_overflow_p);
15206 default:
15207 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15208 TREE_TYPE (t));
15211 /* We don't know sign of `t', so be conservative and return false. */
15212 return false;
15215 /* Return true if T is known to be non-negative. If the return
15216 value is based on the assumption that signed overflow is undefined,
15217 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15218 *STRICT_OVERFLOW_P. */
15220 bool
15221 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15223 enum tree_code code;
15224 if (t == error_mark_node)
15225 return false;
15227 code = TREE_CODE (t);
15228 switch (TREE_CODE_CLASS (code))
15230 case tcc_binary:
15231 case tcc_comparison:
15232 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15233 TREE_TYPE (t),
15234 TREE_OPERAND (t, 0),
15235 TREE_OPERAND (t, 1),
15236 strict_overflow_p);
15238 case tcc_unary:
15239 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15240 TREE_TYPE (t),
15241 TREE_OPERAND (t, 0),
15242 strict_overflow_p);
15244 case tcc_constant:
15245 case tcc_declaration:
15246 case tcc_reference:
15247 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15249 default:
15250 break;
15253 switch (code)
15255 case TRUTH_AND_EXPR:
15256 case TRUTH_OR_EXPR:
15257 case TRUTH_XOR_EXPR:
15258 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15259 TREE_TYPE (t),
15260 TREE_OPERAND (t, 0),
15261 TREE_OPERAND (t, 1),
15262 strict_overflow_p);
15263 case TRUTH_NOT_EXPR:
15264 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15265 TREE_TYPE (t),
15266 TREE_OPERAND (t, 0),
15267 strict_overflow_p);
15269 case COND_EXPR:
15270 case CONSTRUCTOR:
15271 case OBJ_TYPE_REF:
15272 case ASSERT_EXPR:
15273 case ADDR_EXPR:
15274 case WITH_SIZE_EXPR:
15275 case SSA_NAME:
15276 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15278 default:
15279 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15283 /* Return true if `t' is known to be non-negative. Handle warnings
15284 about undefined signed overflow. */
15286 bool
15287 tree_expr_nonnegative_p (tree t)
15289 bool ret, strict_overflow_p;
15291 strict_overflow_p = false;
15292 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15293 if (strict_overflow_p)
15294 fold_overflow_warning (("assuming signed overflow does not occur when "
15295 "determining that expression is always "
15296 "non-negative"),
15297 WARN_STRICT_OVERFLOW_MISC);
15298 return ret;
15302 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15303 For floating point we further ensure that T is not denormal.
15304 Similar logic is present in nonzero_address in rtlanal.h.
15306 If the return value is based on the assumption that signed overflow
15307 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15308 change *STRICT_OVERFLOW_P. */
15310 bool
15311 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15312 bool *strict_overflow_p)
15314 switch (code)
15316 case ABS_EXPR:
15317 return tree_expr_nonzero_warnv_p (op0,
15318 strict_overflow_p);
15320 case NOP_EXPR:
15322 tree inner_type = TREE_TYPE (op0);
15323 tree outer_type = type;
15325 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15326 && tree_expr_nonzero_warnv_p (op0,
15327 strict_overflow_p));
15329 break;
15331 case NON_LVALUE_EXPR:
15332 return tree_expr_nonzero_warnv_p (op0,
15333 strict_overflow_p);
15335 default:
15336 break;
15339 return false;
15342 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15343 For floating point we further ensure that T is not denormal.
15344 Similar logic is present in nonzero_address in rtlanal.h.
15346 If the return value is based on the assumption that signed overflow
15347 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15348 change *STRICT_OVERFLOW_P. */
15350 bool
15351 tree_binary_nonzero_warnv_p (enum tree_code code,
15352 tree type,
15353 tree op0,
15354 tree op1, bool *strict_overflow_p)
15356 bool sub_strict_overflow_p;
15357 switch (code)
15359 case POINTER_PLUS_EXPR:
15360 case PLUS_EXPR:
15361 if (TYPE_OVERFLOW_UNDEFINED (type))
15363 /* With the presence of negative values it is hard
15364 to say something. */
15365 sub_strict_overflow_p = false;
15366 if (!tree_expr_nonnegative_warnv_p (op0,
15367 &sub_strict_overflow_p)
15368 || !tree_expr_nonnegative_warnv_p (op1,
15369 &sub_strict_overflow_p))
15370 return false;
15371 /* One of operands must be positive and the other non-negative. */
15372 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15373 overflows, on a twos-complement machine the sum of two
15374 nonnegative numbers can never be zero. */
15375 return (tree_expr_nonzero_warnv_p (op0,
15376 strict_overflow_p)
15377 || tree_expr_nonzero_warnv_p (op1,
15378 strict_overflow_p));
15380 break;
15382 case MULT_EXPR:
15383 if (TYPE_OVERFLOW_UNDEFINED (type))
15385 if (tree_expr_nonzero_warnv_p (op0,
15386 strict_overflow_p)
15387 && tree_expr_nonzero_warnv_p (op1,
15388 strict_overflow_p))
15390 *strict_overflow_p = true;
15391 return true;
15394 break;
15396 case MIN_EXPR:
15397 sub_strict_overflow_p = false;
15398 if (tree_expr_nonzero_warnv_p (op0,
15399 &sub_strict_overflow_p)
15400 && tree_expr_nonzero_warnv_p (op1,
15401 &sub_strict_overflow_p))
15403 if (sub_strict_overflow_p)
15404 *strict_overflow_p = true;
15406 break;
15408 case MAX_EXPR:
15409 sub_strict_overflow_p = false;
15410 if (tree_expr_nonzero_warnv_p (op0,
15411 &sub_strict_overflow_p))
15413 if (sub_strict_overflow_p)
15414 *strict_overflow_p = true;
15416 /* When both operands are nonzero, then MAX must be too. */
15417 if (tree_expr_nonzero_warnv_p (op1,
15418 strict_overflow_p))
15419 return true;
15421 /* MAX where operand 0 is positive is positive. */
15422 return tree_expr_nonnegative_warnv_p (op0,
15423 strict_overflow_p);
15425 /* MAX where operand 1 is positive is positive. */
15426 else if (tree_expr_nonzero_warnv_p (op1,
15427 &sub_strict_overflow_p)
15428 && tree_expr_nonnegative_warnv_p (op1,
15429 &sub_strict_overflow_p))
15431 if (sub_strict_overflow_p)
15432 *strict_overflow_p = true;
15433 return true;
15435 break;
15437 case BIT_IOR_EXPR:
15438 return (tree_expr_nonzero_warnv_p (op1,
15439 strict_overflow_p)
15440 || tree_expr_nonzero_warnv_p (op0,
15441 strict_overflow_p));
15443 default:
15444 break;
15447 return false;
15450 /* Return true when T is an address and is known to be nonzero.
15451 For floating point we further ensure that T is not denormal.
15452 Similar logic is present in nonzero_address in rtlanal.h.
15454 If the return value is based on the assumption that signed overflow
15455 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15456 change *STRICT_OVERFLOW_P. */
15458 bool
15459 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15461 bool sub_strict_overflow_p;
15462 switch (TREE_CODE (t))
15464 case INTEGER_CST:
15465 return !integer_zerop (t);
15467 case ADDR_EXPR:
15469 tree base = TREE_OPERAND (t, 0);
15471 if (!DECL_P (base))
15472 base = get_base_address (base);
15474 if (!base)
15475 return false;
15477 /* For objects in symbol table check if we know they are non-zero.
15478 Don't do anything for variables and functions before symtab is built;
15479 it is quite possible that they will be declared weak later. */
15480 if (DECL_P (base) && decl_in_symtab_p (base))
15482 struct symtab_node *symbol;
15484 symbol = symtab_node::get_create (base);
15485 if (symbol)
15486 return symbol->nonzero_address ();
15487 else
15488 return false;
15491 /* Function local objects are never NULL. */
15492 if (DECL_P (base)
15493 && (DECL_CONTEXT (base)
15494 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15495 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15496 return true;
15498 /* Constants are never weak. */
15499 if (CONSTANT_CLASS_P (base))
15500 return true;
15502 return false;
15505 case COND_EXPR:
15506 sub_strict_overflow_p = false;
15507 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15508 &sub_strict_overflow_p)
15509 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15510 &sub_strict_overflow_p))
15512 if (sub_strict_overflow_p)
15513 *strict_overflow_p = true;
15514 return true;
15516 break;
15518 default:
15519 break;
15521 return false;
15524 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15525 attempt to fold the expression to a constant without modifying TYPE,
15526 OP0 or OP1.
15528 If the expression could be simplified to a constant, then return
15529 the constant. If the expression would not be simplified to a
15530 constant, then return NULL_TREE. */
15532 tree
15533 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15535 tree tem = fold_binary (code, type, op0, op1);
15536 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15539 /* Given the components of a unary expression CODE, TYPE and OP0,
15540 attempt to fold the expression to a constant without modifying
15541 TYPE or OP0.
15543 If the expression could be simplified to a constant, then return
15544 the constant. If the expression would not be simplified to a
15545 constant, then return NULL_TREE. */
15547 tree
15548 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15550 tree tem = fold_unary (code, type, op0);
15551 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15554 /* If EXP represents referencing an element in a constant string
15555 (either via pointer arithmetic or array indexing), return the
15556 tree representing the value accessed, otherwise return NULL. */
15558 tree
15559 fold_read_from_constant_string (tree exp)
15561 if ((TREE_CODE (exp) == INDIRECT_REF
15562 || TREE_CODE (exp) == ARRAY_REF)
15563 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15565 tree exp1 = TREE_OPERAND (exp, 0);
15566 tree index;
15567 tree string;
15568 location_t loc = EXPR_LOCATION (exp);
15570 if (TREE_CODE (exp) == INDIRECT_REF)
15571 string = string_constant (exp1, &index);
15572 else
15574 tree low_bound = array_ref_low_bound (exp);
15575 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15577 /* Optimize the special-case of a zero lower bound.
15579 We convert the low_bound to sizetype to avoid some problems
15580 with constant folding. (E.g. suppose the lower bound is 1,
15581 and its mode is QI. Without the conversion,l (ARRAY
15582 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15583 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15584 if (! integer_zerop (low_bound))
15585 index = size_diffop_loc (loc, index,
15586 fold_convert_loc (loc, sizetype, low_bound));
15588 string = exp1;
15591 if (string
15592 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15593 && TREE_CODE (string) == STRING_CST
15594 && TREE_CODE (index) == INTEGER_CST
15595 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15596 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15597 == MODE_INT)
15598 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15599 return build_int_cst_type (TREE_TYPE (exp),
15600 (TREE_STRING_POINTER (string)
15601 [TREE_INT_CST_LOW (index)]));
15603 return NULL;
15606 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15607 an integer constant, real, or fixed-point constant.
15609 TYPE is the type of the result. */
15611 static tree
15612 fold_negate_const (tree arg0, tree type)
15614 tree t = NULL_TREE;
15616 switch (TREE_CODE (arg0))
15618 case INTEGER_CST:
15620 bool overflow;
15621 wide_int val = wi::neg (arg0, &overflow);
15622 t = force_fit_type (type, val, 1,
15623 (overflow | TREE_OVERFLOW (arg0))
15624 && !TYPE_UNSIGNED (type));
15625 break;
15628 case REAL_CST:
15629 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15630 break;
15632 case FIXED_CST:
15634 FIXED_VALUE_TYPE f;
15635 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15636 &(TREE_FIXED_CST (arg0)), NULL,
15637 TYPE_SATURATING (type));
15638 t = build_fixed (type, f);
15639 /* Propagate overflow flags. */
15640 if (overflow_p | TREE_OVERFLOW (arg0))
15641 TREE_OVERFLOW (t) = 1;
15642 break;
15645 default:
15646 gcc_unreachable ();
15649 return t;
15652 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15653 an integer constant or real constant.
15655 TYPE is the type of the result. */
15657 tree
15658 fold_abs_const (tree arg0, tree type)
15660 tree t = NULL_TREE;
15662 switch (TREE_CODE (arg0))
15664 case INTEGER_CST:
15666 /* If the value is unsigned or non-negative, then the absolute value
15667 is the same as the ordinary value. */
15668 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15669 t = arg0;
15671 /* If the value is negative, then the absolute value is
15672 its negation. */
15673 else
15675 bool overflow;
15676 wide_int val = wi::neg (arg0, &overflow);
15677 t = force_fit_type (type, val, -1,
15678 overflow | TREE_OVERFLOW (arg0));
15681 break;
15683 case REAL_CST:
15684 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15685 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15686 else
15687 t = arg0;
15688 break;
15690 default:
15691 gcc_unreachable ();
15694 return t;
15697 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15698 constant. TYPE is the type of the result. */
15700 static tree
15701 fold_not_const (const_tree arg0, tree type)
15703 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15705 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15708 /* Given CODE, a relational operator, the target type, TYPE and two
15709 constant operands OP0 and OP1, return the result of the
15710 relational operation. If the result is not a compile time
15711 constant, then return NULL_TREE. */
15713 static tree
15714 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15716 int result, invert;
15718 /* From here on, the only cases we handle are when the result is
15719 known to be a constant. */
15721 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15723 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15724 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15726 /* Handle the cases where either operand is a NaN. */
15727 if (real_isnan (c0) || real_isnan (c1))
15729 switch (code)
15731 case EQ_EXPR:
15732 case ORDERED_EXPR:
15733 result = 0;
15734 break;
15736 case NE_EXPR:
15737 case UNORDERED_EXPR:
15738 case UNLT_EXPR:
15739 case UNLE_EXPR:
15740 case UNGT_EXPR:
15741 case UNGE_EXPR:
15742 case UNEQ_EXPR:
15743 result = 1;
15744 break;
15746 case LT_EXPR:
15747 case LE_EXPR:
15748 case GT_EXPR:
15749 case GE_EXPR:
15750 case LTGT_EXPR:
15751 if (flag_trapping_math)
15752 return NULL_TREE;
15753 result = 0;
15754 break;
15756 default:
15757 gcc_unreachable ();
15760 return constant_boolean_node (result, type);
15763 return constant_boolean_node (real_compare (code, c0, c1), type);
15766 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15768 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15769 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15770 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15773 /* Handle equality/inequality of complex constants. */
15774 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15776 tree rcond = fold_relational_const (code, type,
15777 TREE_REALPART (op0),
15778 TREE_REALPART (op1));
15779 tree icond = fold_relational_const (code, type,
15780 TREE_IMAGPART (op0),
15781 TREE_IMAGPART (op1));
15782 if (code == EQ_EXPR)
15783 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15784 else if (code == NE_EXPR)
15785 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15786 else
15787 return NULL_TREE;
15790 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15792 unsigned count = VECTOR_CST_NELTS (op0);
15793 tree *elts = XALLOCAVEC (tree, count);
15794 gcc_assert (VECTOR_CST_NELTS (op1) == count
15795 && TYPE_VECTOR_SUBPARTS (type) == count);
15797 for (unsigned i = 0; i < count; i++)
15799 tree elem_type = TREE_TYPE (type);
15800 tree elem0 = VECTOR_CST_ELT (op0, i);
15801 tree elem1 = VECTOR_CST_ELT (op1, i);
15803 tree tem = fold_relational_const (code, elem_type,
15804 elem0, elem1);
15806 if (tem == NULL_TREE)
15807 return NULL_TREE;
15809 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15812 return build_vector (type, elts);
15815 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15817 To compute GT, swap the arguments and do LT.
15818 To compute GE, do LT and invert the result.
15819 To compute LE, swap the arguments, do LT and invert the result.
15820 To compute NE, do EQ and invert the result.
15822 Therefore, the code below must handle only EQ and LT. */
15824 if (code == LE_EXPR || code == GT_EXPR)
15826 tree tem = op0;
15827 op0 = op1;
15828 op1 = tem;
15829 code = swap_tree_comparison (code);
15832 /* Note that it is safe to invert for real values here because we
15833 have already handled the one case that it matters. */
15835 invert = 0;
15836 if (code == NE_EXPR || code == GE_EXPR)
15838 invert = 1;
15839 code = invert_tree_comparison (code, false);
15842 /* Compute a result for LT or EQ if args permit;
15843 Otherwise return T. */
15844 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15846 if (code == EQ_EXPR)
15847 result = tree_int_cst_equal (op0, op1);
15848 else
15849 result = tree_int_cst_lt (op0, op1);
15851 else
15852 return NULL_TREE;
15854 if (invert)
15855 result ^= 1;
15856 return constant_boolean_node (result, type);
15859 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15860 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15861 itself. */
15863 tree
15864 fold_build_cleanup_point_expr (tree type, tree expr)
15866 /* If the expression does not have side effects then we don't have to wrap
15867 it with a cleanup point expression. */
15868 if (!TREE_SIDE_EFFECTS (expr))
15869 return expr;
15871 /* If the expression is a return, check to see if the expression inside the
15872 return has no side effects or the right hand side of the modify expression
15873 inside the return. If either don't have side effects set we don't need to
15874 wrap the expression in a cleanup point expression. Note we don't check the
15875 left hand side of the modify because it should always be a return decl. */
15876 if (TREE_CODE (expr) == RETURN_EXPR)
15878 tree op = TREE_OPERAND (expr, 0);
15879 if (!op || !TREE_SIDE_EFFECTS (op))
15880 return expr;
15881 op = TREE_OPERAND (op, 1);
15882 if (!TREE_SIDE_EFFECTS (op))
15883 return expr;
15886 return build1 (CLEANUP_POINT_EXPR, type, expr);
15889 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15890 of an indirection through OP0, or NULL_TREE if no simplification is
15891 possible. */
15893 tree
15894 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15896 tree sub = op0;
15897 tree subtype;
15899 STRIP_NOPS (sub);
15900 subtype = TREE_TYPE (sub);
15901 if (!POINTER_TYPE_P (subtype))
15902 return NULL_TREE;
15904 if (TREE_CODE (sub) == ADDR_EXPR)
15906 tree op = TREE_OPERAND (sub, 0);
15907 tree optype = TREE_TYPE (op);
15908 /* *&CONST_DECL -> to the value of the const decl. */
15909 if (TREE_CODE (op) == CONST_DECL)
15910 return DECL_INITIAL (op);
15911 /* *&p => p; make sure to handle *&"str"[cst] here. */
15912 if (type == optype)
15914 tree fop = fold_read_from_constant_string (op);
15915 if (fop)
15916 return fop;
15917 else
15918 return op;
15920 /* *(foo *)&fooarray => fooarray[0] */
15921 else if (TREE_CODE (optype) == ARRAY_TYPE
15922 && type == TREE_TYPE (optype)
15923 && (!in_gimple_form
15924 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15926 tree type_domain = TYPE_DOMAIN (optype);
15927 tree min_val = size_zero_node;
15928 if (type_domain && TYPE_MIN_VALUE (type_domain))
15929 min_val = TYPE_MIN_VALUE (type_domain);
15930 if (in_gimple_form
15931 && TREE_CODE (min_val) != INTEGER_CST)
15932 return NULL_TREE;
15933 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15934 NULL_TREE, NULL_TREE);
15936 /* *(foo *)&complexfoo => __real__ complexfoo */
15937 else if (TREE_CODE (optype) == COMPLEX_TYPE
15938 && type == TREE_TYPE (optype))
15939 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15940 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15941 else if (TREE_CODE (optype) == VECTOR_TYPE
15942 && type == TREE_TYPE (optype))
15944 tree part_width = TYPE_SIZE (type);
15945 tree index = bitsize_int (0);
15946 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15950 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15951 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15953 tree op00 = TREE_OPERAND (sub, 0);
15954 tree op01 = TREE_OPERAND (sub, 1);
15956 STRIP_NOPS (op00);
15957 if (TREE_CODE (op00) == ADDR_EXPR)
15959 tree op00type;
15960 op00 = TREE_OPERAND (op00, 0);
15961 op00type = TREE_TYPE (op00);
15963 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15964 if (TREE_CODE (op00type) == VECTOR_TYPE
15965 && type == TREE_TYPE (op00type))
15967 HOST_WIDE_INT offset = tree_to_shwi (op01);
15968 tree part_width = TYPE_SIZE (type);
15969 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15970 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15971 tree index = bitsize_int (indexi);
15973 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15974 return fold_build3_loc (loc,
15975 BIT_FIELD_REF, type, op00,
15976 part_width, index);
15979 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15980 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15981 && type == TREE_TYPE (op00type))
15983 tree size = TYPE_SIZE_UNIT (type);
15984 if (tree_int_cst_equal (size, op01))
15985 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15987 /* ((foo *)&fooarray)[1] => fooarray[1] */
15988 else if (TREE_CODE (op00type) == ARRAY_TYPE
15989 && type == TREE_TYPE (op00type))
15991 tree type_domain = TYPE_DOMAIN (op00type);
15992 tree min_val = size_zero_node;
15993 if (type_domain && TYPE_MIN_VALUE (type_domain))
15994 min_val = TYPE_MIN_VALUE (type_domain);
15995 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15996 TYPE_SIZE_UNIT (type));
15997 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15998 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15999 NULL_TREE, NULL_TREE);
16004 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16005 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16006 && type == TREE_TYPE (TREE_TYPE (subtype))
16007 && (!in_gimple_form
16008 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16010 tree type_domain;
16011 tree min_val = size_zero_node;
16012 sub = build_fold_indirect_ref_loc (loc, sub);
16013 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16014 if (type_domain && TYPE_MIN_VALUE (type_domain))
16015 min_val = TYPE_MIN_VALUE (type_domain);
16016 if (in_gimple_form
16017 && TREE_CODE (min_val) != INTEGER_CST)
16018 return NULL_TREE;
16019 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16020 NULL_TREE);
16023 return NULL_TREE;
16026 /* Builds an expression for an indirection through T, simplifying some
16027 cases. */
16029 tree
16030 build_fold_indirect_ref_loc (location_t loc, tree t)
16032 tree type = TREE_TYPE (TREE_TYPE (t));
16033 tree sub = fold_indirect_ref_1 (loc, type, t);
16035 if (sub)
16036 return sub;
16038 return build1_loc (loc, INDIRECT_REF, type, t);
16041 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16043 tree
16044 fold_indirect_ref_loc (location_t loc, tree t)
16046 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16048 if (sub)
16049 return sub;
16050 else
16051 return t;
16054 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16055 whose result is ignored. The type of the returned tree need not be
16056 the same as the original expression. */
16058 tree
16059 fold_ignored_result (tree t)
16061 if (!TREE_SIDE_EFFECTS (t))
16062 return integer_zero_node;
16064 for (;;)
16065 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16067 case tcc_unary:
16068 t = TREE_OPERAND (t, 0);
16069 break;
16071 case tcc_binary:
16072 case tcc_comparison:
16073 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16074 t = TREE_OPERAND (t, 0);
16075 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16076 t = TREE_OPERAND (t, 1);
16077 else
16078 return t;
16079 break;
16081 case tcc_expression:
16082 switch (TREE_CODE (t))
16084 case COMPOUND_EXPR:
16085 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16086 return t;
16087 t = TREE_OPERAND (t, 0);
16088 break;
16090 case COND_EXPR:
16091 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16092 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16093 return t;
16094 t = TREE_OPERAND (t, 0);
16095 break;
16097 default:
16098 return t;
16100 break;
16102 default:
16103 return t;
16107 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16109 tree
16110 round_up_loc (location_t loc, tree value, unsigned int divisor)
16112 tree div = NULL_TREE;
16114 if (divisor == 1)
16115 return value;
16117 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16118 have to do anything. Only do this when we are not given a const,
16119 because in that case, this check is more expensive than just
16120 doing it. */
16121 if (TREE_CODE (value) != INTEGER_CST)
16123 div = build_int_cst (TREE_TYPE (value), divisor);
16125 if (multiple_of_p (TREE_TYPE (value), value, div))
16126 return value;
16129 /* If divisor is a power of two, simplify this to bit manipulation. */
16130 if (divisor == (divisor & -divisor))
16132 if (TREE_CODE (value) == INTEGER_CST)
16134 wide_int val = value;
16135 bool overflow_p;
16137 if ((val & (divisor - 1)) == 0)
16138 return value;
16140 overflow_p = TREE_OVERFLOW (value);
16141 val &= ~(divisor - 1);
16142 val += divisor;
16143 if (val == 0)
16144 overflow_p = true;
16146 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16148 else
16150 tree t;
16152 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16153 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16154 t = build_int_cst (TREE_TYPE (value), -divisor);
16155 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16158 else
16160 if (!div)
16161 div = build_int_cst (TREE_TYPE (value), divisor);
16162 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16163 value = size_binop_loc (loc, MULT_EXPR, value, div);
16166 return value;
16169 /* Likewise, but round down. */
16171 tree
16172 round_down_loc (location_t loc, tree value, int divisor)
16174 tree div = NULL_TREE;
16176 gcc_assert (divisor > 0);
16177 if (divisor == 1)
16178 return value;
16180 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16181 have to do anything. Only do this when we are not given a const,
16182 because in that case, this check is more expensive than just
16183 doing it. */
16184 if (TREE_CODE (value) != INTEGER_CST)
16186 div = build_int_cst (TREE_TYPE (value), divisor);
16188 if (multiple_of_p (TREE_TYPE (value), value, div))
16189 return value;
16192 /* If divisor is a power of two, simplify this to bit manipulation. */
16193 if (divisor == (divisor & -divisor))
16195 tree t;
16197 t = build_int_cst (TREE_TYPE (value), -divisor);
16198 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16200 else
16202 if (!div)
16203 div = build_int_cst (TREE_TYPE (value), divisor);
16204 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16205 value = size_binop_loc (loc, MULT_EXPR, value, div);
16208 return value;
16211 /* Returns the pointer to the base of the object addressed by EXP and
16212 extracts the information about the offset of the access, storing it
16213 to PBITPOS and POFFSET. */
16215 static tree
16216 split_address_to_core_and_offset (tree exp,
16217 HOST_WIDE_INT *pbitpos, tree *poffset)
16219 tree core;
16220 machine_mode mode;
16221 int unsignedp, volatilep;
16222 HOST_WIDE_INT bitsize;
16223 location_t loc = EXPR_LOCATION (exp);
16225 if (TREE_CODE (exp) == ADDR_EXPR)
16227 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16228 poffset, &mode, &unsignedp, &volatilep,
16229 false);
16230 core = build_fold_addr_expr_loc (loc, core);
16232 else
16234 core = exp;
16235 *pbitpos = 0;
16236 *poffset = NULL_TREE;
16239 return core;
16242 /* Returns true if addresses of E1 and E2 differ by a constant, false
16243 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16245 bool
16246 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16248 tree core1, core2;
16249 HOST_WIDE_INT bitpos1, bitpos2;
16250 tree toffset1, toffset2, tdiff, type;
16252 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16253 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16255 if (bitpos1 % BITS_PER_UNIT != 0
16256 || bitpos2 % BITS_PER_UNIT != 0
16257 || !operand_equal_p (core1, core2, 0))
16258 return false;
16260 if (toffset1 && toffset2)
16262 type = TREE_TYPE (toffset1);
16263 if (type != TREE_TYPE (toffset2))
16264 toffset2 = fold_convert (type, toffset2);
16266 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16267 if (!cst_and_fits_in_hwi (tdiff))
16268 return false;
16270 *diff = int_cst_value (tdiff);
16272 else if (toffset1 || toffset2)
16274 /* If only one of the offsets is non-constant, the difference cannot
16275 be a constant. */
16276 return false;
16278 else
16279 *diff = 0;
16281 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16282 return true;
16285 /* Simplify the floating point expression EXP when the sign of the
16286 result is not significant. Return NULL_TREE if no simplification
16287 is possible. */
16289 tree
16290 fold_strip_sign_ops (tree exp)
16292 tree arg0, arg1;
16293 location_t loc = EXPR_LOCATION (exp);
16295 switch (TREE_CODE (exp))
16297 case ABS_EXPR:
16298 case NEGATE_EXPR:
16299 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16300 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16302 case MULT_EXPR:
16303 case RDIV_EXPR:
16304 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16305 return NULL_TREE;
16306 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16307 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16308 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16309 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16310 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16311 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16312 break;
16314 case COMPOUND_EXPR:
16315 arg0 = TREE_OPERAND (exp, 0);
16316 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16317 if (arg1)
16318 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16319 break;
16321 case COND_EXPR:
16322 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16323 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16324 if (arg0 || arg1)
16325 return fold_build3_loc (loc,
16326 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16327 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16328 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16329 break;
16331 case CALL_EXPR:
16333 const enum built_in_function fcode = builtin_mathfn_code (exp);
16334 switch (fcode)
16336 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16337 /* Strip copysign function call, return the 1st argument. */
16338 arg0 = CALL_EXPR_ARG (exp, 0);
16339 arg1 = CALL_EXPR_ARG (exp, 1);
16340 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16342 default:
16343 /* Strip sign ops from the argument of "odd" math functions. */
16344 if (negate_mathfn_p (fcode))
16346 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16347 if (arg0)
16348 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16350 break;
16353 break;
16355 default:
16356 break;
16358 return NULL_TREE;