PR c/61077
[official-gcc.git] / gcc / fold-const.c
blob0999625dc281dcd2386409abba58f5b6b127c277
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
72 #include "cgraph.h"
74 /* Nonzero if we are folding constants inside an initializer; zero
75 otherwise. */
76 int folding_initializer = 0;
78 /* The following constants represent a bit based encoding of GCC's
79 comparison operators. This encoding simplifies transformations
80 on relational comparison operators, such as AND and OR. */
81 enum comparison_code {
82 COMPCODE_FALSE = 0,
83 COMPCODE_LT = 1,
84 COMPCODE_EQ = 2,
85 COMPCODE_LE = 3,
86 COMPCODE_GT = 4,
87 COMPCODE_LTGT = 5,
88 COMPCODE_GE = 6,
89 COMPCODE_ORD = 7,
90 COMPCODE_UNORD = 8,
91 COMPCODE_UNLT = 9,
92 COMPCODE_UNEQ = 10,
93 COMPCODE_UNLE = 11,
94 COMPCODE_UNGT = 12,
95 COMPCODE_NE = 13,
96 COMPCODE_UNGE = 14,
97 COMPCODE_TRUE = 15
100 static bool negate_mathfn_p (enum built_in_function);
101 static bool negate_expr_p (tree);
102 static tree negate_expr (tree);
103 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
104 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
105 static tree const_binop (enum tree_code, tree, tree);
106 static enum comparison_code comparison_to_compcode (enum tree_code);
107 static enum tree_code compcode_to_comparison (enum comparison_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
112 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (location_t, tree, tree,
114 HOST_WIDE_INT, HOST_WIDE_INT, int);
115 static tree optimize_bit_field_compare (location_t, enum tree_code,
116 tree, tree, tree);
117 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
118 HOST_WIDE_INT *,
119 enum machine_mode *, int *, int *,
120 tree *, tree *);
121 static tree sign_bit_p (tree, const_tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (location_t,
139 enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (const_tree, const_tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (const_tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static tree fold_convert_const (enum tree_code, tree, tree);
149 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
150 Otherwise, return LOC. */
152 static location_t
153 expr_location_or (tree t, location_t loc)
155 location_t tloc = EXPR_LOCATION (t);
156 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 /* Similar to protected_set_expr_location, but never modify x in place,
160 if location can and needs to be set, unshare it. */
162 static inline tree
163 protected_set_expr_location_unshare (tree x, location_t loc)
165 if (CAN_HAVE_LOCATION_P (x)
166 && EXPR_LOCATION (x) != loc
167 && !(TREE_CODE (x) == SAVE_EXPR
168 || TREE_CODE (x) == TARGET_EXPR
169 || TREE_CODE (x) == BIND_EXPR))
171 x = copy_node (x);
172 SET_EXPR_LOCATION (x, loc);
174 return x;
177 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
178 division and returns the quotient. Otherwise returns
179 NULL_TREE. */
181 tree
182 div_if_zero_remainder (const_tree arg1, const_tree arg2)
184 widest_int quo;
186 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
187 SIGNED, &quo))
188 return wide_int_to_tree (TREE_TYPE (arg1), quo);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 return !wi::only_sign_bit_p (t);
373 /* Determine whether an expression T can be cheaply negated using
374 the function negate_expr without introducing undefined overflow. */
376 static bool
377 negate_expr_p (tree t)
379 tree type;
381 if (t == 0)
382 return false;
384 type = TREE_TYPE (t);
386 STRIP_SIGN_NOPS (t);
387 switch (TREE_CODE (t))
389 case INTEGER_CST:
390 if (TYPE_OVERFLOW_WRAPS (type))
391 return true;
393 /* Check that -CST will not overflow type. */
394 return may_negate_without_overflow_p (t);
395 case BIT_NOT_EXPR:
396 return (INTEGRAL_TYPE_P (type)
397 && TYPE_OVERFLOW_WRAPS (type));
399 case FIXED_CST:
400 case NEGATE_EXPR:
401 return true;
403 case REAL_CST:
404 /* We want to canonicalize to positive real constants. Pretend
405 that only negative ones can be easily negated. */
406 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
408 case COMPLEX_CST:
409 return negate_expr_p (TREE_REALPART (t))
410 && negate_expr_p (TREE_IMAGPART (t));
412 case VECTOR_CST:
414 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
415 return true;
417 int count = TYPE_VECTOR_SUBPARTS (type), i;
419 for (i = 0; i < count; i++)
420 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
421 return false;
423 return true;
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
475 break;
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
485 return true;
487 else if (negate_expr_p (TREE_OPERAND (t, 0)))
488 return true;
489 return negate_expr_p (TREE_OPERAND (t, 1));
491 case NOP_EXPR:
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
495 tree tem = strip_float_extensions (t);
496 if (tem != t)
497 return negate_expr_p (tem);
499 break;
501 case CALL_EXPR:
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
505 break;
507 case RSHIFT_EXPR:
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 tree op1 = TREE_OPERAND (t, 1);
512 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 return true;
515 break;
517 default:
518 break;
520 return false;
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
528 static tree
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
532 tree tem;
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_one_cst (type));
541 break;
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
572 break;
574 case VECTOR_CST:
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
579 for (i = 0; i < count; i++)
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
586 return build_vector (type, elts);
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 return TREE_OPERAND (t, 0);
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1))
611 && reorder_operands_p (TREE_OPERAND (t, 0),
612 TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
633 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
634 return fold_build2_loc (loc, MINUS_EXPR, type,
635 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
636 break;
638 case MULT_EXPR:
639 if (TYPE_UNSIGNED (type))
640 break;
642 /* Fall through. */
644 case RDIV_EXPR:
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
647 tem = TREE_OPERAND (t, 1);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
651 tem = TREE_OPERAND (t, 0);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 negate_expr (tem), TREE_OPERAND (t, 1));
656 break;
658 case TRUNC_DIV_EXPR:
659 case ROUND_DIV_EXPR:
660 case EXACT_DIV_EXPR:
661 /* In general we can't negate A / B, because if A is INT_MIN and
662 B is 1, we may turn this into INT_MIN / -1 which is undefined
663 and actually traps on some architectures. But if overflow is
664 undefined, we can negate, because - (INT_MIN / 1) is an
665 overflow. */
666 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
668 const char * const warnmsg = G_("assuming signed overflow does not "
669 "occur when negating a division");
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || integer_onep (tem)))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0), negate_expr (tem));
680 /* If overflow is undefined then we have to be careful because
681 we ask whether it's ok to associate the negate with the
682 division which is not ok for example for
683 -((a - b) / c) where (-(a - b)) / c may invoke undefined
684 overflow because of negating INT_MIN. So do not use
685 negate_expr_p here but open-code the two important cases. */
686 tem = TREE_OPERAND (t, 0);
687 if ((INTEGRAL_TYPE_P (type)
688 && (TREE_CODE (tem) == NEGATE_EXPR
689 || (TREE_CODE (tem) == INTEGER_CST
690 && may_negate_without_overflow_p (tem))))
691 || !INTEGRAL_TYPE_P (type))
692 return fold_build2_loc (loc, TREE_CODE (t), type,
693 negate_expr (tem), TREE_OPERAND (t, 1));
695 break;
697 case NOP_EXPR:
698 /* Convert -((double)float) into (double)(-float). */
699 if (TREE_CODE (type) == REAL_TYPE)
701 tem = strip_float_extensions (t);
702 if (tem != t && negate_expr_p (tem))
703 return fold_convert_loc (loc, type, negate_expr (tem));
705 break;
707 case CALL_EXPR:
708 /* Negate -f(x) as f(-x). */
709 if (negate_mathfn_p (builtin_mathfn_code (t))
710 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
712 tree fndecl, arg;
714 fndecl = get_callee_fndecl (t);
715 arg = negate_expr (CALL_EXPR_ARG (t, 0));
716 return build_call_expr_loc (loc, fndecl, 1, arg);
718 break;
720 case RSHIFT_EXPR:
721 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
722 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
724 tree op1 = TREE_OPERAND (t, 1);
725 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
727 tree ntype = TYPE_UNSIGNED (type)
728 ? signed_type_for (type)
729 : unsigned_type_for (type);
730 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
731 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
732 return fold_convert_loc (loc, type, temp);
735 break;
737 default:
738 break;
741 return NULL_TREE;
744 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
745 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
746 return NULL_TREE. */
748 static tree
749 negate_expr (tree t)
751 tree type, tem;
752 location_t loc;
754 if (t == NULL_TREE)
755 return NULL_TREE;
757 loc = EXPR_LOCATION (t);
758 type = TREE_TYPE (t);
759 STRIP_SIGN_NOPS (t);
761 tem = fold_negate_expr (loc, t);
762 if (!tem)
763 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
764 return fold_convert_loc (loc, type, tem);
767 /* Split a tree IN into a constant, literal and variable parts that could be
768 combined with CODE to make IN. "constant" means an expression with
769 TREE_CONSTANT but that isn't an actual constant. CODE must be a
770 commutative arithmetic operation. Store the constant part into *CONP,
771 the literal in *LITP and return the variable part. If a part isn't
772 present, set it to null. If the tree does not decompose in this way,
773 return the entire tree as the variable part and the other parts as null.
775 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
776 case, we negate an operand that was subtracted. Except if it is a
777 literal for which we use *MINUS_LITP instead.
779 If NEGATE_P is true, we are negating all of IN, again except a literal
780 for which we use *MINUS_LITP instead.
782 If IN is itself a literal or constant, return it as appropriate.
784 Note that we do not guarantee that any of the three values will be the
785 same type as IN, but they will have the same signedness and mode. */
787 static tree
788 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
789 tree *minus_litp, int negate_p)
791 tree var = 0;
793 *conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
813 tree op0 = TREE_OPERAND (in, 0);
814 tree op1 = TREE_OPERAND (in, 1);
815 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
816 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
818 /* First see if either of the operands is a literal, then a constant. */
819 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
820 || TREE_CODE (op0) == FIXED_CST)
821 *litp = op0, op0 = 0;
822 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
823 || TREE_CODE (op1) == FIXED_CST)
824 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
826 if (op0 != 0 && TREE_CONSTANT (op0))
827 *conp = op0, op0 = 0;
828 else if (op1 != 0 && TREE_CONSTANT (op1))
829 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
831 /* If we haven't dealt with either operand, this is not a case we can
832 decompose. Otherwise, VAR is either of the ones remaining, if any. */
833 if (op0 != 0 && op1 != 0)
834 var = in;
835 else if (op0 != 0)
836 var = op0;
837 else
838 var = op1, neg_var_p = neg1_p;
840 /* Now do any needed negations. */
841 if (neg_litp_p)
842 *minus_litp = *litp, *litp = 0;
843 if (neg_conp_p)
844 *conp = negate_expr (*conp);
845 if (neg_var_p)
846 var = negate_expr (var);
848 else if (TREE_CODE (in) == BIT_NOT_EXPR
849 && code == PLUS_EXPR)
851 /* -X - 1 is folded to ~X, undo that here. */
852 *minus_litp = build_one_cst (TREE_TYPE (in));
853 var = negate_expr (TREE_OPERAND (in, 0));
855 else if (TREE_CONSTANT (in))
856 *conp = in;
857 else
858 var = in;
860 if (negate_p)
862 if (*litp)
863 *minus_litp = *litp, *litp = 0;
864 else if (*minus_litp)
865 *litp = *minus_litp, *minus_litp = 0;
866 *conp = negate_expr (*conp);
867 var = negate_expr (var);
870 return var;
873 /* Re-associate trees split by the above function. T1 and T2 are
874 either expressions to associate or null. Return the new
875 expression, if any. LOC is the location of the new expression. If
876 we build an operation, do it in TYPE and with CODE. */
878 static tree
879 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
881 if (t1 == 0)
882 return t2;
883 else if (t2 == 0)
884 return t1;
886 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
887 try to fold this since we will have infinite recursion. But do
888 deal with any NEGATE_EXPRs. */
889 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
890 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
892 if (code == PLUS_EXPR)
894 if (TREE_CODE (t1) == NEGATE_EXPR)
895 return build2_loc (loc, MINUS_EXPR, type,
896 fold_convert_loc (loc, type, t2),
897 fold_convert_loc (loc, type,
898 TREE_OPERAND (t1, 0)));
899 else if (TREE_CODE (t2) == NEGATE_EXPR)
900 return build2_loc (loc, MINUS_EXPR, type,
901 fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type,
903 TREE_OPERAND (t2, 0)));
904 else if (integer_zerop (t2))
905 return fold_convert_loc (loc, type, t1);
907 else if (code == MINUS_EXPR)
909 if (integer_zerop (t2))
910 return fold_convert_loc (loc, type, t1);
913 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type, t2));
917 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
918 fold_convert_loc (loc, type, t2));
921 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
922 for use in int_const_binop, size_binop and size_diffop. */
924 static bool
925 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
927 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
928 return false;
929 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
930 return false;
932 switch (code)
934 case LSHIFT_EXPR:
935 case RSHIFT_EXPR:
936 case LROTATE_EXPR:
937 case RROTATE_EXPR:
938 return true;
940 default:
941 break;
944 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
945 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
946 && TYPE_MODE (type1) == TYPE_MODE (type2);
950 /* Combine two integer constants ARG1 and ARG2 under operation CODE
951 to produce a new constant. Return NULL_TREE if we don't know how
952 to evaluate CODE at compile-time. */
954 static tree
955 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
956 int overflowable)
958 wide_int res;
959 tree t;
960 tree type = TREE_TYPE (arg1);
961 signop sign = TYPE_SIGN (type);
962 bool overflow = false;
964 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
965 TYPE_SIGN (TREE_TYPE (parg2)));
967 switch (code)
969 case BIT_IOR_EXPR:
970 res = wi::bit_or (arg1, arg2);
971 break;
973 case BIT_XOR_EXPR:
974 res = wi::bit_xor (arg1, arg2);
975 break;
977 case BIT_AND_EXPR:
978 res = wi::bit_and (arg1, arg2);
979 break;
981 case RSHIFT_EXPR:
982 case LSHIFT_EXPR:
983 if (wi::neg_p (arg2))
985 arg2 = -arg2;
986 if (code == RSHIFT_EXPR)
987 code = LSHIFT_EXPR;
988 else
989 code = RSHIFT_EXPR;
992 if (code == RSHIFT_EXPR)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res = wi::rshift (arg1, arg2, sign);
997 else
998 res = wi::lshift (arg1, arg2);
999 break;
1001 case RROTATE_EXPR:
1002 case LROTATE_EXPR:
1003 if (wi::neg_p (arg2))
1005 arg2 = -arg2;
1006 if (code == RROTATE_EXPR)
1007 code = LROTATE_EXPR;
1008 else
1009 code = RROTATE_EXPR;
1012 if (code == RROTATE_EXPR)
1013 res = wi::rrotate (arg1, arg2);
1014 else
1015 res = wi::lrotate (arg1, arg2);
1016 break;
1018 case PLUS_EXPR:
1019 res = wi::add (arg1, arg2, sign, &overflow);
1020 break;
1022 case MINUS_EXPR:
1023 res = wi::sub (arg1, arg2, sign, &overflow);
1024 break;
1026 case MULT_EXPR:
1027 res = wi::mul (arg1, arg2, sign, &overflow);
1028 break;
1030 case MULT_HIGHPART_EXPR:
1031 res = wi::mul_high (arg1, arg2, sign);
1032 break;
1034 case TRUNC_DIV_EXPR:
1035 case EXACT_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1039 break;
1041 case FLOOR_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_floor (arg1, arg2, sign, &overflow);
1045 break;
1047 case CEIL_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1051 break;
1053 case ROUND_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_round (arg1, arg2, sign, &overflow);
1057 break;
1059 case TRUNC_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1063 break;
1065 case FLOOR_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1069 break;
1071 case CEIL_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1075 break;
1077 case ROUND_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_round (arg1, arg2, sign, &overflow);
1081 break;
1083 case MIN_EXPR:
1084 res = wi::min (arg1, arg2, sign);
1085 break;
1087 case MAX_EXPR:
1088 res = wi::max (arg1, arg2, sign);
1089 break;
1091 default:
1092 return NULL_TREE;
1095 t = force_fit_type (type, res, overflowable,
1096 (((sign == SIGNED || overflowable == -1)
1097 && overflow)
1098 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1100 return t;
1103 tree
1104 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1106 return int_const_binop_1 (code, arg1, arg2, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1114 static tree
1115 const_binop (enum tree_code code, tree arg1, tree arg2)
1117 /* Sanity check for the recursive cases. */
1118 if (!arg1 || !arg2)
1119 return NULL_TREE;
1121 STRIP_NOPS (arg1);
1122 STRIP_NOPS (arg2);
1124 if (TREE_CODE (arg1) == INTEGER_CST)
1125 return int_const_binop (code, arg1, arg2);
1127 if (TREE_CODE (arg1) == REAL_CST)
1129 enum machine_mode mode;
1130 REAL_VALUE_TYPE d1;
1131 REAL_VALUE_TYPE d2;
1132 REAL_VALUE_TYPE value;
1133 REAL_VALUE_TYPE result;
1134 bool inexact;
1135 tree t, type;
1137 /* The following codes are handled by real_arithmetic. */
1138 switch (code)
1140 case PLUS_EXPR:
1141 case MINUS_EXPR:
1142 case MULT_EXPR:
1143 case RDIV_EXPR:
1144 case MIN_EXPR:
1145 case MAX_EXPR:
1146 break;
1148 default:
1149 return NULL_TREE;
1152 d1 = TREE_REAL_CST (arg1);
1153 d2 = TREE_REAL_CST (arg2);
1155 type = TREE_TYPE (arg1);
1156 mode = TYPE_MODE (type);
1158 /* Don't perform operation if we honor signaling NaNs and
1159 either operand is a NaN. */
1160 if (HONOR_SNANS (mode)
1161 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1162 return NULL_TREE;
1164 /* Don't perform operation if it would raise a division
1165 by zero exception. */
1166 if (code == RDIV_EXPR
1167 && REAL_VALUES_EQUAL (d2, dconst0)
1168 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1169 return NULL_TREE;
1171 /* If either operand is a NaN, just return it. Otherwise, set up
1172 for floating-point trap; we return an overflow. */
1173 if (REAL_VALUE_ISNAN (d1))
1174 return arg1;
1175 else if (REAL_VALUE_ISNAN (d2))
1176 return arg2;
1178 inexact = real_arithmetic (&value, code, &d1, &d2);
1179 real_convert (&result, mode, &value);
1181 /* Don't constant fold this floating point operation if
1182 the result has overflowed and flag_trapping_math. */
1183 if (flag_trapping_math
1184 && MODE_HAS_INFINITIES (mode)
1185 && REAL_VALUE_ISINF (result)
1186 && !REAL_VALUE_ISINF (d1)
1187 && !REAL_VALUE_ISINF (d2))
1188 return NULL_TREE;
1190 /* Don't constant fold this floating point operation if the
1191 result may dependent upon the run-time rounding mode and
1192 flag_rounding_math is set, or if GCC's software emulation
1193 is unable to accurately represent the result. */
1194 if ((flag_rounding_math
1195 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1196 && (inexact || !real_identical (&result, &value)))
1197 return NULL_TREE;
1199 t = build_real (type, result);
1201 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1202 return t;
1205 if (TREE_CODE (arg1) == FIXED_CST)
1207 FIXED_VALUE_TYPE f1;
1208 FIXED_VALUE_TYPE f2;
1209 FIXED_VALUE_TYPE result;
1210 tree t, type;
1211 int sat_p;
1212 bool overflow_p;
1214 /* The following codes are handled by fixed_arithmetic. */
1215 switch (code)
1217 case PLUS_EXPR:
1218 case MINUS_EXPR:
1219 case MULT_EXPR:
1220 case TRUNC_DIV_EXPR:
1221 f2 = TREE_FIXED_CST (arg2);
1222 break;
1224 case LSHIFT_EXPR:
1225 case RSHIFT_EXPR:
1227 wide_int w2 = arg2;
1228 f2.data.high = w2.elt (1);
1229 f2.data.low = w2.elt (0);
1230 f2.mode = SImode;
1232 break;
1234 default:
1235 return NULL_TREE;
1238 f1 = TREE_FIXED_CST (arg1);
1239 type = TREE_TYPE (arg1);
1240 sat_p = TYPE_SATURATING (type);
1241 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1242 t = build_fixed (type, result);
1243 /* Propagate overflow flags. */
1244 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1249 if (TREE_CODE (arg1) == COMPLEX_CST)
1251 tree type = TREE_TYPE (arg1);
1252 tree r1 = TREE_REALPART (arg1);
1253 tree i1 = TREE_IMAGPART (arg1);
1254 tree r2 = TREE_REALPART (arg2);
1255 tree i2 = TREE_IMAGPART (arg2);
1256 tree real, imag;
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 real = const_binop (code, r1, r2);
1263 imag = const_binop (code, i1, i2);
1264 break;
1266 case MULT_EXPR:
1267 if (COMPLEX_FLOAT_TYPE_P (type))
1268 return do_mpc_arg2 (arg1, arg2, type,
1269 /* do_nonfinite= */ folding_initializer,
1270 mpc_mul);
1272 real = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, r1, r2),
1274 const_binop (MULT_EXPR, i1, i2));
1275 imag = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, i2),
1277 const_binop (MULT_EXPR, i1, r2));
1278 break;
1280 case RDIV_EXPR:
1281 if (COMPLEX_FLOAT_TYPE_P (type))
1282 return do_mpc_arg2 (arg1, arg2, type,
1283 /* do_nonfinite= */ folding_initializer,
1284 mpc_div);
1285 /* Fallthru ... */
1286 case TRUNC_DIV_EXPR:
1287 case CEIL_DIV_EXPR:
1288 case FLOOR_DIV_EXPR:
1289 case ROUND_DIV_EXPR:
1290 if (flag_complex_method == 0)
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_straight().
1295 Expand complex division to scalars, straightforward algorithm.
1296 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1297 t = br*br + bi*bi
1299 tree magsquared
1300 = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r2, r2),
1302 const_binop (MULT_EXPR, i2, i2));
1303 tree t1
1304 = const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2),
1306 const_binop (MULT_EXPR, i1, i2));
1307 tree t2
1308 = const_binop (MINUS_EXPR,
1309 const_binop (MULT_EXPR, i1, r2),
1310 const_binop (MULT_EXPR, r1, i2));
1312 real = const_binop (code, t1, magsquared);
1313 imag = const_binop (code, t2, magsquared);
1315 else
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_wide().
1320 Expand complex division to scalars, modified algorithm to minimize
1321 overflow with wide input ranges. */
1322 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1323 fold_abs_const (r2, TREE_TYPE (type)),
1324 fold_abs_const (i2, TREE_TYPE (type)));
1326 if (integer_nonzerop (compare))
1328 /* In the TRUE branch, we compute
1329 ratio = br/bi;
1330 div = (br * ratio) + bi;
1331 tr = (ar * ratio) + ai;
1332 ti = (ai * ratio) - ar;
1333 tr = tr / div;
1334 ti = ti / div; */
1335 tree ratio = const_binop (code, r2, i2);
1336 tree div = const_binop (PLUS_EXPR, i2,
1337 const_binop (MULT_EXPR, r2, ratio));
1338 real = const_binop (MULT_EXPR, r1, ratio);
1339 real = const_binop (PLUS_EXPR, real, i1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, i1, ratio);
1343 imag = const_binop (MINUS_EXPR, imag, r1);
1344 imag = const_binop (code, imag, div);
1346 else
1348 /* In the FALSE branch, we compute
1349 ratio = d/c;
1350 divisor = (d * ratio) + c;
1351 tr = (b * ratio) + a;
1352 ti = b - (a * ratio);
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, i2, r2);
1356 tree div = const_binop (PLUS_EXPR, r2,
1357 const_binop (MULT_EXPR, i2, ratio));
1359 real = const_binop (MULT_EXPR, i1, ratio);
1360 real = const_binop (PLUS_EXPR, real, r1);
1361 real = const_binop (code, real, div);
1363 imag = const_binop (MULT_EXPR, r1, ratio);
1364 imag = const_binop (MINUS_EXPR, i1, imag);
1365 imag = const_binop (code, imag, div);
1368 break;
1370 default:
1371 return NULL_TREE;
1374 if (real && imag)
1375 return build_complex (type, real, imag);
1378 if (TREE_CODE (arg1) == VECTOR_CST
1379 && TREE_CODE (arg2) == VECTOR_CST)
1381 tree type = TREE_TYPE (arg1);
1382 int count = TYPE_VECTOR_SUBPARTS (type), i;
1383 tree *elts = XALLOCAVEC (tree, count);
1385 for (i = 0; i < count; i++)
1387 tree elem1 = VECTOR_CST_ELT (arg1, i);
1388 tree elem2 = VECTOR_CST_ELT (arg2, i);
1390 elts[i] = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if (elts[i] == NULL_TREE)
1395 return NULL_TREE;
1398 return build_vector (type, elts);
1401 /* Shifts allow a scalar offset for a vector. */
1402 if (TREE_CODE (arg1) == VECTOR_CST
1403 && TREE_CODE (arg2) == INTEGER_CST)
1405 tree type = TREE_TYPE (arg1);
1406 int count = TYPE_VECTOR_SUBPARTS (type), i;
1407 tree *elts = XALLOCAVEC (tree, count);
1409 if (code == VEC_LSHIFT_EXPR
1410 || code == VEC_RSHIFT_EXPR)
1412 if (!tree_fits_uhwi_p (arg2))
1413 return NULL_TREE;
1415 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1416 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1417 unsigned HOST_WIDE_INT innerc
1418 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1419 if (shiftc >= outerc || (shiftc % innerc) != 0)
1420 return NULL_TREE;
1421 int offset = shiftc / innerc;
1422 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1423 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1424 for !BYTES_BIG_ENDIAN picks first vector element, but
1425 for BYTES_BIG_ENDIAN last element from the vector. */
1426 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1427 offset = -offset;
1428 tree zero = build_zero_cst (TREE_TYPE (type));
1429 for (i = 0; i < count; i++)
1431 if (i + offset < 0 || i + offset >= count)
1432 elts[i] = zero;
1433 else
1434 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1437 else
1438 for (i = 0; i < count; i++)
1440 tree elem1 = VECTOR_CST_ELT (arg1, i);
1442 elts[i] = const_binop (code, elem1, arg2);
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts[i] == NULL_TREE)
1447 return NULL_TREE;
1450 return build_vector (type, elts);
1452 return NULL_TREE;
1455 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1456 indicates which particular sizetype to create. */
1458 tree
1459 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1461 return build_int_cst (sizetype_tab[(int) kind], number);
1464 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1465 is a tree code. The type of the result is taken from the operands.
1466 Both must be equivalent integer types, ala int_binop_types_match_p.
1467 If the operands are constant, so is the result. */
1469 tree
1470 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1472 tree type = TREE_TYPE (arg0);
1474 if (arg0 == error_mark_node || arg1 == error_mark_node)
1475 return error_mark_node;
1477 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1478 TREE_TYPE (arg1)));
1480 /* Handle the special case of two integer constants faster. */
1481 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1483 /* And some specific cases even faster than that. */
1484 if (code == PLUS_EXPR)
1486 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1487 return arg1;
1488 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1489 return arg0;
1491 else if (code == MINUS_EXPR)
1493 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1494 return arg0;
1496 else if (code == MULT_EXPR)
1498 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1499 return arg1;
1502 /* Handle general case of two integer constants. For sizetype
1503 constant calculations we always want to know about overflow,
1504 even in the unsigned case. */
1505 return int_const_binop_1 (code, arg0, arg1, -1);
1508 return fold_build2_loc (loc, code, type, arg0, arg1);
1511 /* Given two values, either both of sizetype or both of bitsizetype,
1512 compute the difference between the two values. Return the value
1513 in signed type corresponding to the type of the operands. */
1515 tree
1516 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1518 tree type = TREE_TYPE (arg0);
1519 tree ctype;
1521 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1522 TREE_TYPE (arg1)));
1524 /* If the type is already signed, just do the simple thing. */
1525 if (!TYPE_UNSIGNED (type))
1526 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1528 if (type == sizetype)
1529 ctype = ssizetype;
1530 else if (type == bitsizetype)
1531 ctype = sbitsizetype;
1532 else
1533 ctype = signed_type_for (type);
1535 /* If either operand is not a constant, do the conversions to the signed
1536 type and subtract. The hardware will do the right thing with any
1537 overflow in the subtraction. */
1538 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1539 return size_binop_loc (loc, MINUS_EXPR,
1540 fold_convert_loc (loc, ctype, arg0),
1541 fold_convert_loc (loc, ctype, arg1));
1543 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1544 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1545 overflow) and negate (which can't either). Special-case a result
1546 of zero while we're here. */
1547 if (tree_int_cst_equal (arg0, arg1))
1548 return build_int_cst (ctype, 0);
1549 else if (tree_int_cst_lt (arg1, arg0))
1550 return fold_convert_loc (loc, ctype,
1551 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1554 fold_convert_loc (loc, ctype,
1555 size_binop_loc (loc,
1556 MINUS_EXPR,
1557 arg1, arg0)));
1560 /* A subroutine of fold_convert_const handling conversions of an
1561 INTEGER_CST to another integer type. */
1563 static tree
1564 fold_convert_const_int_from_int (tree type, const_tree arg1)
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. Use widest_int
1568 so that any extension is done according ARG1's type. */
1569 return force_fit_type (type, wi::to_widest (arg1),
1570 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1571 TREE_OVERFLOW (arg1));
1574 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1575 to an integer type. */
1577 static tree
1578 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1580 bool overflow = false;
1581 tree t;
1583 /* The following code implements the floating point to integer
1584 conversion rules required by the Java Language Specification,
1585 that IEEE NaNs are mapped to zero and values that overflow
1586 the target precision saturate, i.e. values greater than
1587 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1588 are mapped to INT_MIN. These semantics are allowed by the
1589 C and C++ standards that simply state that the behavior of
1590 FP-to-integer conversion is unspecified upon overflow. */
1592 wide_int val;
1593 REAL_VALUE_TYPE r;
1594 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1596 switch (code)
1598 case FIX_TRUNC_EXPR:
1599 real_trunc (&r, VOIDmode, &x);
1600 break;
1602 default:
1603 gcc_unreachable ();
1606 /* If R is NaN, return zero and show we have an overflow. */
1607 if (REAL_VALUE_ISNAN (r))
1609 overflow = true;
1610 val = wi::zero (TYPE_PRECISION (type));
1613 /* See if R is less than the lower bound or greater than the
1614 upper bound. */
1616 if (! overflow)
1618 tree lt = TYPE_MIN_VALUE (type);
1619 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1620 if (REAL_VALUES_LESS (r, l))
1622 overflow = true;
1623 val = lt;
1627 if (! overflow)
1629 tree ut = TYPE_MAX_VALUE (type);
1630 if (ut)
1632 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1633 if (REAL_VALUES_LESS (u, r))
1635 overflow = true;
1636 val = ut;
1641 if (! overflow)
1642 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1644 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1645 return t;
1648 /* A subroutine of fold_convert_const handling conversions of a
1649 FIXED_CST to an integer type. */
1651 static tree
1652 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1654 tree t;
1655 double_int temp, temp_trunc;
1656 unsigned int mode;
1658 /* Right shift FIXED_CST to temp by fbit. */
1659 temp = TREE_FIXED_CST (arg1).data;
1660 mode = TREE_FIXED_CST (arg1).mode;
1661 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1663 temp = temp.rshift (GET_MODE_FBIT (mode),
1664 HOST_BITS_PER_DOUBLE_INT,
1665 SIGNED_FIXED_POINT_MODE_P (mode));
1667 /* Left shift temp to temp_trunc by fbit. */
1668 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1669 HOST_BITS_PER_DOUBLE_INT,
1670 SIGNED_FIXED_POINT_MODE_P (mode));
1672 else
1674 temp = double_int_zero;
1675 temp_trunc = double_int_zero;
1678 /* If FIXED_CST is negative, we need to round the value toward 0.
1679 By checking if the fractional bits are not zero to add 1 to temp. */
1680 if (SIGNED_FIXED_POINT_MODE_P (mode)
1681 && temp_trunc.is_negative ()
1682 && TREE_FIXED_CST (arg1).data != temp_trunc)
1683 temp += double_int_one;
1685 /* Given a fixed-point constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t = force_fit_type (type, temp, -1,
1688 (temp.is_negative ()
1689 && (TYPE_UNSIGNED (type)
1690 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1691 | TREE_OVERFLOW (arg1));
1693 return t;
1696 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1697 to another floating point type. */
1699 static tree
1700 fold_convert_const_real_from_real (tree type, const_tree arg1)
1702 REAL_VALUE_TYPE value;
1703 tree t;
1705 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1706 t = build_real (type, value);
1708 /* If converting an infinity or NAN to a representation that doesn't
1709 have one, set the overflow bit so that we can produce some kind of
1710 error message at the appropriate point if necessary. It's not the
1711 most user-friendly message, but it's better than nothing. */
1712 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1713 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1714 TREE_OVERFLOW (t) = 1;
1715 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1716 && !MODE_HAS_NANS (TYPE_MODE (type)))
1717 TREE_OVERFLOW (t) = 1;
1718 /* Regular overflow, conversion produced an infinity in a mode that
1719 can't represent them. */
1720 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1721 && REAL_VALUE_ISINF (value)
1722 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1723 TREE_OVERFLOW (t) = 1;
1724 else
1725 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1726 return t;
1729 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1730 to a floating point type. */
1732 static tree
1733 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1735 REAL_VALUE_TYPE value;
1736 tree t;
1738 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1739 t = build_real (type, value);
1741 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1742 return t;
1745 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1746 to another fixed-point type. */
1748 static tree
1749 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1751 FIXED_VALUE_TYPE value;
1752 tree t;
1753 bool overflow_p;
1755 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1756 TYPE_SATURATING (type));
1757 t = build_fixed (type, value);
1759 /* Propagate overflow flags. */
1760 if (overflow_p | TREE_OVERFLOW (arg1))
1761 TREE_OVERFLOW (t) = 1;
1762 return t;
1765 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1766 to a fixed-point type. */
1768 static tree
1769 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1771 FIXED_VALUE_TYPE value;
1772 tree t;
1773 bool overflow_p;
1774 double_int di;
1776 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1778 di.low = TREE_INT_CST_ELT (arg1, 0);
1779 if (TREE_INT_CST_NUNITS (arg1) == 1)
1780 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1781 else
1782 di.high = TREE_INT_CST_ELT (arg1, 1);
1784 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1785 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1786 TYPE_SATURATING (type));
1787 t = build_fixed (type, value);
1789 /* Propagate overflow flags. */
1790 if (overflow_p | TREE_OVERFLOW (arg1))
1791 TREE_OVERFLOW (t) = 1;
1792 return t;
1795 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1796 to a fixed-point type. */
1798 static tree
1799 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1801 FIXED_VALUE_TYPE value;
1802 tree t;
1803 bool overflow_p;
1805 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1806 &TREE_REAL_CST (arg1),
1807 TYPE_SATURATING (type));
1808 t = build_fixed (type, value);
1810 /* Propagate overflow flags. */
1811 if (overflow_p | TREE_OVERFLOW (arg1))
1812 TREE_OVERFLOW (t) = 1;
1813 return t;
1816 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1817 type TYPE. If no simplification can be done return NULL_TREE. */
1819 static tree
1820 fold_convert_const (enum tree_code code, tree type, tree arg1)
1822 if (TREE_TYPE (arg1) == type)
1823 return arg1;
1825 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1826 || TREE_CODE (type) == OFFSET_TYPE)
1828 if (TREE_CODE (arg1) == INTEGER_CST)
1829 return fold_convert_const_int_from_int (type, arg1);
1830 else if (TREE_CODE (arg1) == REAL_CST)
1831 return fold_convert_const_int_from_real (code, type, arg1);
1832 else if (TREE_CODE (arg1) == FIXED_CST)
1833 return fold_convert_const_int_from_fixed (type, arg1);
1835 else if (TREE_CODE (type) == REAL_TYPE)
1837 if (TREE_CODE (arg1) == INTEGER_CST)
1838 return build_real_from_int_cst (type, arg1);
1839 else if (TREE_CODE (arg1) == REAL_CST)
1840 return fold_convert_const_real_from_real (type, arg1);
1841 else if (TREE_CODE (arg1) == FIXED_CST)
1842 return fold_convert_const_real_from_fixed (type, arg1);
1844 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1846 if (TREE_CODE (arg1) == FIXED_CST)
1847 return fold_convert_const_fixed_from_fixed (type, arg1);
1848 else if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_fixed_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_fixed_from_real (type, arg1);
1853 return NULL_TREE;
1856 /* Construct a vector of zero elements of vector type TYPE. */
1858 static tree
1859 build_zero_vector (tree type)
1861 tree t;
1863 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1864 return build_vector_from_val (type, t);
1867 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1869 bool
1870 fold_convertible_p (const_tree type, const_tree arg)
1872 tree orig = TREE_TYPE (arg);
1874 if (type == orig)
1875 return true;
1877 if (TREE_CODE (arg) == ERROR_MARK
1878 || TREE_CODE (type) == ERROR_MARK
1879 || TREE_CODE (orig) == ERROR_MARK)
1880 return false;
1882 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1883 return true;
1885 switch (TREE_CODE (type))
1887 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1888 case POINTER_TYPE: case REFERENCE_TYPE:
1889 case OFFSET_TYPE:
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return true;
1893 return (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1896 case REAL_TYPE:
1897 case FIXED_POINT_TYPE:
1898 case COMPLEX_TYPE:
1899 case VECTOR_TYPE:
1900 case VOID_TYPE:
1901 return TREE_CODE (type) == TREE_CODE (orig);
1903 default:
1904 return false;
1908 /* Convert expression ARG to type TYPE. Used by the middle-end for
1909 simple conversions in preference to calling the front-end's convert. */
1911 tree
1912 fold_convert_loc (location_t loc, tree type, tree arg)
1914 tree orig = TREE_TYPE (arg);
1915 tree tem;
1917 if (type == orig)
1918 return arg;
1920 if (TREE_CODE (arg) == ERROR_MARK
1921 || TREE_CODE (type) == ERROR_MARK
1922 || TREE_CODE (orig) == ERROR_MARK)
1923 return error_mark_node;
1925 switch (TREE_CODE (type))
1927 case POINTER_TYPE:
1928 case REFERENCE_TYPE:
1929 /* Handle conversions between pointers to different address spaces. */
1930 if (POINTER_TYPE_P (orig)
1931 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1932 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1933 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1934 /* fall through */
1936 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1937 case OFFSET_TYPE:
1938 if (TREE_CODE (arg) == INTEGER_CST)
1940 tem = fold_convert_const (NOP_EXPR, type, arg);
1941 if (tem != NULL_TREE)
1942 return tem;
1944 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == OFFSET_TYPE)
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1947 if (TREE_CODE (orig) == COMPLEX_TYPE)
1948 return fold_convert_loc (loc, type,
1949 fold_build1_loc (loc, REALPART_EXPR,
1950 TREE_TYPE (orig), arg));
1951 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1952 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1955 case REAL_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1958 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1962 else if (TREE_CODE (arg) == REAL_CST)
1964 tem = fold_convert_const (NOP_EXPR, type, arg);
1965 if (tem != NULL_TREE)
1966 return tem;
1968 else if (TREE_CODE (arg) == FIXED_CST)
1970 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1971 if (tem != NULL_TREE)
1972 return tem;
1975 switch (TREE_CODE (orig))
1977 case INTEGER_TYPE:
1978 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1979 case POINTER_TYPE: case REFERENCE_TYPE:
1980 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1982 case REAL_TYPE:
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985 case FIXED_POINT_TYPE:
1986 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1988 case COMPLEX_TYPE:
1989 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 return fold_convert_loc (loc, type, tem);
1992 default:
1993 gcc_unreachable ();
1996 case FIXED_POINT_TYPE:
1997 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1998 || TREE_CODE (arg) == REAL_CST)
2000 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 goto fold_convert_exit;
2005 switch (TREE_CODE (orig))
2007 case FIXED_POINT_TYPE:
2008 case INTEGER_TYPE:
2009 case ENUMERAL_TYPE:
2010 case BOOLEAN_TYPE:
2011 case REAL_TYPE:
2012 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014 case COMPLEX_TYPE:
2015 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2016 return fold_convert_loc (loc, type, tem);
2018 default:
2019 gcc_unreachable ();
2022 case COMPLEX_TYPE:
2023 switch (TREE_CODE (orig))
2025 case INTEGER_TYPE:
2026 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2027 case POINTER_TYPE: case REFERENCE_TYPE:
2028 case REAL_TYPE:
2029 case FIXED_POINT_TYPE:
2030 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2031 fold_convert_loc (loc, TREE_TYPE (type), arg),
2032 fold_convert_loc (loc, TREE_TYPE (type),
2033 integer_zero_node));
2034 case COMPLEX_TYPE:
2036 tree rpart, ipart;
2038 if (TREE_CODE (arg) == COMPLEX_EXPR)
2040 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2041 TREE_OPERAND (arg, 0));
2042 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2043 TREE_OPERAND (arg, 1));
2044 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2047 arg = save_expr (arg);
2048 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2049 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2051 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2052 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2055 default:
2056 gcc_unreachable ();
2059 case VECTOR_TYPE:
2060 if (integer_zerop (arg))
2061 return build_zero_vector (type);
2062 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2063 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2064 || TREE_CODE (orig) == VECTOR_TYPE);
2065 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2067 case VOID_TYPE:
2068 tem = fold_ignored_result (arg);
2069 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2071 default:
2072 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2073 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2074 gcc_unreachable ();
2076 fold_convert_exit:
2077 protected_set_expr_location_unshare (tem, loc);
2078 return tem;
2081 /* Return false if expr can be assumed not to be an lvalue, true
2082 otherwise. */
2084 static bool
2085 maybe_lvalue_p (const_tree x)
2087 /* We only need to wrap lvalue tree codes. */
2088 switch (TREE_CODE (x))
2090 case VAR_DECL:
2091 case PARM_DECL:
2092 case RESULT_DECL:
2093 case LABEL_DECL:
2094 case FUNCTION_DECL:
2095 case SSA_NAME:
2097 case COMPONENT_REF:
2098 case MEM_REF:
2099 case INDIRECT_REF:
2100 case ARRAY_REF:
2101 case ARRAY_RANGE_REF:
2102 case BIT_FIELD_REF:
2103 case OBJ_TYPE_REF:
2105 case REALPART_EXPR:
2106 case IMAGPART_EXPR:
2107 case PREINCREMENT_EXPR:
2108 case PREDECREMENT_EXPR:
2109 case SAVE_EXPR:
2110 case TRY_CATCH_EXPR:
2111 case WITH_CLEANUP_EXPR:
2112 case COMPOUND_EXPR:
2113 case MODIFY_EXPR:
2114 case TARGET_EXPR:
2115 case COND_EXPR:
2116 case BIND_EXPR:
2117 break;
2119 default:
2120 /* Assume the worst for front-end tree codes. */
2121 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2122 break;
2123 return false;
2126 return true;
2129 /* Return an expr equal to X but certainly not valid as an lvalue. */
2131 tree
2132 non_lvalue_loc (location_t loc, tree x)
2134 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2135 us. */
2136 if (in_gimple_form)
2137 return x;
2139 if (! maybe_lvalue_p (x))
2140 return x;
2141 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2144 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2145 Zero means allow extended lvalues. */
2147 int pedantic_lvalues;
2149 /* When pedantic, return an expr equal to X but certainly not valid as a
2150 pedantic lvalue. Otherwise, return X. */
2152 static tree
2153 pedantic_non_lvalue_loc (location_t loc, tree x)
2155 if (pedantic_lvalues)
2156 return non_lvalue_loc (loc, x);
2158 return protected_set_expr_location_unshare (x, loc);
2161 /* Given a tree comparison code, return the code that is the logical inverse.
2162 It is generally not safe to do this for floating-point comparisons, except
2163 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2164 ERROR_MARK in this case. */
2166 enum tree_code
2167 invert_tree_comparison (enum tree_code code, bool honor_nans)
2169 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2170 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2171 return ERROR_MARK;
2173 switch (code)
2175 case EQ_EXPR:
2176 return NE_EXPR;
2177 case NE_EXPR:
2178 return EQ_EXPR;
2179 case GT_EXPR:
2180 return honor_nans ? UNLE_EXPR : LE_EXPR;
2181 case GE_EXPR:
2182 return honor_nans ? UNLT_EXPR : LT_EXPR;
2183 case LT_EXPR:
2184 return honor_nans ? UNGE_EXPR : GE_EXPR;
2185 case LE_EXPR:
2186 return honor_nans ? UNGT_EXPR : GT_EXPR;
2187 case LTGT_EXPR:
2188 return UNEQ_EXPR;
2189 case UNEQ_EXPR:
2190 return LTGT_EXPR;
2191 case UNGT_EXPR:
2192 return LE_EXPR;
2193 case UNGE_EXPR:
2194 return LT_EXPR;
2195 case UNLT_EXPR:
2196 return GE_EXPR;
2197 case UNLE_EXPR:
2198 return GT_EXPR;
2199 case ORDERED_EXPR:
2200 return UNORDERED_EXPR;
2201 case UNORDERED_EXPR:
2202 return ORDERED_EXPR;
2203 default:
2204 gcc_unreachable ();
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2211 enum tree_code
2212 swap_tree_comparison (enum tree_code code)
2214 switch (code)
2216 case EQ_EXPR:
2217 case NE_EXPR:
2218 case ORDERED_EXPR:
2219 case UNORDERED_EXPR:
2220 case LTGT_EXPR:
2221 case UNEQ_EXPR:
2222 return code;
2223 case GT_EXPR:
2224 return LT_EXPR;
2225 case GE_EXPR:
2226 return LE_EXPR;
2227 case LT_EXPR:
2228 return GT_EXPR;
2229 case LE_EXPR:
2230 return GE_EXPR;
2231 case UNGT_EXPR:
2232 return UNLT_EXPR;
2233 case UNGE_EXPR:
2234 return UNLE_EXPR;
2235 case UNLT_EXPR:
2236 return UNGT_EXPR;
2237 case UNLE_EXPR:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code)
2252 switch (code)
2254 case LT_EXPR:
2255 return COMPCODE_LT;
2256 case EQ_EXPR:
2257 return COMPCODE_EQ;
2258 case LE_EXPR:
2259 return COMPCODE_LE;
2260 case GT_EXPR:
2261 return COMPCODE_GT;
2262 case NE_EXPR:
2263 return COMPCODE_NE;
2264 case GE_EXPR:
2265 return COMPCODE_GE;
2266 case ORDERED_EXPR:
2267 return COMPCODE_ORD;
2268 case UNORDERED_EXPR:
2269 return COMPCODE_UNORD;
2270 case UNLT_EXPR:
2271 return COMPCODE_UNLT;
2272 case UNEQ_EXPR:
2273 return COMPCODE_UNEQ;
2274 case UNLE_EXPR:
2275 return COMPCODE_UNLE;
2276 case UNGT_EXPR:
2277 return COMPCODE_UNGT;
2278 case LTGT_EXPR:
2279 return COMPCODE_LTGT;
2280 case UNGE_EXPR:
2281 return COMPCODE_UNGE;
2282 default:
2283 gcc_unreachable ();
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code)
2294 switch (code)
2296 case COMPCODE_LT:
2297 return LT_EXPR;
2298 case COMPCODE_EQ:
2299 return EQ_EXPR;
2300 case COMPCODE_LE:
2301 return LE_EXPR;
2302 case COMPCODE_GT:
2303 return GT_EXPR;
2304 case COMPCODE_NE:
2305 return NE_EXPR;
2306 case COMPCODE_GE:
2307 return GE_EXPR;
2308 case COMPCODE_ORD:
2309 return ORDERED_EXPR;
2310 case COMPCODE_UNORD:
2311 return UNORDERED_EXPR;
2312 case COMPCODE_UNLT:
2313 return UNLT_EXPR;
2314 case COMPCODE_UNEQ:
2315 return UNEQ_EXPR;
2316 case COMPCODE_UNLE:
2317 return UNLE_EXPR;
2318 case COMPCODE_UNGT:
2319 return UNGT_EXPR;
2320 case COMPCODE_LTGT:
2321 return LTGT_EXPR;
2322 case COMPCODE_UNGE:
2323 return UNGE_EXPR;
2324 default:
2325 gcc_unreachable ();
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2335 tree
2336 combine_comparisons (location_t loc,
2337 enum tree_code code, enum tree_code lcode,
2338 enum tree_code rcode, tree truth_type,
2339 tree ll_arg, tree lr_arg)
2341 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2342 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2343 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2344 int compcode;
2346 switch (code)
2348 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2349 compcode = lcompcode & rcompcode;
2350 break;
2352 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2353 compcode = lcompcode | rcompcode;
2354 break;
2356 default:
2357 return NULL_TREE;
2360 if (!honor_nans)
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode &= ~COMPCODE_UNORD;
2365 if (compcode == COMPCODE_LTGT)
2366 compcode = COMPCODE_NE;
2367 else if (compcode == COMPCODE_ORD)
2368 compcode = COMPCODE_TRUE;
2370 else if (flag_trapping_math)
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2375 && (lcompcode != COMPCODE_EQ)
2376 && (lcompcode != COMPCODE_ORD);
2377 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2378 && (rcompcode != COMPCODE_EQ)
2379 && (rcompcode != COMPCODE_ORD);
2380 bool trap = (compcode & COMPCODE_UNORD) == 0
2381 && (compcode != COMPCODE_EQ)
2382 && (compcode != COMPCODE_ORD);
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2391 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2392 rtrap = false;
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2396 if (rtrap && !ltrap
2397 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2398 return NULL_TREE;
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap || rtrap) != trap)
2402 return NULL_TREE;
2405 if (compcode == COMPCODE_TRUE)
2406 return constant_boolean_node (true, truth_type);
2407 else if (compcode == COMPCODE_FALSE)
2408 return constant_boolean_node (false, truth_type);
2409 else
2411 enum tree_code tcode;
2413 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2414 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2418 /* Return nonzero if two operands (typically of the same tree node)
2419 are necessarily equal. If either argument has side-effects this
2420 function returns zero. FLAGS modifies behavior as follows:
2422 If OEP_ONLY_CONST is set, only return nonzero for constants.
2423 This function tests whether the operands are indistinguishable;
2424 it does not test whether they are equal using C's == operation.
2425 The distinction is important for IEEE floating point, because
2426 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2427 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2430 even though it may hold multiple values during a function.
2431 This is because a GCC tree node guarantees that nothing else is
2432 executed between the evaluation of its "operands" (which may often
2433 be evaluated in arbitrary order). Hence if the operands themselves
2434 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2435 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2436 unset means assuming isochronic (or instantaneous) tree equivalence.
2437 Unless comparing arbitrary expression trees, such as from different
2438 statements, this flag can usually be left unset.
2440 If OEP_PURE_SAME is set, then pure functions with identical arguments
2441 are considered the same. It is used when the caller has other ways
2442 to ensure that global memory is unchanged in between. */
2445 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2447 /* If either is ERROR_MARK, they aren't equal. */
2448 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2449 || TREE_TYPE (arg0) == error_mark_node
2450 || TREE_TYPE (arg1) == error_mark_node)
2451 return 0;
2453 /* Similar, if either does not have a type (like a released SSA name),
2454 they aren't equal. */
2455 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2456 return 0;
2458 /* Check equality of integer constants before bailing out due to
2459 precision differences. */
2460 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2461 return tree_int_cst_equal (arg0, arg1);
2463 /* If both types don't have the same signedness, then we can't consider
2464 them equal. We must check this before the STRIP_NOPS calls
2465 because they may change the signedness of the arguments. As pointers
2466 strictly don't have a signedness, require either two pointers or
2467 two non-pointers as well. */
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2469 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2470 return 0;
2472 /* We cannot consider pointers to different address space equal. */
2473 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2474 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2475 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2476 return 0;
2478 /* If both types don't have the same precision, then it is not safe
2479 to strip NOPs. */
2480 if (element_precision (TREE_TYPE (arg0))
2481 != element_precision (TREE_TYPE (arg1)))
2482 return 0;
2484 STRIP_NOPS (arg0);
2485 STRIP_NOPS (arg1);
2487 /* In case both args are comparisons but with different comparison
2488 code, try to swap the comparison operands of one arg to produce
2489 a match and compare that variant. */
2490 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2491 && COMPARISON_CLASS_P (arg0)
2492 && COMPARISON_CLASS_P (arg1))
2494 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2496 if (TREE_CODE (arg0) == swap_code)
2497 return operand_equal_p (TREE_OPERAND (arg0, 0),
2498 TREE_OPERAND (arg1, 1), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 1),
2500 TREE_OPERAND (arg1, 0), flags);
2503 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2504 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2505 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2506 return 0;
2508 /* This is needed for conversions and for COMPONENT_REF.
2509 Might as well play it safe and always test this. */
2510 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2511 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2512 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2513 return 0;
2515 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2516 We don't care about side effects in that case because the SAVE_EXPR
2517 takes care of that for us. In all other cases, two expressions are
2518 equal if they have no side effects. If we have two identical
2519 expressions with side effects that should be treated the same due
2520 to the only side effects being identical SAVE_EXPR's, that will
2521 be detected in the recursive calls below.
2522 If we are taking an invariant address of two identical objects
2523 they are necessarily equal as well. */
2524 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2525 && (TREE_CODE (arg0) == SAVE_EXPR
2526 || (flags & OEP_CONSTANT_ADDRESS_OF)
2527 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2528 return 1;
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2533 switch (TREE_CODE (arg0))
2535 case INTEGER_CST:
2536 return tree_int_cst_equal (arg0, arg1);
2538 case FIXED_CST:
2539 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2540 TREE_FIXED_CST (arg1));
2542 case REAL_CST:
2543 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2544 TREE_REAL_CST (arg1)))
2545 return 1;
2548 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2550 /* If we do not distinguish between signed and unsigned zero,
2551 consider them equal. */
2552 if (real_zerop (arg0) && real_zerop (arg1))
2553 return 1;
2555 return 0;
2557 case VECTOR_CST:
2559 unsigned i;
2561 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2562 return 0;
2564 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2566 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2567 VECTOR_CST_ELT (arg1, i), flags))
2568 return 0;
2570 return 1;
2573 case COMPLEX_CST:
2574 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2575 flags)
2576 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2577 flags));
2579 case STRING_CST:
2580 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2581 && ! memcmp (TREE_STRING_POINTER (arg0),
2582 TREE_STRING_POINTER (arg1),
2583 TREE_STRING_LENGTH (arg0)));
2585 case ADDR_EXPR:
2586 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2587 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2588 ? OEP_CONSTANT_ADDRESS_OF : 0);
2589 default:
2590 break;
2593 if (flags & OEP_ONLY_CONST)
2594 return 0;
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2609 case tcc_unary:
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0))
2613 CASE_CONVERT:
2614 case FIX_TRUNC_EXPR:
2615 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2616 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2617 return 0;
2618 break;
2619 default:
2620 break;
2623 return OP_SAME (0);
2626 case tcc_comparison:
2627 case tcc_binary:
2628 if (OP_SAME (0) && OP_SAME (1))
2629 return 1;
2631 /* For commutative ops, allow the other order. */
2632 return (commutative_tree_code (TREE_CODE (arg0))
2633 && operand_equal_p (TREE_OPERAND (arg0, 0),
2634 TREE_OPERAND (arg1, 1), flags)
2635 && operand_equal_p (TREE_OPERAND (arg0, 1),
2636 TREE_OPERAND (arg1, 0), flags));
2638 case tcc_reference:
2639 /* If either of the pointer (or reference) expressions we are
2640 dereferencing contain a side effect, these cannot be equal,
2641 but their addresses can be. */
2642 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2643 && (TREE_SIDE_EFFECTS (arg0)
2644 || TREE_SIDE_EFFECTS (arg1)))
2645 return 0;
2647 switch (TREE_CODE (arg0))
2649 case INDIRECT_REF:
2650 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2651 return OP_SAME (0);
2653 case REALPART_EXPR:
2654 case IMAGPART_EXPR:
2655 return OP_SAME (0);
2657 case TARGET_MEM_REF:
2658 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2659 /* Require equal extra operands and then fall through to MEM_REF
2660 handling of the two common operands. */
2661 if (!OP_SAME_WITH_NULL (2)
2662 || !OP_SAME_WITH_NULL (3)
2663 || !OP_SAME_WITH_NULL (4))
2664 return 0;
2665 /* Fallthru. */
2666 case MEM_REF:
2667 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2668 /* Require equal access sizes, and similar pointer types.
2669 We can have incomplete types for array references of
2670 variable-sized arrays from the Fortran frontend
2671 though. Also verify the types are compatible. */
2672 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2673 || (TYPE_SIZE (TREE_TYPE (arg0))
2674 && TYPE_SIZE (TREE_TYPE (arg1))
2675 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2676 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2677 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2678 && alias_ptr_types_compatible_p
2679 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2680 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2681 && OP_SAME (0) && OP_SAME (1));
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2688 if (!OP_SAME (0))
2689 return 0;
2690 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2692 TREE_OPERAND (arg1, 1))
2693 || OP_SAME (1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2697 case COMPONENT_REF:
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0)
2701 || !OP_SAME (1))
2702 return 0;
2703 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2704 return OP_SAME_WITH_NULL (2);
2706 case BIT_FIELD_REF:
2707 if (!OP_SAME (0))
2708 return 0;
2709 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2710 return OP_SAME (1) && OP_SAME (2);
2712 default:
2713 return 0;
2716 case tcc_expression:
2717 switch (TREE_CODE (arg0))
2719 case ADDR_EXPR:
2720 case TRUTH_NOT_EXPR:
2721 return OP_SAME (0);
2723 case TRUTH_ANDIF_EXPR:
2724 case TRUTH_ORIF_EXPR:
2725 return OP_SAME (0) && OP_SAME (1);
2727 case FMA_EXPR:
2728 case WIDEN_MULT_PLUS_EXPR:
2729 case WIDEN_MULT_MINUS_EXPR:
2730 if (!OP_SAME (2))
2731 return 0;
2732 /* The multiplcation operands are commutative. */
2733 /* FALLTHRU */
2735 case TRUTH_AND_EXPR:
2736 case TRUTH_OR_EXPR:
2737 case TRUTH_XOR_EXPR:
2738 if (OP_SAME (0) && OP_SAME (1))
2739 return 1;
2741 /* Otherwise take into account this is a commutative operation. */
2742 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2743 TREE_OPERAND (arg1, 1), flags)
2744 && operand_equal_p (TREE_OPERAND (arg0, 1),
2745 TREE_OPERAND (arg1, 0), flags));
2747 case COND_EXPR:
2748 case VEC_COND_EXPR:
2749 case DOT_PROD_EXPR:
2750 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2752 default:
2753 return 0;
2756 case tcc_vl_exp:
2757 switch (TREE_CODE (arg0))
2759 case CALL_EXPR:
2760 /* If the CALL_EXPRs call different functions, then they
2761 clearly can not be equal. */
2762 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2763 flags))
2764 return 0;
2767 unsigned int cef = call_expr_flags (arg0);
2768 if (flags & OEP_PURE_SAME)
2769 cef &= ECF_CONST | ECF_PURE;
2770 else
2771 cef &= ECF_CONST;
2772 if (!cef)
2773 return 0;
2776 /* Now see if all the arguments are the same. */
2778 const_call_expr_arg_iterator iter0, iter1;
2779 const_tree a0, a1;
2780 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2781 a1 = first_const_call_expr_arg (arg1, &iter1);
2782 a0 && a1;
2783 a0 = next_const_call_expr_arg (&iter0),
2784 a1 = next_const_call_expr_arg (&iter1))
2785 if (! operand_equal_p (a0, a1, flags))
2786 return 0;
2788 /* If we get here and both argument lists are exhausted
2789 then the CALL_EXPRs are equal. */
2790 return ! (a0 || a1);
2792 default:
2793 return 0;
2796 case tcc_declaration:
2797 /* Consider __builtin_sqrt equal to sqrt. */
2798 return (TREE_CODE (arg0) == FUNCTION_DECL
2799 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2800 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2801 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2803 default:
2804 return 0;
2807 #undef OP_SAME
2808 #undef OP_SAME_WITH_NULL
2811 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2812 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2814 When in doubt, return 0. */
2816 static int
2817 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2819 int unsignedp1, unsignedpo;
2820 tree primarg0, primarg1, primother;
2821 unsigned int correct_width;
2823 if (operand_equal_p (arg0, arg1, 0))
2824 return 1;
2826 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2827 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2828 return 0;
2830 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2831 and see if the inner values are the same. This removes any
2832 signedness comparison, which doesn't matter here. */
2833 primarg0 = arg0, primarg1 = arg1;
2834 STRIP_NOPS (primarg0);
2835 STRIP_NOPS (primarg1);
2836 if (operand_equal_p (primarg0, primarg1, 0))
2837 return 1;
2839 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2840 actual comparison operand, ARG0.
2842 First throw away any conversions to wider types
2843 already present in the operands. */
2845 primarg1 = get_narrower (arg1, &unsignedp1);
2846 primother = get_narrower (other, &unsignedpo);
2848 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2849 if (unsignedp1 == unsignedpo
2850 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2851 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2853 tree type = TREE_TYPE (arg0);
2855 /* Make sure shorter operand is extended the right way
2856 to match the longer operand. */
2857 primarg1 = fold_convert (signed_or_unsigned_type_for
2858 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2860 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2861 return 1;
2864 return 0;
2867 /* See if ARG is an expression that is either a comparison or is performing
2868 arithmetic on comparisons. The comparisons must only be comparing
2869 two different values, which will be stored in *CVAL1 and *CVAL2; if
2870 they are nonzero it means that some operands have already been found.
2871 No variables may be used anywhere else in the expression except in the
2872 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2873 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2875 If this is true, return 1. Otherwise, return zero. */
2877 static int
2878 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2880 enum tree_code code = TREE_CODE (arg);
2881 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2885 tclass = tcc_unary;
2886 else if (tclass == tcc_expression
2887 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2888 || code == COMPOUND_EXPR))
2889 tclass = tcc_binary;
2891 else if (tclass == tcc_expression && code == SAVE_EXPR
2892 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2894 /* If we've already found a CVAL1 or CVAL2, this expression is
2895 two complex to handle. */
2896 if (*cval1 || *cval2)
2897 return 0;
2899 tclass = tcc_unary;
2900 *save_p = 1;
2903 switch (tclass)
2905 case tcc_unary:
2906 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2908 case tcc_binary:
2909 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2910 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2911 cval1, cval2, save_p));
2913 case tcc_constant:
2914 return 1;
2916 case tcc_expression:
2917 if (code == COND_EXPR)
2918 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2919 cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p)
2922 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2923 cval1, cval2, save_p));
2924 return 0;
2926 case tcc_comparison:
2927 /* First see if we can handle the first operand, then the second. For
2928 the second operand, we know *CVAL1 can't be zero. It must be that
2929 one side of the comparison is each of the values; test for the
2930 case where this isn't true by failing if the two operands
2931 are the same. */
2933 if (operand_equal_p (TREE_OPERAND (arg, 0),
2934 TREE_OPERAND (arg, 1), 0))
2935 return 0;
2937 if (*cval1 == 0)
2938 *cval1 = TREE_OPERAND (arg, 0);
2939 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2941 else if (*cval2 == 0)
2942 *cval2 = TREE_OPERAND (arg, 0);
2943 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2945 else
2946 return 0;
2948 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2950 else if (*cval2 == 0)
2951 *cval2 = TREE_OPERAND (arg, 1);
2952 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2954 else
2955 return 0;
2957 return 1;
2959 default:
2960 return 0;
2964 /* ARG is a tree that is known to contain just arithmetic operations and
2965 comparisons. Evaluate the operations in the tree substituting NEW0 for
2966 any occurrence of OLD0 as an operand of a comparison and likewise for
2967 NEW1 and OLD1. */
2969 static tree
2970 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2971 tree old1, tree new1)
2973 tree type = TREE_TYPE (arg);
2974 enum tree_code code = TREE_CODE (arg);
2975 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2977 /* We can handle some of the tcc_expression cases here. */
2978 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2979 tclass = tcc_unary;
2980 else if (tclass == tcc_expression
2981 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2982 tclass = tcc_binary;
2984 switch (tclass)
2986 case tcc_unary:
2987 return fold_build1_loc (loc, code, type,
2988 eval_subst (loc, TREE_OPERAND (arg, 0),
2989 old0, new0, old1, new1));
2991 case tcc_binary:
2992 return fold_build2_loc (loc, code, type,
2993 eval_subst (loc, TREE_OPERAND (arg, 0),
2994 old0, new0, old1, new1),
2995 eval_subst (loc, TREE_OPERAND (arg, 1),
2996 old0, new0, old1, new1));
2998 case tcc_expression:
2999 switch (code)
3001 case SAVE_EXPR:
3002 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3003 old1, new1);
3005 case COMPOUND_EXPR:
3006 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3007 old1, new1);
3009 case COND_EXPR:
3010 return fold_build3_loc (loc, code, type,
3011 eval_subst (loc, TREE_OPERAND (arg, 0),
3012 old0, new0, old1, new1),
3013 eval_subst (loc, TREE_OPERAND (arg, 1),
3014 old0, new0, old1, new1),
3015 eval_subst (loc, TREE_OPERAND (arg, 2),
3016 old0, new0, old1, new1));
3017 default:
3018 break;
3020 /* Fall through - ??? */
3022 case tcc_comparison:
3024 tree arg0 = TREE_OPERAND (arg, 0);
3025 tree arg1 = TREE_OPERAND (arg, 1);
3027 /* We need to check both for exact equality and tree equality. The
3028 former will be true if the operand has a side-effect. In that
3029 case, we know the operand occurred exactly once. */
3031 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3032 arg0 = new0;
3033 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3034 arg0 = new1;
3036 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3037 arg1 = new0;
3038 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3039 arg1 = new1;
3041 return fold_build2_loc (loc, code, type, arg0, arg1);
3044 default:
3045 return arg;
3049 /* Return a tree for the case when the result of an expression is RESULT
3050 converted to TYPE and OMITTED was previously an operand of the expression
3051 but is now not needed (e.g., we folded OMITTED * 0).
3053 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3054 the conversion of RESULT to TYPE. */
3056 tree
3057 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3059 tree t = fold_convert_loc (loc, type, result);
3061 /* If the resulting operand is an empty statement, just return the omitted
3062 statement casted to void. */
3063 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3064 return build1_loc (loc, NOP_EXPR, void_type_node,
3065 fold_ignored_result (omitted));
3067 if (TREE_SIDE_EFFECTS (omitted))
3068 return build2_loc (loc, COMPOUND_EXPR, type,
3069 fold_ignored_result (omitted), t);
3071 return non_lvalue_loc (loc, t);
3074 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3076 static tree
3077 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3078 tree omitted)
3080 tree t = fold_convert_loc (loc, type, result);
3082 /* If the resulting operand is an empty statement, just return the omitted
3083 statement casted to void. */
3084 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3085 return build1_loc (loc, NOP_EXPR, void_type_node,
3086 fold_ignored_result (omitted));
3088 if (TREE_SIDE_EFFECTS (omitted))
3089 return build2_loc (loc, COMPOUND_EXPR, type,
3090 fold_ignored_result (omitted), t);
3092 return pedantic_non_lvalue_loc (loc, t);
3095 /* Return a tree for the case when the result of an expression is RESULT
3096 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3097 of the expression but are now not needed.
3099 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3100 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3101 evaluated before OMITTED2. Otherwise, if neither has side effects,
3102 just do the conversion of RESULT to TYPE. */
3104 tree
3105 omit_two_operands_loc (location_t loc, tree type, tree result,
3106 tree omitted1, tree omitted2)
3108 tree t = fold_convert_loc (loc, type, result);
3110 if (TREE_SIDE_EFFECTS (omitted2))
3111 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3112 if (TREE_SIDE_EFFECTS (omitted1))
3113 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3115 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3119 /* Return a simplified tree node for the truth-negation of ARG. This
3120 never alters ARG itself. We assume that ARG is an operation that
3121 returns a truth value (0 or 1).
3123 FIXME: one would think we would fold the result, but it causes
3124 problems with the dominator optimizer. */
3126 static tree
3127 fold_truth_not_expr (location_t loc, tree arg)
3129 tree type = TREE_TYPE (arg);
3130 enum tree_code code = TREE_CODE (arg);
3131 location_t loc1, loc2;
3133 /* If this is a comparison, we can simply invert it, except for
3134 floating-point non-equality comparisons, in which case we just
3135 enclose a TRUTH_NOT_EXPR around what we have. */
3137 if (TREE_CODE_CLASS (code) == tcc_comparison)
3139 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3140 if (FLOAT_TYPE_P (op_type)
3141 && flag_trapping_math
3142 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3143 && code != NE_EXPR && code != EQ_EXPR)
3144 return NULL_TREE;
3146 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3147 if (code == ERROR_MARK)
3148 return NULL_TREE;
3150 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3151 TREE_OPERAND (arg, 1));
3154 switch (code)
3156 case INTEGER_CST:
3157 return constant_boolean_node (integer_zerop (arg), type);
3159 case TRUTH_AND_EXPR:
3160 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3161 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3162 return build2_loc (loc, TRUTH_OR_EXPR, type,
3163 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3164 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3166 case TRUTH_OR_EXPR:
3167 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3168 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3169 return build2_loc (loc, TRUTH_AND_EXPR, type,
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3171 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3173 case TRUTH_XOR_EXPR:
3174 /* Here we can invert either operand. We invert the first operand
3175 unless the second operand is a TRUTH_NOT_EXPR in which case our
3176 result is the XOR of the first operand with the inside of the
3177 negation of the second operand. */
3179 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3180 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3181 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3182 else
3183 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3184 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3185 TREE_OPERAND (arg, 1));
3187 case TRUTH_ANDIF_EXPR:
3188 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3189 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3190 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3191 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3192 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3194 case TRUTH_ORIF_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3197 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3198 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3199 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3201 case TRUTH_NOT_EXPR:
3202 return TREE_OPERAND (arg, 0);
3204 case COND_EXPR:
3206 tree arg1 = TREE_OPERAND (arg, 1);
3207 tree arg2 = TREE_OPERAND (arg, 2);
3209 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3210 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3212 /* A COND_EXPR may have a throw as one operand, which
3213 then has void type. Just leave void operands
3214 as they are. */
3215 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3216 VOID_TYPE_P (TREE_TYPE (arg1))
3217 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3218 VOID_TYPE_P (TREE_TYPE (arg2))
3219 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3222 case COMPOUND_EXPR:
3223 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3224 return build2_loc (loc, COMPOUND_EXPR, type,
3225 TREE_OPERAND (arg, 0),
3226 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3228 case NON_LVALUE_EXPR:
3229 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3230 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3232 CASE_CONVERT:
3233 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3234 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3236 /* ... fall through ... */
3238 case FLOAT_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return build1_loc (loc, TREE_CODE (arg), type,
3241 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3243 case BIT_AND_EXPR:
3244 if (!integer_onep (TREE_OPERAND (arg, 1)))
3245 return NULL_TREE;
3246 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3248 case SAVE_EXPR:
3249 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3251 case CLEANUP_POINT_EXPR:
3252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3253 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3254 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3256 default:
3257 return NULL_TREE;
3261 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3262 assume that ARG is an operation that returns a truth value (0 or 1
3263 for scalars, 0 or -1 for vectors). Return the folded expression if
3264 folding is successful. Otherwise, return NULL_TREE. */
3266 static tree
3267 fold_invert_truthvalue (location_t loc, tree arg)
3269 tree type = TREE_TYPE (arg);
3270 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3271 ? BIT_NOT_EXPR
3272 : TRUTH_NOT_EXPR,
3273 type, arg);
3276 /* Return a simplified tree node for the truth-negation of ARG. This
3277 never alters ARG itself. We assume that ARG is an operation that
3278 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3280 tree
3281 invert_truthvalue_loc (location_t loc, tree arg)
3283 if (TREE_CODE (arg) == ERROR_MARK)
3284 return arg;
3286 tree type = TREE_TYPE (arg);
3287 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3288 ? BIT_NOT_EXPR
3289 : TRUTH_NOT_EXPR,
3290 type, arg);
3293 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3294 operands are another bit-wise operation with a common input. If so,
3295 distribute the bit operations to save an operation and possibly two if
3296 constants are involved. For example, convert
3297 (A | B) & (A | C) into A | (B & C)
3298 Further simplification will occur if B and C are constants.
3300 If this optimization cannot be done, 0 will be returned. */
3302 static tree
3303 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3304 tree arg0, tree arg1)
3306 tree common;
3307 tree left, right;
3309 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3310 || TREE_CODE (arg0) == code
3311 || (TREE_CODE (arg0) != BIT_AND_EXPR
3312 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3313 return 0;
3315 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3317 common = TREE_OPERAND (arg0, 0);
3318 left = TREE_OPERAND (arg0, 1);
3319 right = TREE_OPERAND (arg1, 1);
3321 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3323 common = TREE_OPERAND (arg0, 0);
3324 left = TREE_OPERAND (arg0, 1);
3325 right = TREE_OPERAND (arg1, 0);
3327 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3329 common = TREE_OPERAND (arg0, 1);
3330 left = TREE_OPERAND (arg0, 0);
3331 right = TREE_OPERAND (arg1, 1);
3333 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3335 common = TREE_OPERAND (arg0, 1);
3336 left = TREE_OPERAND (arg0, 0);
3337 right = TREE_OPERAND (arg1, 0);
3339 else
3340 return 0;
3342 common = fold_convert_loc (loc, type, common);
3343 left = fold_convert_loc (loc, type, left);
3344 right = fold_convert_loc (loc, type, right);
3345 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3346 fold_build2_loc (loc, code, type, left, right));
3349 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3350 with code CODE. This optimization is unsafe. */
3351 static tree
3352 distribute_real_division (location_t loc, enum tree_code code, tree type,
3353 tree arg0, tree arg1)
3355 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3356 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3358 /* (A / C) +- (B / C) -> (A +- B) / C. */
3359 if (mul0 == mul1
3360 && operand_equal_p (TREE_OPERAND (arg0, 1),
3361 TREE_OPERAND (arg1, 1), 0))
3362 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3363 fold_build2_loc (loc, code, type,
3364 TREE_OPERAND (arg0, 0),
3365 TREE_OPERAND (arg1, 0)),
3366 TREE_OPERAND (arg0, 1));
3368 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3369 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3370 TREE_OPERAND (arg1, 0), 0)
3371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3372 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3374 REAL_VALUE_TYPE r0, r1;
3375 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3376 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3377 if (!mul0)
3378 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3379 if (!mul1)
3380 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3381 real_arithmetic (&r0, code, &r0, &r1);
3382 return fold_build2_loc (loc, MULT_EXPR, type,
3383 TREE_OPERAND (arg0, 0),
3384 build_real (type, r0));
3387 return NULL_TREE;
3390 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3391 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3393 static tree
3394 make_bit_field_ref (location_t loc, tree inner, tree type,
3395 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3397 tree result, bftype;
3399 if (bitpos == 0)
3401 tree size = TYPE_SIZE (TREE_TYPE (inner));
3402 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3403 || POINTER_TYPE_P (TREE_TYPE (inner)))
3404 && tree_fits_shwi_p (size)
3405 && tree_to_shwi (size) == bitsize)
3406 return fold_convert_loc (loc, type, inner);
3409 bftype = type;
3410 if (TYPE_PRECISION (bftype) != bitsize
3411 || TYPE_UNSIGNED (bftype) == !unsignedp)
3412 bftype = build_nonstandard_integer_type (bitsize, 0);
3414 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3415 size_int (bitsize), bitsize_int (bitpos));
3417 if (bftype != type)
3418 result = fold_convert_loc (loc, type, result);
3420 return result;
3423 /* Optimize a bit-field compare.
3425 There are two cases: First is a compare against a constant and the
3426 second is a comparison of two items where the fields are at the same
3427 bit position relative to the start of a chunk (byte, halfword, word)
3428 large enough to contain it. In these cases we can avoid the shift
3429 implicit in bitfield extractions.
3431 For constants, we emit a compare of the shifted constant with the
3432 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3433 compared. For two fields at the same position, we do the ANDs with the
3434 similar mask and compare the result of the ANDs.
3436 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3437 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3438 are the left and right operands of the comparison, respectively.
3440 If the optimization described above can be done, we return the resulting
3441 tree. Otherwise we return zero. */
3443 static tree
3444 optimize_bit_field_compare (location_t loc, enum tree_code code,
3445 tree compare_type, tree lhs, tree rhs)
3447 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3448 tree type = TREE_TYPE (lhs);
3449 tree unsigned_type;
3450 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3451 enum machine_mode lmode, rmode, nmode;
3452 int lunsignedp, runsignedp;
3453 int lvolatilep = 0, rvolatilep = 0;
3454 tree linner, rinner = NULL_TREE;
3455 tree mask;
3456 tree offset;
3458 /* Get all the information about the extractions being done. If the bit size
3459 if the same as the size of the underlying object, we aren't doing an
3460 extraction at all and so can do nothing. We also don't want to
3461 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3462 then will no longer be able to replace it. */
3463 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3464 &lunsignedp, &lvolatilep, false);
3465 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3466 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3467 return 0;
3469 if (!const_p)
3471 /* If this is not a constant, we can only do something if bit positions,
3472 sizes, and signedness are the same. */
3473 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3474 &runsignedp, &rvolatilep, false);
3476 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3477 || lunsignedp != runsignedp || offset != 0
3478 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3479 return 0;
3482 /* See if we can find a mode to refer to this field. We should be able to,
3483 but fail if we can't. */
3484 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3485 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3486 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3487 TYPE_ALIGN (TREE_TYPE (rinner))),
3488 word_mode, false);
3489 if (nmode == VOIDmode)
3490 return 0;
3492 /* Set signed and unsigned types of the precision of this mode for the
3493 shifts below. */
3494 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3496 /* Compute the bit position and size for the new reference and our offset
3497 within it. If the new reference is the same size as the original, we
3498 won't optimize anything, so return zero. */
3499 nbitsize = GET_MODE_BITSIZE (nmode);
3500 nbitpos = lbitpos & ~ (nbitsize - 1);
3501 lbitpos -= nbitpos;
3502 if (nbitsize == lbitsize)
3503 return 0;
3505 if (BYTES_BIG_ENDIAN)
3506 lbitpos = nbitsize - lbitsize - lbitpos;
3508 /* Make the mask to be used against the extracted field. */
3509 mask = build_int_cst_type (unsigned_type, -1);
3510 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3511 mask = const_binop (RSHIFT_EXPR, mask,
3512 size_int (nbitsize - lbitsize - lbitpos));
3514 if (! const_p)
3515 /* If not comparing with constant, just rework the comparison
3516 and return. */
3517 return fold_build2_loc (loc, code, compare_type,
3518 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3519 make_bit_field_ref (loc, linner,
3520 unsigned_type,
3521 nbitsize, nbitpos,
3523 mask),
3524 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3525 make_bit_field_ref (loc, rinner,
3526 unsigned_type,
3527 nbitsize, nbitpos,
3529 mask));
3531 /* Otherwise, we are handling the constant case. See if the constant is too
3532 big for the field. Warn and return a tree of for 0 (false) if so. We do
3533 this not only for its own sake, but to avoid having to test for this
3534 error case below. If we didn't, we might generate wrong code.
3536 For unsigned fields, the constant shifted right by the field length should
3537 be all zero. For signed fields, the high-order bits should agree with
3538 the sign bit. */
3540 if (lunsignedp)
3542 if (wi::lrshift (rhs, lbitsize) != 0)
3544 warning (0, "comparison is always %d due to width of bit-field",
3545 code == NE_EXPR);
3546 return constant_boolean_node (code == NE_EXPR, compare_type);
3549 else
3551 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3552 if (tem != 0 && tem != -1)
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3560 /* Single-bit compares should always be against zero. */
3561 if (lbitsize == 1 && ! integer_zerop (rhs))
3563 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3564 rhs = build_int_cst (type, 0);
3567 /* Make a new bitfield reference, shift the constant over the
3568 appropriate number of bits and mask it with the computed mask
3569 (in case this was a signed field). If we changed it, make a new one. */
3570 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3572 rhs = const_binop (BIT_AND_EXPR,
3573 const_binop (LSHIFT_EXPR,
3574 fold_convert_loc (loc, unsigned_type, rhs),
3575 size_int (lbitpos)),
3576 mask);
3578 lhs = build2_loc (loc, code, compare_type,
3579 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3580 return lhs;
3583 /* Subroutine for fold_truth_andor_1: decode a field reference.
3585 If EXP is a comparison reference, we return the innermost reference.
3587 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3588 set to the starting bit number.
3590 If the innermost field can be completely contained in a mode-sized
3591 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3593 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3594 otherwise it is not changed.
3596 *PUNSIGNEDP is set to the signedness of the field.
3598 *PMASK is set to the mask used. This is either contained in a
3599 BIT_AND_EXPR or derived from the width of the field.
3601 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3603 Return 0 if this is not a component reference or is one that we can't
3604 do anything with. */
3606 static tree
3607 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3608 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3609 int *punsignedp, int *pvolatilep,
3610 tree *pmask, tree *pand_mask)
3612 tree outer_type = 0;
3613 tree and_mask = 0;
3614 tree mask, inner, offset;
3615 tree unsigned_type;
3616 unsigned int precision;
3618 /* All the optimizations using this function assume integer fields.
3619 There are problems with FP fields since the type_for_size call
3620 below can fail for, e.g., XFmode. */
3621 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3622 return 0;
3624 /* We are interested in the bare arrangement of bits, so strip everything
3625 that doesn't affect the machine mode. However, record the type of the
3626 outermost expression if it may matter below. */
3627 if (CONVERT_EXPR_P (exp)
3628 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3629 outer_type = TREE_TYPE (exp);
3630 STRIP_NOPS (exp);
3632 if (TREE_CODE (exp) == BIT_AND_EXPR)
3634 and_mask = TREE_OPERAND (exp, 1);
3635 exp = TREE_OPERAND (exp, 0);
3636 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3637 if (TREE_CODE (and_mask) != INTEGER_CST)
3638 return 0;
3641 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3642 punsignedp, pvolatilep, false);
3643 if ((inner == exp && and_mask == 0)
3644 || *pbitsize < 0 || offset != 0
3645 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3646 return 0;
3648 /* If the number of bits in the reference is the same as the bitsize of
3649 the outer type, then the outer type gives the signedness. Otherwise
3650 (in case of a small bitfield) the signedness is unchanged. */
3651 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3652 *punsignedp = TYPE_UNSIGNED (outer_type);
3654 /* Compute the mask to access the bitfield. */
3655 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3656 precision = TYPE_PRECISION (unsigned_type);
3658 mask = build_int_cst_type (unsigned_type, -1);
3660 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3661 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3663 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3664 if (and_mask != 0)
3665 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3666 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3668 *pmask = mask;
3669 *pand_mask = and_mask;
3670 return inner;
3673 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3674 bit positions and MASK is SIGNED. */
3676 static int
3677 all_ones_mask_p (const_tree mask, unsigned int size)
3679 tree type = TREE_TYPE (mask);
3680 unsigned int precision = TYPE_PRECISION (type);
3682 /* If this function returns true when the type of the mask is
3683 UNSIGNED, then there will be errors. In particular see
3684 gcc.c-torture/execute/990326-1.c. There does not appear to be
3685 any documentation paper trail as to why this is so. But the pre
3686 wide-int worked with that restriction and it has been preserved
3687 here. */
3688 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3689 return false;
3691 return wi::mask (size, false, precision) == mask;
3694 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3695 represents the sign bit of EXP's type. If EXP represents a sign
3696 or zero extension, also test VAL against the unextended type.
3697 The return value is the (sub)expression whose sign bit is VAL,
3698 or NULL_TREE otherwise. */
3700 static tree
3701 sign_bit_p (tree exp, const_tree val)
3703 int width;
3704 tree t;
3706 /* Tree EXP must have an integral type. */
3707 t = TREE_TYPE (exp);
3708 if (! INTEGRAL_TYPE_P (t))
3709 return NULL_TREE;
3711 /* Tree VAL must be an integer constant. */
3712 if (TREE_CODE (val) != INTEGER_CST
3713 || TREE_OVERFLOW (val))
3714 return NULL_TREE;
3716 width = TYPE_PRECISION (t);
3717 if (wi::only_sign_bit_p (val, width))
3718 return exp;
3720 /* Handle extension from a narrower type. */
3721 if (TREE_CODE (exp) == NOP_EXPR
3722 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3723 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3725 return NULL_TREE;
3728 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3729 to be evaluated unconditionally. */
3731 static int
3732 simple_operand_p (const_tree exp)
3734 /* Strip any conversions that don't change the machine mode. */
3735 STRIP_NOPS (exp);
3737 return (CONSTANT_CLASS_P (exp)
3738 || TREE_CODE (exp) == SSA_NAME
3739 || (DECL_P (exp)
3740 && ! TREE_ADDRESSABLE (exp)
3741 && ! TREE_THIS_VOLATILE (exp)
3742 && ! DECL_NONLOCAL (exp)
3743 /* Don't regard global variables as simple. They may be
3744 allocated in ways unknown to the compiler (shared memory,
3745 #pragma weak, etc). */
3746 && ! TREE_PUBLIC (exp)
3747 && ! DECL_EXTERNAL (exp)
3748 /* Weakrefs are not safe to be read, since they can be NULL.
3749 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3750 have DECL_WEAK flag set. */
3751 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3752 /* Loading a static variable is unduly expensive, but global
3753 registers aren't expensive. */
3754 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3757 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3758 to be evaluated unconditionally.
3759 I addition to simple_operand_p, we assume that comparisons, conversions,
3760 and logic-not operations are simple, if their operands are simple, too. */
3762 static bool
3763 simple_operand_p_2 (tree exp)
3765 enum tree_code code;
3767 if (TREE_SIDE_EFFECTS (exp)
3768 || tree_could_trap_p (exp))
3769 return false;
3771 while (CONVERT_EXPR_P (exp))
3772 exp = TREE_OPERAND (exp, 0);
3774 code = TREE_CODE (exp);
3776 if (TREE_CODE_CLASS (code) == tcc_comparison)
3777 return (simple_operand_p (TREE_OPERAND (exp, 0))
3778 && simple_operand_p (TREE_OPERAND (exp, 1)));
3780 if (code == TRUTH_NOT_EXPR)
3781 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3783 return simple_operand_p (exp);
3787 /* The following functions are subroutines to fold_range_test and allow it to
3788 try to change a logical combination of comparisons into a range test.
3790 For example, both
3791 X == 2 || X == 3 || X == 4 || X == 5
3793 X >= 2 && X <= 5
3794 are converted to
3795 (unsigned) (X - 2) <= 3
3797 We describe each set of comparisons as being either inside or outside
3798 a range, using a variable named like IN_P, and then describe the
3799 range with a lower and upper bound. If one of the bounds is omitted,
3800 it represents either the highest or lowest value of the type.
3802 In the comments below, we represent a range by two numbers in brackets
3803 preceded by a "+" to designate being inside that range, or a "-" to
3804 designate being outside that range, so the condition can be inverted by
3805 flipping the prefix. An omitted bound is represented by a "-". For
3806 example, "- [-, 10]" means being outside the range starting at the lowest
3807 possible value and ending at 10, in other words, being greater than 10.
3808 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3809 always false.
3811 We set up things so that the missing bounds are handled in a consistent
3812 manner so neither a missing bound nor "true" and "false" need to be
3813 handled using a special case. */
3815 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3816 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3817 and UPPER1_P are nonzero if the respective argument is an upper bound
3818 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3819 must be specified for a comparison. ARG1 will be converted to ARG0's
3820 type if both are specified. */
3822 static tree
3823 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3824 tree arg1, int upper1_p)
3826 tree tem;
3827 int result;
3828 int sgn0, sgn1;
3830 /* If neither arg represents infinity, do the normal operation.
3831 Else, if not a comparison, return infinity. Else handle the special
3832 comparison rules. Note that most of the cases below won't occur, but
3833 are handled for consistency. */
3835 if (arg0 != 0 && arg1 != 0)
3837 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3838 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3839 STRIP_NOPS (tem);
3840 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3843 if (TREE_CODE_CLASS (code) != tcc_comparison)
3844 return 0;
3846 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3847 for neither. In real maths, we cannot assume open ended ranges are
3848 the same. But, this is computer arithmetic, where numbers are finite.
3849 We can therefore make the transformation of any unbounded range with
3850 the value Z, Z being greater than any representable number. This permits
3851 us to treat unbounded ranges as equal. */
3852 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3853 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3854 switch (code)
3856 case EQ_EXPR:
3857 result = sgn0 == sgn1;
3858 break;
3859 case NE_EXPR:
3860 result = sgn0 != sgn1;
3861 break;
3862 case LT_EXPR:
3863 result = sgn0 < sgn1;
3864 break;
3865 case LE_EXPR:
3866 result = sgn0 <= sgn1;
3867 break;
3868 case GT_EXPR:
3869 result = sgn0 > sgn1;
3870 break;
3871 case GE_EXPR:
3872 result = sgn0 >= sgn1;
3873 break;
3874 default:
3875 gcc_unreachable ();
3878 return constant_boolean_node (result, type);
3881 /* Helper routine for make_range. Perform one step for it, return
3882 new expression if the loop should continue or NULL_TREE if it should
3883 stop. */
3885 tree
3886 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3887 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3888 bool *strict_overflow_p)
3890 tree arg0_type = TREE_TYPE (arg0);
3891 tree n_low, n_high, low = *p_low, high = *p_high;
3892 int in_p = *p_in_p, n_in_p;
3894 switch (code)
3896 case TRUTH_NOT_EXPR:
3897 /* We can only do something if the range is testing for zero. */
3898 if (low == NULL_TREE || high == NULL_TREE
3899 || ! integer_zerop (low) || ! integer_zerop (high))
3900 return NULL_TREE;
3901 *p_in_p = ! in_p;
3902 return arg0;
3904 case EQ_EXPR: case NE_EXPR:
3905 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3906 /* We can only do something if the range is testing for zero
3907 and if the second operand is an integer constant. Note that
3908 saying something is "in" the range we make is done by
3909 complementing IN_P since it will set in the initial case of
3910 being not equal to zero; "out" is leaving it alone. */
3911 if (low == NULL_TREE || high == NULL_TREE
3912 || ! integer_zerop (low) || ! integer_zerop (high)
3913 || TREE_CODE (arg1) != INTEGER_CST)
3914 return NULL_TREE;
3916 switch (code)
3918 case NE_EXPR: /* - [c, c] */
3919 low = high = arg1;
3920 break;
3921 case EQ_EXPR: /* + [c, c] */
3922 in_p = ! in_p, low = high = arg1;
3923 break;
3924 case GT_EXPR: /* - [-, c] */
3925 low = 0, high = arg1;
3926 break;
3927 case GE_EXPR: /* + [c, -] */
3928 in_p = ! in_p, low = arg1, high = 0;
3929 break;
3930 case LT_EXPR: /* - [c, -] */
3931 low = arg1, high = 0;
3932 break;
3933 case LE_EXPR: /* + [-, c] */
3934 in_p = ! in_p, low = 0, high = arg1;
3935 break;
3936 default:
3937 gcc_unreachable ();
3940 /* If this is an unsigned comparison, we also know that EXP is
3941 greater than or equal to zero. We base the range tests we make
3942 on that fact, so we record it here so we can parse existing
3943 range tests. We test arg0_type since often the return type
3944 of, e.g. EQ_EXPR, is boolean. */
3945 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3947 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3948 in_p, low, high, 1,
3949 build_int_cst (arg0_type, 0),
3950 NULL_TREE))
3951 return NULL_TREE;
3953 in_p = n_in_p, low = n_low, high = n_high;
3955 /* If the high bound is missing, but we have a nonzero low
3956 bound, reverse the range so it goes from zero to the low bound
3957 minus 1. */
3958 if (high == 0 && low && ! integer_zerop (low))
3960 in_p = ! in_p;
3961 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3962 build_int_cst (TREE_TYPE (low), 1), 0);
3963 low = build_int_cst (arg0_type, 0);
3967 *p_low = low;
3968 *p_high = high;
3969 *p_in_p = in_p;
3970 return arg0;
3972 case NEGATE_EXPR:
3973 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3974 low and high are non-NULL, then normalize will DTRT. */
3975 if (!TYPE_UNSIGNED (arg0_type)
3976 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3978 if (low == NULL_TREE)
3979 low = TYPE_MIN_VALUE (arg0_type);
3980 if (high == NULL_TREE)
3981 high = TYPE_MAX_VALUE (arg0_type);
3984 /* (-x) IN [a,b] -> x in [-b, -a] */
3985 n_low = range_binop (MINUS_EXPR, exp_type,
3986 build_int_cst (exp_type, 0),
3987 0, high, 1);
3988 n_high = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3990 0, low, 0);
3991 if (n_high != 0 && TREE_OVERFLOW (n_high))
3992 return NULL_TREE;
3993 goto normalize;
3995 case BIT_NOT_EXPR:
3996 /* ~ X -> -X - 1 */
3997 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3998 build_int_cst (exp_type, 1));
4000 case PLUS_EXPR:
4001 case MINUS_EXPR:
4002 if (TREE_CODE (arg1) != INTEGER_CST)
4003 return NULL_TREE;
4005 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4006 move a constant to the other side. */
4007 if (!TYPE_UNSIGNED (arg0_type)
4008 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4009 return NULL_TREE;
4011 /* If EXP is signed, any overflow in the computation is undefined,
4012 so we don't worry about it so long as our computations on
4013 the bounds don't overflow. For unsigned, overflow is defined
4014 and this is exactly the right thing. */
4015 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4016 arg0_type, low, 0, arg1, 0);
4017 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4018 arg0_type, high, 1, arg1, 0);
4019 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4020 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4021 return NULL_TREE;
4023 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4024 *strict_overflow_p = true;
4026 normalize:
4027 /* Check for an unsigned range which has wrapped around the maximum
4028 value thus making n_high < n_low, and normalize it. */
4029 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4031 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4032 build_int_cst (TREE_TYPE (n_high), 1), 0);
4033 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4034 build_int_cst (TREE_TYPE (n_low), 1), 0);
4036 /* If the range is of the form +/- [ x+1, x ], we won't
4037 be able to normalize it. But then, it represents the
4038 whole range or the empty set, so make it
4039 +/- [ -, - ]. */
4040 if (tree_int_cst_equal (n_low, low)
4041 && tree_int_cst_equal (n_high, high))
4042 low = high = 0;
4043 else
4044 in_p = ! in_p;
4046 else
4047 low = n_low, high = n_high;
4049 *p_low = low;
4050 *p_high = high;
4051 *p_in_p = in_p;
4052 return arg0;
4054 CASE_CONVERT:
4055 case NON_LVALUE_EXPR:
4056 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4057 return NULL_TREE;
4059 if (! INTEGRAL_TYPE_P (arg0_type)
4060 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4061 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4062 return NULL_TREE;
4064 n_low = low, n_high = high;
4066 if (n_low != 0)
4067 n_low = fold_convert_loc (loc, arg0_type, n_low);
4069 if (n_high != 0)
4070 n_high = fold_convert_loc (loc, arg0_type, n_high);
4072 /* If we're converting arg0 from an unsigned type, to exp,
4073 a signed type, we will be doing the comparison as unsigned.
4074 The tests above have already verified that LOW and HIGH
4075 are both positive.
4077 So we have to ensure that we will handle large unsigned
4078 values the same way that the current signed bounds treat
4079 negative values. */
4081 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4083 tree high_positive;
4084 tree equiv_type;
4085 /* For fixed-point modes, we need to pass the saturating flag
4086 as the 2nd parameter. */
4087 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4088 equiv_type
4089 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4090 TYPE_SATURATING (arg0_type));
4091 else
4092 equiv_type
4093 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4095 /* A range without an upper bound is, naturally, unbounded.
4096 Since convert would have cropped a very large value, use
4097 the max value for the destination type. */
4098 high_positive
4099 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4100 : TYPE_MAX_VALUE (arg0_type);
4102 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4103 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4104 fold_convert_loc (loc, arg0_type,
4105 high_positive),
4106 build_int_cst (arg0_type, 1));
4108 /* If the low bound is specified, "and" the range with the
4109 range for which the original unsigned value will be
4110 positive. */
4111 if (low != 0)
4113 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4114 1, fold_convert_loc (loc, arg0_type,
4115 integer_zero_node),
4116 high_positive))
4117 return NULL_TREE;
4119 in_p = (n_in_p == in_p);
4121 else
4123 /* Otherwise, "or" the range with the range of the input
4124 that will be interpreted as negative. */
4125 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4126 1, fold_convert_loc (loc, arg0_type,
4127 integer_zero_node),
4128 high_positive))
4129 return NULL_TREE;
4131 in_p = (in_p != n_in_p);
4135 *p_low = n_low;
4136 *p_high = n_high;
4137 *p_in_p = in_p;
4138 return arg0;
4140 default:
4141 return NULL_TREE;
4145 /* Given EXP, a logical expression, set the range it is testing into
4146 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4147 actually being tested. *PLOW and *PHIGH will be made of the same
4148 type as the returned expression. If EXP is not a comparison, we
4149 will most likely not be returning a useful value and range. Set
4150 *STRICT_OVERFLOW_P to true if the return value is only valid
4151 because signed overflow is undefined; otherwise, do not change
4152 *STRICT_OVERFLOW_P. */
4154 tree
4155 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4156 bool *strict_overflow_p)
4158 enum tree_code code;
4159 tree arg0, arg1 = NULL_TREE;
4160 tree exp_type, nexp;
4161 int in_p;
4162 tree low, high;
4163 location_t loc = EXPR_LOCATION (exp);
4165 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4166 and see if we can refine the range. Some of the cases below may not
4167 happen, but it doesn't seem worth worrying about this. We "continue"
4168 the outer loop when we've changed something; otherwise we "break"
4169 the switch, which will "break" the while. */
4171 in_p = 0;
4172 low = high = build_int_cst (TREE_TYPE (exp), 0);
4174 while (1)
4176 code = TREE_CODE (exp);
4177 exp_type = TREE_TYPE (exp);
4178 arg0 = NULL_TREE;
4180 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4182 if (TREE_OPERAND_LENGTH (exp) > 0)
4183 arg0 = TREE_OPERAND (exp, 0);
4184 if (TREE_CODE_CLASS (code) == tcc_binary
4185 || TREE_CODE_CLASS (code) == tcc_comparison
4186 || (TREE_CODE_CLASS (code) == tcc_expression
4187 && TREE_OPERAND_LENGTH (exp) > 1))
4188 arg1 = TREE_OPERAND (exp, 1);
4190 if (arg0 == NULL_TREE)
4191 break;
4193 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4194 &high, &in_p, strict_overflow_p);
4195 if (nexp == NULL_TREE)
4196 break;
4197 exp = nexp;
4200 /* If EXP is a constant, we can evaluate whether this is true or false. */
4201 if (TREE_CODE (exp) == INTEGER_CST)
4203 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4204 exp, 0, low, 0))
4205 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4206 exp, 1, high, 1)));
4207 low = high = 0;
4208 exp = 0;
4211 *pin_p = in_p, *plow = low, *phigh = high;
4212 return exp;
4215 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216 type, TYPE, return an expression to test if EXP is in (or out of, depending
4217 on IN_P) the range. Return 0 if the test couldn't be created. */
4219 tree
4220 build_range_check (location_t loc, tree type, tree exp, int in_p,
4221 tree low, tree high)
4223 tree etype = TREE_TYPE (exp), value;
4225 #ifdef HAVE_canonicalize_funcptr_for_compare
4226 /* Disable this optimization for function pointer expressions
4227 on targets that require function pointer canonicalization. */
4228 if (HAVE_canonicalize_funcptr_for_compare
4229 && TREE_CODE (etype) == POINTER_TYPE
4230 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4231 return NULL_TREE;
4232 #endif
4234 if (! in_p)
4236 value = build_range_check (loc, type, exp, 1, low, high);
4237 if (value != 0)
4238 return invert_truthvalue_loc (loc, value);
4240 return 0;
4243 if (low == 0 && high == 0)
4244 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4246 if (low == 0)
4247 return fold_build2_loc (loc, LE_EXPR, type, exp,
4248 fold_convert_loc (loc, etype, high));
4250 if (high == 0)
4251 return fold_build2_loc (loc, GE_EXPR, type, exp,
4252 fold_convert_loc (loc, etype, low));
4254 if (operand_equal_p (low, high, 0))
4255 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4256 fold_convert_loc (loc, etype, low));
4258 if (integer_zerop (low))
4260 if (! TYPE_UNSIGNED (etype))
4262 etype = unsigned_type_for (etype);
4263 high = fold_convert_loc (loc, etype, high);
4264 exp = fold_convert_loc (loc, etype, exp);
4266 return build_range_check (loc, type, exp, 1, 0, high);
4269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4270 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4272 int prec = TYPE_PRECISION (etype);
4274 if (wi::mask (prec - 1, false, prec) == high)
4276 if (TYPE_UNSIGNED (etype))
4278 tree signed_etype = signed_type_for (etype);
4279 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4280 etype
4281 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4282 else
4283 etype = signed_etype;
4284 exp = fold_convert_loc (loc, etype, exp);
4286 return fold_build2_loc (loc, GT_EXPR, type, exp,
4287 build_int_cst (etype, 0));
4291 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4292 This requires wrap-around arithmetics for the type of the expression.
4293 First make sure that arithmetics in this type is valid, then make sure
4294 that it wraps around. */
4295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4297 TYPE_UNSIGNED (etype));
4299 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4301 tree utype, minv, maxv;
4303 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4304 for the type in question, as we rely on this here. */
4305 utype = unsigned_type_for (etype);
4306 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4307 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4308 build_int_cst (TREE_TYPE (maxv), 1), 1);
4309 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4311 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4312 minv, 1, maxv, 1)))
4313 etype = utype;
4314 else
4315 return 0;
4318 high = fold_convert_loc (loc, etype, high);
4319 low = fold_convert_loc (loc, etype, low);
4320 exp = fold_convert_loc (loc, etype, exp);
4322 value = const_binop (MINUS_EXPR, high, low);
4325 if (POINTER_TYPE_P (etype))
4327 if (value != 0 && !TREE_OVERFLOW (value))
4329 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4330 return build_range_check (loc, type,
4331 fold_build_pointer_plus_loc (loc, exp, low),
4332 1, build_int_cst (etype, 0), value);
4334 return 0;
4337 if (value != 0 && !TREE_OVERFLOW (value))
4338 return build_range_check (loc, type,
4339 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4340 1, build_int_cst (etype, 0), value);
4342 return 0;
4345 /* Return the predecessor of VAL in its type, handling the infinite case. */
4347 static tree
4348 range_predecessor (tree val)
4350 tree type = TREE_TYPE (val);
4352 if (INTEGRAL_TYPE_P (type)
4353 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4354 return 0;
4355 else
4356 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4357 build_int_cst (TREE_TYPE (val), 1), 0);
4360 /* Return the successor of VAL in its type, handling the infinite case. */
4362 static tree
4363 range_successor (tree val)
4365 tree type = TREE_TYPE (val);
4367 if (INTEGRAL_TYPE_P (type)
4368 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4369 return 0;
4370 else
4371 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4372 build_int_cst (TREE_TYPE (val), 1), 0);
4375 /* Given two ranges, see if we can merge them into one. Return 1 if we
4376 can, 0 if we can't. Set the output range into the specified parameters. */
4378 bool
4379 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4380 tree high0, int in1_p, tree low1, tree high1)
4382 int no_overlap;
4383 int subset;
4384 int temp;
4385 tree tem;
4386 int in_p;
4387 tree low, high;
4388 int lowequal = ((low0 == 0 && low1 == 0)
4389 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4390 low0, 0, low1, 0)));
4391 int highequal = ((high0 == 0 && high1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 high0, 1, high1, 1)));
4395 /* Make range 0 be the range that starts first, or ends last if they
4396 start at the same value. Swap them if it isn't. */
4397 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4398 low0, 0, low1, 0))
4399 || (lowequal
4400 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 high1, 1, high0, 1))))
4403 temp = in0_p, in0_p = in1_p, in1_p = temp;
4404 tem = low0, low0 = low1, low1 = tem;
4405 tem = high0, high0 = high1, high1 = tem;
4408 /* Now flag two cases, whether the ranges are disjoint or whether the
4409 second range is totally subsumed in the first. Note that the tests
4410 below are simplified by the ones above. */
4411 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4412 high0, 1, low1, 0));
4413 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4414 high1, 1, high0, 1));
4416 /* We now have four cases, depending on whether we are including or
4417 excluding the two ranges. */
4418 if (in0_p && in1_p)
4420 /* If they don't overlap, the result is false. If the second range
4421 is a subset it is the result. Otherwise, the range is from the start
4422 of the second to the end of the first. */
4423 if (no_overlap)
4424 in_p = 0, low = high = 0;
4425 else if (subset)
4426 in_p = 1, low = low1, high = high1;
4427 else
4428 in_p = 1, low = low1, high = high0;
4431 else if (in0_p && ! in1_p)
4433 /* If they don't overlap, the result is the first range. If they are
4434 equal, the result is false. If the second range is a subset of the
4435 first, and the ranges begin at the same place, we go from just after
4436 the end of the second range to the end of the first. If the second
4437 range is not a subset of the first, or if it is a subset and both
4438 ranges end at the same place, the range starts at the start of the
4439 first range and ends just before the second range.
4440 Otherwise, we can't describe this as a single range. */
4441 if (no_overlap)
4442 in_p = 1, low = low0, high = high0;
4443 else if (lowequal && highequal)
4444 in_p = 0, low = high = 0;
4445 else if (subset && lowequal)
4447 low = range_successor (high1);
4448 high = high0;
4449 in_p = 1;
4450 if (low == 0)
4452 /* We are in the weird situation where high0 > high1 but
4453 high1 has no successor. Punt. */
4454 return 0;
4457 else if (! subset || highequal)
4459 low = low0;
4460 high = range_predecessor (low1);
4461 in_p = 1;
4462 if (high == 0)
4464 /* low0 < low1 but low1 has no predecessor. Punt. */
4465 return 0;
4468 else
4469 return 0;
4472 else if (! in0_p && in1_p)
4474 /* If they don't overlap, the result is the second range. If the second
4475 is a subset of the first, the result is false. Otherwise,
4476 the range starts just after the first range and ends at the
4477 end of the second. */
4478 if (no_overlap)
4479 in_p = 1, low = low1, high = high1;
4480 else if (subset || highequal)
4481 in_p = 0, low = high = 0;
4482 else
4484 low = range_successor (high0);
4485 high = high1;
4486 in_p = 1;
4487 if (low == 0)
4489 /* high1 > high0 but high0 has no successor. Punt. */
4490 return 0;
4495 else
4497 /* The case where we are excluding both ranges. Here the complex case
4498 is if they don't overlap. In that case, the only time we have a
4499 range is if they are adjacent. If the second is a subset of the
4500 first, the result is the first. Otherwise, the range to exclude
4501 starts at the beginning of the first range and ends at the end of the
4502 second. */
4503 if (no_overlap)
4505 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4506 range_successor (high0),
4507 1, low1, 0)))
4508 in_p = 0, low = low0, high = high1;
4509 else
4511 /* Canonicalize - [min, x] into - [-, x]. */
4512 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4513 switch (TREE_CODE (TREE_TYPE (low0)))
4515 case ENUMERAL_TYPE:
4516 if (TYPE_PRECISION (TREE_TYPE (low0))
4517 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4518 break;
4519 /* FALLTHROUGH */
4520 case INTEGER_TYPE:
4521 if (tree_int_cst_equal (low0,
4522 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4523 low0 = 0;
4524 break;
4525 case POINTER_TYPE:
4526 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4527 && integer_zerop (low0))
4528 low0 = 0;
4529 break;
4530 default:
4531 break;
4534 /* Canonicalize - [x, max] into - [x, -]. */
4535 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4536 switch (TREE_CODE (TREE_TYPE (high1)))
4538 case ENUMERAL_TYPE:
4539 if (TYPE_PRECISION (TREE_TYPE (high1))
4540 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4541 break;
4542 /* FALLTHROUGH */
4543 case INTEGER_TYPE:
4544 if (tree_int_cst_equal (high1,
4545 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4546 high1 = 0;
4547 break;
4548 case POINTER_TYPE:
4549 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4550 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4551 high1, 1,
4552 build_int_cst (TREE_TYPE (high1), 1),
4553 1)))
4554 high1 = 0;
4555 break;
4556 default:
4557 break;
4560 /* The ranges might be also adjacent between the maximum and
4561 minimum values of the given type. For
4562 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4563 return + [x + 1, y - 1]. */
4564 if (low0 == 0 && high1 == 0)
4566 low = range_successor (high0);
4567 high = range_predecessor (low1);
4568 if (low == 0 || high == 0)
4569 return 0;
4571 in_p = 1;
4573 else
4574 return 0;
4577 else if (subset)
4578 in_p = 0, low = low0, high = high0;
4579 else
4580 in_p = 0, low = low0, high = high1;
4583 *pin_p = in_p, *plow = low, *phigh = high;
4584 return 1;
4588 /* Subroutine of fold, looking inside expressions of the form
4589 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4590 of the COND_EXPR. This function is being used also to optimize
4591 A op B ? C : A, by reversing the comparison first.
4593 Return a folded expression whose code is not a COND_EXPR
4594 anymore, or NULL_TREE if no folding opportunity is found. */
4596 static tree
4597 fold_cond_expr_with_comparison (location_t loc, tree type,
4598 tree arg0, tree arg1, tree arg2)
4600 enum tree_code comp_code = TREE_CODE (arg0);
4601 tree arg00 = TREE_OPERAND (arg0, 0);
4602 tree arg01 = TREE_OPERAND (arg0, 1);
4603 tree arg1_type = TREE_TYPE (arg1);
4604 tree tem;
4606 STRIP_NOPS (arg1);
4607 STRIP_NOPS (arg2);
4609 /* If we have A op 0 ? A : -A, consider applying the following
4610 transformations:
4612 A == 0? A : -A same as -A
4613 A != 0? A : -A same as A
4614 A >= 0? A : -A same as abs (A)
4615 A > 0? A : -A same as abs (A)
4616 A <= 0? A : -A same as -abs (A)
4617 A < 0? A : -A same as -abs (A)
4619 None of these transformations work for modes with signed
4620 zeros. If A is +/-0, the first two transformations will
4621 change the sign of the result (from +0 to -0, or vice
4622 versa). The last four will fix the sign of the result,
4623 even though the original expressions could be positive or
4624 negative, depending on the sign of A.
4626 Note that all these transformations are correct if A is
4627 NaN, since the two alternatives (A and -A) are also NaNs. */
4628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4629 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4630 ? real_zerop (arg01)
4631 : integer_zerop (arg01))
4632 && ((TREE_CODE (arg2) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1) == MINUS_EXPR
4637 && TREE_CODE (arg2) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1, 0),
4639 TREE_OPERAND (arg2, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1, 1),
4641 TREE_OPERAND (arg2, 0), 0))))
4642 switch (comp_code)
4644 case EQ_EXPR:
4645 case UNEQ_EXPR:
4646 tem = fold_convert_loc (loc, arg1_type, arg1);
4647 return pedantic_non_lvalue_loc (loc,
4648 fold_convert_loc (loc, type,
4649 negate_expr (tem)));
4650 case NE_EXPR:
4651 case LTGT_EXPR:
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4653 case UNGE_EXPR:
4654 case UNGT_EXPR:
4655 if (flag_trapping_math)
4656 break;
4657 /* Fall through. */
4658 case GE_EXPR:
4659 case GT_EXPR:
4660 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4661 arg1 = fold_convert_loc (loc, signed_type_for
4662 (TREE_TYPE (arg1)), arg1);
4663 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4664 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4665 case UNLE_EXPR:
4666 case UNLT_EXPR:
4667 if (flag_trapping_math)
4668 break;
4669 case LE_EXPR:
4670 case LT_EXPR:
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4672 arg1 = fold_convert_loc (loc, signed_type_for
4673 (TREE_TYPE (arg1)), arg1);
4674 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4675 return negate_expr (fold_convert_loc (loc, type, tem));
4676 default:
4677 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4678 break;
4681 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4682 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4683 both transformations are correct when A is NaN: A != 0
4684 is then true, and A == 0 is false. */
4686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4687 && integer_zerop (arg01) && integer_zerop (arg2))
4689 if (comp_code == NE_EXPR)
4690 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4691 else if (comp_code == EQ_EXPR)
4692 return build_zero_cst (type);
4695 /* Try some transformations of A op B ? A : B.
4697 A == B? A : B same as B
4698 A != B? A : B same as A
4699 A >= B? A : B same as max (A, B)
4700 A > B? A : B same as max (B, A)
4701 A <= B? A : B same as min (A, B)
4702 A < B? A : B same as min (B, A)
4704 As above, these transformations don't work in the presence
4705 of signed zeros. For example, if A and B are zeros of
4706 opposite sign, the first two transformations will change
4707 the sign of the result. In the last four, the original
4708 expressions give different results for (A=+0, B=-0) and
4709 (A=-0, B=+0), but the transformed expressions do not.
4711 The first two transformations are correct if either A or B
4712 is a NaN. In the first transformation, the condition will
4713 be false, and B will indeed be chosen. In the case of the
4714 second transformation, the condition A != B will be true,
4715 and A will be chosen.
4717 The conversions to max() and min() are not correct if B is
4718 a number and A is not. The conditions in the original
4719 expressions will be false, so all four give B. The min()
4720 and max() versions would give a NaN instead. */
4721 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4722 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4723 /* Avoid these transformations if the COND_EXPR may be used
4724 as an lvalue in the C++ front-end. PR c++/19199. */
4725 && (in_gimple_form
4726 || VECTOR_TYPE_P (type)
4727 || (strcmp (lang_hooks.name, "GNU C++") != 0
4728 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4729 || ! maybe_lvalue_p (arg1)
4730 || ! maybe_lvalue_p (arg2)))
4732 tree comp_op0 = arg00;
4733 tree comp_op1 = arg01;
4734 tree comp_type = TREE_TYPE (comp_op0);
4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4737 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4739 comp_type = type;
4740 comp_op0 = arg1;
4741 comp_op1 = arg2;
4744 switch (comp_code)
4746 case EQ_EXPR:
4747 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4748 case NE_EXPR:
4749 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4750 case LE_EXPR:
4751 case LT_EXPR:
4752 case UNLE_EXPR:
4753 case UNLT_EXPR:
4754 /* In C++ a ?: expression can be an lvalue, so put the
4755 operand which will be used if they are equal first
4756 so that we can convert this back to the
4757 corresponding COND_EXPR. */
4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4760 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4761 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4762 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4763 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4764 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4765 comp_op1, comp_op0);
4766 return pedantic_non_lvalue_loc (loc,
4767 fold_convert_loc (loc, type, tem));
4769 break;
4770 case GE_EXPR:
4771 case GT_EXPR:
4772 case UNGE_EXPR:
4773 case UNGT_EXPR:
4774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4776 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4777 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4778 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4779 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4780 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4781 comp_op1, comp_op0);
4782 return pedantic_non_lvalue_loc (loc,
4783 fold_convert_loc (loc, type, tem));
4785 break;
4786 case UNEQ_EXPR:
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4788 return pedantic_non_lvalue_loc (loc,
4789 fold_convert_loc (loc, type, arg2));
4790 break;
4791 case LTGT_EXPR:
4792 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4793 return pedantic_non_lvalue_loc (loc,
4794 fold_convert_loc (loc, type, arg1));
4795 break;
4796 default:
4797 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4798 break;
4802 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4803 we might still be able to simplify this. For example,
4804 if C1 is one less or one more than C2, this might have started
4805 out as a MIN or MAX and been transformed by this function.
4806 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4808 if (INTEGRAL_TYPE_P (type)
4809 && TREE_CODE (arg01) == INTEGER_CST
4810 && TREE_CODE (arg2) == INTEGER_CST)
4811 switch (comp_code)
4813 case EQ_EXPR:
4814 if (TREE_CODE (arg1) == INTEGER_CST)
4815 break;
4816 /* We can replace A with C1 in this case. */
4817 arg1 = fold_convert_loc (loc, type, arg01);
4818 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4820 case LT_EXPR:
4821 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4822 MIN_EXPR, to preserve the signedness of the comparison. */
4823 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4824 OEP_ONLY_CONST)
4825 && operand_equal_p (arg01,
4826 const_binop (PLUS_EXPR, arg2,
4827 build_int_cst (type, 1)),
4828 OEP_ONLY_CONST))
4830 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4831 fold_convert_loc (loc, TREE_TYPE (arg00),
4832 arg2));
4833 return pedantic_non_lvalue_loc (loc,
4834 fold_convert_loc (loc, type, tem));
4836 break;
4838 case LE_EXPR:
4839 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4840 as above. */
4841 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4842 OEP_ONLY_CONST)
4843 && operand_equal_p (arg01,
4844 const_binop (MINUS_EXPR, arg2,
4845 build_int_cst (type, 1)),
4846 OEP_ONLY_CONST))
4848 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4849 fold_convert_loc (loc, TREE_TYPE (arg00),
4850 arg2));
4851 return pedantic_non_lvalue_loc (loc,
4852 fold_convert_loc (loc, type, tem));
4854 break;
4856 case GT_EXPR:
4857 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4858 MAX_EXPR, to preserve the signedness of the comparison. */
4859 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4860 OEP_ONLY_CONST)
4861 && operand_equal_p (arg01,
4862 const_binop (MINUS_EXPR, arg2,
4863 build_int_cst (type, 1)),
4864 OEP_ONLY_CONST))
4866 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4867 fold_convert_loc (loc, TREE_TYPE (arg00),
4868 arg2));
4869 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4871 break;
4873 case GE_EXPR:
4874 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4875 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4876 OEP_ONLY_CONST)
4877 && operand_equal_p (arg01,
4878 const_binop (PLUS_EXPR, arg2,
4879 build_int_cst (type, 1)),
4880 OEP_ONLY_CONST))
4882 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4883 fold_convert_loc (loc, TREE_TYPE (arg00),
4884 arg2));
4885 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4887 break;
4888 case NE_EXPR:
4889 break;
4890 default:
4891 gcc_unreachable ();
4894 return NULL_TREE;
4899 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4900 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4901 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4902 false) >= 2)
4903 #endif
4905 /* EXP is some logical combination of boolean tests. See if we can
4906 merge it into some range test. Return the new tree if so. */
4908 static tree
4909 fold_range_test (location_t loc, enum tree_code code, tree type,
4910 tree op0, tree op1)
4912 int or_op = (code == TRUTH_ORIF_EXPR
4913 || code == TRUTH_OR_EXPR);
4914 int in0_p, in1_p, in_p;
4915 tree low0, low1, low, high0, high1, high;
4916 bool strict_overflow_p = false;
4917 tree tem, lhs, rhs;
4918 const char * const warnmsg = G_("assuming signed overflow does not occur "
4919 "when simplifying range test");
4921 if (!INTEGRAL_TYPE_P (type))
4922 return 0;
4924 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4925 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4927 /* If this is an OR operation, invert both sides; we will invert
4928 again at the end. */
4929 if (or_op)
4930 in0_p = ! in0_p, in1_p = ! in1_p;
4932 /* If both expressions are the same, if we can merge the ranges, and we
4933 can build the range test, return it or it inverted. If one of the
4934 ranges is always true or always false, consider it to be the same
4935 expression as the other. */
4936 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4937 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4938 in1_p, low1, high1)
4939 && 0 != (tem = (build_range_check (loc, type,
4940 lhs != 0 ? lhs
4941 : rhs != 0 ? rhs : integer_zero_node,
4942 in_p, low, high))))
4944 if (strict_overflow_p)
4945 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4946 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4949 /* On machines where the branch cost is expensive, if this is a
4950 short-circuited branch and the underlying object on both sides
4951 is the same, make a non-short-circuit operation. */
4952 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4953 && lhs != 0 && rhs != 0
4954 && (code == TRUTH_ANDIF_EXPR
4955 || code == TRUTH_ORIF_EXPR)
4956 && operand_equal_p (lhs, rhs, 0))
4958 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4959 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4960 which cases we can't do this. */
4961 if (simple_operand_p (lhs))
4962 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4964 type, op0, op1);
4966 else if (!lang_hooks.decls.global_bindings_p ()
4967 && !CONTAINS_PLACEHOLDER_P (lhs))
4969 tree common = save_expr (lhs);
4971 if (0 != (lhs = build_range_check (loc, type, common,
4972 or_op ? ! in0_p : in0_p,
4973 low0, high0))
4974 && (0 != (rhs = build_range_check (loc, type, common,
4975 or_op ? ! in1_p : in1_p,
4976 low1, high1))))
4978 if (strict_overflow_p)
4979 fold_overflow_warning (warnmsg,
4980 WARN_STRICT_OVERFLOW_COMPARISON);
4981 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4982 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4983 type, lhs, rhs);
4988 return 0;
4991 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4992 bit value. Arrange things so the extra bits will be set to zero if and
4993 only if C is signed-extended to its full width. If MASK is nonzero,
4994 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4996 static tree
4997 unextend (tree c, int p, int unsignedp, tree mask)
4999 tree type = TREE_TYPE (c);
5000 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5001 tree temp;
5003 if (p == modesize || unsignedp)
5004 return c;
5006 /* We work by getting just the sign bit into the low-order bit, then
5007 into the high-order bit, then sign-extend. We then XOR that value
5008 with C. */
5009 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5011 /* We must use a signed type in order to get an arithmetic right shift.
5012 However, we must also avoid introducing accidental overflows, so that
5013 a subsequent call to integer_zerop will work. Hence we must
5014 do the type conversion here. At this point, the constant is either
5015 zero or one, and the conversion to a signed type can never overflow.
5016 We could get an overflow if this conversion is done anywhere else. */
5017 if (TYPE_UNSIGNED (type))
5018 temp = fold_convert (signed_type_for (type), temp);
5020 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5021 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5022 if (mask != 0)
5023 temp = const_binop (BIT_AND_EXPR, temp,
5024 fold_convert (TREE_TYPE (c), mask));
5025 /* If necessary, convert the type back to match the type of C. */
5026 if (TYPE_UNSIGNED (type))
5027 temp = fold_convert (type, temp);
5029 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5032 /* For an expression that has the form
5033 (A && B) || ~B
5035 (A || B) && ~B,
5036 we can drop one of the inner expressions and simplify to
5037 A || ~B
5039 A && ~B
5040 LOC is the location of the resulting expression. OP is the inner
5041 logical operation; the left-hand side in the examples above, while CMPOP
5042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5043 removing a condition that guards another, as in
5044 (A != NULL && A->...) || A == NULL
5045 which we must not transform. If RHS_ONLY is true, only eliminate the
5046 right-most operand of the inner logical operation. */
5048 static tree
5049 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5050 bool rhs_only)
5052 tree type = TREE_TYPE (cmpop);
5053 enum tree_code code = TREE_CODE (cmpop);
5054 enum tree_code truthop_code = TREE_CODE (op);
5055 tree lhs = TREE_OPERAND (op, 0);
5056 tree rhs = TREE_OPERAND (op, 1);
5057 tree orig_lhs = lhs, orig_rhs = rhs;
5058 enum tree_code rhs_code = TREE_CODE (rhs);
5059 enum tree_code lhs_code = TREE_CODE (lhs);
5060 enum tree_code inv_code;
5062 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5063 return NULL_TREE;
5065 if (TREE_CODE_CLASS (code) != tcc_comparison)
5066 return NULL_TREE;
5068 if (rhs_code == truthop_code)
5070 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5071 if (newrhs != NULL_TREE)
5073 rhs = newrhs;
5074 rhs_code = TREE_CODE (rhs);
5077 if (lhs_code == truthop_code && !rhs_only)
5079 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5080 if (newlhs != NULL_TREE)
5082 lhs = newlhs;
5083 lhs_code = TREE_CODE (lhs);
5087 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5088 if (inv_code == rhs_code
5089 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5090 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5091 return lhs;
5092 if (!rhs_only && inv_code == lhs_code
5093 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 return rhs;
5096 if (rhs != orig_rhs || lhs != orig_lhs)
5097 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5098 lhs, rhs);
5099 return NULL_TREE;
5102 /* Find ways of folding logical expressions of LHS and RHS:
5103 Try to merge two comparisons to the same innermost item.
5104 Look for range tests like "ch >= '0' && ch <= '9'".
5105 Look for combinations of simple terms on machines with expensive branches
5106 and evaluate the RHS unconditionally.
5108 For example, if we have p->a == 2 && p->b == 4 and we can make an
5109 object large enough to span both A and B, we can do this with a comparison
5110 against the object ANDed with the a mask.
5112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5113 operations to do this with one comparison.
5115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5116 function and the one above.
5118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5122 two operands.
5124 We return the simplified tree or 0 if no optimization is possible. */
5126 static tree
5127 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5128 tree lhs, tree rhs)
5130 /* If this is the "or" of two comparisons, we can do something if
5131 the comparisons are NE_EXPR. If this is the "and", we can do something
5132 if the comparisons are EQ_EXPR. I.e.,
5133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5135 WANTED_CODE is this operation code. For single bit fields, we can
5136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5137 comparison for one-bit fields. */
5139 enum tree_code wanted_code;
5140 enum tree_code lcode, rcode;
5141 tree ll_arg, lr_arg, rl_arg, rr_arg;
5142 tree ll_inner, lr_inner, rl_inner, rr_inner;
5143 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5144 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5145 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5146 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5147 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5148 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5149 enum machine_mode lnmode, rnmode;
5150 tree ll_mask, lr_mask, rl_mask, rr_mask;
5151 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5152 tree l_const, r_const;
5153 tree lntype, rntype, result;
5154 HOST_WIDE_INT first_bit, end_bit;
5155 int volatilep;
5157 /* Start by getting the comparison codes. Fail if anything is volatile.
5158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5159 it were surrounded with a NE_EXPR. */
5161 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5162 return 0;
5164 lcode = TREE_CODE (lhs);
5165 rcode = TREE_CODE (rhs);
5167 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5169 lhs = build2 (NE_EXPR, truth_type, lhs,
5170 build_int_cst (TREE_TYPE (lhs), 0));
5171 lcode = NE_EXPR;
5174 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5176 rhs = build2 (NE_EXPR, truth_type, rhs,
5177 build_int_cst (TREE_TYPE (rhs), 0));
5178 rcode = NE_EXPR;
5181 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5182 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5183 return 0;
5185 ll_arg = TREE_OPERAND (lhs, 0);
5186 lr_arg = TREE_OPERAND (lhs, 1);
5187 rl_arg = TREE_OPERAND (rhs, 0);
5188 rr_arg = TREE_OPERAND (rhs, 1);
5190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5191 if (simple_operand_p (ll_arg)
5192 && simple_operand_p (lr_arg))
5194 if (operand_equal_p (ll_arg, rl_arg, 0)
5195 && operand_equal_p (lr_arg, rr_arg, 0))
5197 result = combine_comparisons (loc, code, lcode, rcode,
5198 truth_type, ll_arg, lr_arg);
5199 if (result)
5200 return result;
5202 else if (operand_equal_p (ll_arg, rr_arg, 0)
5203 && operand_equal_p (lr_arg, rl_arg, 0))
5205 result = combine_comparisons (loc, code, lcode,
5206 swap_tree_comparison (rcode),
5207 truth_type, ll_arg, lr_arg);
5208 if (result)
5209 return result;
5213 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5214 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5216 /* If the RHS can be evaluated unconditionally and its operands are
5217 simple, it wins to evaluate the RHS unconditionally on machines
5218 with expensive branches. In this case, this isn't a comparison
5219 that can be merged. */
5221 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5222 false) >= 2
5223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5224 && simple_operand_p (rl_arg)
5225 && simple_operand_p (rr_arg))
5227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5228 if (code == TRUTH_OR_EXPR
5229 && lcode == NE_EXPR && integer_zerop (lr_arg)
5230 && rcode == NE_EXPR && integer_zerop (rr_arg)
5231 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5233 return build2_loc (loc, NE_EXPR, truth_type,
5234 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5235 ll_arg, rl_arg),
5236 build_int_cst (TREE_TYPE (ll_arg), 0));
5238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5239 if (code == TRUTH_AND_EXPR
5240 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5241 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5242 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5244 return build2_loc (loc, EQ_EXPR, truth_type,
5245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5246 ll_arg, rl_arg),
5247 build_int_cst (TREE_TYPE (ll_arg), 0));
5250 /* See if the comparisons can be merged. Then get all the parameters for
5251 each side. */
5253 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5254 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5255 return 0;
5257 volatilep = 0;
5258 ll_inner = decode_field_reference (loc, ll_arg,
5259 &ll_bitsize, &ll_bitpos, &ll_mode,
5260 &ll_unsignedp, &volatilep, &ll_mask,
5261 &ll_and_mask);
5262 lr_inner = decode_field_reference (loc, lr_arg,
5263 &lr_bitsize, &lr_bitpos, &lr_mode,
5264 &lr_unsignedp, &volatilep, &lr_mask,
5265 &lr_and_mask);
5266 rl_inner = decode_field_reference (loc, rl_arg,
5267 &rl_bitsize, &rl_bitpos, &rl_mode,
5268 &rl_unsignedp, &volatilep, &rl_mask,
5269 &rl_and_mask);
5270 rr_inner = decode_field_reference (loc, rr_arg,
5271 &rr_bitsize, &rr_bitpos, &rr_mode,
5272 &rr_unsignedp, &volatilep, &rr_mask,
5273 &rr_and_mask);
5275 /* It must be true that the inner operation on the lhs of each
5276 comparison must be the same if we are to be able to do anything.
5277 Then see if we have constants. If not, the same must be true for
5278 the rhs's. */
5279 if (volatilep || ll_inner == 0 || rl_inner == 0
5280 || ! operand_equal_p (ll_inner, rl_inner, 0))
5281 return 0;
5283 if (TREE_CODE (lr_arg) == INTEGER_CST
5284 && TREE_CODE (rr_arg) == INTEGER_CST)
5285 l_const = lr_arg, r_const = rr_arg;
5286 else if (lr_inner == 0 || rr_inner == 0
5287 || ! operand_equal_p (lr_inner, rr_inner, 0))
5288 return 0;
5289 else
5290 l_const = r_const = 0;
5292 /* If either comparison code is not correct for our logical operation,
5293 fail. However, we can convert a one-bit comparison against zero into
5294 the opposite comparison against that bit being set in the field. */
5296 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5297 if (lcode != wanted_code)
5299 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5301 /* Make the left operand unsigned, since we are only interested
5302 in the value of one bit. Otherwise we are doing the wrong
5303 thing below. */
5304 ll_unsignedp = 1;
5305 l_const = ll_mask;
5307 else
5308 return 0;
5311 /* This is analogous to the code for l_const above. */
5312 if (rcode != wanted_code)
5314 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5316 rl_unsignedp = 1;
5317 r_const = rl_mask;
5319 else
5320 return 0;
5323 /* See if we can find a mode that contains both fields being compared on
5324 the left. If we can't, fail. Otherwise, update all constants and masks
5325 to be relative to a field of that size. */
5326 first_bit = MIN (ll_bitpos, rl_bitpos);
5327 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5328 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5330 volatilep);
5331 if (lnmode == VOIDmode)
5332 return 0;
5334 lnbitsize = GET_MODE_BITSIZE (lnmode);
5335 lnbitpos = first_bit & ~ (lnbitsize - 1);
5336 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5337 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5339 if (BYTES_BIG_ENDIAN)
5341 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5342 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5345 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5346 size_int (xll_bitpos));
5347 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5348 size_int (xrl_bitpos));
5350 if (l_const)
5352 l_const = fold_convert_loc (loc, lntype, l_const);
5353 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5354 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5356 fold_build1_loc (loc, BIT_NOT_EXPR,
5357 lntype, ll_mask))))
5359 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5361 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5364 if (r_const)
5366 r_const = fold_convert_loc (loc, lntype, r_const);
5367 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5368 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5370 fold_build1_loc (loc, BIT_NOT_EXPR,
5371 lntype, rl_mask))))
5373 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5375 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5379 /* If the right sides are not constant, do the same for it. Also,
5380 disallow this optimization if a size or signedness mismatch occurs
5381 between the left and right sides. */
5382 if (l_const == 0)
5384 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5385 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5386 /* Make sure the two fields on the right
5387 correspond to the left without being swapped. */
5388 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5389 return 0;
5391 first_bit = MIN (lr_bitpos, rr_bitpos);
5392 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5393 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5395 volatilep);
5396 if (rnmode == VOIDmode)
5397 return 0;
5399 rnbitsize = GET_MODE_BITSIZE (rnmode);
5400 rnbitpos = first_bit & ~ (rnbitsize - 1);
5401 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5402 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5404 if (BYTES_BIG_ENDIAN)
5406 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5407 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5410 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5411 rntype, lr_mask),
5412 size_int (xlr_bitpos));
5413 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5414 rntype, rr_mask),
5415 size_int (xrr_bitpos));
5417 /* Make a mask that corresponds to both fields being compared.
5418 Do this for both items being compared. If the operands are the
5419 same size and the bits being compared are in the same position
5420 then we can do this by masking both and comparing the masked
5421 results. */
5422 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5423 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5424 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5426 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5428 if (! all_ones_mask_p (ll_mask, lnbitsize))
5429 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5431 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5432 lr_unsignedp || rr_unsignedp);
5433 if (! all_ones_mask_p (lr_mask, rnbitsize))
5434 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5436 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5439 /* There is still another way we can do something: If both pairs of
5440 fields being compared are adjacent, we may be able to make a wider
5441 field containing them both.
5443 Note that we still must mask the lhs/rhs expressions. Furthermore,
5444 the mask must be shifted to account for the shift done by
5445 make_bit_field_ref. */
5446 if ((ll_bitsize + ll_bitpos == rl_bitpos
5447 && lr_bitsize + lr_bitpos == rr_bitpos)
5448 || (ll_bitpos == rl_bitpos + rl_bitsize
5449 && lr_bitpos == rr_bitpos + rr_bitsize))
5451 tree type;
5453 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5454 ll_bitsize + rl_bitsize,
5455 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5456 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5457 lr_bitsize + rr_bitsize,
5458 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5460 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5461 size_int (MIN (xll_bitpos, xrl_bitpos)));
5462 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5463 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5465 /* Convert to the smaller type before masking out unwanted bits. */
5466 type = lntype;
5467 if (lntype != rntype)
5469 if (lnbitsize > rnbitsize)
5471 lhs = fold_convert_loc (loc, rntype, lhs);
5472 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5473 type = rntype;
5475 else if (lnbitsize < rnbitsize)
5477 rhs = fold_convert_loc (loc, lntype, rhs);
5478 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5479 type = lntype;
5483 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5484 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5486 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5487 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5489 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5492 return 0;
5495 /* Handle the case of comparisons with constants. If there is something in
5496 common between the masks, those bits of the constants must be the same.
5497 If not, the condition is always false. Test for this to avoid generating
5498 incorrect code below. */
5499 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5500 if (! integer_zerop (result)
5501 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5502 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5504 if (wanted_code == NE_EXPR)
5506 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5507 return constant_boolean_node (true, truth_type);
5509 else
5511 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5512 return constant_boolean_node (false, truth_type);
5516 /* Construct the expression we will return. First get the component
5517 reference we will make. Unless the mask is all ones the width of
5518 that field, perform the mask operation. Then compare with the
5519 merged constant. */
5520 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5521 ll_unsignedp || rl_unsignedp);
5523 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5524 if (! all_ones_mask_p (ll_mask, lnbitsize))
5525 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5527 return build2_loc (loc, wanted_code, truth_type, result,
5528 const_binop (BIT_IOR_EXPR, l_const, r_const));
5531 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5532 constant. */
5534 static tree
5535 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5536 tree op0, tree op1)
5538 tree arg0 = op0;
5539 enum tree_code op_code;
5540 tree comp_const;
5541 tree minmax_const;
5542 int consts_equal, consts_lt;
5543 tree inner;
5545 STRIP_SIGN_NOPS (arg0);
5547 op_code = TREE_CODE (arg0);
5548 minmax_const = TREE_OPERAND (arg0, 1);
5549 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5550 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5551 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5552 inner = TREE_OPERAND (arg0, 0);
5554 /* If something does not permit us to optimize, return the original tree. */
5555 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5556 || TREE_CODE (comp_const) != INTEGER_CST
5557 || TREE_OVERFLOW (comp_const)
5558 || TREE_CODE (minmax_const) != INTEGER_CST
5559 || TREE_OVERFLOW (minmax_const))
5560 return NULL_TREE;
5562 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5563 and GT_EXPR, doing the rest with recursive calls using logical
5564 simplifications. */
5565 switch (code)
5567 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5569 tree tem
5570 = optimize_minmax_comparison (loc,
5571 invert_tree_comparison (code, false),
5572 type, op0, op1);
5573 if (tem)
5574 return invert_truthvalue_loc (loc, tem);
5575 return NULL_TREE;
5578 case GE_EXPR:
5579 return
5580 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5581 optimize_minmax_comparison
5582 (loc, EQ_EXPR, type, arg0, comp_const),
5583 optimize_minmax_comparison
5584 (loc, GT_EXPR, type, arg0, comp_const));
5586 case EQ_EXPR:
5587 if (op_code == MAX_EXPR && consts_equal)
5588 /* MAX (X, 0) == 0 -> X <= 0 */
5589 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5591 else if (op_code == MAX_EXPR && consts_lt)
5592 /* MAX (X, 0) == 5 -> X == 5 */
5593 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5595 else if (op_code == MAX_EXPR)
5596 /* MAX (X, 0) == -1 -> false */
5597 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5599 else if (consts_equal)
5600 /* MIN (X, 0) == 0 -> X >= 0 */
5601 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5603 else if (consts_lt)
5604 /* MIN (X, 0) == 5 -> false */
5605 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607 else
5608 /* MIN (X, 0) == -1 -> X == -1 */
5609 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5611 case GT_EXPR:
5612 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5613 /* MAX (X, 0) > 0 -> X > 0
5614 MAX (X, 0) > 5 -> X > 5 */
5615 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5617 else if (op_code == MAX_EXPR)
5618 /* MAX (X, 0) > -1 -> true */
5619 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5621 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5622 /* MIN (X, 0) > 0 -> false
5623 MIN (X, 0) > 5 -> false */
5624 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5626 else
5627 /* MIN (X, 0) > -1 -> X > -1 */
5628 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5630 default:
5631 return NULL_TREE;
5635 /* T is an integer expression that is being multiplied, divided, or taken a
5636 modulus (CODE says which and what kind of divide or modulus) by a
5637 constant C. See if we can eliminate that operation by folding it with
5638 other operations already in T. WIDE_TYPE, if non-null, is a type that
5639 should be used for the computation if wider than our type.
5641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5642 (X * 2) + (Y * 4). We must, however, be assured that either the original
5643 expression would not overflow or that overflow is undefined for the type
5644 in the language in question.
5646 If we return a non-null expression, it is an equivalent form of the
5647 original computation, but need not be in the original type.
5649 We set *STRICT_OVERFLOW_P to true if the return values depends on
5650 signed overflow being undefined. Otherwise we do not change
5651 *STRICT_OVERFLOW_P. */
5653 static tree
5654 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5655 bool *strict_overflow_p)
5657 /* To avoid exponential search depth, refuse to allow recursion past
5658 three levels. Beyond that (1) it's highly unlikely that we'll find
5659 something interesting and (2) we've probably processed it before
5660 when we built the inner expression. */
5662 static int depth;
5663 tree ret;
5665 if (depth > 3)
5666 return NULL;
5668 depth++;
5669 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5670 depth--;
5672 return ret;
5675 static tree
5676 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5677 bool *strict_overflow_p)
5679 tree type = TREE_TYPE (t);
5680 enum tree_code tcode = TREE_CODE (t);
5681 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5682 > GET_MODE_SIZE (TYPE_MODE (type)))
5683 ? wide_type : type);
5684 tree t1, t2;
5685 int same_p = tcode == code;
5686 tree op0 = NULL_TREE, op1 = NULL_TREE;
5687 bool sub_strict_overflow_p;
5689 /* Don't deal with constants of zero here; they confuse the code below. */
5690 if (integer_zerop (c))
5691 return NULL_TREE;
5693 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5694 op0 = TREE_OPERAND (t, 0);
5696 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5697 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5699 /* Note that we need not handle conditional operations here since fold
5700 already handles those cases. So just do arithmetic here. */
5701 switch (tcode)
5703 case INTEGER_CST:
5704 /* For a constant, we can always simplify if we are a multiply
5705 or (for divide and modulus) if it is a multiple of our constant. */
5706 if (code == MULT_EXPR
5707 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5708 return const_binop (code, fold_convert (ctype, t),
5709 fold_convert (ctype, c));
5710 break;
5712 CASE_CONVERT: case NON_LVALUE_EXPR:
5713 /* If op0 is an expression ... */
5714 if ((COMPARISON_CLASS_P (op0)
5715 || UNARY_CLASS_P (op0)
5716 || BINARY_CLASS_P (op0)
5717 || VL_EXP_CLASS_P (op0)
5718 || EXPRESSION_CLASS_P (op0))
5719 /* ... and has wrapping overflow, and its type is smaller
5720 than ctype, then we cannot pass through as widening. */
5721 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5722 && (TYPE_PRECISION (ctype)
5723 > TYPE_PRECISION (TREE_TYPE (op0))))
5724 /* ... or this is a truncation (t is narrower than op0),
5725 then we cannot pass through this narrowing. */
5726 || (TYPE_PRECISION (type)
5727 < TYPE_PRECISION (TREE_TYPE (op0)))
5728 /* ... or signedness changes for division or modulus,
5729 then we cannot pass through this conversion. */
5730 || (code != MULT_EXPR
5731 && (TYPE_UNSIGNED (ctype)
5732 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5733 /* ... or has undefined overflow while the converted to
5734 type has not, we cannot do the operation in the inner type
5735 as that would introduce undefined overflow. */
5736 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5737 && !TYPE_OVERFLOW_UNDEFINED (type))))
5738 break;
5740 /* Pass the constant down and see if we can make a simplification. If
5741 we can, replace this expression with the inner simplification for
5742 possible later conversion to our or some other type. */
5743 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5744 && TREE_CODE (t2) == INTEGER_CST
5745 && !TREE_OVERFLOW (t2)
5746 && (0 != (t1 = extract_muldiv (op0, t2, code,
5747 code == MULT_EXPR
5748 ? ctype : NULL_TREE,
5749 strict_overflow_p))))
5750 return t1;
5751 break;
5753 case ABS_EXPR:
5754 /* If widening the type changes it from signed to unsigned, then we
5755 must avoid building ABS_EXPR itself as unsigned. */
5756 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5758 tree cstype = (*signed_type_for) (ctype);
5759 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5760 != 0)
5762 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5763 return fold_convert (ctype, t1);
5765 break;
5767 /* If the constant is negative, we cannot simplify this. */
5768 if (tree_int_cst_sgn (c) == -1)
5769 break;
5770 /* FALLTHROUGH */
5771 case NEGATE_EXPR:
5772 /* For division and modulus, type can't be unsigned, as e.g.
5773 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5774 For signed types, even with wrapping overflow, this is fine. */
5775 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5776 break;
5777 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5778 != 0)
5779 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5780 break;
5782 case MIN_EXPR: case MAX_EXPR:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5786 break;
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p = false;
5790 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5791 &sub_strict_overflow_p)) != 0
5792 && (t2 = extract_muldiv (op1, c, code, wide_type,
5793 &sub_strict_overflow_p)) != 0)
5795 if (tree_int_cst_sgn (c) < 0)
5796 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5797 if (sub_strict_overflow_p)
5798 *strict_overflow_p = true;
5799 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5800 fold_convert (ctype, t2));
5802 break;
5804 case LSHIFT_EXPR: case RSHIFT_EXPR:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5810 multiplication. */
5811 if (TREE_CODE (op1) == INTEGER_CST
5812 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5816 && 0 != (t1 = fold_convert (ctype,
5817 const_binop (LSHIFT_EXPR,
5818 size_one_node,
5819 op1)))
5820 && !TREE_OVERFLOW (t1))
5821 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5822 ? MULT_EXPR : FLOOR_DIV_EXPR,
5823 ctype,
5824 fold_convert (ctype, op0),
5825 t1),
5826 c, code, wide_type, strict_overflow_p);
5827 break;
5829 case PLUS_EXPR: case MINUS_EXPR:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5833 constant. */
5834 sub_strict_overflow_p = false;
5835 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5836 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5837 if (t1 != 0 && t2 != 0
5838 && (code == MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype, op0, c)
5842 && multiple_of_p (ctype, op1, c))))
5844 if (sub_strict_overflow_p)
5845 *strict_overflow_p = true;
5846 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5847 fold_convert (ctype, t2));
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode == MINUS_EXPR)
5854 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0) == INTEGER_CST)
5858 tree tem = op0;
5859 op0 = op1;
5860 op1 = tem;
5861 tem = t1;
5862 t1 = t2;
5863 t2 = tem;
5867 if (TREE_CODE (op1) != INTEGER_CST)
5868 break;
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5875 if (code == CEIL_DIV_EXPR)
5876 code = FLOOR_DIV_EXPR;
5877 else if (code == FLOOR_DIV_EXPR)
5878 code = CEIL_DIV_EXPR;
5879 else if (code != MULT_EXPR
5880 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5881 break;
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code == MULT_EXPR
5887 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5889 op1 = const_binop (code, fold_convert (ctype, op1),
5890 fold_convert (ctype, c));
5891 /* We allow the constant to overflow with wrapping semantics. */
5892 if (op1 == 0
5893 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5894 break;
5896 else
5897 break;
5899 /* If we have an unsigned type, we cannot widen the operation since it
5900 will change the result if the original computation overflowed. */
5901 if (TYPE_UNSIGNED (ctype) && ctype != type)
5902 break;
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow
5912 and overflow is defined. With undefined overflow
5913 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5914 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5915 return fold_build2 (tcode, ctype,
5916 fold_build2 (code, ctype,
5917 fold_convert (ctype, op0),
5918 fold_convert (ctype, c)),
5919 op1);
5921 break;
5923 case MULT_EXPR:
5924 /* We have a special case here if we are doing something like
5925 (C * 8) % 4 since we know that's zero. */
5926 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5927 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5928 /* If the multiplication can overflow we cannot optimize this. */
5929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5930 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5931 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5933 *strict_overflow_p = true;
5934 return omit_one_operand (type, integer_zero_node, op0);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5944 if (same_p
5945 && (t1 = extract_muldiv (op0, c, code, wide_type,
5946 strict_overflow_p)) != 0)
5947 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5948 fold_convert (ctype, op1));
5949 else if (tcode == MULT_EXPR && code == MULT_EXPR
5950 && (t1 = extract_muldiv (op1, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype, t1));
5954 else if (TREE_CODE (op1) != INTEGER_CST)
5955 return 0;
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5959 if (tcode == code)
5961 bool overflow_p = false;
5962 bool overflow_mul_p;
5963 signop sign = TYPE_SIGN (ctype);
5964 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5965 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5966 if (overflow_mul_p
5967 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5968 overflow_p = true;
5969 if (!overflow_p)
5970 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5971 wide_int_to_tree (ctype, mul));
5974 /* If these operations "cancel" each other, we have the main
5975 optimizations of this pass, which occur when either constant is a
5976 multiple of the other, in which case we replace this with either an
5977 operation or CODE or TCODE.
5979 If we have an unsigned type, we cannot do this since it will change
5980 the result if the original computation overflowed. */
5981 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5982 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5983 || (tcode == MULT_EXPR
5984 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5985 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5986 && code != MULT_EXPR)))
5988 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5991 *strict_overflow_p = true;
5992 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5993 fold_convert (ctype,
5994 const_binop (TRUNC_DIV_EXPR,
5995 op1, c)));
5997 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 c, op1)));
6007 break;
6009 default:
6010 break;
6013 return 0;
6016 /* Return a node which has the indicated constant VALUE (either 0 or
6017 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6018 and is of the indicated TYPE. */
6020 tree
6021 constant_boolean_node (bool value, tree type)
6023 if (type == integer_type_node)
6024 return value ? integer_one_node : integer_zero_node;
6025 else if (type == boolean_type_node)
6026 return value ? boolean_true_node : boolean_false_node;
6027 else if (TREE_CODE (type) == VECTOR_TYPE)
6028 return build_vector_from_val (type,
6029 build_int_cst (TREE_TYPE (type),
6030 value ? -1 : 0));
6031 else
6032 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6036 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6037 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6038 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6039 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6040 COND is the first argument to CODE; otherwise (as in the example
6041 given here), it is the second argument. TYPE is the type of the
6042 original expression. Return NULL_TREE if no simplification is
6043 possible. */
6045 static tree
6046 fold_binary_op_with_conditional_arg (location_t loc,
6047 enum tree_code code,
6048 tree type, tree op0, tree op1,
6049 tree cond, tree arg, int cond_first_p)
6051 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6052 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6053 tree test, true_value, false_value;
6054 tree lhs = NULL_TREE;
6055 tree rhs = NULL_TREE;
6056 enum tree_code cond_code = COND_EXPR;
6058 if (TREE_CODE (cond) == COND_EXPR
6059 || TREE_CODE (cond) == VEC_COND_EXPR)
6061 test = TREE_OPERAND (cond, 0);
6062 true_value = TREE_OPERAND (cond, 1);
6063 false_value = TREE_OPERAND (cond, 2);
6064 /* If this operand throws an expression, then it does not make
6065 sense to try to perform a logical or arithmetic operation
6066 involving it. */
6067 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6068 lhs = true_value;
6069 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6070 rhs = false_value;
6072 else
6074 tree testtype = TREE_TYPE (cond);
6075 test = cond;
6076 true_value = constant_boolean_node (true, testtype);
6077 false_value = constant_boolean_node (false, testtype);
6080 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6081 cond_code = VEC_COND_EXPR;
6083 /* This transformation is only worthwhile if we don't have to wrap ARG
6084 in a SAVE_EXPR and the operation can be simplified without recursing
6085 on at least one of the branches once its pushed inside the COND_EXPR. */
6086 if (!TREE_CONSTANT (arg)
6087 && (TREE_SIDE_EFFECTS (arg)
6088 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6089 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6090 return NULL_TREE;
6092 arg = fold_convert_loc (loc, arg_type, arg);
6093 if (lhs == 0)
6095 true_value = fold_convert_loc (loc, cond_type, true_value);
6096 if (cond_first_p)
6097 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6098 else
6099 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6101 if (rhs == 0)
6103 false_value = fold_convert_loc (loc, cond_type, false_value);
6104 if (cond_first_p)
6105 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6106 else
6107 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6110 /* Check that we have simplified at least one of the branches. */
6111 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6112 return NULL_TREE;
6114 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6118 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6120 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6121 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6122 ADDEND is the same as X.
6124 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6125 and finite. The problematic cases are when X is zero, and its mode
6126 has signed zeros. In the case of rounding towards -infinity,
6127 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6128 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6130 bool
6131 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6133 if (!real_zerop (addend))
6134 return false;
6136 /* Don't allow the fold with -fsignaling-nans. */
6137 if (HONOR_SNANS (TYPE_MODE (type)))
6138 return false;
6140 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6142 return true;
6144 /* In a vector or complex, we would need to check the sign of all zeros. */
6145 if (TREE_CODE (addend) != REAL_CST)
6146 return false;
6148 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6149 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6150 negate = !negate;
6152 /* The mode has signed zeros, and we have to honor their sign.
6153 In this situation, there is only one case we can return true for.
6154 X - 0 is the same as X unless rounding towards -infinity is
6155 supported. */
6156 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6159 /* Subroutine of fold() that checks comparisons of built-in math
6160 functions against real constants.
6162 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6163 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6164 is the type of the result and ARG0 and ARG1 are the operands of the
6165 comparison. ARG1 must be a TREE_REAL_CST.
6167 The function returns the constant folded tree if a simplification
6168 can be made, and NULL_TREE otherwise. */
6170 static tree
6171 fold_mathfn_compare (location_t loc,
6172 enum built_in_function fcode, enum tree_code code,
6173 tree type, tree arg0, tree arg1)
6175 REAL_VALUE_TYPE c;
6177 if (BUILTIN_SQRT_P (fcode))
6179 tree arg = CALL_EXPR_ARG (arg0, 0);
6180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6182 c = TREE_REAL_CST (arg1);
6183 if (REAL_VALUE_NEGATIVE (c))
6185 /* sqrt(x) < y is always false, if y is negative. */
6186 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6187 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6189 /* sqrt(x) > y is always true, if y is negative and we
6190 don't care about NaNs, i.e. negative values of x. */
6191 if (code == NE_EXPR || !HONOR_NANS (mode))
6192 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6194 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6195 return fold_build2_loc (loc, GE_EXPR, type, arg,
6196 build_real (TREE_TYPE (arg), dconst0));
6198 else if (code == GT_EXPR || code == GE_EXPR)
6200 REAL_VALUE_TYPE c2;
6202 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6203 real_convert (&c2, mode, &c2);
6205 if (REAL_VALUE_ISINF (c2))
6207 /* sqrt(x) > y is x == +Inf, when y is very large. */
6208 if (HONOR_INFINITIES (mode))
6209 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6210 build_real (TREE_TYPE (arg), c2));
6212 /* sqrt(x) > y is always false, when y is very large
6213 and we don't care about infinities. */
6214 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6217 /* sqrt(x) > c is the same as x > c*c. */
6218 return fold_build2_loc (loc, code, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6221 else if (code == LT_EXPR || code == LE_EXPR)
6223 REAL_VALUE_TYPE c2;
6225 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6226 real_convert (&c2, mode, &c2);
6228 if (REAL_VALUE_ISINF (c2))
6230 /* sqrt(x) < y is always true, when y is a very large
6231 value and we don't care about NaNs or Infinities. */
6232 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6233 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6235 /* sqrt(x) < y is x != +Inf when y is very large and we
6236 don't care about NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return fold_build2_loc (loc, NE_EXPR, type, arg,
6239 build_real (TREE_TYPE (arg), c2));
6241 /* sqrt(x) < y is x >= 0 when y is very large and we
6242 don't care about Infinities. */
6243 if (! HONOR_INFINITIES (mode))
6244 return fold_build2_loc (loc, GE_EXPR, type, arg,
6245 build_real (TREE_TYPE (arg), dconst0));
6247 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6248 arg = save_expr (arg);
6249 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6250 fold_build2_loc (loc, GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 dconst0)),
6253 fold_build2_loc (loc, NE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 c2)));
6258 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6259 if (! HONOR_NANS (mode))
6260 return fold_build2_loc (loc, code, type, arg,
6261 build_real (TREE_TYPE (arg), c2));
6263 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6264 arg = save_expr (arg);
6265 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6266 fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg),
6268 dconst0)),
6269 fold_build2_loc (loc, code, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 c2)));
6275 return NULL_TREE;
6278 /* Subroutine of fold() that optimizes comparisons against Infinities,
6279 either +Inf or -Inf.
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6288 static tree
6289 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6290 tree arg0, tree arg1)
6292 enum machine_mode mode;
6293 REAL_VALUE_TYPE max;
6294 tree temp;
6295 bool neg;
6297 mode = TYPE_MODE (TREE_TYPE (arg0));
6299 /* For negative infinity swap the sense of the comparison. */
6300 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6301 if (neg)
6302 code = swap_tree_comparison (code);
6304 switch (code)
6306 case GT_EXPR:
6307 /* x > +Inf is always false, if with ignore sNANs. */
6308 if (HONOR_SNANS (mode))
6309 return NULL_TREE;
6310 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6312 case LE_EXPR:
6313 /* x <= +Inf is always true, if we don't case about NaNs. */
6314 if (! HONOR_NANS (mode))
6315 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6317 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6318 arg0 = save_expr (arg0);
6319 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6321 case EQ_EXPR:
6322 case GE_EXPR:
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max, neg, mode);
6325 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6326 arg0, build_real (TREE_TYPE (arg0), max));
6328 case LT_EXPR:
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6334 case NE_EXPR:
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max, neg, mode);
6337 if (! HONOR_NANS (mode))
6338 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6341 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6345 default:
6346 break;
6349 return NULL_TREE;
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6354 X/C1 op C2.
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6363 static tree
6364 fold_div_compare (location_t loc,
6365 enum tree_code code, tree type, tree arg0, tree arg1)
6367 tree prod, tmp, hi, lo;
6368 tree arg00 = TREE_OPERAND (arg0, 0);
6369 tree arg01 = TREE_OPERAND (arg0, 1);
6370 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6371 bool neg_overflow = false;
6372 bool overflow;
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6376 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6377 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6378 neg_overflow = false;
6380 if (sign == UNSIGNED)
6382 tmp = int_const_binop (MINUS_EXPR, arg01,
6383 build_int_cst (TREE_TYPE (arg01), 1));
6384 lo = prod;
6386 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6387 val = wi::add (prod, tmp, sign, &overflow);
6388 hi = force_fit_type (TREE_TYPE (arg00), val,
6389 -1, overflow | TREE_OVERFLOW (prod));
6391 else if (tree_int_cst_sgn (arg01) >= 0)
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1));
6395 switch (tree_int_cst_sgn (arg1))
6397 case -1:
6398 neg_overflow = true;
6399 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6400 hi = prod;
6401 break;
6403 case 0:
6404 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6405 hi = tmp;
6406 break;
6408 case 1:
6409 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6410 lo = prod;
6411 break;
6413 default:
6414 gcc_unreachable ();
6417 else
6419 /* A negative divisor reverses the relational operators. */
6420 code = swap_tree_comparison (code);
6422 tmp = int_const_binop (PLUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1));
6424 switch (tree_int_cst_sgn (arg1))
6426 case -1:
6427 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6428 lo = prod;
6429 break;
6431 case 0:
6432 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6433 lo = tmp;
6434 break;
6436 case 1:
6437 neg_overflow = true;
6438 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6439 hi = prod;
6440 break;
6442 default:
6443 gcc_unreachable ();
6447 switch (code)
6449 case EQ_EXPR:
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6456 return build_range_check (loc, type, arg00, 1, lo, hi);
6458 case NE_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 0, lo, hi);
6467 case LT_EXPR:
6468 if (TREE_OVERFLOW (lo))
6470 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 return omit_one_operand_loc (loc, type, tmp, arg00);
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6475 case LE_EXPR:
6476 if (TREE_OVERFLOW (hi))
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6483 case GT_EXPR:
6484 if (TREE_OVERFLOW (hi))
6486 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6491 case GE_EXPR:
6492 if (TREE_OVERFLOW (lo))
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6499 default:
6500 break;
6503 return NULL_TREE;
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6510 result type. */
6512 static tree
6513 fold_single_bit_test_into_sign_test (location_t loc,
6514 enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6532 tree stype = signed_type_for (TREE_TYPE (arg00));
6533 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type,
6535 fold_convert_loc (loc, stype, arg00),
6536 build_int_cst (stype, 0));
6540 return NULL_TREE;
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6548 tree
6549 fold_single_bit_test (location_t loc, enum tree_code code,
6550 tree arg0, tree arg1, tree result_type)
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code == NE_EXPR || code == EQ_EXPR)
6554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6557 tree inner = TREE_OPERAND (arg0, 0);
6558 tree type = TREE_TYPE (arg0);
6559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6560 enum machine_mode operand_mode = TYPE_MODE (type);
6561 int ops_unsigned;
6562 tree signed_type, unsigned_type, intermediate_type;
6563 tree tem, one;
6565 /* First, see if we can fold the single bit test into a sign-bit
6566 test. */
6567 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6568 result_type);
6569 if (tem)
6570 return tem;
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6580 && bitnum < TYPE_PRECISION (type)
6581 && wi::ltu_p (TREE_OPERAND (inner, 1),
6582 TYPE_PRECISION (type) - bitnum))
6584 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6594 #else
6595 ops_unsigned = 1;
6596 #endif
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert_loc (loc, intermediate_type, inner);
6603 if (bitnum != 0)
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6607 one = build_int_cst (intermediate_type, 1);
6609 if (code == EQ_EXPR)
6610 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert_loc (loc, result_type, inner);
6618 return inner;
6620 return NULL_TREE;
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6626 static bool
6627 reorder_operands_p (const_tree arg0, const_tree arg1)
6629 if (! flag_evaluation_order)
6630 return true;
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return true;
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6642 bool
6643 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6645 STRIP_SIGN_NOPS (arg0);
6646 STRIP_SIGN_NOPS (arg1);
6648 if (TREE_CODE (arg1) == INTEGER_CST)
6649 return 0;
6650 if (TREE_CODE (arg0) == INTEGER_CST)
6651 return 1;
6653 if (TREE_CODE (arg1) == REAL_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == REAL_CST)
6656 return 1;
6658 if (TREE_CODE (arg1) == FIXED_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == FIXED_CST)
6661 return 1;
6663 if (TREE_CODE (arg1) == COMPLEX_CST)
6664 return 0;
6665 if (TREE_CODE (arg0) == COMPLEX_CST)
6666 return 1;
6668 if (TREE_CONSTANT (arg1))
6669 return 0;
6670 if (TREE_CONSTANT (arg0))
6671 return 1;
6673 if (optimize_function_for_size_p (cfun))
6674 return 0;
6676 if (reorder && flag_evaluation_order
6677 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6678 return 0;
6680 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6681 for commutative and comparison operators. Ensuring a canonical
6682 form allows the optimizers to find additional redundancies without
6683 having to explicitly check for both orderings. */
6684 if (TREE_CODE (arg0) == SSA_NAME
6685 && TREE_CODE (arg1) == SSA_NAME
6686 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6687 return 1;
6689 /* Put SSA_NAMEs last. */
6690 if (TREE_CODE (arg1) == SSA_NAME)
6691 return 0;
6692 if (TREE_CODE (arg0) == SSA_NAME)
6693 return 1;
6695 /* Put variables last. */
6696 if (DECL_P (arg1))
6697 return 0;
6698 if (DECL_P (arg0))
6699 return 1;
6701 return 0;
6704 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6705 ARG0 is extended to a wider type. */
6707 static tree
6708 fold_widened_comparison (location_t loc, enum tree_code code,
6709 tree type, tree arg0, tree arg1)
6711 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6712 tree arg1_unw;
6713 tree shorter_type, outer_type;
6714 tree min, max;
6715 bool above, below;
6717 if (arg0_unw == arg0)
6718 return NULL_TREE;
6719 shorter_type = TREE_TYPE (arg0_unw);
6721 #ifdef HAVE_canonicalize_funcptr_for_compare
6722 /* Disable this optimization if we're casting a function pointer
6723 type on targets that require function pointer canonicalization. */
6724 if (HAVE_canonicalize_funcptr_for_compare
6725 && TREE_CODE (shorter_type) == POINTER_TYPE
6726 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6727 return NULL_TREE;
6728 #endif
6730 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6731 return NULL_TREE;
6733 arg1_unw = get_unwidened (arg1, NULL_TREE);
6735 /* If possible, express the comparison in the shorter mode. */
6736 if ((code == EQ_EXPR || code == NE_EXPR
6737 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6738 && (TREE_TYPE (arg1_unw) == shorter_type
6739 || ((TYPE_PRECISION (shorter_type)
6740 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6741 && (TYPE_UNSIGNED (shorter_type)
6742 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6743 || (TREE_CODE (arg1_unw) == INTEGER_CST
6744 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6745 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6746 && int_fits_type_p (arg1_unw, shorter_type))))
6747 return fold_build2_loc (loc, code, type, arg0_unw,
6748 fold_convert_loc (loc, shorter_type, arg1_unw));
6750 if (TREE_CODE (arg1_unw) != INTEGER_CST
6751 || TREE_CODE (shorter_type) != INTEGER_TYPE
6752 || !int_fits_type_p (arg1_unw, shorter_type))
6753 return NULL_TREE;
6755 /* If we are comparing with the integer that does not fit into the range
6756 of the shorter type, the result is known. */
6757 outer_type = TREE_TYPE (arg1_unw);
6758 min = lower_bound_in_type (outer_type, shorter_type);
6759 max = upper_bound_in_type (outer_type, shorter_type);
6761 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 max, arg1_unw));
6763 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6764 arg1_unw, min));
6766 switch (code)
6768 case EQ_EXPR:
6769 if (above || below)
6770 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6771 break;
6773 case NE_EXPR:
6774 if (above || below)
6775 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6776 break;
6778 case LT_EXPR:
6779 case LE_EXPR:
6780 if (above)
6781 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6782 else if (below)
6783 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6785 case GT_EXPR:
6786 case GE_EXPR:
6787 if (above)
6788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6789 else if (below)
6790 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6792 default:
6793 break;
6796 return NULL_TREE;
6799 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6800 ARG0 just the signedness is changed. */
6802 static tree
6803 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6804 tree arg0, tree arg1)
6806 tree arg0_inner;
6807 tree inner_type, outer_type;
6809 if (!CONVERT_EXPR_P (arg0))
6810 return NULL_TREE;
6812 outer_type = TREE_TYPE (arg0);
6813 arg0_inner = TREE_OPERAND (arg0, 0);
6814 inner_type = TREE_TYPE (arg0_inner);
6816 #ifdef HAVE_canonicalize_funcptr_for_compare
6817 /* Disable this optimization if we're casting a function pointer
6818 type on targets that require function pointer canonicalization. */
6819 if (HAVE_canonicalize_funcptr_for_compare
6820 && TREE_CODE (inner_type) == POINTER_TYPE
6821 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6822 return NULL_TREE;
6823 #endif
6825 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6826 return NULL_TREE;
6828 if (TREE_CODE (arg1) != INTEGER_CST
6829 && !(CONVERT_EXPR_P (arg1)
6830 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6831 return NULL_TREE;
6833 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6834 && code != NE_EXPR
6835 && code != EQ_EXPR)
6836 return NULL_TREE;
6838 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6839 return NULL_TREE;
6841 if (TREE_CODE (arg1) == INTEGER_CST)
6842 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6843 TREE_OVERFLOW (arg1));
6844 else
6845 arg1 = fold_convert_loc (loc, inner_type, arg1);
6847 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6850 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6851 step of the array. Reconstructs s and delta in the case of s *
6852 delta being an integer constant (and thus already folded). ADDR is
6853 the address. MULT is the multiplicative expression. If the
6854 function succeeds, the new address expression is returned.
6855 Otherwise NULL_TREE is returned. LOC is the location of the
6856 resulting expression. */
6858 static tree
6859 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6861 tree s, delta, step;
6862 tree ref = TREE_OPERAND (addr, 0), pref;
6863 tree ret, pos;
6864 tree itype;
6865 bool mdim = false;
6867 /* Strip the nops that might be added when converting op1 to sizetype. */
6868 STRIP_NOPS (op1);
6870 /* Canonicalize op1 into a possibly non-constant delta
6871 and an INTEGER_CST s. */
6872 if (TREE_CODE (op1) == MULT_EXPR)
6874 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6876 STRIP_NOPS (arg0);
6877 STRIP_NOPS (arg1);
6879 if (TREE_CODE (arg0) == INTEGER_CST)
6881 s = arg0;
6882 delta = arg1;
6884 else if (TREE_CODE (arg1) == INTEGER_CST)
6886 s = arg1;
6887 delta = arg0;
6889 else
6890 return NULL_TREE;
6892 else if (TREE_CODE (op1) == INTEGER_CST)
6894 delta = op1;
6895 s = NULL_TREE;
6897 else
6899 /* Simulate we are delta * 1. */
6900 delta = op1;
6901 s = integer_one_node;
6904 /* Handle &x.array the same as we would handle &x.array[0]. */
6905 if (TREE_CODE (ref) == COMPONENT_REF
6906 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6908 tree domain;
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6912 mdim = true;
6914 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6915 if (! domain)
6916 goto cont;
6917 itype = TREE_TYPE (domain);
6919 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6920 if (TREE_CODE (step) != INTEGER_CST)
6921 goto cont;
6923 if (s)
6925 if (! tree_int_cst_equal (step, s))
6926 goto cont;
6928 else
6930 /* Try if delta is a multiple of step. */
6931 tree tmp = div_if_zero_remainder (op1, step);
6932 if (! tmp)
6933 goto cont;
6934 delta = tmp;
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6939 if (mdim)
6941 tree tmp;
6943 if (!TYPE_MIN_VALUE (domain)
6944 || !TYPE_MAX_VALUE (domain)
6945 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6946 goto cont;
6948 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6949 fold_convert_loc (loc, itype,
6950 TYPE_MIN_VALUE (domain)),
6951 fold_convert_loc (loc, itype, delta));
6952 if (TREE_CODE (tmp) != INTEGER_CST
6953 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6954 goto cont;
6957 /* We found a suitable component reference. */
6959 pref = TREE_OPERAND (addr, 0);
6960 ret = copy_node (pref);
6961 SET_EXPR_LOCATION (ret, loc);
6963 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6964 fold_build2_loc
6965 (loc, PLUS_EXPR, itype,
6966 fold_convert_loc (loc, itype,
6967 TYPE_MIN_VALUE
6968 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6969 fold_convert_loc (loc, itype, delta)),
6970 NULL_TREE, NULL_TREE);
6971 return build_fold_addr_expr_loc (loc, ret);
6974 cont:
6976 for (;; ref = TREE_OPERAND (ref, 0))
6978 if (TREE_CODE (ref) == ARRAY_REF)
6980 tree domain;
6982 /* Remember if this was a multi-dimensional array. */
6983 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6984 mdim = true;
6986 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6987 if (! domain)
6988 continue;
6989 itype = TREE_TYPE (domain);
6991 step = array_ref_element_size (ref);
6992 if (TREE_CODE (step) != INTEGER_CST)
6993 continue;
6995 if (s)
6997 if (! tree_int_cst_equal (step, s))
6998 continue;
7000 else
7002 /* Try if delta is a multiple of step. */
7003 tree tmp = div_if_zero_remainder (op1, step);
7004 if (! tmp)
7005 continue;
7006 delta = tmp;
7009 /* Only fold here if we can verify we do not overflow one
7010 dimension of a multi-dimensional array. */
7011 if (mdim)
7013 tree tmp;
7015 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7016 || !TYPE_MAX_VALUE (domain)
7017 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7018 continue;
7020 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7021 fold_convert_loc (loc, itype,
7022 TREE_OPERAND (ref, 1)),
7023 fold_convert_loc (loc, itype, delta));
7024 if (!tmp
7025 || TREE_CODE (tmp) != INTEGER_CST
7026 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7027 continue;
7030 break;
7032 else
7033 mdim = false;
7035 if (!handled_component_p (ref))
7036 return NULL_TREE;
7039 /* We found the suitable array reference. So copy everything up to it,
7040 and replace the index. */
7042 pref = TREE_OPERAND (addr, 0);
7043 ret = copy_node (pref);
7044 SET_EXPR_LOCATION (ret, loc);
7045 pos = ret;
7047 while (pref != ref)
7049 pref = TREE_OPERAND (pref, 0);
7050 TREE_OPERAND (pos, 0) = copy_node (pref);
7051 pos = TREE_OPERAND (pos, 0);
7054 TREE_OPERAND (pos, 1)
7055 = fold_build2_loc (loc, PLUS_EXPR, itype,
7056 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7057 fold_convert_loc (loc, itype, delta));
7058 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7062 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7063 means A >= Y && A != MAX, but in this case we know that
7064 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7066 static tree
7067 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7069 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7071 if (TREE_CODE (bound) == LT_EXPR)
7072 a = TREE_OPERAND (bound, 0);
7073 else if (TREE_CODE (bound) == GT_EXPR)
7074 a = TREE_OPERAND (bound, 1);
7075 else
7076 return NULL_TREE;
7078 typea = TREE_TYPE (a);
7079 if (!INTEGRAL_TYPE_P (typea)
7080 && !POINTER_TYPE_P (typea))
7081 return NULL_TREE;
7083 if (TREE_CODE (ineq) == LT_EXPR)
7085 a1 = TREE_OPERAND (ineq, 1);
7086 y = TREE_OPERAND (ineq, 0);
7088 else if (TREE_CODE (ineq) == GT_EXPR)
7090 a1 = TREE_OPERAND (ineq, 0);
7091 y = TREE_OPERAND (ineq, 1);
7093 else
7094 return NULL_TREE;
7096 if (TREE_TYPE (a1) != typea)
7097 return NULL_TREE;
7099 if (POINTER_TYPE_P (typea))
7101 /* Convert the pointer types into integer before taking the difference. */
7102 tree ta = fold_convert_loc (loc, ssizetype, a);
7103 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7104 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7106 else
7107 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7109 if (!diff || !integer_onep (diff))
7110 return NULL_TREE;
7112 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7115 /* Fold a sum or difference of at least one multiplication.
7116 Returns the folded tree or NULL if no simplification could be made. */
7118 static tree
7119 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7120 tree arg0, tree arg1)
7122 tree arg00, arg01, arg10, arg11;
7123 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7125 /* (A * C) +- (B * C) -> (A+-B) * C.
7126 (A * C) +- A -> A * (C+-1).
7127 We are most concerned about the case where C is a constant,
7128 but other combinations show up during loop reduction. Since
7129 it is not difficult, try all four possibilities. */
7131 if (TREE_CODE (arg0) == MULT_EXPR)
7133 arg00 = TREE_OPERAND (arg0, 0);
7134 arg01 = TREE_OPERAND (arg0, 1);
7136 else if (TREE_CODE (arg0) == INTEGER_CST)
7138 arg00 = build_one_cst (type);
7139 arg01 = arg0;
7141 else
7143 /* We cannot generate constant 1 for fract. */
7144 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7145 return NULL_TREE;
7146 arg00 = arg0;
7147 arg01 = build_one_cst (type);
7149 if (TREE_CODE (arg1) == MULT_EXPR)
7151 arg10 = TREE_OPERAND (arg1, 0);
7152 arg11 = TREE_OPERAND (arg1, 1);
7154 else if (TREE_CODE (arg1) == INTEGER_CST)
7156 arg10 = build_one_cst (type);
7157 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7158 the purpose of this canonicalization. */
7159 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7160 && negate_expr_p (arg1)
7161 && code == PLUS_EXPR)
7163 arg11 = negate_expr (arg1);
7164 code = MINUS_EXPR;
7166 else
7167 arg11 = arg1;
7169 else
7171 /* We cannot generate constant 1 for fract. */
7172 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7173 return NULL_TREE;
7174 arg10 = arg1;
7175 arg11 = build_one_cst (type);
7177 same = NULL_TREE;
7179 if (operand_equal_p (arg01, arg11, 0))
7180 same = arg01, alt0 = arg00, alt1 = arg10;
7181 else if (operand_equal_p (arg00, arg10, 0))
7182 same = arg00, alt0 = arg01, alt1 = arg11;
7183 else if (operand_equal_p (arg00, arg11, 0))
7184 same = arg00, alt0 = arg01, alt1 = arg10;
7185 else if (operand_equal_p (arg01, arg10, 0))
7186 same = arg01, alt0 = arg00, alt1 = arg11;
7188 /* No identical multiplicands; see if we can find a common
7189 power-of-two factor in non-power-of-two multiplies. This
7190 can help in multi-dimensional array access. */
7191 else if (tree_fits_shwi_p (arg01)
7192 && tree_fits_shwi_p (arg11))
7194 HOST_WIDE_INT int01, int11, tmp;
7195 bool swap = false;
7196 tree maybe_same;
7197 int01 = tree_to_shwi (arg01);
7198 int11 = tree_to_shwi (arg11);
7200 /* Move min of absolute values to int11. */
7201 if (absu_hwi (int01) < absu_hwi (int11))
7203 tmp = int01, int01 = int11, int11 = tmp;
7204 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7205 maybe_same = arg01;
7206 swap = true;
7208 else
7209 maybe_same = arg11;
7211 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7212 /* The remainder should not be a constant, otherwise we
7213 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7214 increased the number of multiplications necessary. */
7215 && TREE_CODE (arg10) != INTEGER_CST)
7217 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7218 build_int_cst (TREE_TYPE (arg00),
7219 int01 / int11));
7220 alt1 = arg10;
7221 same = maybe_same;
7222 if (swap)
7223 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7227 if (same)
7228 return fold_build2_loc (loc, MULT_EXPR, type,
7229 fold_build2_loc (loc, code, type,
7230 fold_convert_loc (loc, type, alt0),
7231 fold_convert_loc (loc, type, alt1)),
7232 fold_convert_loc (loc, type, same));
7234 return NULL_TREE;
7237 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7238 specified by EXPR into the buffer PTR of length LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero
7240 upon failure. */
7242 static int
7243 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7245 tree type = TREE_TYPE (expr);
7246 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7247 int byte, offset, word, words;
7248 unsigned char value;
7250 if ((off == -1 && total_bytes > len)
7251 || off >= total_bytes)
7252 return 0;
7253 if (off == -1)
7254 off = 0;
7255 words = total_bytes / UNITS_PER_WORD;
7257 for (byte = 0; byte < total_bytes; byte++)
7259 int bitpos = byte * BITS_PER_UNIT;
7260 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7261 number of bytes. */
7262 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7264 if (total_bytes > UNITS_PER_WORD)
7266 word = byte / UNITS_PER_WORD;
7267 if (WORDS_BIG_ENDIAN)
7268 word = (words - 1) - word;
7269 offset = word * UNITS_PER_WORD;
7270 if (BYTES_BIG_ENDIAN)
7271 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7272 else
7273 offset += byte % UNITS_PER_WORD;
7275 else
7276 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7277 if (offset >= off
7278 && offset - off < len)
7279 ptr[offset - off] = value;
7281 return MIN (len, total_bytes - off);
7285 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7286 specified by EXPR into the buffer PTR of length LEN bytes.
7287 Return the number of bytes placed in the buffer, or zero
7288 upon failure. */
7290 static int
7291 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7293 tree type = TREE_TYPE (expr);
7294 enum machine_mode mode = TYPE_MODE (type);
7295 int total_bytes = GET_MODE_SIZE (mode);
7296 FIXED_VALUE_TYPE value;
7297 tree i_value, i_type;
7299 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7300 return 0;
7302 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7304 if (NULL_TREE == i_type
7305 || TYPE_PRECISION (i_type) != total_bytes)
7306 return 0;
7308 value = TREE_FIXED_CST (expr);
7309 i_value = double_int_to_tree (i_type, value.data);
7311 return native_encode_int (i_value, ptr, len, off);
7315 /* Subroutine of native_encode_expr. Encode the REAL_CST
7316 specified by EXPR into the buffer PTR of length LEN bytes.
7317 Return the number of bytes placed in the buffer, or zero
7318 upon failure. */
7320 static int
7321 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7323 tree type = TREE_TYPE (expr);
7324 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7325 int byte, offset, word, words, bitpos;
7326 unsigned char value;
7328 /* There are always 32 bits in each long, no matter the size of
7329 the hosts long. We handle floating point representations with
7330 up to 192 bits. */
7331 long tmp[6];
7333 if ((off == -1 && total_bytes > len)
7334 || off >= total_bytes)
7335 return 0;
7336 if (off == -1)
7337 off = 0;
7338 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7340 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7342 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7343 bitpos += BITS_PER_UNIT)
7345 byte = (bitpos / BITS_PER_UNIT) & 3;
7346 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7348 if (UNITS_PER_WORD < 4)
7350 word = byte / UNITS_PER_WORD;
7351 if (WORDS_BIG_ENDIAN)
7352 word = (words - 1) - word;
7353 offset = word * UNITS_PER_WORD;
7354 if (BYTES_BIG_ENDIAN)
7355 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7356 else
7357 offset += byte % UNITS_PER_WORD;
7359 else
7360 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7361 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7362 if (offset >= off
7363 && offset - off < len)
7364 ptr[offset - off] = value;
7366 return MIN (len, total_bytes - off);
7369 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7370 specified by EXPR into the buffer PTR of length LEN bytes.
7371 Return the number of bytes placed in the buffer, or zero
7372 upon failure. */
7374 static int
7375 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7377 int rsize, isize;
7378 tree part;
7380 part = TREE_REALPART (expr);
7381 rsize = native_encode_expr (part, ptr, len, off);
7382 if (off == -1
7383 && rsize == 0)
7384 return 0;
7385 part = TREE_IMAGPART (expr);
7386 if (off != -1)
7387 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7388 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7389 if (off == -1
7390 && isize != rsize)
7391 return 0;
7392 return rsize + isize;
7396 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7397 specified by EXPR into the buffer PTR of length LEN bytes.
7398 Return the number of bytes placed in the buffer, or zero
7399 upon failure. */
7401 static int
7402 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7404 unsigned i, count;
7405 int size, offset;
7406 tree itype, elem;
7408 offset = 0;
7409 count = VECTOR_CST_NELTS (expr);
7410 itype = TREE_TYPE (TREE_TYPE (expr));
7411 size = GET_MODE_SIZE (TYPE_MODE (itype));
7412 for (i = 0; i < count; i++)
7414 if (off >= size)
7416 off -= size;
7417 continue;
7419 elem = VECTOR_CST_ELT (expr, i);
7420 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7421 if ((off == -1 && res != size)
7422 || res == 0)
7423 return 0;
7424 offset += res;
7425 if (offset >= len)
7426 return offset;
7427 if (off != -1)
7428 off = 0;
7430 return offset;
7434 /* Subroutine of native_encode_expr. Encode the STRING_CST
7435 specified by EXPR into the buffer PTR of length LEN bytes.
7436 Return the number of bytes placed in the buffer, or zero
7437 upon failure. */
7439 static int
7440 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7442 tree type = TREE_TYPE (expr);
7443 HOST_WIDE_INT total_bytes;
7445 if (TREE_CODE (type) != ARRAY_TYPE
7446 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7447 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7448 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7449 return 0;
7450 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7451 if ((off == -1 && total_bytes > len)
7452 || off >= total_bytes)
7453 return 0;
7454 if (off == -1)
7455 off = 0;
7456 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7458 int written = 0;
7459 if (off < TREE_STRING_LENGTH (expr))
7461 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7462 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7464 memset (ptr + written, 0,
7465 MIN (total_bytes - written, len - written));
7467 else
7468 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7469 return MIN (total_bytes - off, len);
7473 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7474 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7475 buffer PTR of length LEN bytes. If OFF is not -1 then start
7476 the encoding at byte offset OFF and encode at most LEN bytes.
7477 Return the number of bytes placed in the buffer, or zero upon failure. */
7480 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7482 switch (TREE_CODE (expr))
7484 case INTEGER_CST:
7485 return native_encode_int (expr, ptr, len, off);
7487 case REAL_CST:
7488 return native_encode_real (expr, ptr, len, off);
7490 case FIXED_CST:
7491 return native_encode_fixed (expr, ptr, len, off);
7493 case COMPLEX_CST:
7494 return native_encode_complex (expr, ptr, len, off);
7496 case VECTOR_CST:
7497 return native_encode_vector (expr, ptr, len, off);
7499 case STRING_CST:
7500 return native_encode_string (expr, ptr, len, off);
7502 default:
7503 return 0;
7508 /* Subroutine of native_interpret_expr. Interpret the contents of
7509 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7510 If the buffer cannot be interpreted, return NULL_TREE. */
7512 static tree
7513 native_interpret_int (tree type, const unsigned char *ptr, int len)
7515 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7517 if (total_bytes > len
7518 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7519 return NULL_TREE;
7521 wide_int result = wi::from_buffer (ptr, total_bytes);
7523 return wide_int_to_tree (type, result);
7527 /* Subroutine of native_interpret_expr. Interpret the contents of
7528 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7529 If the buffer cannot be interpreted, return NULL_TREE. */
7531 static tree
7532 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7534 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7535 double_int result;
7536 FIXED_VALUE_TYPE fixed_value;
7538 if (total_bytes > len
7539 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7540 return NULL_TREE;
7542 result = double_int::from_buffer (ptr, total_bytes);
7543 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7545 return build_fixed (type, fixed_value);
7549 /* Subroutine of native_interpret_expr. Interpret the contents of
7550 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7551 If the buffer cannot be interpreted, return NULL_TREE. */
7553 static tree
7554 native_interpret_real (tree type, const unsigned char *ptr, int len)
7556 enum machine_mode mode = TYPE_MODE (type);
7557 int total_bytes = GET_MODE_SIZE (mode);
7558 int byte, offset, word, words, bitpos;
7559 unsigned char value;
7560 /* There are always 32 bits in each long, no matter the size of
7561 the hosts long. We handle floating point representations with
7562 up to 192 bits. */
7563 REAL_VALUE_TYPE r;
7564 long tmp[6];
7566 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7567 if (total_bytes > len || total_bytes > 24)
7568 return NULL_TREE;
7569 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7571 memset (tmp, 0, sizeof (tmp));
7572 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7573 bitpos += BITS_PER_UNIT)
7575 byte = (bitpos / BITS_PER_UNIT) & 3;
7576 if (UNITS_PER_WORD < 4)
7578 word = byte / UNITS_PER_WORD;
7579 if (WORDS_BIG_ENDIAN)
7580 word = (words - 1) - word;
7581 offset = word * UNITS_PER_WORD;
7582 if (BYTES_BIG_ENDIAN)
7583 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7584 else
7585 offset += byte % UNITS_PER_WORD;
7587 else
7588 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7589 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7591 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7594 real_from_target (&r, tmp, mode);
7595 return build_real (type, r);
7599 /* Subroutine of native_interpret_expr. Interpret the contents of
7600 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7601 If the buffer cannot be interpreted, return NULL_TREE. */
7603 static tree
7604 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7606 tree etype, rpart, ipart;
7607 int size;
7609 etype = TREE_TYPE (type);
7610 size = GET_MODE_SIZE (TYPE_MODE (etype));
7611 if (size * 2 > len)
7612 return NULL_TREE;
7613 rpart = native_interpret_expr (etype, ptr, size);
7614 if (!rpart)
7615 return NULL_TREE;
7616 ipart = native_interpret_expr (etype, ptr+size, size);
7617 if (!ipart)
7618 return NULL_TREE;
7619 return build_complex (type, rpart, ipart);
7623 /* Subroutine of native_interpret_expr. Interpret the contents of
7624 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7625 If the buffer cannot be interpreted, return NULL_TREE. */
7627 static tree
7628 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7630 tree etype, elem;
7631 int i, size, count;
7632 tree *elements;
7634 etype = TREE_TYPE (type);
7635 size = GET_MODE_SIZE (TYPE_MODE (etype));
7636 count = TYPE_VECTOR_SUBPARTS (type);
7637 if (size * count > len)
7638 return NULL_TREE;
7640 elements = XALLOCAVEC (tree, count);
7641 for (i = count - 1; i >= 0; i--)
7643 elem = native_interpret_expr (etype, ptr+(i*size), size);
7644 if (!elem)
7645 return NULL_TREE;
7646 elements[i] = elem;
7648 return build_vector (type, elements);
7652 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7653 the buffer PTR of length LEN as a constant of type TYPE. For
7654 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7655 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7656 return NULL_TREE. */
7658 tree
7659 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7661 switch (TREE_CODE (type))
7663 case INTEGER_TYPE:
7664 case ENUMERAL_TYPE:
7665 case BOOLEAN_TYPE:
7666 case POINTER_TYPE:
7667 case REFERENCE_TYPE:
7668 return native_interpret_int (type, ptr, len);
7670 case REAL_TYPE:
7671 return native_interpret_real (type, ptr, len);
7673 case FIXED_POINT_TYPE:
7674 return native_interpret_fixed (type, ptr, len);
7676 case COMPLEX_TYPE:
7677 return native_interpret_complex (type, ptr, len);
7679 case VECTOR_TYPE:
7680 return native_interpret_vector (type, ptr, len);
7682 default:
7683 return NULL_TREE;
7687 /* Returns true if we can interpret the contents of a native encoding
7688 as TYPE. */
7690 static bool
7691 can_native_interpret_type_p (tree type)
7693 switch (TREE_CODE (type))
7695 case INTEGER_TYPE:
7696 case ENUMERAL_TYPE:
7697 case BOOLEAN_TYPE:
7698 case POINTER_TYPE:
7699 case REFERENCE_TYPE:
7700 case FIXED_POINT_TYPE:
7701 case REAL_TYPE:
7702 case COMPLEX_TYPE:
7703 case VECTOR_TYPE:
7704 return true;
7705 default:
7706 return false;
7710 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7711 TYPE at compile-time. If we're unable to perform the conversion
7712 return NULL_TREE. */
7714 static tree
7715 fold_view_convert_expr (tree type, tree expr)
7717 /* We support up to 512-bit values (for V8DFmode). */
7718 unsigned char buffer[64];
7719 int len;
7721 /* Check that the host and target are sane. */
7722 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7723 return NULL_TREE;
7725 len = native_encode_expr (expr, buffer, sizeof (buffer));
7726 if (len == 0)
7727 return NULL_TREE;
7729 return native_interpret_expr (type, buffer, len);
7732 /* Build an expression for the address of T. Folds away INDIRECT_REF
7733 to avoid confusing the gimplify process. */
7735 tree
7736 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7738 /* The size of the object is not relevant when talking about its address. */
7739 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7740 t = TREE_OPERAND (t, 0);
7742 if (TREE_CODE (t) == INDIRECT_REF)
7744 t = TREE_OPERAND (t, 0);
7746 if (TREE_TYPE (t) != ptrtype)
7747 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7749 else if (TREE_CODE (t) == MEM_REF
7750 && integer_zerop (TREE_OPERAND (t, 1)))
7751 return TREE_OPERAND (t, 0);
7752 else if (TREE_CODE (t) == MEM_REF
7753 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7754 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7755 TREE_OPERAND (t, 0),
7756 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7757 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7759 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7761 if (TREE_TYPE (t) != ptrtype)
7762 t = fold_convert_loc (loc, ptrtype, t);
7764 else
7765 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7767 return t;
7770 /* Build an expression for the address of T. */
7772 tree
7773 build_fold_addr_expr_loc (location_t loc, tree t)
7775 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7777 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7780 static bool vec_cst_ctor_to_array (tree, tree *);
7782 /* Fold a unary expression of code CODE and type TYPE with operand
7783 OP0. Return the folded expression if folding is successful.
7784 Otherwise, return NULL_TREE. */
7786 tree
7787 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7789 tree tem;
7790 tree arg0;
7791 enum tree_code_class kind = TREE_CODE_CLASS (code);
7793 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7794 && TREE_CODE_LENGTH (code) == 1);
7796 arg0 = op0;
7797 if (arg0)
7799 if (CONVERT_EXPR_CODE_P (code)
7800 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7802 /* Don't use STRIP_NOPS, because signedness of argument type
7803 matters. */
7804 STRIP_SIGN_NOPS (arg0);
7806 else
7808 /* Strip any conversions that don't change the mode. This
7809 is safe for every expression, except for a comparison
7810 expression because its signedness is derived from its
7811 operands.
7813 Note that this is done as an internal manipulation within
7814 the constant folder, in order to find the simplest
7815 representation of the arguments so that their form can be
7816 studied. In any cases, the appropriate type conversions
7817 should be put back in the tree that will get out of the
7818 constant folder. */
7819 STRIP_NOPS (arg0);
7823 if (TREE_CODE_CLASS (code) == tcc_unary)
7825 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7826 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7827 fold_build1_loc (loc, code, type,
7828 fold_convert_loc (loc, TREE_TYPE (op0),
7829 TREE_OPERAND (arg0, 1))));
7830 else if (TREE_CODE (arg0) == COND_EXPR)
7832 tree arg01 = TREE_OPERAND (arg0, 1);
7833 tree arg02 = TREE_OPERAND (arg0, 2);
7834 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7835 arg01 = fold_build1_loc (loc, code, type,
7836 fold_convert_loc (loc,
7837 TREE_TYPE (op0), arg01));
7838 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7839 arg02 = fold_build1_loc (loc, code, type,
7840 fold_convert_loc (loc,
7841 TREE_TYPE (op0), arg02));
7842 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7843 arg01, arg02);
7845 /* If this was a conversion, and all we did was to move into
7846 inside the COND_EXPR, bring it back out. But leave it if
7847 it is a conversion from integer to integer and the
7848 result precision is no wider than a word since such a
7849 conversion is cheap and may be optimized away by combine,
7850 while it couldn't if it were outside the COND_EXPR. Then return
7851 so we don't get into an infinite recursion loop taking the
7852 conversion out and then back in. */
7854 if ((CONVERT_EXPR_CODE_P (code)
7855 || code == NON_LVALUE_EXPR)
7856 && TREE_CODE (tem) == COND_EXPR
7857 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7858 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7859 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7860 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7861 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7862 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7863 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7864 && (INTEGRAL_TYPE_P
7865 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7866 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7867 || flag_syntax_only))
7868 tem = build1_loc (loc, code, type,
7869 build3 (COND_EXPR,
7870 TREE_TYPE (TREE_OPERAND
7871 (TREE_OPERAND (tem, 1), 0)),
7872 TREE_OPERAND (tem, 0),
7873 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7874 TREE_OPERAND (TREE_OPERAND (tem, 2),
7875 0)));
7876 return tem;
7880 switch (code)
7882 case PAREN_EXPR:
7883 /* Re-association barriers around constants and other re-association
7884 barriers can be removed. */
7885 if (CONSTANT_CLASS_P (op0)
7886 || TREE_CODE (op0) == PAREN_EXPR)
7887 return fold_convert_loc (loc, type, op0);
7888 return NULL_TREE;
7890 case NON_LVALUE_EXPR:
7891 if (!maybe_lvalue_p (op0))
7892 return fold_convert_loc (loc, type, op0);
7893 return NULL_TREE;
7895 CASE_CONVERT:
7896 case FLOAT_EXPR:
7897 case FIX_TRUNC_EXPR:
7898 if (TREE_TYPE (op0) == type)
7899 return op0;
7901 if (COMPARISON_CLASS_P (op0))
7903 /* If we have (type) (a CMP b) and type is an integral type, return
7904 new expression involving the new type. Canonicalize
7905 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7906 non-integral type.
7907 Do not fold the result as that would not simplify further, also
7908 folding again results in recursions. */
7909 if (TREE_CODE (type) == BOOLEAN_TYPE)
7910 return build2_loc (loc, TREE_CODE (op0), type,
7911 TREE_OPERAND (op0, 0),
7912 TREE_OPERAND (op0, 1));
7913 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7914 && TREE_CODE (type) != VECTOR_TYPE)
7915 return build3_loc (loc, COND_EXPR, type, op0,
7916 constant_boolean_node (true, type),
7917 constant_boolean_node (false, type));
7920 /* Handle cases of two conversions in a row. */
7921 if (CONVERT_EXPR_P (op0))
7923 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7924 tree inter_type = TREE_TYPE (op0);
7925 int inside_int = INTEGRAL_TYPE_P (inside_type);
7926 int inside_ptr = POINTER_TYPE_P (inside_type);
7927 int inside_float = FLOAT_TYPE_P (inside_type);
7928 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7929 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7930 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7931 int inter_int = INTEGRAL_TYPE_P (inter_type);
7932 int inter_ptr = POINTER_TYPE_P (inter_type);
7933 int inter_float = FLOAT_TYPE_P (inter_type);
7934 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7935 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7936 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7937 int final_int = INTEGRAL_TYPE_P (type);
7938 int final_ptr = POINTER_TYPE_P (type);
7939 int final_float = FLOAT_TYPE_P (type);
7940 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7941 unsigned int final_prec = TYPE_PRECISION (type);
7942 int final_unsignedp = TYPE_UNSIGNED (type);
7944 /* In addition to the cases of two conversions in a row
7945 handled below, if we are converting something to its own
7946 type via an object of identical or wider precision, neither
7947 conversion is needed. */
7948 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7949 && (((inter_int || inter_ptr) && final_int)
7950 || (inter_float && final_float))
7951 && inter_prec >= final_prec)
7952 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7954 /* Likewise, if the intermediate and initial types are either both
7955 float or both integer, we don't need the middle conversion if the
7956 former is wider than the latter and doesn't change the signedness
7957 (for integers). Avoid this if the final type is a pointer since
7958 then we sometimes need the middle conversion. Likewise if the
7959 final type has a precision not equal to the size of its mode. */
7960 if (((inter_int && inside_int)
7961 || (inter_float && inside_float)
7962 || (inter_vec && inside_vec))
7963 && inter_prec >= inside_prec
7964 && (inter_float || inter_vec
7965 || inter_unsignedp == inside_unsignedp)
7966 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7967 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7968 && ! final_ptr
7969 && (! final_vec || inter_prec == inside_prec))
7970 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7972 /* If we have a sign-extension of a zero-extended value, we can
7973 replace that by a single zero-extension. Likewise if the
7974 final conversion does not change precision we can drop the
7975 intermediate conversion. */
7976 if (inside_int && inter_int && final_int
7977 && ((inside_prec < inter_prec && inter_prec < final_prec
7978 && inside_unsignedp && !inter_unsignedp)
7979 || final_prec == inter_prec))
7980 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7982 /* Two conversions in a row are not needed unless:
7983 - some conversion is floating-point (overstrict for now), or
7984 - some conversion is a vector (overstrict for now), or
7985 - the intermediate type is narrower than both initial and
7986 final, or
7987 - the intermediate type and innermost type differ in signedness,
7988 and the outermost type is wider than the intermediate, or
7989 - the initial type is a pointer type and the precisions of the
7990 intermediate and final types differ, or
7991 - the final type is a pointer type and the precisions of the
7992 initial and intermediate types differ. */
7993 if (! inside_float && ! inter_float && ! final_float
7994 && ! inside_vec && ! inter_vec && ! final_vec
7995 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7996 && ! (inside_int && inter_int
7997 && inter_unsignedp != inside_unsignedp
7998 && inter_prec < final_prec)
7999 && ((inter_unsignedp && inter_prec > inside_prec)
8000 == (final_unsignedp && final_prec > inter_prec))
8001 && ! (inside_ptr && inter_prec != final_prec)
8002 && ! (final_ptr && inside_prec != inter_prec)
8003 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8004 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8005 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8008 /* Handle (T *)&A.B.C for A being of type T and B and C
8009 living at offset zero. This occurs frequently in
8010 C++ upcasting and then accessing the base. */
8011 if (TREE_CODE (op0) == ADDR_EXPR
8012 && POINTER_TYPE_P (type)
8013 && handled_component_p (TREE_OPERAND (op0, 0)))
8015 HOST_WIDE_INT bitsize, bitpos;
8016 tree offset;
8017 enum machine_mode mode;
8018 int unsignedp, volatilep;
8019 tree base = TREE_OPERAND (op0, 0);
8020 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8021 &mode, &unsignedp, &volatilep, false);
8022 /* If the reference was to a (constant) zero offset, we can use
8023 the address of the base if it has the same base type
8024 as the result type and the pointer type is unqualified. */
8025 if (! offset && bitpos == 0
8026 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8027 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8028 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8029 return fold_convert_loc (loc, type,
8030 build_fold_addr_expr_loc (loc, base));
8033 if (TREE_CODE (op0) == MODIFY_EXPR
8034 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8035 /* Detect assigning a bitfield. */
8036 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8037 && DECL_BIT_FIELD
8038 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8040 /* Don't leave an assignment inside a conversion
8041 unless assigning a bitfield. */
8042 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8043 /* First do the assignment, then return converted constant. */
8044 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8045 TREE_NO_WARNING (tem) = 1;
8046 TREE_USED (tem) = 1;
8047 return tem;
8050 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8051 constants (if x has signed type, the sign bit cannot be set
8052 in c). This folds extension into the BIT_AND_EXPR.
8053 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8054 very likely don't have maximal range for their precision and this
8055 transformation effectively doesn't preserve non-maximal ranges. */
8056 if (TREE_CODE (type) == INTEGER_TYPE
8057 && TREE_CODE (op0) == BIT_AND_EXPR
8058 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8060 tree and_expr = op0;
8061 tree and0 = TREE_OPERAND (and_expr, 0);
8062 tree and1 = TREE_OPERAND (and_expr, 1);
8063 int change = 0;
8065 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8066 || (TYPE_PRECISION (type)
8067 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8068 change = 1;
8069 else if (TYPE_PRECISION (TREE_TYPE (and1))
8070 <= HOST_BITS_PER_WIDE_INT
8071 && tree_fits_uhwi_p (and1))
8073 unsigned HOST_WIDE_INT cst;
8075 cst = tree_to_uhwi (and1);
8076 cst &= HOST_WIDE_INT_M1U
8077 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8078 change = (cst == 0);
8079 #ifdef LOAD_EXTEND_OP
8080 if (change
8081 && !flag_syntax_only
8082 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8083 == ZERO_EXTEND))
8085 tree uns = unsigned_type_for (TREE_TYPE (and0));
8086 and0 = fold_convert_loc (loc, uns, and0);
8087 and1 = fold_convert_loc (loc, uns, and1);
8089 #endif
8091 if (change)
8093 tem = force_fit_type (type, wi::to_widest (and1), 0,
8094 TREE_OVERFLOW (and1));
8095 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8096 fold_convert_loc (loc, type, and0), tem);
8100 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8101 when one of the new casts will fold away. Conservatively we assume
8102 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8103 if (POINTER_TYPE_P (type)
8104 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8105 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8106 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8107 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8108 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8110 tree arg00 = TREE_OPERAND (arg0, 0);
8111 tree arg01 = TREE_OPERAND (arg0, 1);
8113 return fold_build_pointer_plus_loc
8114 (loc, fold_convert_loc (loc, type, arg00), arg01);
8117 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8118 of the same precision, and X is an integer type not narrower than
8119 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8120 if (INTEGRAL_TYPE_P (type)
8121 && TREE_CODE (op0) == BIT_NOT_EXPR
8122 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8123 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8124 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8126 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8127 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8128 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8129 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8130 fold_convert_loc (loc, type, tem));
8133 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8134 type of X and Y (integer types only). */
8135 if (INTEGRAL_TYPE_P (type)
8136 && TREE_CODE (op0) == MULT_EXPR
8137 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8138 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8140 /* Be careful not to introduce new overflows. */
8141 tree mult_type;
8142 if (TYPE_OVERFLOW_WRAPS (type))
8143 mult_type = type;
8144 else
8145 mult_type = unsigned_type_for (type);
8147 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8149 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8150 fold_convert_loc (loc, mult_type,
8151 TREE_OPERAND (op0, 0)),
8152 fold_convert_loc (loc, mult_type,
8153 TREE_OPERAND (op0, 1)));
8154 return fold_convert_loc (loc, type, tem);
8158 tem = fold_convert_const (code, type, arg0);
8159 return tem ? tem : NULL_TREE;
8161 case ADDR_SPACE_CONVERT_EXPR:
8162 if (integer_zerop (arg0))
8163 return fold_convert_const (code, type, arg0);
8164 return NULL_TREE;
8166 case FIXED_CONVERT_EXPR:
8167 tem = fold_convert_const (code, type, arg0);
8168 return tem ? tem : NULL_TREE;
8170 case VIEW_CONVERT_EXPR:
8171 if (TREE_TYPE (op0) == type)
8172 return op0;
8173 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8174 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8175 type, TREE_OPERAND (op0, 0));
8176 if (TREE_CODE (op0) == MEM_REF)
8177 return fold_build2_loc (loc, MEM_REF, type,
8178 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8180 /* For integral conversions with the same precision or pointer
8181 conversions use a NOP_EXPR instead. */
8182 if ((INTEGRAL_TYPE_P (type)
8183 || POINTER_TYPE_P (type))
8184 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8185 || POINTER_TYPE_P (TREE_TYPE (op0)))
8186 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8187 return fold_convert_loc (loc, type, op0);
8189 /* Strip inner integral conversions that do not change the precision. */
8190 if (CONVERT_EXPR_P (op0)
8191 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8192 || POINTER_TYPE_P (TREE_TYPE (op0)))
8193 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8194 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8195 && (TYPE_PRECISION (TREE_TYPE (op0))
8196 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8197 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8198 type, TREE_OPERAND (op0, 0));
8200 return fold_view_convert_expr (type, op0);
8202 case NEGATE_EXPR:
8203 tem = fold_negate_expr (loc, arg0);
8204 if (tem)
8205 return fold_convert_loc (loc, type, tem);
8206 return NULL_TREE;
8208 case ABS_EXPR:
8209 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8210 return fold_abs_const (arg0, type);
8211 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8212 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8213 /* Convert fabs((double)float) into (double)fabsf(float). */
8214 else if (TREE_CODE (arg0) == NOP_EXPR
8215 && TREE_CODE (type) == REAL_TYPE)
8217 tree targ0 = strip_float_extensions (arg0);
8218 if (targ0 != arg0)
8219 return fold_convert_loc (loc, type,
8220 fold_build1_loc (loc, ABS_EXPR,
8221 TREE_TYPE (targ0),
8222 targ0));
8224 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8225 else if (TREE_CODE (arg0) == ABS_EXPR)
8226 return arg0;
8227 else if (tree_expr_nonnegative_p (arg0))
8228 return arg0;
8230 /* Strip sign ops from argument. */
8231 if (TREE_CODE (type) == REAL_TYPE)
8233 tem = fold_strip_sign_ops (arg0);
8234 if (tem)
8235 return fold_build1_loc (loc, ABS_EXPR, type,
8236 fold_convert_loc (loc, type, tem));
8238 return NULL_TREE;
8240 case CONJ_EXPR:
8241 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8242 return fold_convert_loc (loc, type, arg0);
8243 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8245 tree itype = TREE_TYPE (type);
8246 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8247 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8248 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8249 negate_expr (ipart));
8251 if (TREE_CODE (arg0) == COMPLEX_CST)
8253 tree itype = TREE_TYPE (type);
8254 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8255 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8256 return build_complex (type, rpart, negate_expr (ipart));
8258 if (TREE_CODE (arg0) == CONJ_EXPR)
8259 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8260 return NULL_TREE;
8262 case BIT_NOT_EXPR:
8263 if (TREE_CODE (arg0) == INTEGER_CST)
8264 return fold_not_const (arg0, type);
8265 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8266 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8267 /* Convert ~ (-A) to A - 1. */
8268 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8269 return fold_build2_loc (loc, MINUS_EXPR, type,
8270 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8271 build_int_cst (type, 1));
8272 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8273 else if (INTEGRAL_TYPE_P (type)
8274 && ((TREE_CODE (arg0) == MINUS_EXPR
8275 && integer_onep (TREE_OPERAND (arg0, 1)))
8276 || (TREE_CODE (arg0) == PLUS_EXPR
8277 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8278 return fold_build1_loc (loc, NEGATE_EXPR, type,
8279 fold_convert_loc (loc, type,
8280 TREE_OPERAND (arg0, 0)));
8281 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8282 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8283 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8284 fold_convert_loc (loc, type,
8285 TREE_OPERAND (arg0, 0)))))
8286 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8287 fold_convert_loc (loc, type,
8288 TREE_OPERAND (arg0, 1)));
8289 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8290 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8291 fold_convert_loc (loc, type,
8292 TREE_OPERAND (arg0, 1)))))
8293 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8294 fold_convert_loc (loc, type,
8295 TREE_OPERAND (arg0, 0)), tem);
8296 /* Perform BIT_NOT_EXPR on each element individually. */
8297 else if (TREE_CODE (arg0) == VECTOR_CST)
8299 tree *elements;
8300 tree elem;
8301 unsigned count = VECTOR_CST_NELTS (arg0), i;
8303 elements = XALLOCAVEC (tree, count);
8304 for (i = 0; i < count; i++)
8306 elem = VECTOR_CST_ELT (arg0, i);
8307 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8308 if (elem == NULL_TREE)
8309 break;
8310 elements[i] = elem;
8312 if (i == count)
8313 return build_vector (type, elements);
8315 else if (COMPARISON_CLASS_P (arg0)
8316 && (VECTOR_TYPE_P (type)
8317 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8319 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8320 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8321 HONOR_NANS (TYPE_MODE (op_type)));
8322 if (subcode != ERROR_MARK)
8323 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8324 TREE_OPERAND (arg0, 1));
8328 return NULL_TREE;
8330 case TRUTH_NOT_EXPR:
8331 /* Note that the operand of this must be an int
8332 and its values must be 0 or 1.
8333 ("true" is a fixed value perhaps depending on the language,
8334 but we don't handle values other than 1 correctly yet.) */
8335 tem = fold_truth_not_expr (loc, arg0);
8336 if (!tem)
8337 return NULL_TREE;
8338 return fold_convert_loc (loc, type, tem);
8340 case REALPART_EXPR:
8341 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8342 return fold_convert_loc (loc, type, arg0);
8343 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8344 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8345 TREE_OPERAND (arg0, 1));
8346 if (TREE_CODE (arg0) == COMPLEX_CST)
8347 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8348 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8350 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8351 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8352 fold_build1_loc (loc, REALPART_EXPR, itype,
8353 TREE_OPERAND (arg0, 0)),
8354 fold_build1_loc (loc, REALPART_EXPR, itype,
8355 TREE_OPERAND (arg0, 1)));
8356 return fold_convert_loc (loc, type, tem);
8358 if (TREE_CODE (arg0) == CONJ_EXPR)
8360 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8361 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8362 TREE_OPERAND (arg0, 0));
8363 return fold_convert_loc (loc, type, tem);
8365 if (TREE_CODE (arg0) == CALL_EXPR)
8367 tree fn = get_callee_fndecl (arg0);
8368 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8369 switch (DECL_FUNCTION_CODE (fn))
8371 CASE_FLT_FN (BUILT_IN_CEXPI):
8372 fn = mathfn_built_in (type, BUILT_IN_COS);
8373 if (fn)
8374 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8375 break;
8377 default:
8378 break;
8381 return NULL_TREE;
8383 case IMAGPART_EXPR:
8384 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8385 return build_zero_cst (type);
8386 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8387 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8388 TREE_OPERAND (arg0, 0));
8389 if (TREE_CODE (arg0) == COMPLEX_CST)
8390 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8391 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8393 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8394 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8395 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8396 TREE_OPERAND (arg0, 0)),
8397 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8398 TREE_OPERAND (arg0, 1)));
8399 return fold_convert_loc (loc, type, tem);
8401 if (TREE_CODE (arg0) == CONJ_EXPR)
8403 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8404 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8405 return fold_convert_loc (loc, type, negate_expr (tem));
8407 if (TREE_CODE (arg0) == CALL_EXPR)
8409 tree fn = get_callee_fndecl (arg0);
8410 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8411 switch (DECL_FUNCTION_CODE (fn))
8413 CASE_FLT_FN (BUILT_IN_CEXPI):
8414 fn = mathfn_built_in (type, BUILT_IN_SIN);
8415 if (fn)
8416 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8417 break;
8419 default:
8420 break;
8423 return NULL_TREE;
8425 case INDIRECT_REF:
8426 /* Fold *&X to X if X is an lvalue. */
8427 if (TREE_CODE (op0) == ADDR_EXPR)
8429 tree op00 = TREE_OPERAND (op0, 0);
8430 if ((TREE_CODE (op00) == VAR_DECL
8431 || TREE_CODE (op00) == PARM_DECL
8432 || TREE_CODE (op00) == RESULT_DECL)
8433 && !TREE_READONLY (op00))
8434 return op00;
8436 return NULL_TREE;
8438 case VEC_UNPACK_LO_EXPR:
8439 case VEC_UNPACK_HI_EXPR:
8440 case VEC_UNPACK_FLOAT_LO_EXPR:
8441 case VEC_UNPACK_FLOAT_HI_EXPR:
8443 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8444 tree *elts;
8445 enum tree_code subcode;
8447 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8448 if (TREE_CODE (arg0) != VECTOR_CST)
8449 return NULL_TREE;
8451 elts = XALLOCAVEC (tree, nelts * 2);
8452 if (!vec_cst_ctor_to_array (arg0, elts))
8453 return NULL_TREE;
8455 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8456 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8457 elts += nelts;
8459 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8460 subcode = NOP_EXPR;
8461 else
8462 subcode = FLOAT_EXPR;
8464 for (i = 0; i < nelts; i++)
8466 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8467 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8468 return NULL_TREE;
8471 return build_vector (type, elts);
8474 case REDUC_MIN_EXPR:
8475 case REDUC_MAX_EXPR:
8476 case REDUC_PLUS_EXPR:
8478 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8479 tree *elts;
8480 enum tree_code subcode;
8482 if (TREE_CODE (op0) != VECTOR_CST)
8483 return NULL_TREE;
8485 elts = XALLOCAVEC (tree, nelts);
8486 if (!vec_cst_ctor_to_array (op0, elts))
8487 return NULL_TREE;
8489 switch (code)
8491 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8492 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8493 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8494 default: gcc_unreachable ();
8497 for (i = 1; i < nelts; i++)
8499 elts[0] = const_binop (subcode, elts[0], elts[i]);
8500 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8501 return NULL_TREE;
8502 elts[i] = build_zero_cst (TREE_TYPE (type));
8505 return build_vector (type, elts);
8508 default:
8509 return NULL_TREE;
8510 } /* switch (code) */
8514 /* If the operation was a conversion do _not_ mark a resulting constant
8515 with TREE_OVERFLOW if the original constant was not. These conversions
8516 have implementation defined behavior and retaining the TREE_OVERFLOW
8517 flag here would confuse later passes such as VRP. */
8518 tree
8519 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8520 tree type, tree op0)
8522 tree res = fold_unary_loc (loc, code, type, op0);
8523 if (res
8524 && TREE_CODE (res) == INTEGER_CST
8525 && TREE_CODE (op0) == INTEGER_CST
8526 && CONVERT_EXPR_CODE_P (code))
8527 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8529 return res;
8532 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8533 operands OP0 and OP1. LOC is the location of the resulting expression.
8534 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8535 Return the folded expression if folding is successful. Otherwise,
8536 return NULL_TREE. */
8537 static tree
8538 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8539 tree arg0, tree arg1, tree op0, tree op1)
8541 tree tem;
8543 /* We only do these simplifications if we are optimizing. */
8544 if (!optimize)
8545 return NULL_TREE;
8547 /* Check for things like (A || B) && (A || C). We can convert this
8548 to A || (B && C). Note that either operator can be any of the four
8549 truth and/or operations and the transformation will still be
8550 valid. Also note that we only care about order for the
8551 ANDIF and ORIF operators. If B contains side effects, this
8552 might change the truth-value of A. */
8553 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8554 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8555 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8556 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8557 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8558 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8560 tree a00 = TREE_OPERAND (arg0, 0);
8561 tree a01 = TREE_OPERAND (arg0, 1);
8562 tree a10 = TREE_OPERAND (arg1, 0);
8563 tree a11 = TREE_OPERAND (arg1, 1);
8564 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8565 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8566 && (code == TRUTH_AND_EXPR
8567 || code == TRUTH_OR_EXPR));
8569 if (operand_equal_p (a00, a10, 0))
8570 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8571 fold_build2_loc (loc, code, type, a01, a11));
8572 else if (commutative && operand_equal_p (a00, a11, 0))
8573 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8574 fold_build2_loc (loc, code, type, a01, a10));
8575 else if (commutative && operand_equal_p (a01, a10, 0))
8576 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8577 fold_build2_loc (loc, code, type, a00, a11));
8579 /* This case if tricky because we must either have commutative
8580 operators or else A10 must not have side-effects. */
8582 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8583 && operand_equal_p (a01, a11, 0))
8584 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8585 fold_build2_loc (loc, code, type, a00, a10),
8586 a01);
8589 /* See if we can build a range comparison. */
8590 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8591 return tem;
8593 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8594 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8596 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8597 if (tem)
8598 return fold_build2_loc (loc, code, type, tem, arg1);
8601 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8602 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8604 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8605 if (tem)
8606 return fold_build2_loc (loc, code, type, arg0, tem);
8609 /* Check for the possibility of merging component references. If our
8610 lhs is another similar operation, try to merge its rhs with our
8611 rhs. Then try to merge our lhs and rhs. */
8612 if (TREE_CODE (arg0) == code
8613 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8614 TREE_OPERAND (arg0, 1), arg1)))
8615 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8617 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8618 return tem;
8620 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8621 && (code == TRUTH_AND_EXPR
8622 || code == TRUTH_ANDIF_EXPR
8623 || code == TRUTH_OR_EXPR
8624 || code == TRUTH_ORIF_EXPR))
8626 enum tree_code ncode, icode;
8628 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8629 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8630 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8632 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8633 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8634 We don't want to pack more than two leafs to a non-IF AND/OR
8635 expression.
8636 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8637 equal to IF-CODE, then we don't want to add right-hand operand.
8638 If the inner right-hand side of left-hand operand has
8639 side-effects, or isn't simple, then we can't add to it,
8640 as otherwise we might destroy if-sequence. */
8641 if (TREE_CODE (arg0) == icode
8642 && simple_operand_p_2 (arg1)
8643 /* Needed for sequence points to handle trappings, and
8644 side-effects. */
8645 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8647 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8648 arg1);
8649 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8650 tem);
8652 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8653 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8654 else if (TREE_CODE (arg1) == icode
8655 && simple_operand_p_2 (arg0)
8656 /* Needed for sequence points to handle trappings, and
8657 side-effects. */
8658 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8660 tem = fold_build2_loc (loc, ncode, type,
8661 arg0, TREE_OPERAND (arg1, 0));
8662 return fold_build2_loc (loc, icode, type, tem,
8663 TREE_OPERAND (arg1, 1));
8665 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8666 into (A OR B).
8667 For sequence point consistancy, we need to check for trapping,
8668 and side-effects. */
8669 else if (code == icode && simple_operand_p_2 (arg0)
8670 && simple_operand_p_2 (arg1))
8671 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8674 return NULL_TREE;
8677 /* Fold a binary expression of code CODE and type TYPE with operands
8678 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8679 Return the folded expression if folding is successful. Otherwise,
8680 return NULL_TREE. */
8682 static tree
8683 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8685 enum tree_code compl_code;
8687 if (code == MIN_EXPR)
8688 compl_code = MAX_EXPR;
8689 else if (code == MAX_EXPR)
8690 compl_code = MIN_EXPR;
8691 else
8692 gcc_unreachable ();
8694 /* MIN (MAX (a, b), b) == b. */
8695 if (TREE_CODE (op0) == compl_code
8696 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8697 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8699 /* MIN (MAX (b, a), b) == b. */
8700 if (TREE_CODE (op0) == compl_code
8701 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8702 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8703 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8705 /* MIN (a, MAX (a, b)) == a. */
8706 if (TREE_CODE (op1) == compl_code
8707 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8708 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8709 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8711 /* MIN (a, MAX (b, a)) == a. */
8712 if (TREE_CODE (op1) == compl_code
8713 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8714 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8715 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8717 return NULL_TREE;
8720 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8721 by changing CODE to reduce the magnitude of constants involved in
8722 ARG0 of the comparison.
8723 Returns a canonicalized comparison tree if a simplification was
8724 possible, otherwise returns NULL_TREE.
8725 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8726 valid if signed overflow is undefined. */
8728 static tree
8729 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8730 tree arg0, tree arg1,
8731 bool *strict_overflow_p)
8733 enum tree_code code0 = TREE_CODE (arg0);
8734 tree t, cst0 = NULL_TREE;
8735 int sgn0;
8736 bool swap = false;
8738 /* Match A +- CST code arg1 and CST code arg1. We can change the
8739 first form only if overflow is undefined. */
8740 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8741 /* In principle pointers also have undefined overflow behavior,
8742 but that causes problems elsewhere. */
8743 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8744 && (code0 == MINUS_EXPR
8745 || code0 == PLUS_EXPR)
8746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8747 || code0 == INTEGER_CST))
8748 return NULL_TREE;
8750 /* Identify the constant in arg0 and its sign. */
8751 if (code0 == INTEGER_CST)
8752 cst0 = arg0;
8753 else
8754 cst0 = TREE_OPERAND (arg0, 1);
8755 sgn0 = tree_int_cst_sgn (cst0);
8757 /* Overflowed constants and zero will cause problems. */
8758 if (integer_zerop (cst0)
8759 || TREE_OVERFLOW (cst0))
8760 return NULL_TREE;
8762 /* See if we can reduce the magnitude of the constant in
8763 arg0 by changing the comparison code. */
8764 if (code0 == INTEGER_CST)
8766 /* CST <= arg1 -> CST-1 < arg1. */
8767 if (code == LE_EXPR && sgn0 == 1)
8768 code = LT_EXPR;
8769 /* -CST < arg1 -> -CST-1 <= arg1. */
8770 else if (code == LT_EXPR && sgn0 == -1)
8771 code = LE_EXPR;
8772 /* CST > arg1 -> CST-1 >= arg1. */
8773 else if (code == GT_EXPR && sgn0 == 1)
8774 code = GE_EXPR;
8775 /* -CST >= arg1 -> -CST-1 > arg1. */
8776 else if (code == GE_EXPR && sgn0 == -1)
8777 code = GT_EXPR;
8778 else
8779 return NULL_TREE;
8780 /* arg1 code' CST' might be more canonical. */
8781 swap = true;
8783 else
8785 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8786 if (code == LT_EXPR
8787 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8788 code = LE_EXPR;
8789 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8790 else if (code == GT_EXPR
8791 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8792 code = GE_EXPR;
8793 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8794 else if (code == LE_EXPR
8795 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8796 code = LT_EXPR;
8797 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8798 else if (code == GE_EXPR
8799 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8800 code = GT_EXPR;
8801 else
8802 return NULL_TREE;
8803 *strict_overflow_p = true;
8806 /* Now build the constant reduced in magnitude. But not if that
8807 would produce one outside of its types range. */
8808 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8809 && ((sgn0 == 1
8810 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8811 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8812 || (sgn0 == -1
8813 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8814 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8815 /* We cannot swap the comparison here as that would cause us to
8816 endlessly recurse. */
8817 return NULL_TREE;
8819 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8820 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8821 if (code0 != INTEGER_CST)
8822 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8823 t = fold_convert (TREE_TYPE (arg1), t);
8825 /* If swapping might yield to a more canonical form, do so. */
8826 if (swap)
8827 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8828 else
8829 return fold_build2_loc (loc, code, type, t, arg1);
8832 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8833 overflow further. Try to decrease the magnitude of constants involved
8834 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8835 and put sole constants at the second argument position.
8836 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8838 static tree
8839 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8840 tree arg0, tree arg1)
8842 tree t;
8843 bool strict_overflow_p;
8844 const char * const warnmsg = G_("assuming signed overflow does not occur "
8845 "when reducing constant in comparison");
8847 /* Try canonicalization by simplifying arg0. */
8848 strict_overflow_p = false;
8849 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8850 &strict_overflow_p);
8851 if (t)
8853 if (strict_overflow_p)
8854 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8855 return t;
8858 /* Try canonicalization by simplifying arg1 using the swapped
8859 comparison. */
8860 code = swap_tree_comparison (code);
8861 strict_overflow_p = false;
8862 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8863 &strict_overflow_p);
8864 if (t && strict_overflow_p)
8865 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8866 return t;
8869 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8870 space. This is used to avoid issuing overflow warnings for
8871 expressions like &p->x which can not wrap. */
8873 static bool
8874 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8876 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8877 return true;
8879 if (bitpos < 0)
8880 return true;
8882 wide_int wi_offset;
8883 int precision = TYPE_PRECISION (TREE_TYPE (base));
8884 if (offset == NULL_TREE)
8885 wi_offset = wi::zero (precision);
8886 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8887 return true;
8888 else
8889 wi_offset = offset;
8891 bool overflow;
8892 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8893 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8894 if (overflow)
8895 return true;
8897 if (!wi::fits_uhwi_p (total))
8898 return true;
8900 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8901 if (size <= 0)
8902 return true;
8904 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8905 array. */
8906 if (TREE_CODE (base) == ADDR_EXPR)
8908 HOST_WIDE_INT base_size;
8910 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8911 if (base_size > 0 && size < base_size)
8912 size = base_size;
8915 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8918 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8919 kind INTEGER_CST. This makes sure to properly sign-extend the
8920 constant. */
8922 static HOST_WIDE_INT
8923 size_low_cst (const_tree t)
8925 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8926 int prec = TYPE_PRECISION (TREE_TYPE (t));
8927 if (prec < HOST_BITS_PER_WIDE_INT)
8928 return sext_hwi (w, prec);
8929 return w;
8932 /* Subroutine of fold_binary. This routine performs all of the
8933 transformations that are common to the equality/inequality
8934 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8935 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8936 fold_binary should call fold_binary. Fold a comparison with
8937 tree code CODE and type TYPE with operands OP0 and OP1. Return
8938 the folded comparison or NULL_TREE. */
8940 static tree
8941 fold_comparison (location_t loc, enum tree_code code, tree type,
8942 tree op0, tree op1)
8944 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8945 tree arg0, arg1, tem;
8947 arg0 = op0;
8948 arg1 = op1;
8950 STRIP_SIGN_NOPS (arg0);
8951 STRIP_SIGN_NOPS (arg1);
8953 tem = fold_relational_const (code, type, arg0, arg1);
8954 if (tem != NULL_TREE)
8955 return tem;
8957 /* If one arg is a real or integer constant, put it last. */
8958 if (tree_swap_operands_p (arg0, arg1, true))
8959 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8961 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8962 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8963 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8965 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8966 && TREE_CODE (arg1) == INTEGER_CST
8967 && !TREE_OVERFLOW (arg1))
8969 const enum tree_code
8970 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8971 tree const1 = TREE_OPERAND (arg0, 1);
8972 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8973 tree variable = TREE_OPERAND (arg0, 0);
8974 tree new_const = int_const_binop (reverse_op, const2, const1);
8976 /* If the constant operation overflowed this can be
8977 simplified as a comparison against INT_MAX/INT_MIN. */
8978 if (TREE_OVERFLOW (new_const))
8980 int const1_sgn = tree_int_cst_sgn (const1);
8981 enum tree_code code2 = code;
8983 /* Get the sign of the constant on the lhs if the
8984 operation were VARIABLE + CONST1. */
8985 if (TREE_CODE (arg0) == MINUS_EXPR)
8986 const1_sgn = -const1_sgn;
8988 /* The sign of the constant determines if we overflowed
8989 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8990 Canonicalize to the INT_MIN overflow by swapping the comparison
8991 if necessary. */
8992 if (const1_sgn == -1)
8993 code2 = swap_tree_comparison (code);
8995 /* We now can look at the canonicalized case
8996 VARIABLE + 1 CODE2 INT_MIN
8997 and decide on the result. */
8998 switch (code2)
9000 case EQ_EXPR:
9001 case LT_EXPR:
9002 case LE_EXPR:
9003 return
9004 omit_one_operand_loc (loc, type, boolean_false_node, variable);
9006 case NE_EXPR:
9007 case GE_EXPR:
9008 case GT_EXPR:
9009 return
9010 omit_one_operand_loc (loc, type, boolean_true_node, variable);
9012 default:
9013 gcc_unreachable ();
9016 else
9018 if (!equality_code)
9019 fold_overflow_warning ("assuming signed overflow does not occur "
9020 "when changing X +- C1 cmp C2 to "
9021 "X cmp C2 -+ C1",
9022 WARN_STRICT_OVERFLOW_COMPARISON);
9023 return fold_build2_loc (loc, code, type, variable, new_const);
9027 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9028 if (TREE_CODE (arg0) == MINUS_EXPR
9029 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9030 && integer_zerop (arg1))
9032 if (!equality_code)
9033 fold_overflow_warning ("assuming signed overflow does not occur "
9034 "when changing X - Y cmp 0 to X cmp Y",
9035 WARN_STRICT_OVERFLOW_COMPARISON);
9036 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
9037 TREE_OPERAND (arg0, 1));
9040 /* For comparisons of pointers we can decompose it to a compile time
9041 comparison of the base objects and the offsets into the object.
9042 This requires at least one operand being an ADDR_EXPR or a
9043 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9044 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9045 && (TREE_CODE (arg0) == ADDR_EXPR
9046 || TREE_CODE (arg1) == ADDR_EXPR
9047 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9048 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9050 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9051 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9052 enum machine_mode mode;
9053 int volatilep, unsignedp;
9054 bool indirect_base0 = false, indirect_base1 = false;
9056 /* Get base and offset for the access. Strip ADDR_EXPR for
9057 get_inner_reference, but put it back by stripping INDIRECT_REF
9058 off the base object if possible. indirect_baseN will be true
9059 if baseN is not an address but refers to the object itself. */
9060 base0 = arg0;
9061 if (TREE_CODE (arg0) == ADDR_EXPR)
9063 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9064 &bitsize, &bitpos0, &offset0, &mode,
9065 &unsignedp, &volatilep, false);
9066 if (TREE_CODE (base0) == INDIRECT_REF)
9067 base0 = TREE_OPERAND (base0, 0);
9068 else
9069 indirect_base0 = true;
9071 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9073 base0 = TREE_OPERAND (arg0, 0);
9074 STRIP_SIGN_NOPS (base0);
9075 if (TREE_CODE (base0) == ADDR_EXPR)
9077 base0 = TREE_OPERAND (base0, 0);
9078 indirect_base0 = true;
9080 offset0 = TREE_OPERAND (arg0, 1);
9081 if (tree_fits_shwi_p (offset0))
9083 HOST_WIDE_INT off = size_low_cst (offset0);
9084 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9085 * BITS_PER_UNIT)
9086 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9088 bitpos0 = off * BITS_PER_UNIT;
9089 offset0 = NULL_TREE;
9094 base1 = arg1;
9095 if (TREE_CODE (arg1) == ADDR_EXPR)
9097 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9098 &bitsize, &bitpos1, &offset1, &mode,
9099 &unsignedp, &volatilep, false);
9100 if (TREE_CODE (base1) == INDIRECT_REF)
9101 base1 = TREE_OPERAND (base1, 0);
9102 else
9103 indirect_base1 = true;
9105 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9107 base1 = TREE_OPERAND (arg1, 0);
9108 STRIP_SIGN_NOPS (base1);
9109 if (TREE_CODE (base1) == ADDR_EXPR)
9111 base1 = TREE_OPERAND (base1, 0);
9112 indirect_base1 = true;
9114 offset1 = TREE_OPERAND (arg1, 1);
9115 if (tree_fits_shwi_p (offset1))
9117 HOST_WIDE_INT off = size_low_cst (offset1);
9118 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9119 * BITS_PER_UNIT)
9120 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9122 bitpos1 = off * BITS_PER_UNIT;
9123 offset1 = NULL_TREE;
9128 /* A local variable can never be pointed to by
9129 the default SSA name of an incoming parameter. */
9130 if ((TREE_CODE (arg0) == ADDR_EXPR
9131 && indirect_base0
9132 && TREE_CODE (base0) == VAR_DECL
9133 && auto_var_in_fn_p (base0, current_function_decl)
9134 && !indirect_base1
9135 && TREE_CODE (base1) == SSA_NAME
9136 && SSA_NAME_IS_DEFAULT_DEF (base1)
9137 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9138 || (TREE_CODE (arg1) == ADDR_EXPR
9139 && indirect_base1
9140 && TREE_CODE (base1) == VAR_DECL
9141 && auto_var_in_fn_p (base1, current_function_decl)
9142 && !indirect_base0
9143 && TREE_CODE (base0) == SSA_NAME
9144 && SSA_NAME_IS_DEFAULT_DEF (base0)
9145 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9147 if (code == NE_EXPR)
9148 return constant_boolean_node (1, type);
9149 else if (code == EQ_EXPR)
9150 return constant_boolean_node (0, type);
9152 /* If we have equivalent bases we might be able to simplify. */
9153 else if (indirect_base0 == indirect_base1
9154 && operand_equal_p (base0, base1, 0))
9156 /* We can fold this expression to a constant if the non-constant
9157 offset parts are equal. */
9158 if ((offset0 == offset1
9159 || (offset0 && offset1
9160 && operand_equal_p (offset0, offset1, 0)))
9161 && (code == EQ_EXPR
9162 || code == NE_EXPR
9163 || (indirect_base0 && DECL_P (base0))
9164 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9167 if (!equality_code
9168 && bitpos0 != bitpos1
9169 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9170 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9171 fold_overflow_warning (("assuming pointer wraparound does not "
9172 "occur when comparing P +- C1 with "
9173 "P +- C2"),
9174 WARN_STRICT_OVERFLOW_CONDITIONAL);
9176 switch (code)
9178 case EQ_EXPR:
9179 return constant_boolean_node (bitpos0 == bitpos1, type);
9180 case NE_EXPR:
9181 return constant_boolean_node (bitpos0 != bitpos1, type);
9182 case LT_EXPR:
9183 return constant_boolean_node (bitpos0 < bitpos1, type);
9184 case LE_EXPR:
9185 return constant_boolean_node (bitpos0 <= bitpos1, type);
9186 case GE_EXPR:
9187 return constant_boolean_node (bitpos0 >= bitpos1, type);
9188 case GT_EXPR:
9189 return constant_boolean_node (bitpos0 > bitpos1, type);
9190 default:;
9193 /* We can simplify the comparison to a comparison of the variable
9194 offset parts if the constant offset parts are equal.
9195 Be careful to use signed sizetype here because otherwise we
9196 mess with array offsets in the wrong way. This is possible
9197 because pointer arithmetic is restricted to retain within an
9198 object and overflow on pointer differences is undefined as of
9199 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9200 else if (bitpos0 == bitpos1
9201 && (equality_code
9202 || (indirect_base0 && DECL_P (base0))
9203 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9205 /* By converting to signed sizetype we cover middle-end pointer
9206 arithmetic which operates on unsigned pointer types of size
9207 type size and ARRAY_REF offsets which are properly sign or
9208 zero extended from their type in case it is narrower than
9209 sizetype. */
9210 if (offset0 == NULL_TREE)
9211 offset0 = build_int_cst (ssizetype, 0);
9212 else
9213 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9214 if (offset1 == NULL_TREE)
9215 offset1 = build_int_cst (ssizetype, 0);
9216 else
9217 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9219 if (!equality_code
9220 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9221 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9222 fold_overflow_warning (("assuming pointer wraparound does not "
9223 "occur when comparing P +- C1 with "
9224 "P +- C2"),
9225 WARN_STRICT_OVERFLOW_COMPARISON);
9227 return fold_build2_loc (loc, code, type, offset0, offset1);
9230 /* For non-equal bases we can simplify if they are addresses
9231 of local binding decls or constants. */
9232 else if (indirect_base0 && indirect_base1
9233 /* We know that !operand_equal_p (base0, base1, 0)
9234 because the if condition was false. But make
9235 sure two decls are not the same. */
9236 && base0 != base1
9237 && TREE_CODE (arg0) == ADDR_EXPR
9238 && TREE_CODE (arg1) == ADDR_EXPR
9239 && (((TREE_CODE (base0) == VAR_DECL
9240 || TREE_CODE (base0) == PARM_DECL)
9241 && (targetm.binds_local_p (base0)
9242 || CONSTANT_CLASS_P (base1)))
9243 || CONSTANT_CLASS_P (base0))
9244 && (((TREE_CODE (base1) == VAR_DECL
9245 || TREE_CODE (base1) == PARM_DECL)
9246 && (targetm.binds_local_p (base1)
9247 || CONSTANT_CLASS_P (base0)))
9248 || CONSTANT_CLASS_P (base1)))
9250 if (code == EQ_EXPR)
9251 return omit_two_operands_loc (loc, type, boolean_false_node,
9252 arg0, arg1);
9253 else if (code == NE_EXPR)
9254 return omit_two_operands_loc (loc, type, boolean_true_node,
9255 arg0, arg1);
9257 /* For equal offsets we can simplify to a comparison of the
9258 base addresses. */
9259 else if (bitpos0 == bitpos1
9260 && (indirect_base0
9261 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9262 && (indirect_base1
9263 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9264 && ((offset0 == offset1)
9265 || (offset0 && offset1
9266 && operand_equal_p (offset0, offset1, 0))))
9268 if (indirect_base0)
9269 base0 = build_fold_addr_expr_loc (loc, base0);
9270 if (indirect_base1)
9271 base1 = build_fold_addr_expr_loc (loc, base1);
9272 return fold_build2_loc (loc, code, type, base0, base1);
9276 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9277 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9278 the resulting offset is smaller in absolute value than the
9279 original one and has the same sign. */
9280 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9281 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9282 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9283 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9284 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9285 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9286 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9288 tree const1 = TREE_OPERAND (arg0, 1);
9289 tree const2 = TREE_OPERAND (arg1, 1);
9290 tree variable1 = TREE_OPERAND (arg0, 0);
9291 tree variable2 = TREE_OPERAND (arg1, 0);
9292 tree cst;
9293 const char * const warnmsg = G_("assuming signed overflow does not "
9294 "occur when combining constants around "
9295 "a comparison");
9297 /* Put the constant on the side where it doesn't overflow and is
9298 of lower absolute value and of same sign than before. */
9299 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9300 ? MINUS_EXPR : PLUS_EXPR,
9301 const2, const1);
9302 if (!TREE_OVERFLOW (cst)
9303 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9304 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9306 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9307 return fold_build2_loc (loc, code, type,
9308 variable1,
9309 fold_build2_loc (loc, TREE_CODE (arg1),
9310 TREE_TYPE (arg1),
9311 variable2, cst));
9314 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9315 ? MINUS_EXPR : PLUS_EXPR,
9316 const1, const2);
9317 if (!TREE_OVERFLOW (cst)
9318 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9319 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9321 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9322 return fold_build2_loc (loc, code, type,
9323 fold_build2_loc (loc, TREE_CODE (arg0),
9324 TREE_TYPE (arg0),
9325 variable1, cst),
9326 variable2);
9330 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9331 signed arithmetic case. That form is created by the compiler
9332 often enough for folding it to be of value. One example is in
9333 computing loop trip counts after Operator Strength Reduction. */
9334 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9335 && TREE_CODE (arg0) == MULT_EXPR
9336 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9337 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9338 && integer_zerop (arg1))
9340 tree const1 = TREE_OPERAND (arg0, 1);
9341 tree const2 = arg1; /* zero */
9342 tree variable1 = TREE_OPERAND (arg0, 0);
9343 enum tree_code cmp_code = code;
9345 /* Handle unfolded multiplication by zero. */
9346 if (integer_zerop (const1))
9347 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9349 fold_overflow_warning (("assuming signed overflow does not occur when "
9350 "eliminating multiplication in comparison "
9351 "with zero"),
9352 WARN_STRICT_OVERFLOW_COMPARISON);
9354 /* If const1 is negative we swap the sense of the comparison. */
9355 if (tree_int_cst_sgn (const1) < 0)
9356 cmp_code = swap_tree_comparison (cmp_code);
9358 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9361 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9362 if (tem)
9363 return tem;
9365 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9367 tree targ0 = strip_float_extensions (arg0);
9368 tree targ1 = strip_float_extensions (arg1);
9369 tree newtype = TREE_TYPE (targ0);
9371 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9372 newtype = TREE_TYPE (targ1);
9374 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9375 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9376 return fold_build2_loc (loc, code, type,
9377 fold_convert_loc (loc, newtype, targ0),
9378 fold_convert_loc (loc, newtype, targ1));
9380 /* (-a) CMP (-b) -> b CMP a */
9381 if (TREE_CODE (arg0) == NEGATE_EXPR
9382 && TREE_CODE (arg1) == NEGATE_EXPR)
9383 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9384 TREE_OPERAND (arg0, 0));
9386 if (TREE_CODE (arg1) == REAL_CST)
9388 REAL_VALUE_TYPE cst;
9389 cst = TREE_REAL_CST (arg1);
9391 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9392 if (TREE_CODE (arg0) == NEGATE_EXPR)
9393 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9394 TREE_OPERAND (arg0, 0),
9395 build_real (TREE_TYPE (arg1),
9396 real_value_negate (&cst)));
9398 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9399 /* a CMP (-0) -> a CMP 0 */
9400 if (REAL_VALUE_MINUS_ZERO (cst))
9401 return fold_build2_loc (loc, code, type, arg0,
9402 build_real (TREE_TYPE (arg1), dconst0));
9404 /* x != NaN is always true, other ops are always false. */
9405 if (REAL_VALUE_ISNAN (cst)
9406 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9408 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9409 return omit_one_operand_loc (loc, type, tem, arg0);
9412 /* Fold comparisons against infinity. */
9413 if (REAL_VALUE_ISINF (cst)
9414 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9416 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9417 if (tem != NULL_TREE)
9418 return tem;
9422 /* If this is a comparison of a real constant with a PLUS_EXPR
9423 or a MINUS_EXPR of a real constant, we can convert it into a
9424 comparison with a revised real constant as long as no overflow
9425 occurs when unsafe_math_optimizations are enabled. */
9426 if (flag_unsafe_math_optimizations
9427 && TREE_CODE (arg1) == REAL_CST
9428 && (TREE_CODE (arg0) == PLUS_EXPR
9429 || TREE_CODE (arg0) == MINUS_EXPR)
9430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9431 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9432 ? MINUS_EXPR : PLUS_EXPR,
9433 arg1, TREE_OPERAND (arg0, 1)))
9434 && !TREE_OVERFLOW (tem))
9435 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9437 /* Likewise, we can simplify a comparison of a real constant with
9438 a MINUS_EXPR whose first operand is also a real constant, i.e.
9439 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9440 floating-point types only if -fassociative-math is set. */
9441 if (flag_associative_math
9442 && TREE_CODE (arg1) == REAL_CST
9443 && TREE_CODE (arg0) == MINUS_EXPR
9444 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9445 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9446 arg1))
9447 && !TREE_OVERFLOW (tem))
9448 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9449 TREE_OPERAND (arg0, 1), tem);
9451 /* Fold comparisons against built-in math functions. */
9452 if (TREE_CODE (arg1) == REAL_CST
9453 && flag_unsafe_math_optimizations
9454 && ! flag_errno_math)
9456 enum built_in_function fcode = builtin_mathfn_code (arg0);
9458 if (fcode != END_BUILTINS)
9460 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9461 if (tem != NULL_TREE)
9462 return tem;
9467 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9468 && CONVERT_EXPR_P (arg0))
9470 /* If we are widening one operand of an integer comparison,
9471 see if the other operand is similarly being widened. Perhaps we
9472 can do the comparison in the narrower type. */
9473 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9474 if (tem)
9475 return tem;
9477 /* Or if we are changing signedness. */
9478 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9479 if (tem)
9480 return tem;
9483 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9484 constant, we can simplify it. */
9485 if (TREE_CODE (arg1) == INTEGER_CST
9486 && (TREE_CODE (arg0) == MIN_EXPR
9487 || TREE_CODE (arg0) == MAX_EXPR)
9488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9490 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9491 if (tem)
9492 return tem;
9495 /* Simplify comparison of something with itself. (For IEEE
9496 floating-point, we can only do some of these simplifications.) */
9497 if (operand_equal_p (arg0, arg1, 0))
9499 switch (code)
9501 case EQ_EXPR:
9502 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9503 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9504 return constant_boolean_node (1, type);
9505 break;
9507 case GE_EXPR:
9508 case LE_EXPR:
9509 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9510 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9511 return constant_boolean_node (1, type);
9512 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9514 case NE_EXPR:
9515 /* For NE, we can only do this simplification if integer
9516 or we don't honor IEEE floating point NaNs. */
9517 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9518 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9519 break;
9520 /* ... fall through ... */
9521 case GT_EXPR:
9522 case LT_EXPR:
9523 return constant_boolean_node (0, type);
9524 default:
9525 gcc_unreachable ();
9529 /* If we are comparing an expression that just has comparisons
9530 of two integer values, arithmetic expressions of those comparisons,
9531 and constants, we can simplify it. There are only three cases
9532 to check: the two values can either be equal, the first can be
9533 greater, or the second can be greater. Fold the expression for
9534 those three values. Since each value must be 0 or 1, we have
9535 eight possibilities, each of which corresponds to the constant 0
9536 or 1 or one of the six possible comparisons.
9538 This handles common cases like (a > b) == 0 but also handles
9539 expressions like ((x > y) - (y > x)) > 0, which supposedly
9540 occur in macroized code. */
9542 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9544 tree cval1 = 0, cval2 = 0;
9545 int save_p = 0;
9547 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9548 /* Don't handle degenerate cases here; they should already
9549 have been handled anyway. */
9550 && cval1 != 0 && cval2 != 0
9551 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9552 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9553 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9554 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9555 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9556 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9557 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9559 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9560 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9562 /* We can't just pass T to eval_subst in case cval1 or cval2
9563 was the same as ARG1. */
9565 tree high_result
9566 = fold_build2_loc (loc, code, type,
9567 eval_subst (loc, arg0, cval1, maxval,
9568 cval2, minval),
9569 arg1);
9570 tree equal_result
9571 = fold_build2_loc (loc, code, type,
9572 eval_subst (loc, arg0, cval1, maxval,
9573 cval2, maxval),
9574 arg1);
9575 tree low_result
9576 = fold_build2_loc (loc, code, type,
9577 eval_subst (loc, arg0, cval1, minval,
9578 cval2, maxval),
9579 arg1);
9581 /* All three of these results should be 0 or 1. Confirm they are.
9582 Then use those values to select the proper code to use. */
9584 if (TREE_CODE (high_result) == INTEGER_CST
9585 && TREE_CODE (equal_result) == INTEGER_CST
9586 && TREE_CODE (low_result) == INTEGER_CST)
9588 /* Make a 3-bit mask with the high-order bit being the
9589 value for `>', the next for '=', and the low for '<'. */
9590 switch ((integer_onep (high_result) * 4)
9591 + (integer_onep (equal_result) * 2)
9592 + integer_onep (low_result))
9594 case 0:
9595 /* Always false. */
9596 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9597 case 1:
9598 code = LT_EXPR;
9599 break;
9600 case 2:
9601 code = EQ_EXPR;
9602 break;
9603 case 3:
9604 code = LE_EXPR;
9605 break;
9606 case 4:
9607 code = GT_EXPR;
9608 break;
9609 case 5:
9610 code = NE_EXPR;
9611 break;
9612 case 6:
9613 code = GE_EXPR;
9614 break;
9615 case 7:
9616 /* Always true. */
9617 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9620 if (save_p)
9622 tem = save_expr (build2 (code, type, cval1, cval2));
9623 SET_EXPR_LOCATION (tem, loc);
9624 return tem;
9626 return fold_build2_loc (loc, code, type, cval1, cval2);
9631 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9632 into a single range test. */
9633 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9634 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9635 && TREE_CODE (arg1) == INTEGER_CST
9636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9637 && !integer_zerop (TREE_OPERAND (arg0, 1))
9638 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9639 && !TREE_OVERFLOW (arg1))
9641 tem = fold_div_compare (loc, code, type, arg0, arg1);
9642 if (tem != NULL_TREE)
9643 return tem;
9646 /* Fold ~X op ~Y as Y op X. */
9647 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9648 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9650 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9651 return fold_build2_loc (loc, code, type,
9652 fold_convert_loc (loc, cmp_type,
9653 TREE_OPERAND (arg1, 0)),
9654 TREE_OPERAND (arg0, 0));
9657 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9658 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9659 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9661 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9662 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9663 TREE_OPERAND (arg0, 0),
9664 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9665 fold_convert_loc (loc, cmp_type, arg1)));
9668 return NULL_TREE;
9672 /* Subroutine of fold_binary. Optimize complex multiplications of the
9673 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9674 argument EXPR represents the expression "z" of type TYPE. */
9676 static tree
9677 fold_mult_zconjz (location_t loc, tree type, tree expr)
9679 tree itype = TREE_TYPE (type);
9680 tree rpart, ipart, tem;
9682 if (TREE_CODE (expr) == COMPLEX_EXPR)
9684 rpart = TREE_OPERAND (expr, 0);
9685 ipart = TREE_OPERAND (expr, 1);
9687 else if (TREE_CODE (expr) == COMPLEX_CST)
9689 rpart = TREE_REALPART (expr);
9690 ipart = TREE_IMAGPART (expr);
9692 else
9694 expr = save_expr (expr);
9695 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9696 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9699 rpart = save_expr (rpart);
9700 ipart = save_expr (ipart);
9701 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9702 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9703 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9704 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9705 build_zero_cst (itype));
9709 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9710 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9711 guarantees that P and N have the same least significant log2(M) bits.
9712 N is not otherwise constrained. In particular, N is not normalized to
9713 0 <= N < M as is common. In general, the precise value of P is unknown.
9714 M is chosen as large as possible such that constant N can be determined.
9716 Returns M and sets *RESIDUE to N.
9718 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9719 account. This is not always possible due to PR 35705.
9722 static unsigned HOST_WIDE_INT
9723 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9724 bool allow_func_align)
9726 enum tree_code code;
9728 *residue = 0;
9730 code = TREE_CODE (expr);
9731 if (code == ADDR_EXPR)
9733 unsigned int bitalign;
9734 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9735 *residue /= BITS_PER_UNIT;
9736 return bitalign / BITS_PER_UNIT;
9738 else if (code == POINTER_PLUS_EXPR)
9740 tree op0, op1;
9741 unsigned HOST_WIDE_INT modulus;
9742 enum tree_code inner_code;
9744 op0 = TREE_OPERAND (expr, 0);
9745 STRIP_NOPS (op0);
9746 modulus = get_pointer_modulus_and_residue (op0, residue,
9747 allow_func_align);
9749 op1 = TREE_OPERAND (expr, 1);
9750 STRIP_NOPS (op1);
9751 inner_code = TREE_CODE (op1);
9752 if (inner_code == INTEGER_CST)
9754 *residue += TREE_INT_CST_LOW (op1);
9755 return modulus;
9757 else if (inner_code == MULT_EXPR)
9759 op1 = TREE_OPERAND (op1, 1);
9760 if (TREE_CODE (op1) == INTEGER_CST)
9762 unsigned HOST_WIDE_INT align;
9764 /* Compute the greatest power-of-2 divisor of op1. */
9765 align = TREE_INT_CST_LOW (op1);
9766 align &= -align;
9768 /* If align is non-zero and less than *modulus, replace
9769 *modulus with align., If align is 0, then either op1 is 0
9770 or the greatest power-of-2 divisor of op1 doesn't fit in an
9771 unsigned HOST_WIDE_INT. In either case, no additional
9772 constraint is imposed. */
9773 if (align)
9774 modulus = MIN (modulus, align);
9776 return modulus;
9781 /* If we get here, we were unable to determine anything useful about the
9782 expression. */
9783 return 1;
9786 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9787 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9789 static bool
9790 vec_cst_ctor_to_array (tree arg, tree *elts)
9792 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9794 if (TREE_CODE (arg) == VECTOR_CST)
9796 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9797 elts[i] = VECTOR_CST_ELT (arg, i);
9799 else if (TREE_CODE (arg) == CONSTRUCTOR)
9801 constructor_elt *elt;
9803 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9804 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9805 return false;
9806 else
9807 elts[i] = elt->value;
9809 else
9810 return false;
9811 for (; i < nelts; i++)
9812 elts[i]
9813 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9814 return true;
9817 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9818 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9819 NULL_TREE otherwise. */
9821 static tree
9822 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9824 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9825 tree *elts;
9826 bool need_ctor = false;
9828 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9829 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9830 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9831 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9832 return NULL_TREE;
9834 elts = XALLOCAVEC (tree, nelts * 3);
9835 if (!vec_cst_ctor_to_array (arg0, elts)
9836 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9837 return NULL_TREE;
9839 for (i = 0; i < nelts; i++)
9841 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9842 need_ctor = true;
9843 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9846 if (need_ctor)
9848 vec<constructor_elt, va_gc> *v;
9849 vec_alloc (v, nelts);
9850 for (i = 0; i < nelts; i++)
9851 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9852 return build_constructor (type, v);
9854 else
9855 return build_vector (type, &elts[2 * nelts]);
9858 /* Try to fold a pointer difference of type TYPE two address expressions of
9859 array references AREF0 and AREF1 using location LOC. Return a
9860 simplified expression for the difference or NULL_TREE. */
9862 static tree
9863 fold_addr_of_array_ref_difference (location_t loc, tree type,
9864 tree aref0, tree aref1)
9866 tree base0 = TREE_OPERAND (aref0, 0);
9867 tree base1 = TREE_OPERAND (aref1, 0);
9868 tree base_offset = build_int_cst (type, 0);
9870 /* If the bases are array references as well, recurse. If the bases
9871 are pointer indirections compute the difference of the pointers.
9872 If the bases are equal, we are set. */
9873 if ((TREE_CODE (base0) == ARRAY_REF
9874 && TREE_CODE (base1) == ARRAY_REF
9875 && (base_offset
9876 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9877 || (INDIRECT_REF_P (base0)
9878 && INDIRECT_REF_P (base1)
9879 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9880 TREE_OPERAND (base0, 0),
9881 TREE_OPERAND (base1, 0))))
9882 || operand_equal_p (base0, base1, 0))
9884 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9885 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9886 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9887 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9888 return fold_build2_loc (loc, PLUS_EXPR, type,
9889 base_offset,
9890 fold_build2_loc (loc, MULT_EXPR, type,
9891 diff, esz));
9893 return NULL_TREE;
9896 /* If the real or vector real constant CST of type TYPE has an exact
9897 inverse, return it, else return NULL. */
9899 static tree
9900 exact_inverse (tree type, tree cst)
9902 REAL_VALUE_TYPE r;
9903 tree unit_type, *elts;
9904 enum machine_mode mode;
9905 unsigned vec_nelts, i;
9907 switch (TREE_CODE (cst))
9909 case REAL_CST:
9910 r = TREE_REAL_CST (cst);
9912 if (exact_real_inverse (TYPE_MODE (type), &r))
9913 return build_real (type, r);
9915 return NULL_TREE;
9917 case VECTOR_CST:
9918 vec_nelts = VECTOR_CST_NELTS (cst);
9919 elts = XALLOCAVEC (tree, vec_nelts);
9920 unit_type = TREE_TYPE (type);
9921 mode = TYPE_MODE (unit_type);
9923 for (i = 0; i < vec_nelts; i++)
9925 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9926 if (!exact_real_inverse (mode, &r))
9927 return NULL_TREE;
9928 elts[i] = build_real (unit_type, r);
9931 return build_vector (type, elts);
9933 default:
9934 return NULL_TREE;
9938 /* Mask out the tz least significant bits of X of type TYPE where
9939 tz is the number of trailing zeroes in Y. */
9940 static wide_int
9941 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9943 int tz = wi::ctz (y);
9944 if (tz > 0)
9945 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9946 return x;
9949 /* Return true when T is an address and is known to be nonzero.
9950 For floating point we further ensure that T is not denormal.
9951 Similar logic is present in nonzero_address in rtlanal.h.
9953 If the return value is based on the assumption that signed overflow
9954 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9955 change *STRICT_OVERFLOW_P. */
9957 static bool
9958 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9960 tree type = TREE_TYPE (t);
9961 enum tree_code code;
9963 /* Doing something useful for floating point would need more work. */
9964 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9965 return false;
9967 code = TREE_CODE (t);
9968 switch (TREE_CODE_CLASS (code))
9970 case tcc_unary:
9971 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9972 strict_overflow_p);
9973 case tcc_binary:
9974 case tcc_comparison:
9975 return tree_binary_nonzero_warnv_p (code, type,
9976 TREE_OPERAND (t, 0),
9977 TREE_OPERAND (t, 1),
9978 strict_overflow_p);
9979 case tcc_constant:
9980 case tcc_declaration:
9981 case tcc_reference:
9982 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9984 default:
9985 break;
9988 switch (code)
9990 case TRUTH_NOT_EXPR:
9991 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9992 strict_overflow_p);
9994 case TRUTH_AND_EXPR:
9995 case TRUTH_OR_EXPR:
9996 case TRUTH_XOR_EXPR:
9997 return tree_binary_nonzero_warnv_p (code, type,
9998 TREE_OPERAND (t, 0),
9999 TREE_OPERAND (t, 1),
10000 strict_overflow_p);
10002 case COND_EXPR:
10003 case CONSTRUCTOR:
10004 case OBJ_TYPE_REF:
10005 case ASSERT_EXPR:
10006 case ADDR_EXPR:
10007 case WITH_SIZE_EXPR:
10008 case SSA_NAME:
10009 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10011 case COMPOUND_EXPR:
10012 case MODIFY_EXPR:
10013 case BIND_EXPR:
10014 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10015 strict_overflow_p);
10017 case SAVE_EXPR:
10018 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10019 strict_overflow_p);
10021 case CALL_EXPR:
10023 tree fndecl = get_callee_fndecl (t);
10024 if (!fndecl) return false;
10025 if (flag_delete_null_pointer_checks && !flag_check_new
10026 && DECL_IS_OPERATOR_NEW (fndecl)
10027 && !TREE_NOTHROW (fndecl))
10028 return true;
10029 if (flag_delete_null_pointer_checks
10030 && lookup_attribute ("returns_nonnull",
10031 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10032 return true;
10033 return alloca_call_p (t);
10036 default:
10037 break;
10039 return false;
10042 /* Return true when T is an address and is known to be nonzero.
10043 Handle warnings about undefined signed overflow. */
10045 static bool
10046 tree_expr_nonzero_p (tree t)
10048 bool ret, strict_overflow_p;
10050 strict_overflow_p = false;
10051 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10052 if (strict_overflow_p)
10053 fold_overflow_warning (("assuming signed overflow does not occur when "
10054 "determining that expression is always "
10055 "non-zero"),
10056 WARN_STRICT_OVERFLOW_MISC);
10057 return ret;
10060 /* Fold a binary expression of code CODE and type TYPE with operands
10061 OP0 and OP1. LOC is the location of the resulting expression.
10062 Return the folded expression if folding is successful. Otherwise,
10063 return NULL_TREE. */
10065 tree
10066 fold_binary_loc (location_t loc,
10067 enum tree_code code, tree type, tree op0, tree op1)
10069 enum tree_code_class kind = TREE_CODE_CLASS (code);
10070 tree arg0, arg1, tem;
10071 tree t1 = NULL_TREE;
10072 bool strict_overflow_p;
10073 unsigned int prec;
10075 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10076 && TREE_CODE_LENGTH (code) == 2
10077 && op0 != NULL_TREE
10078 && op1 != NULL_TREE);
10080 arg0 = op0;
10081 arg1 = op1;
10083 /* Strip any conversions that don't change the mode. This is
10084 safe for every expression, except for a comparison expression
10085 because its signedness is derived from its operands. So, in
10086 the latter case, only strip conversions that don't change the
10087 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10088 preserved.
10090 Note that this is done as an internal manipulation within the
10091 constant folder, in order to find the simplest representation
10092 of the arguments so that their form can be studied. In any
10093 cases, the appropriate type conversions should be put back in
10094 the tree that will get out of the constant folder. */
10096 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10098 STRIP_SIGN_NOPS (arg0);
10099 STRIP_SIGN_NOPS (arg1);
10101 else
10103 STRIP_NOPS (arg0);
10104 STRIP_NOPS (arg1);
10107 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10108 constant but we can't do arithmetic on them. */
10109 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10110 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10111 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10112 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10113 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10114 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10115 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10117 if (kind == tcc_binary)
10119 /* Make sure type and arg0 have the same saturating flag. */
10120 gcc_assert (TYPE_SATURATING (type)
10121 == TYPE_SATURATING (TREE_TYPE (arg0)));
10122 tem = const_binop (code, arg0, arg1);
10124 else if (kind == tcc_comparison)
10125 tem = fold_relational_const (code, type, arg0, arg1);
10126 else
10127 tem = NULL_TREE;
10129 if (tem != NULL_TREE)
10131 if (TREE_TYPE (tem) != type)
10132 tem = fold_convert_loc (loc, type, tem);
10133 return tem;
10137 /* If this is a commutative operation, and ARG0 is a constant, move it
10138 to ARG1 to reduce the number of tests below. */
10139 if (commutative_tree_code (code)
10140 && tree_swap_operands_p (arg0, arg1, true))
10141 return fold_build2_loc (loc, code, type, op1, op0);
10143 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10145 First check for cases where an arithmetic operation is applied to a
10146 compound, conditional, or comparison operation. Push the arithmetic
10147 operation inside the compound or conditional to see if any folding
10148 can then be done. Convert comparison to conditional for this purpose.
10149 The also optimizes non-constant cases that used to be done in
10150 expand_expr.
10152 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10153 one of the operands is a comparison and the other is a comparison, a
10154 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10155 code below would make the expression more complex. Change it to a
10156 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10157 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10159 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10160 || code == EQ_EXPR || code == NE_EXPR)
10161 && TREE_CODE (type) != VECTOR_TYPE
10162 && ((truth_value_p (TREE_CODE (arg0))
10163 && (truth_value_p (TREE_CODE (arg1))
10164 || (TREE_CODE (arg1) == BIT_AND_EXPR
10165 && integer_onep (TREE_OPERAND (arg1, 1)))))
10166 || (truth_value_p (TREE_CODE (arg1))
10167 && (truth_value_p (TREE_CODE (arg0))
10168 || (TREE_CODE (arg0) == BIT_AND_EXPR
10169 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10171 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10172 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10173 : TRUTH_XOR_EXPR,
10174 boolean_type_node,
10175 fold_convert_loc (loc, boolean_type_node, arg0),
10176 fold_convert_loc (loc, boolean_type_node, arg1));
10178 if (code == EQ_EXPR)
10179 tem = invert_truthvalue_loc (loc, tem);
10181 return fold_convert_loc (loc, type, tem);
10184 if (TREE_CODE_CLASS (code) == tcc_binary
10185 || TREE_CODE_CLASS (code) == tcc_comparison)
10187 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10189 tem = fold_build2_loc (loc, code, type,
10190 fold_convert_loc (loc, TREE_TYPE (op0),
10191 TREE_OPERAND (arg0, 1)), op1);
10192 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10193 tem);
10195 if (TREE_CODE (arg1) == COMPOUND_EXPR
10196 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10198 tem = fold_build2_loc (loc, code, type, op0,
10199 fold_convert_loc (loc, TREE_TYPE (op1),
10200 TREE_OPERAND (arg1, 1)));
10201 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10202 tem);
10205 if (TREE_CODE (arg0) == COND_EXPR
10206 || TREE_CODE (arg0) == VEC_COND_EXPR
10207 || COMPARISON_CLASS_P (arg0))
10209 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10210 arg0, arg1,
10211 /*cond_first_p=*/1);
10212 if (tem != NULL_TREE)
10213 return tem;
10216 if (TREE_CODE (arg1) == COND_EXPR
10217 || TREE_CODE (arg1) == VEC_COND_EXPR
10218 || COMPARISON_CLASS_P (arg1))
10220 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10221 arg1, arg0,
10222 /*cond_first_p=*/0);
10223 if (tem != NULL_TREE)
10224 return tem;
10228 switch (code)
10230 case MEM_REF:
10231 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10232 if (TREE_CODE (arg0) == ADDR_EXPR
10233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10235 tree iref = TREE_OPERAND (arg0, 0);
10236 return fold_build2 (MEM_REF, type,
10237 TREE_OPERAND (iref, 0),
10238 int_const_binop (PLUS_EXPR, arg1,
10239 TREE_OPERAND (iref, 1)));
10242 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10243 if (TREE_CODE (arg0) == ADDR_EXPR
10244 && handled_component_p (TREE_OPERAND (arg0, 0)))
10246 tree base;
10247 HOST_WIDE_INT coffset;
10248 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10249 &coffset);
10250 if (!base)
10251 return NULL_TREE;
10252 return fold_build2 (MEM_REF, type,
10253 build_fold_addr_expr (base),
10254 int_const_binop (PLUS_EXPR, arg1,
10255 size_int (coffset)));
10258 return NULL_TREE;
10260 case POINTER_PLUS_EXPR:
10261 /* 0 +p index -> (type)index */
10262 if (integer_zerop (arg0))
10263 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10265 /* PTR +p 0 -> PTR */
10266 if (integer_zerop (arg1))
10267 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10269 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10270 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10271 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10272 return fold_convert_loc (loc, type,
10273 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10274 fold_convert_loc (loc, sizetype,
10275 arg1),
10276 fold_convert_loc (loc, sizetype,
10277 arg0)));
10279 /* (PTR +p B) +p A -> PTR +p (B + A) */
10280 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10282 tree inner;
10283 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10284 tree arg00 = TREE_OPERAND (arg0, 0);
10285 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10286 arg01, fold_convert_loc (loc, sizetype, arg1));
10287 return fold_convert_loc (loc, type,
10288 fold_build_pointer_plus_loc (loc,
10289 arg00, inner));
10292 /* PTR_CST +p CST -> CST1 */
10293 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10294 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10295 fold_convert_loc (loc, type, arg1));
10297 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10298 of the array. Loop optimizer sometimes produce this type of
10299 expressions. */
10300 if (TREE_CODE (arg0) == ADDR_EXPR)
10302 tem = try_move_mult_to_index (loc, arg0,
10303 fold_convert_loc (loc,
10304 ssizetype, arg1));
10305 if (tem)
10306 return fold_convert_loc (loc, type, tem);
10309 return NULL_TREE;
10311 case PLUS_EXPR:
10312 /* A + (-B) -> A - B */
10313 if (TREE_CODE (arg1) == NEGATE_EXPR
10314 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10315 return fold_build2_loc (loc, MINUS_EXPR, type,
10316 fold_convert_loc (loc, type, arg0),
10317 fold_convert_loc (loc, type,
10318 TREE_OPERAND (arg1, 0)));
10319 /* (-A) + B -> B - A */
10320 if (TREE_CODE (arg0) == NEGATE_EXPR
10321 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10322 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10323 return fold_build2_loc (loc, MINUS_EXPR, type,
10324 fold_convert_loc (loc, type, arg1),
10325 fold_convert_loc (loc, type,
10326 TREE_OPERAND (arg0, 0)));
10328 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10330 /* Convert ~A + 1 to -A. */
10331 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10332 && integer_onep (arg1))
10333 return fold_build1_loc (loc, NEGATE_EXPR, type,
10334 fold_convert_loc (loc, type,
10335 TREE_OPERAND (arg0, 0)));
10337 /* ~X + X is -1. */
10338 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10339 && !TYPE_OVERFLOW_TRAPS (type))
10341 tree tem = TREE_OPERAND (arg0, 0);
10343 STRIP_NOPS (tem);
10344 if (operand_equal_p (tem, arg1, 0))
10346 t1 = build_all_ones_cst (type);
10347 return omit_one_operand_loc (loc, type, t1, arg1);
10351 /* X + ~X is -1. */
10352 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10353 && !TYPE_OVERFLOW_TRAPS (type))
10355 tree tem = TREE_OPERAND (arg1, 0);
10357 STRIP_NOPS (tem);
10358 if (operand_equal_p (arg0, tem, 0))
10360 t1 = build_all_ones_cst (type);
10361 return omit_one_operand_loc (loc, type, t1, arg0);
10365 /* X + (X / CST) * -CST is X % CST. */
10366 if (TREE_CODE (arg1) == MULT_EXPR
10367 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10368 && operand_equal_p (arg0,
10369 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10371 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10372 tree cst1 = TREE_OPERAND (arg1, 1);
10373 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10374 cst1, cst0);
10375 if (sum && integer_zerop (sum))
10376 return fold_convert_loc (loc, type,
10377 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10378 TREE_TYPE (arg0), arg0,
10379 cst0));
10383 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10384 one. Make sure the type is not saturating and has the signedness of
10385 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10386 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10387 if ((TREE_CODE (arg0) == MULT_EXPR
10388 || TREE_CODE (arg1) == MULT_EXPR)
10389 && !TYPE_SATURATING (type)
10390 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10391 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10392 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10394 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10395 if (tem)
10396 return tem;
10399 if (! FLOAT_TYPE_P (type))
10401 if (integer_zerop (arg1))
10402 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10404 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10405 with a constant, and the two constants have no bits in common,
10406 we should treat this as a BIT_IOR_EXPR since this may produce more
10407 simplifications. */
10408 if (TREE_CODE (arg0) == BIT_AND_EXPR
10409 && TREE_CODE (arg1) == BIT_AND_EXPR
10410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10411 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10412 && wi::bit_and (TREE_OPERAND (arg0, 1),
10413 TREE_OPERAND (arg1, 1)) == 0)
10415 code = BIT_IOR_EXPR;
10416 goto bit_ior;
10419 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10420 (plus (plus (mult) (mult)) (foo)) so that we can
10421 take advantage of the factoring cases below. */
10422 if (TYPE_OVERFLOW_WRAPS (type)
10423 && (((TREE_CODE (arg0) == PLUS_EXPR
10424 || TREE_CODE (arg0) == MINUS_EXPR)
10425 && TREE_CODE (arg1) == MULT_EXPR)
10426 || ((TREE_CODE (arg1) == PLUS_EXPR
10427 || TREE_CODE (arg1) == MINUS_EXPR)
10428 && TREE_CODE (arg0) == MULT_EXPR)))
10430 tree parg0, parg1, parg, marg;
10431 enum tree_code pcode;
10433 if (TREE_CODE (arg1) == MULT_EXPR)
10434 parg = arg0, marg = arg1;
10435 else
10436 parg = arg1, marg = arg0;
10437 pcode = TREE_CODE (parg);
10438 parg0 = TREE_OPERAND (parg, 0);
10439 parg1 = TREE_OPERAND (parg, 1);
10440 STRIP_NOPS (parg0);
10441 STRIP_NOPS (parg1);
10443 if (TREE_CODE (parg0) == MULT_EXPR
10444 && TREE_CODE (parg1) != MULT_EXPR)
10445 return fold_build2_loc (loc, pcode, type,
10446 fold_build2_loc (loc, PLUS_EXPR, type,
10447 fold_convert_loc (loc, type,
10448 parg0),
10449 fold_convert_loc (loc, type,
10450 marg)),
10451 fold_convert_loc (loc, type, parg1));
10452 if (TREE_CODE (parg0) != MULT_EXPR
10453 && TREE_CODE (parg1) == MULT_EXPR)
10454 return
10455 fold_build2_loc (loc, PLUS_EXPR, type,
10456 fold_convert_loc (loc, type, parg0),
10457 fold_build2_loc (loc, pcode, type,
10458 fold_convert_loc (loc, type, marg),
10459 fold_convert_loc (loc, type,
10460 parg1)));
10463 else
10465 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10466 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10469 /* Likewise if the operands are reversed. */
10470 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10471 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10473 /* Convert X + -C into X - C. */
10474 if (TREE_CODE (arg1) == REAL_CST
10475 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10477 tem = fold_negate_const (arg1, type);
10478 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10479 return fold_build2_loc (loc, MINUS_EXPR, type,
10480 fold_convert_loc (loc, type, arg0),
10481 fold_convert_loc (loc, type, tem));
10484 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10485 to __complex__ ( x, y ). This is not the same for SNaNs or
10486 if signed zeros are involved. */
10487 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10488 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10489 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10491 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10492 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10493 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10494 bool arg0rz = false, arg0iz = false;
10495 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10496 || (arg0i && (arg0iz = real_zerop (arg0i))))
10498 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10499 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10500 if (arg0rz && arg1i && real_zerop (arg1i))
10502 tree rp = arg1r ? arg1r
10503 : build1 (REALPART_EXPR, rtype, arg1);
10504 tree ip = arg0i ? arg0i
10505 : build1 (IMAGPART_EXPR, rtype, arg0);
10506 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10508 else if (arg0iz && arg1r && real_zerop (arg1r))
10510 tree rp = arg0r ? arg0r
10511 : build1 (REALPART_EXPR, rtype, arg0);
10512 tree ip = arg1i ? arg1i
10513 : build1 (IMAGPART_EXPR, rtype, arg1);
10514 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10519 if (flag_unsafe_math_optimizations
10520 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10521 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10522 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10523 return tem;
10525 /* Convert x+x into x*2.0. */
10526 if (operand_equal_p (arg0, arg1, 0)
10527 && SCALAR_FLOAT_TYPE_P (type))
10528 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10529 build_real (type, dconst2));
10531 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10532 We associate floats only if the user has specified
10533 -fassociative-math. */
10534 if (flag_associative_math
10535 && TREE_CODE (arg1) == PLUS_EXPR
10536 && TREE_CODE (arg0) != MULT_EXPR)
10538 tree tree10 = TREE_OPERAND (arg1, 0);
10539 tree tree11 = TREE_OPERAND (arg1, 1);
10540 if (TREE_CODE (tree11) == MULT_EXPR
10541 && TREE_CODE (tree10) == MULT_EXPR)
10543 tree tree0;
10544 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10545 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10548 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10549 We associate floats only if the user has specified
10550 -fassociative-math. */
10551 if (flag_associative_math
10552 && TREE_CODE (arg0) == PLUS_EXPR
10553 && TREE_CODE (arg1) != MULT_EXPR)
10555 tree tree00 = TREE_OPERAND (arg0, 0);
10556 tree tree01 = TREE_OPERAND (arg0, 1);
10557 if (TREE_CODE (tree01) == MULT_EXPR
10558 && TREE_CODE (tree00) == MULT_EXPR)
10560 tree tree0;
10561 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10562 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10567 bit_rotate:
10568 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10569 is a rotate of A by C1 bits. */
10570 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10571 is a rotate of A by B bits. */
10573 enum tree_code code0, code1;
10574 tree rtype;
10575 code0 = TREE_CODE (arg0);
10576 code1 = TREE_CODE (arg1);
10577 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10578 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10579 && operand_equal_p (TREE_OPERAND (arg0, 0),
10580 TREE_OPERAND (arg1, 0), 0)
10581 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10582 TYPE_UNSIGNED (rtype))
10583 /* Only create rotates in complete modes. Other cases are not
10584 expanded properly. */
10585 && (element_precision (rtype)
10586 == element_precision (TYPE_MODE (rtype))))
10588 tree tree01, tree11;
10589 enum tree_code code01, code11;
10591 tree01 = TREE_OPERAND (arg0, 1);
10592 tree11 = TREE_OPERAND (arg1, 1);
10593 STRIP_NOPS (tree01);
10594 STRIP_NOPS (tree11);
10595 code01 = TREE_CODE (tree01);
10596 code11 = TREE_CODE (tree11);
10597 if (code01 == INTEGER_CST
10598 && code11 == INTEGER_CST
10599 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10600 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10602 tem = build2_loc (loc, LROTATE_EXPR,
10603 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10604 TREE_OPERAND (arg0, 0),
10605 code0 == LSHIFT_EXPR ? tree01 : tree11);
10606 return fold_convert_loc (loc, type, tem);
10608 else if (code11 == MINUS_EXPR)
10610 tree tree110, tree111;
10611 tree110 = TREE_OPERAND (tree11, 0);
10612 tree111 = TREE_OPERAND (tree11, 1);
10613 STRIP_NOPS (tree110);
10614 STRIP_NOPS (tree111);
10615 if (TREE_CODE (tree110) == INTEGER_CST
10616 && 0 == compare_tree_int (tree110,
10617 element_precision
10618 (TREE_TYPE (TREE_OPERAND
10619 (arg0, 0))))
10620 && operand_equal_p (tree01, tree111, 0))
10621 return
10622 fold_convert_loc (loc, type,
10623 build2 ((code0 == LSHIFT_EXPR
10624 ? LROTATE_EXPR
10625 : RROTATE_EXPR),
10626 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10627 TREE_OPERAND (arg0, 0), tree01));
10629 else if (code01 == MINUS_EXPR)
10631 tree tree010, tree011;
10632 tree010 = TREE_OPERAND (tree01, 0);
10633 tree011 = TREE_OPERAND (tree01, 1);
10634 STRIP_NOPS (tree010);
10635 STRIP_NOPS (tree011);
10636 if (TREE_CODE (tree010) == INTEGER_CST
10637 && 0 == compare_tree_int (tree010,
10638 element_precision
10639 (TREE_TYPE (TREE_OPERAND
10640 (arg0, 0))))
10641 && operand_equal_p (tree11, tree011, 0))
10642 return fold_convert_loc
10643 (loc, type,
10644 build2 ((code0 != LSHIFT_EXPR
10645 ? LROTATE_EXPR
10646 : RROTATE_EXPR),
10647 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10648 TREE_OPERAND (arg0, 0), tree11));
10653 associate:
10654 /* In most languages, can't associate operations on floats through
10655 parentheses. Rather than remember where the parentheses were, we
10656 don't associate floats at all, unless the user has specified
10657 -fassociative-math.
10658 And, we need to make sure type is not saturating. */
10660 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10661 && !TYPE_SATURATING (type))
10663 tree var0, con0, lit0, minus_lit0;
10664 tree var1, con1, lit1, minus_lit1;
10665 tree atype = type;
10666 bool ok = true;
10668 /* Split both trees into variables, constants, and literals. Then
10669 associate each group together, the constants with literals,
10670 then the result with variables. This increases the chances of
10671 literals being recombined later and of generating relocatable
10672 expressions for the sum of a constant and literal. */
10673 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10674 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10675 code == MINUS_EXPR);
10677 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10678 if (code == MINUS_EXPR)
10679 code = PLUS_EXPR;
10681 /* With undefined overflow prefer doing association in a type
10682 which wraps on overflow, if that is one of the operand types. */
10683 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10684 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10686 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10687 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10688 atype = TREE_TYPE (arg0);
10689 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10690 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10691 atype = TREE_TYPE (arg1);
10692 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10695 /* With undefined overflow we can only associate constants with one
10696 variable, and constants whose association doesn't overflow. */
10697 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10698 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10700 if (var0 && var1)
10702 tree tmp0 = var0;
10703 tree tmp1 = var1;
10705 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10706 tmp0 = TREE_OPERAND (tmp0, 0);
10707 if (CONVERT_EXPR_P (tmp0)
10708 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10709 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10710 <= TYPE_PRECISION (atype)))
10711 tmp0 = TREE_OPERAND (tmp0, 0);
10712 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10713 tmp1 = TREE_OPERAND (tmp1, 0);
10714 if (CONVERT_EXPR_P (tmp1)
10715 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10716 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10717 <= TYPE_PRECISION (atype)))
10718 tmp1 = TREE_OPERAND (tmp1, 0);
10719 /* The only case we can still associate with two variables
10720 is if they are the same, modulo negation and bit-pattern
10721 preserving conversions. */
10722 if (!operand_equal_p (tmp0, tmp1, 0))
10723 ok = false;
10727 /* Only do something if we found more than two objects. Otherwise,
10728 nothing has changed and we risk infinite recursion. */
10729 if (ok
10730 && (2 < ((var0 != 0) + (var1 != 0)
10731 + (con0 != 0) + (con1 != 0)
10732 + (lit0 != 0) + (lit1 != 0)
10733 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10735 bool any_overflows = false;
10736 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10737 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10738 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10739 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10740 var0 = associate_trees (loc, var0, var1, code, atype);
10741 con0 = associate_trees (loc, con0, con1, code, atype);
10742 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10743 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10744 code, atype);
10746 /* Preserve the MINUS_EXPR if the negative part of the literal is
10747 greater than the positive part. Otherwise, the multiplicative
10748 folding code (i.e extract_muldiv) may be fooled in case
10749 unsigned constants are subtracted, like in the following
10750 example: ((X*2 + 4) - 8U)/2. */
10751 if (minus_lit0 && lit0)
10753 if (TREE_CODE (lit0) == INTEGER_CST
10754 && TREE_CODE (minus_lit0) == INTEGER_CST
10755 && tree_int_cst_lt (lit0, minus_lit0))
10757 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10758 MINUS_EXPR, atype);
10759 lit0 = 0;
10761 else
10763 lit0 = associate_trees (loc, lit0, minus_lit0,
10764 MINUS_EXPR, atype);
10765 minus_lit0 = 0;
10769 /* Don't introduce overflows through reassociation. */
10770 if (!any_overflows
10771 && ((lit0 && TREE_OVERFLOW (lit0))
10772 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10773 return NULL_TREE;
10775 if (minus_lit0)
10777 if (con0 == 0)
10778 return
10779 fold_convert_loc (loc, type,
10780 associate_trees (loc, var0, minus_lit0,
10781 MINUS_EXPR, atype));
10782 else
10784 con0 = associate_trees (loc, con0, minus_lit0,
10785 MINUS_EXPR, atype);
10786 return
10787 fold_convert_loc (loc, type,
10788 associate_trees (loc, var0, con0,
10789 PLUS_EXPR, atype));
10793 con0 = associate_trees (loc, con0, lit0, code, atype);
10794 return
10795 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10796 code, atype));
10800 return NULL_TREE;
10802 case MINUS_EXPR:
10803 /* Pointer simplifications for subtraction, simple reassociations. */
10804 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10806 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10807 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10808 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10810 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10811 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10812 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10813 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10814 return fold_build2_loc (loc, PLUS_EXPR, type,
10815 fold_build2_loc (loc, MINUS_EXPR, type,
10816 arg00, arg10),
10817 fold_build2_loc (loc, MINUS_EXPR, type,
10818 arg01, arg11));
10820 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10821 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10823 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10824 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10825 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10826 fold_convert_loc (loc, type, arg1));
10827 if (tmp)
10828 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10831 /* A - (-B) -> A + B */
10832 if (TREE_CODE (arg1) == NEGATE_EXPR)
10833 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10834 fold_convert_loc (loc, type,
10835 TREE_OPERAND (arg1, 0)));
10836 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10837 if (TREE_CODE (arg0) == NEGATE_EXPR
10838 && negate_expr_p (arg1)
10839 && reorder_operands_p (arg0, arg1))
10840 return fold_build2_loc (loc, MINUS_EXPR, type,
10841 fold_convert_loc (loc, type,
10842 negate_expr (arg1)),
10843 fold_convert_loc (loc, type,
10844 TREE_OPERAND (arg0, 0)));
10845 /* Convert -A - 1 to ~A. */
10846 if (TREE_CODE (type) != COMPLEX_TYPE
10847 && TREE_CODE (arg0) == NEGATE_EXPR
10848 && integer_onep (arg1)
10849 && !TYPE_OVERFLOW_TRAPS (type))
10850 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10851 fold_convert_loc (loc, type,
10852 TREE_OPERAND (arg0, 0)));
10854 /* Convert -1 - A to ~A. */
10855 if (TREE_CODE (type) != COMPLEX_TYPE
10856 && integer_all_onesp (arg0))
10857 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10860 /* X - (X / Y) * Y is X % Y. */
10861 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10862 && TREE_CODE (arg1) == MULT_EXPR
10863 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10864 && operand_equal_p (arg0,
10865 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10866 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10867 TREE_OPERAND (arg1, 1), 0))
10868 return
10869 fold_convert_loc (loc, type,
10870 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10871 arg0, TREE_OPERAND (arg1, 1)));
10873 if (! FLOAT_TYPE_P (type))
10875 if (integer_zerop (arg0))
10876 return negate_expr (fold_convert_loc (loc, type, arg1));
10877 if (integer_zerop (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10880 /* Fold A - (A & B) into ~B & A. */
10881 if (!TREE_SIDE_EFFECTS (arg0)
10882 && TREE_CODE (arg1) == BIT_AND_EXPR)
10884 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10886 tree arg10 = fold_convert_loc (loc, type,
10887 TREE_OPERAND (arg1, 0));
10888 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10889 fold_build1_loc (loc, BIT_NOT_EXPR,
10890 type, arg10),
10891 fold_convert_loc (loc, type, arg0));
10893 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10895 tree arg11 = fold_convert_loc (loc,
10896 type, TREE_OPERAND (arg1, 1));
10897 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10898 fold_build1_loc (loc, BIT_NOT_EXPR,
10899 type, arg11),
10900 fold_convert_loc (loc, type, arg0));
10904 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10905 any power of 2 minus 1. */
10906 if (TREE_CODE (arg0) == BIT_AND_EXPR
10907 && TREE_CODE (arg1) == BIT_AND_EXPR
10908 && operand_equal_p (TREE_OPERAND (arg0, 0),
10909 TREE_OPERAND (arg1, 0), 0))
10911 tree mask0 = TREE_OPERAND (arg0, 1);
10912 tree mask1 = TREE_OPERAND (arg1, 1);
10913 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10915 if (operand_equal_p (tem, mask1, 0))
10917 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10918 TREE_OPERAND (arg0, 0), mask1);
10919 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10924 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10925 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10926 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10928 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10929 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10930 (-ARG1 + ARG0) reduces to -ARG1. */
10931 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10932 return negate_expr (fold_convert_loc (loc, type, arg1));
10934 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10935 __complex__ ( x, -y ). This is not the same for SNaNs or if
10936 signed zeros are involved. */
10937 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10938 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10939 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10941 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10942 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10943 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10944 bool arg0rz = false, arg0iz = false;
10945 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10946 || (arg0i && (arg0iz = real_zerop (arg0i))))
10948 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10949 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10950 if (arg0rz && arg1i && real_zerop (arg1i))
10952 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10953 arg1r ? arg1r
10954 : build1 (REALPART_EXPR, rtype, arg1));
10955 tree ip = arg0i ? arg0i
10956 : build1 (IMAGPART_EXPR, rtype, arg0);
10957 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10959 else if (arg0iz && arg1r && real_zerop (arg1r))
10961 tree rp = arg0r ? arg0r
10962 : build1 (REALPART_EXPR, rtype, arg0);
10963 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10964 arg1i ? arg1i
10965 : build1 (IMAGPART_EXPR, rtype, arg1));
10966 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10971 /* Fold &x - &x. This can happen from &x.foo - &x.
10972 This is unsafe for certain floats even in non-IEEE formats.
10973 In IEEE, it is unsafe because it does wrong for NaNs.
10974 Also note that operand_equal_p is always false if an operand
10975 is volatile. */
10977 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10978 && operand_equal_p (arg0, arg1, 0))
10979 return build_zero_cst (type);
10981 /* A - B -> A + (-B) if B is easily negatable. */
10982 if (negate_expr_p (arg1)
10983 && ((FLOAT_TYPE_P (type)
10984 /* Avoid this transformation if B is a positive REAL_CST. */
10985 && (TREE_CODE (arg1) != REAL_CST
10986 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10987 || INTEGRAL_TYPE_P (type)))
10988 return fold_build2_loc (loc, PLUS_EXPR, type,
10989 fold_convert_loc (loc, type, arg0),
10990 fold_convert_loc (loc, type,
10991 negate_expr (arg1)));
10993 /* Try folding difference of addresses. */
10995 HOST_WIDE_INT diff;
10997 if ((TREE_CODE (arg0) == ADDR_EXPR
10998 || TREE_CODE (arg1) == ADDR_EXPR)
10999 && ptr_difference_const (arg0, arg1, &diff))
11000 return build_int_cst_type (type, diff);
11003 /* Fold &a[i] - &a[j] to i-j. */
11004 if (TREE_CODE (arg0) == ADDR_EXPR
11005 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11006 && TREE_CODE (arg1) == ADDR_EXPR
11007 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11009 tree tem = fold_addr_of_array_ref_difference (loc, type,
11010 TREE_OPERAND (arg0, 0),
11011 TREE_OPERAND (arg1, 0));
11012 if (tem)
11013 return tem;
11016 if (FLOAT_TYPE_P (type)
11017 && flag_unsafe_math_optimizations
11018 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11019 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11020 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11021 return tem;
11023 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11024 one. Make sure the type is not saturating and has the signedness of
11025 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11026 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11027 if ((TREE_CODE (arg0) == MULT_EXPR
11028 || TREE_CODE (arg1) == MULT_EXPR)
11029 && !TYPE_SATURATING (type)
11030 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11031 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11032 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11034 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11035 if (tem)
11036 return tem;
11039 goto associate;
11041 case MULT_EXPR:
11042 /* (-A) * (-B) -> A * B */
11043 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11044 return fold_build2_loc (loc, MULT_EXPR, type,
11045 fold_convert_loc (loc, type,
11046 TREE_OPERAND (arg0, 0)),
11047 fold_convert_loc (loc, type,
11048 negate_expr (arg1)));
11049 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11050 return fold_build2_loc (loc, MULT_EXPR, type,
11051 fold_convert_loc (loc, type,
11052 negate_expr (arg0)),
11053 fold_convert_loc (loc, type,
11054 TREE_OPERAND (arg1, 0)));
11056 if (! FLOAT_TYPE_P (type))
11058 if (integer_zerop (arg1))
11059 return omit_one_operand_loc (loc, type, arg1, arg0);
11060 if (integer_onep (arg1))
11061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11062 /* Transform x * -1 into -x. Make sure to do the negation
11063 on the original operand with conversions not stripped
11064 because we can only strip non-sign-changing conversions. */
11065 if (integer_minus_onep (arg1))
11066 return fold_convert_loc (loc, type, negate_expr (op0));
11067 /* Transform x * -C into -x * C if x is easily negatable. */
11068 if (TREE_CODE (arg1) == INTEGER_CST
11069 && tree_int_cst_sgn (arg1) == -1
11070 && negate_expr_p (arg0)
11071 && (tem = negate_expr (arg1)) != arg1
11072 && !TREE_OVERFLOW (tem))
11073 return fold_build2_loc (loc, MULT_EXPR, type,
11074 fold_convert_loc (loc, type,
11075 negate_expr (arg0)),
11076 tem);
11078 /* (a * (1 << b)) is (a << b) */
11079 if (TREE_CODE (arg1) == LSHIFT_EXPR
11080 && integer_onep (TREE_OPERAND (arg1, 0)))
11081 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11082 TREE_OPERAND (arg1, 1));
11083 if (TREE_CODE (arg0) == LSHIFT_EXPR
11084 && integer_onep (TREE_OPERAND (arg0, 0)))
11085 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11086 TREE_OPERAND (arg0, 1));
11088 /* (A + A) * C -> A * 2 * C */
11089 if (TREE_CODE (arg0) == PLUS_EXPR
11090 && TREE_CODE (arg1) == INTEGER_CST
11091 && operand_equal_p (TREE_OPERAND (arg0, 0),
11092 TREE_OPERAND (arg0, 1), 0))
11093 return fold_build2_loc (loc, MULT_EXPR, type,
11094 omit_one_operand_loc (loc, type,
11095 TREE_OPERAND (arg0, 0),
11096 TREE_OPERAND (arg0, 1)),
11097 fold_build2_loc (loc, MULT_EXPR, type,
11098 build_int_cst (type, 2) , arg1));
11100 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11101 sign-changing only. */
11102 if (TREE_CODE (arg1) == INTEGER_CST
11103 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11104 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11105 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11107 strict_overflow_p = false;
11108 if (TREE_CODE (arg1) == INTEGER_CST
11109 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11110 &strict_overflow_p)))
11112 if (strict_overflow_p)
11113 fold_overflow_warning (("assuming signed overflow does not "
11114 "occur when simplifying "
11115 "multiplication"),
11116 WARN_STRICT_OVERFLOW_MISC);
11117 return fold_convert_loc (loc, type, tem);
11120 /* Optimize z * conj(z) for integer complex numbers. */
11121 if (TREE_CODE (arg0) == CONJ_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 return fold_mult_zconjz (loc, type, arg1);
11124 if (TREE_CODE (arg1) == CONJ_EXPR
11125 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11126 return fold_mult_zconjz (loc, type, arg0);
11128 else
11130 /* Maybe fold x * 0 to 0. The expressions aren't the same
11131 when x is NaN, since x * 0 is also NaN. Nor are they the
11132 same in modes with signed zeros, since multiplying a
11133 negative value by 0 gives -0, not +0. */
11134 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11135 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11136 && real_zerop (arg1))
11137 return omit_one_operand_loc (loc, type, arg1, arg0);
11138 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11139 Likewise for complex arithmetic with signed zeros. */
11140 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11141 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11142 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11143 && real_onep (arg1))
11144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11146 /* Transform x * -1.0 into -x. */
11147 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11148 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11149 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11150 && real_minus_onep (arg1))
11151 return fold_convert_loc (loc, type, negate_expr (arg0));
11153 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11154 the result for floating point types due to rounding so it is applied
11155 only if -fassociative-math was specify. */
11156 if (flag_associative_math
11157 && TREE_CODE (arg0) == RDIV_EXPR
11158 && TREE_CODE (arg1) == REAL_CST
11159 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11161 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11162 arg1);
11163 if (tem)
11164 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11165 TREE_OPERAND (arg0, 1));
11168 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11169 if (operand_equal_p (arg0, arg1, 0))
11171 tree tem = fold_strip_sign_ops (arg0);
11172 if (tem != NULL_TREE)
11174 tem = fold_convert_loc (loc, type, tem);
11175 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11179 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11180 This is not the same for NaNs or if signed zeros are
11181 involved. */
11182 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11183 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11184 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11185 && TREE_CODE (arg1) == COMPLEX_CST
11186 && real_zerop (TREE_REALPART (arg1)))
11188 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11189 if (real_onep (TREE_IMAGPART (arg1)))
11190 return
11191 fold_build2_loc (loc, COMPLEX_EXPR, type,
11192 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11193 rtype, arg0)),
11194 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11195 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11196 return
11197 fold_build2_loc (loc, COMPLEX_EXPR, type,
11198 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11199 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11200 rtype, arg0)));
11203 /* Optimize z * conj(z) for floating point complex numbers.
11204 Guarded by flag_unsafe_math_optimizations as non-finite
11205 imaginary components don't produce scalar results. */
11206 if (flag_unsafe_math_optimizations
11207 && TREE_CODE (arg0) == CONJ_EXPR
11208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11209 return fold_mult_zconjz (loc, type, arg1);
11210 if (flag_unsafe_math_optimizations
11211 && TREE_CODE (arg1) == CONJ_EXPR
11212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11213 return fold_mult_zconjz (loc, type, arg0);
11215 if (flag_unsafe_math_optimizations)
11217 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11218 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11220 /* Optimizations of root(...)*root(...). */
11221 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11223 tree rootfn, arg;
11224 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11225 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11227 /* Optimize sqrt(x)*sqrt(x) as x. */
11228 if (BUILTIN_SQRT_P (fcode0)
11229 && operand_equal_p (arg00, arg10, 0)
11230 && ! HONOR_SNANS (TYPE_MODE (type)))
11231 return arg00;
11233 /* Optimize root(x)*root(y) as root(x*y). */
11234 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11235 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11236 return build_call_expr_loc (loc, rootfn, 1, arg);
11239 /* Optimize expN(x)*expN(y) as expN(x+y). */
11240 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11242 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11243 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11244 CALL_EXPR_ARG (arg0, 0),
11245 CALL_EXPR_ARG (arg1, 0));
11246 return build_call_expr_loc (loc, expfn, 1, arg);
11249 /* Optimizations of pow(...)*pow(...). */
11250 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11251 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11252 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11254 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11255 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11256 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11257 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11259 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11260 if (operand_equal_p (arg01, arg11, 0))
11262 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11263 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11264 arg00, arg10);
11265 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11268 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11269 if (operand_equal_p (arg00, arg10, 0))
11271 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11272 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11273 arg01, arg11);
11274 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11278 /* Optimize tan(x)*cos(x) as sin(x). */
11279 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11280 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11281 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11282 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11283 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11284 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11285 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11286 CALL_EXPR_ARG (arg1, 0), 0))
11288 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11290 if (sinfn != NULL_TREE)
11291 return build_call_expr_loc (loc, sinfn, 1,
11292 CALL_EXPR_ARG (arg0, 0));
11295 /* Optimize x*pow(x,c) as pow(x,c+1). */
11296 if (fcode1 == BUILT_IN_POW
11297 || fcode1 == BUILT_IN_POWF
11298 || fcode1 == BUILT_IN_POWL)
11300 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11301 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11302 if (TREE_CODE (arg11) == REAL_CST
11303 && !TREE_OVERFLOW (arg11)
11304 && operand_equal_p (arg0, arg10, 0))
11306 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11307 REAL_VALUE_TYPE c;
11308 tree arg;
11310 c = TREE_REAL_CST (arg11);
11311 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11312 arg = build_real (type, c);
11313 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11317 /* Optimize pow(x,c)*x as pow(x,c+1). */
11318 if (fcode0 == BUILT_IN_POW
11319 || fcode0 == BUILT_IN_POWF
11320 || fcode0 == BUILT_IN_POWL)
11322 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11323 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11324 if (TREE_CODE (arg01) == REAL_CST
11325 && !TREE_OVERFLOW (arg01)
11326 && operand_equal_p (arg1, arg00, 0))
11328 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11329 REAL_VALUE_TYPE c;
11330 tree arg;
11332 c = TREE_REAL_CST (arg01);
11333 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11334 arg = build_real (type, c);
11335 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11339 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11340 if (!in_gimple_form
11341 && optimize
11342 && operand_equal_p (arg0, arg1, 0))
11344 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11346 if (powfn)
11348 tree arg = build_real (type, dconst2);
11349 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11354 goto associate;
11356 case BIT_IOR_EXPR:
11357 bit_ior:
11358 if (integer_all_onesp (arg1))
11359 return omit_one_operand_loc (loc, type, arg1, arg0);
11360 if (integer_zerop (arg1))
11361 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11362 if (operand_equal_p (arg0, arg1, 0))
11363 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11365 /* ~X | X is -1. */
11366 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11367 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11369 t1 = build_zero_cst (type);
11370 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11371 return omit_one_operand_loc (loc, type, t1, arg1);
11374 /* X | ~X is -1. */
11375 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11376 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11378 t1 = build_zero_cst (type);
11379 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11380 return omit_one_operand_loc (loc, type, t1, arg0);
11383 /* Canonicalize (X & C1) | C2. */
11384 if (TREE_CODE (arg0) == BIT_AND_EXPR
11385 && TREE_CODE (arg1) == INTEGER_CST
11386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11388 int width = TYPE_PRECISION (type), w;
11389 wide_int c1 = TREE_OPERAND (arg0, 1);
11390 wide_int c2 = arg1;
11392 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11393 if ((c1 & c2) == c1)
11394 return omit_one_operand_loc (loc, type, arg1,
11395 TREE_OPERAND (arg0, 0));
11397 wide_int msk = wi::mask (width, false,
11398 TYPE_PRECISION (TREE_TYPE (arg1)));
11400 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11401 if (msk.and_not (c1 | c2) == 0)
11402 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11403 TREE_OPERAND (arg0, 0), arg1);
11405 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11406 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11407 mode which allows further optimizations. */
11408 c1 &= msk;
11409 c2 &= msk;
11410 wide_int c3 = c1.and_not (c2);
11411 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11413 wide_int mask = wi::mask (w, false,
11414 TYPE_PRECISION (type));
11415 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11417 c3 = mask;
11418 break;
11422 if (c3 != c1)
11423 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11424 fold_build2_loc (loc, BIT_AND_EXPR, type,
11425 TREE_OPERAND (arg0, 0),
11426 wide_int_to_tree (type,
11427 c3)),
11428 arg1);
11431 /* (X & Y) | Y is (X, Y). */
11432 if (TREE_CODE (arg0) == BIT_AND_EXPR
11433 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11434 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11435 /* (X & Y) | X is (Y, X). */
11436 if (TREE_CODE (arg0) == BIT_AND_EXPR
11437 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11438 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11439 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11440 /* X | (X & Y) is (Y, X). */
11441 if (TREE_CODE (arg1) == BIT_AND_EXPR
11442 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11443 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11444 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11445 /* X | (Y & X) is (Y, X). */
11446 if (TREE_CODE (arg1) == BIT_AND_EXPR
11447 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11448 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11449 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11451 /* (X & ~Y) | (~X & Y) is X ^ Y */
11452 if (TREE_CODE (arg0) == BIT_AND_EXPR
11453 && TREE_CODE (arg1) == BIT_AND_EXPR)
11455 tree a0, a1, l0, l1, n0, n1;
11457 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11458 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11460 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11461 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11463 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11464 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11466 if ((operand_equal_p (n0, a0, 0)
11467 && operand_equal_p (n1, a1, 0))
11468 || (operand_equal_p (n0, a1, 0)
11469 && operand_equal_p (n1, a0, 0)))
11470 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11473 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11474 if (t1 != NULL_TREE)
11475 return t1;
11477 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11479 This results in more efficient code for machines without a NAND
11480 instruction. Combine will canonicalize to the first form
11481 which will allow use of NAND instructions provided by the
11482 backend if they exist. */
11483 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11484 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11486 return
11487 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11488 build2 (BIT_AND_EXPR, type,
11489 fold_convert_loc (loc, type,
11490 TREE_OPERAND (arg0, 0)),
11491 fold_convert_loc (loc, type,
11492 TREE_OPERAND (arg1, 0))));
11495 /* See if this can be simplified into a rotate first. If that
11496 is unsuccessful continue in the association code. */
11497 goto bit_rotate;
11499 case BIT_XOR_EXPR:
11500 if (integer_zerop (arg1))
11501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11502 if (integer_all_onesp (arg1))
11503 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11504 if (operand_equal_p (arg0, arg1, 0))
11505 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11507 /* ~X ^ X is -1. */
11508 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11511 t1 = build_zero_cst (type);
11512 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11513 return omit_one_operand_loc (loc, type, t1, arg1);
11516 /* X ^ ~X is -1. */
11517 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11520 t1 = build_zero_cst (type);
11521 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11522 return omit_one_operand_loc (loc, type, t1, arg0);
11525 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11526 with a constant, and the two constants have no bits in common,
11527 we should treat this as a BIT_IOR_EXPR since this may produce more
11528 simplifications. */
11529 if (TREE_CODE (arg0) == BIT_AND_EXPR
11530 && TREE_CODE (arg1) == BIT_AND_EXPR
11531 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11532 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11533 && wi::bit_and (TREE_OPERAND (arg0, 1),
11534 TREE_OPERAND (arg1, 1)) == 0)
11536 code = BIT_IOR_EXPR;
11537 goto bit_ior;
11540 /* (X | Y) ^ X -> Y & ~ X*/
11541 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11542 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11544 tree t2 = TREE_OPERAND (arg0, 1);
11545 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11546 arg1);
11547 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11548 fold_convert_loc (loc, type, t2),
11549 fold_convert_loc (loc, type, t1));
11550 return t1;
11553 /* (Y | X) ^ X -> Y & ~ X*/
11554 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11555 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11557 tree t2 = TREE_OPERAND (arg0, 0);
11558 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11559 arg1);
11560 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11561 fold_convert_loc (loc, type, t2),
11562 fold_convert_loc (loc, type, t1));
11563 return t1;
11566 /* X ^ (X | Y) -> Y & ~ X*/
11567 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11568 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11570 tree t2 = TREE_OPERAND (arg1, 1);
11571 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11572 arg0);
11573 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11574 fold_convert_loc (loc, type, t2),
11575 fold_convert_loc (loc, type, t1));
11576 return t1;
11579 /* X ^ (Y | X) -> Y & ~ X*/
11580 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11581 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11583 tree t2 = TREE_OPERAND (arg1, 0);
11584 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11585 arg0);
11586 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11587 fold_convert_loc (loc, type, t2),
11588 fold_convert_loc (loc, type, t1));
11589 return t1;
11592 /* Convert ~X ^ ~Y to X ^ Y. */
11593 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11594 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11595 return fold_build2_loc (loc, code, type,
11596 fold_convert_loc (loc, type,
11597 TREE_OPERAND (arg0, 0)),
11598 fold_convert_loc (loc, type,
11599 TREE_OPERAND (arg1, 0)));
11601 /* Convert ~X ^ C to X ^ ~C. */
11602 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11603 && TREE_CODE (arg1) == INTEGER_CST)
11604 return fold_build2_loc (loc, code, type,
11605 fold_convert_loc (loc, type,
11606 TREE_OPERAND (arg0, 0)),
11607 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11609 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11610 if (TREE_CODE (arg0) == BIT_AND_EXPR
11611 && integer_onep (TREE_OPERAND (arg0, 1))
11612 && integer_onep (arg1))
11613 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11614 build_zero_cst (TREE_TYPE (arg0)));
11616 /* Fold (X & Y) ^ Y as ~X & Y. */
11617 if (TREE_CODE (arg0) == BIT_AND_EXPR
11618 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11620 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11621 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11622 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11623 fold_convert_loc (loc, type, arg1));
11625 /* Fold (X & Y) ^ X as ~Y & X. */
11626 if (TREE_CODE (arg0) == BIT_AND_EXPR
11627 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11628 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11630 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11631 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11632 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11633 fold_convert_loc (loc, type, arg1));
11635 /* Fold X ^ (X & Y) as X & ~Y. */
11636 if (TREE_CODE (arg1) == BIT_AND_EXPR
11637 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11639 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11640 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11641 fold_convert_loc (loc, type, arg0),
11642 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11644 /* Fold X ^ (Y & X) as ~Y & X. */
11645 if (TREE_CODE (arg1) == BIT_AND_EXPR
11646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11647 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11649 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11650 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11651 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11652 fold_convert_loc (loc, type, arg0));
11655 /* See if this can be simplified into a rotate first. If that
11656 is unsuccessful continue in the association code. */
11657 goto bit_rotate;
11659 case BIT_AND_EXPR:
11660 if (integer_all_onesp (arg1))
11661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11662 if (integer_zerop (arg1))
11663 return omit_one_operand_loc (loc, type, arg1, arg0);
11664 if (operand_equal_p (arg0, arg1, 0))
11665 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11667 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11668 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11669 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11670 || (TREE_CODE (arg0) == EQ_EXPR
11671 && integer_zerop (TREE_OPERAND (arg0, 1))))
11672 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11673 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11675 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11676 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11677 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11678 || (TREE_CODE (arg1) == EQ_EXPR
11679 && integer_zerop (TREE_OPERAND (arg1, 1))))
11680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11681 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11683 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11684 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11685 && TREE_CODE (arg1) == INTEGER_CST
11686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11688 tree tmp1 = fold_convert_loc (loc, type, arg1);
11689 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11690 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11691 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11692 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11693 return
11694 fold_convert_loc (loc, type,
11695 fold_build2_loc (loc, BIT_IOR_EXPR,
11696 type, tmp2, tmp3));
11699 /* (X | Y) & Y is (X, Y). */
11700 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11701 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11702 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11703 /* (X | Y) & X is (Y, X). */
11704 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11706 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11707 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11708 /* X & (X | Y) is (Y, X). */
11709 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11710 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11711 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11712 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11713 /* X & (Y | X) is (Y, X). */
11714 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11716 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11717 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11719 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11720 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11721 && integer_onep (TREE_OPERAND (arg0, 1))
11722 && integer_onep (arg1))
11724 tree tem2;
11725 tem = TREE_OPERAND (arg0, 0);
11726 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11727 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11728 tem, tem2);
11729 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11730 build_zero_cst (TREE_TYPE (tem)));
11732 /* Fold ~X & 1 as (X & 1) == 0. */
11733 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11734 && integer_onep (arg1))
11736 tree tem2;
11737 tem = TREE_OPERAND (arg0, 0);
11738 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11739 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11740 tem, tem2);
11741 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11742 build_zero_cst (TREE_TYPE (tem)));
11744 /* Fold !X & 1 as X == 0. */
11745 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11746 && integer_onep (arg1))
11748 tem = TREE_OPERAND (arg0, 0);
11749 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11750 build_zero_cst (TREE_TYPE (tem)));
11753 /* Fold (X ^ Y) & Y as ~X & Y. */
11754 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11755 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11757 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11758 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11759 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11760 fold_convert_loc (loc, type, arg1));
11762 /* Fold (X ^ Y) & X as ~Y & X. */
11763 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11764 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11765 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11767 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11768 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11769 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11770 fold_convert_loc (loc, type, arg1));
11772 /* Fold X & (X ^ Y) as X & ~Y. */
11773 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11776 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11777 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11778 fold_convert_loc (loc, type, arg0),
11779 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11781 /* Fold X & (Y ^ X) as ~Y & X. */
11782 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11783 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11784 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11786 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11787 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11788 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11789 fold_convert_loc (loc, type, arg0));
11792 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11793 multiple of 1 << CST. */
11794 if (TREE_CODE (arg1) == INTEGER_CST)
11796 wide_int cst1 = arg1;
11797 wide_int ncst1 = -cst1;
11798 if ((cst1 & ncst1) == ncst1
11799 && multiple_of_p (type, arg0,
11800 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11801 return fold_convert_loc (loc, type, arg0);
11804 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11805 bits from CST2. */
11806 if (TREE_CODE (arg1) == INTEGER_CST
11807 && TREE_CODE (arg0) == MULT_EXPR
11808 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11810 wide_int warg1 = arg1;
11811 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11813 if (masked == 0)
11814 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11815 arg0, arg1);
11816 else if (masked != warg1)
11818 /* Avoid the transform if arg1 is a mask of some
11819 mode which allows further optimizations. */
11820 int pop = wi::popcount (warg1);
11821 if (!(pop >= BITS_PER_UNIT
11822 && exact_log2 (pop) != -1
11823 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11824 return fold_build2_loc (loc, code, type, op0,
11825 wide_int_to_tree (type, masked));
11829 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11830 ((A & N) + B) & M -> (A + B) & M
11831 Similarly if (N & M) == 0,
11832 ((A | N) + B) & M -> (A + B) & M
11833 and for - instead of + (or unary - instead of +)
11834 and/or ^ instead of |.
11835 If B is constant and (B & M) == 0, fold into A & M. */
11836 if (TREE_CODE (arg1) == INTEGER_CST)
11838 wide_int cst1 = arg1;
11839 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11840 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11841 && (TREE_CODE (arg0) == PLUS_EXPR
11842 || TREE_CODE (arg0) == MINUS_EXPR
11843 || TREE_CODE (arg0) == NEGATE_EXPR)
11844 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11845 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11847 tree pmop[2];
11848 int which = 0;
11849 wide_int cst0;
11851 /* Now we know that arg0 is (C + D) or (C - D) or
11852 -C and arg1 (M) is == (1LL << cst) - 1.
11853 Store C into PMOP[0] and D into PMOP[1]. */
11854 pmop[0] = TREE_OPERAND (arg0, 0);
11855 pmop[1] = NULL;
11856 if (TREE_CODE (arg0) != NEGATE_EXPR)
11858 pmop[1] = TREE_OPERAND (arg0, 1);
11859 which = 1;
11862 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11863 which = -1;
11865 for (; which >= 0; which--)
11866 switch (TREE_CODE (pmop[which]))
11868 case BIT_AND_EXPR:
11869 case BIT_IOR_EXPR:
11870 case BIT_XOR_EXPR:
11871 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11872 != INTEGER_CST)
11873 break;
11874 cst0 = TREE_OPERAND (pmop[which], 1);
11875 cst0 &= cst1;
11876 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11878 if (cst0 != cst1)
11879 break;
11881 else if (cst0 != 0)
11882 break;
11883 /* If C or D is of the form (A & N) where
11884 (N & M) == M, or of the form (A | N) or
11885 (A ^ N) where (N & M) == 0, replace it with A. */
11886 pmop[which] = TREE_OPERAND (pmop[which], 0);
11887 break;
11888 case INTEGER_CST:
11889 /* If C or D is a N where (N & M) == 0, it can be
11890 omitted (assumed 0). */
11891 if ((TREE_CODE (arg0) == PLUS_EXPR
11892 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11893 && (cst1 & pmop[which]) == 0)
11894 pmop[which] = NULL;
11895 break;
11896 default:
11897 break;
11900 /* Only build anything new if we optimized one or both arguments
11901 above. */
11902 if (pmop[0] != TREE_OPERAND (arg0, 0)
11903 || (TREE_CODE (arg0) != NEGATE_EXPR
11904 && pmop[1] != TREE_OPERAND (arg0, 1)))
11906 tree utype = TREE_TYPE (arg0);
11907 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11909 /* Perform the operations in a type that has defined
11910 overflow behavior. */
11911 utype = unsigned_type_for (TREE_TYPE (arg0));
11912 if (pmop[0] != NULL)
11913 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11914 if (pmop[1] != NULL)
11915 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11918 if (TREE_CODE (arg0) == NEGATE_EXPR)
11919 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11920 else if (TREE_CODE (arg0) == PLUS_EXPR)
11922 if (pmop[0] != NULL && pmop[1] != NULL)
11923 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11924 pmop[0], pmop[1]);
11925 else if (pmop[0] != NULL)
11926 tem = pmop[0];
11927 else if (pmop[1] != NULL)
11928 tem = pmop[1];
11929 else
11930 return build_int_cst (type, 0);
11932 else if (pmop[0] == NULL)
11933 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11934 else
11935 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11936 pmop[0], pmop[1]);
11937 /* TEM is now the new binary +, - or unary - replacement. */
11938 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11939 fold_convert_loc (loc, utype, arg1));
11940 return fold_convert_loc (loc, type, tem);
11945 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11946 if (t1 != NULL_TREE)
11947 return t1;
11948 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11949 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11950 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11952 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11954 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11955 if (mask == -1)
11956 return
11957 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11960 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11962 This results in more efficient code for machines without a NOR
11963 instruction. Combine will canonicalize to the first form
11964 which will allow use of NOR instructions provided by the
11965 backend if they exist. */
11966 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11967 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11969 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11970 build2 (BIT_IOR_EXPR, type,
11971 fold_convert_loc (loc, type,
11972 TREE_OPERAND (arg0, 0)),
11973 fold_convert_loc (loc, type,
11974 TREE_OPERAND (arg1, 0))));
11977 /* If arg0 is derived from the address of an object or function, we may
11978 be able to fold this expression using the object or function's
11979 alignment. */
11980 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11982 unsigned HOST_WIDE_INT modulus, residue;
11983 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11985 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11986 integer_onep (arg1));
11988 /* This works because modulus is a power of 2. If this weren't the
11989 case, we'd have to replace it by its greatest power-of-2
11990 divisor: modulus & -modulus. */
11991 if (low < modulus)
11992 return build_int_cst (type, residue & low);
11995 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11996 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11997 if the new mask might be further optimized. */
11998 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11999 || TREE_CODE (arg0) == RSHIFT_EXPR)
12000 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12001 && TREE_CODE (arg1) == INTEGER_CST
12002 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12003 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12004 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12005 < TYPE_PRECISION (TREE_TYPE (arg0))))
12007 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12008 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12009 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12010 tree shift_type = TREE_TYPE (arg0);
12012 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12013 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12014 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12015 && TYPE_PRECISION (TREE_TYPE (arg0))
12016 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12018 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12019 tree arg00 = TREE_OPERAND (arg0, 0);
12020 /* See if more bits can be proven as zero because of
12021 zero extension. */
12022 if (TREE_CODE (arg00) == NOP_EXPR
12023 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12025 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12026 if (TYPE_PRECISION (inner_type)
12027 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12028 && TYPE_PRECISION (inner_type) < prec)
12030 prec = TYPE_PRECISION (inner_type);
12031 /* See if we can shorten the right shift. */
12032 if (shiftc < prec)
12033 shift_type = inner_type;
12034 /* Otherwise X >> C1 is all zeros, so we'll optimize
12035 it into (X, 0) later on by making sure zerobits
12036 is all ones. */
12039 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12040 if (shiftc < prec)
12042 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12043 zerobits <<= prec - shiftc;
12045 /* For arithmetic shift if sign bit could be set, zerobits
12046 can contain actually sign bits, so no transformation is
12047 possible, unless MASK masks them all away. In that
12048 case the shift needs to be converted into logical shift. */
12049 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12050 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12052 if ((mask & zerobits) == 0)
12053 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12054 else
12055 zerobits = 0;
12059 /* ((X << 16) & 0xff00) is (X, 0). */
12060 if ((mask & zerobits) == mask)
12061 return omit_one_operand_loc (loc, type,
12062 build_int_cst (type, 0), arg0);
12064 newmask = mask | zerobits;
12065 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12067 /* Only do the transformation if NEWMASK is some integer
12068 mode's mask. */
12069 for (prec = BITS_PER_UNIT;
12070 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12071 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12072 break;
12073 if (prec < HOST_BITS_PER_WIDE_INT
12074 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12076 tree newmaskt;
12078 if (shift_type != TREE_TYPE (arg0))
12080 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12081 fold_convert_loc (loc, shift_type,
12082 TREE_OPERAND (arg0, 0)),
12083 TREE_OPERAND (arg0, 1));
12084 tem = fold_convert_loc (loc, type, tem);
12086 else
12087 tem = op0;
12088 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12089 if (!tree_int_cst_equal (newmaskt, arg1))
12090 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12095 goto associate;
12097 case RDIV_EXPR:
12098 /* Don't touch a floating-point divide by zero unless the mode
12099 of the constant can represent infinity. */
12100 if (TREE_CODE (arg1) == REAL_CST
12101 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12102 && real_zerop (arg1))
12103 return NULL_TREE;
12105 /* Optimize A / A to 1.0 if we don't care about
12106 NaNs or Infinities. Skip the transformation
12107 for non-real operands. */
12108 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12109 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12110 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12111 && operand_equal_p (arg0, arg1, 0))
12113 tree r = build_real (TREE_TYPE (arg0), dconst1);
12115 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12118 /* The complex version of the above A / A optimization. */
12119 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12120 && operand_equal_p (arg0, arg1, 0))
12122 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12123 if (! HONOR_NANS (TYPE_MODE (elem_type))
12124 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12126 tree r = build_real (elem_type, dconst1);
12127 /* omit_two_operands will call fold_convert for us. */
12128 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12132 /* (-A) / (-B) -> A / B */
12133 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12134 return fold_build2_loc (loc, RDIV_EXPR, type,
12135 TREE_OPERAND (arg0, 0),
12136 negate_expr (arg1));
12137 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12138 return fold_build2_loc (loc, RDIV_EXPR, type,
12139 negate_expr (arg0),
12140 TREE_OPERAND (arg1, 0));
12142 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12143 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12144 && real_onep (arg1))
12145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12147 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12148 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12149 && real_minus_onep (arg1))
12150 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12151 negate_expr (arg0)));
12153 /* If ARG1 is a constant, we can convert this to a multiply by the
12154 reciprocal. This does not have the same rounding properties,
12155 so only do this if -freciprocal-math. We can actually
12156 always safely do it if ARG1 is a power of two, but it's hard to
12157 tell if it is or not in a portable manner. */
12158 if (optimize
12159 && (TREE_CODE (arg1) == REAL_CST
12160 || (TREE_CODE (arg1) == COMPLEX_CST
12161 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12162 || (TREE_CODE (arg1) == VECTOR_CST
12163 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12165 if (flag_reciprocal_math
12166 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12167 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12168 /* Find the reciprocal if optimizing and the result is exact.
12169 TODO: Complex reciprocal not implemented. */
12170 if (TREE_CODE (arg1) != COMPLEX_CST)
12172 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12174 if (inverse)
12175 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12178 /* Convert A/B/C to A/(B*C). */
12179 if (flag_reciprocal_math
12180 && TREE_CODE (arg0) == RDIV_EXPR)
12181 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12182 fold_build2_loc (loc, MULT_EXPR, type,
12183 TREE_OPERAND (arg0, 1), arg1));
12185 /* Convert A/(B/C) to (A/B)*C. */
12186 if (flag_reciprocal_math
12187 && TREE_CODE (arg1) == RDIV_EXPR)
12188 return fold_build2_loc (loc, MULT_EXPR, type,
12189 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12190 TREE_OPERAND (arg1, 0)),
12191 TREE_OPERAND (arg1, 1));
12193 /* Convert C1/(X*C2) into (C1/C2)/X. */
12194 if (flag_reciprocal_math
12195 && TREE_CODE (arg1) == MULT_EXPR
12196 && TREE_CODE (arg0) == REAL_CST
12197 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12199 tree tem = const_binop (RDIV_EXPR, arg0,
12200 TREE_OPERAND (arg1, 1));
12201 if (tem)
12202 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12203 TREE_OPERAND (arg1, 0));
12206 if (flag_unsafe_math_optimizations)
12208 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12209 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12211 /* Optimize sin(x)/cos(x) as tan(x). */
12212 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12213 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12214 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12215 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12216 CALL_EXPR_ARG (arg1, 0), 0))
12218 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12220 if (tanfn != NULL_TREE)
12221 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12224 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12225 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12226 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12227 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12228 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12229 CALL_EXPR_ARG (arg1, 0), 0))
12231 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12233 if (tanfn != NULL_TREE)
12235 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12236 CALL_EXPR_ARG (arg0, 0));
12237 return fold_build2_loc (loc, RDIV_EXPR, type,
12238 build_real (type, dconst1), tmp);
12242 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12243 NaNs or Infinities. */
12244 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12245 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12246 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12248 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12249 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12251 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12252 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12253 && operand_equal_p (arg00, arg01, 0))
12255 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12257 if (cosfn != NULL_TREE)
12258 return build_call_expr_loc (loc, cosfn, 1, arg00);
12262 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12263 NaNs or Infinities. */
12264 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12265 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12266 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12268 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12269 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12271 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12272 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12273 && operand_equal_p (arg00, arg01, 0))
12275 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12277 if (cosfn != NULL_TREE)
12279 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12280 return fold_build2_loc (loc, RDIV_EXPR, type,
12281 build_real (type, dconst1),
12282 tmp);
12287 /* Optimize pow(x,c)/x as pow(x,c-1). */
12288 if (fcode0 == BUILT_IN_POW
12289 || fcode0 == BUILT_IN_POWF
12290 || fcode0 == BUILT_IN_POWL)
12292 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12293 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12294 if (TREE_CODE (arg01) == REAL_CST
12295 && !TREE_OVERFLOW (arg01)
12296 && operand_equal_p (arg1, arg00, 0))
12298 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12299 REAL_VALUE_TYPE c;
12300 tree arg;
12302 c = TREE_REAL_CST (arg01);
12303 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12304 arg = build_real (type, c);
12305 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12309 /* Optimize a/root(b/c) into a*root(c/b). */
12310 if (BUILTIN_ROOT_P (fcode1))
12312 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12314 if (TREE_CODE (rootarg) == RDIV_EXPR)
12316 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12317 tree b = TREE_OPERAND (rootarg, 0);
12318 tree c = TREE_OPERAND (rootarg, 1);
12320 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12322 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12323 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12327 /* Optimize x/expN(y) into x*expN(-y). */
12328 if (BUILTIN_EXPONENT_P (fcode1))
12330 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12331 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12332 arg1 = build_call_expr_loc (loc,
12333 expfn, 1,
12334 fold_convert_loc (loc, type, arg));
12335 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12338 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12339 if (fcode1 == BUILT_IN_POW
12340 || fcode1 == BUILT_IN_POWF
12341 || fcode1 == BUILT_IN_POWL)
12343 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12344 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12345 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12346 tree neg11 = fold_convert_loc (loc, type,
12347 negate_expr (arg11));
12348 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12349 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12352 return NULL_TREE;
12354 case TRUNC_DIV_EXPR:
12355 /* Optimize (X & (-A)) / A where A is a power of 2,
12356 to X >> log2(A) */
12357 if (TREE_CODE (arg0) == BIT_AND_EXPR
12358 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12359 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12361 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12362 arg1, TREE_OPERAND (arg0, 1));
12363 if (sum && integer_zerop (sum)) {
12364 tree pow2 = build_int_cst (integer_type_node,
12365 wi::exact_log2 (arg1));
12366 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12367 TREE_OPERAND (arg0, 0), pow2);
12371 /* Fall through */
12373 case FLOOR_DIV_EXPR:
12374 /* Simplify A / (B << N) where A and B are positive and B is
12375 a power of 2, to A >> (N + log2(B)). */
12376 strict_overflow_p = false;
12377 if (TREE_CODE (arg1) == LSHIFT_EXPR
12378 && (TYPE_UNSIGNED (type)
12379 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12381 tree sval = TREE_OPERAND (arg1, 0);
12382 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12384 tree sh_cnt = TREE_OPERAND (arg1, 1);
12385 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12386 wi::exact_log2 (sval));
12388 if (strict_overflow_p)
12389 fold_overflow_warning (("assuming signed overflow does not "
12390 "occur when simplifying A / (B << N)"),
12391 WARN_STRICT_OVERFLOW_MISC);
12393 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12394 sh_cnt, pow2);
12395 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12396 fold_convert_loc (loc, type, arg0), sh_cnt);
12400 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12401 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12402 if (INTEGRAL_TYPE_P (type)
12403 && TYPE_UNSIGNED (type)
12404 && code == FLOOR_DIV_EXPR)
12405 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12407 /* Fall through */
12409 case ROUND_DIV_EXPR:
12410 case CEIL_DIV_EXPR:
12411 case EXACT_DIV_EXPR:
12412 if (integer_onep (arg1))
12413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12414 if (integer_zerop (arg1))
12415 return NULL_TREE;
12416 /* X / -1 is -X. */
12417 if (!TYPE_UNSIGNED (type)
12418 && TREE_CODE (arg1) == INTEGER_CST
12419 && wi::eq_p (arg1, -1))
12420 return fold_convert_loc (loc, type, negate_expr (arg0));
12422 /* Convert -A / -B to A / B when the type is signed and overflow is
12423 undefined. */
12424 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12425 && TREE_CODE (arg0) == NEGATE_EXPR
12426 && negate_expr_p (arg1))
12428 if (INTEGRAL_TYPE_P (type))
12429 fold_overflow_warning (("assuming signed overflow does not occur "
12430 "when distributing negation across "
12431 "division"),
12432 WARN_STRICT_OVERFLOW_MISC);
12433 return fold_build2_loc (loc, code, type,
12434 fold_convert_loc (loc, type,
12435 TREE_OPERAND (arg0, 0)),
12436 fold_convert_loc (loc, type,
12437 negate_expr (arg1)));
12439 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12440 && TREE_CODE (arg1) == NEGATE_EXPR
12441 && negate_expr_p (arg0))
12443 if (INTEGRAL_TYPE_P (type))
12444 fold_overflow_warning (("assuming signed overflow does not occur "
12445 "when distributing negation across "
12446 "division"),
12447 WARN_STRICT_OVERFLOW_MISC);
12448 return fold_build2_loc (loc, code, type,
12449 fold_convert_loc (loc, type,
12450 negate_expr (arg0)),
12451 fold_convert_loc (loc, type,
12452 TREE_OPERAND (arg1, 0)));
12455 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12456 operation, EXACT_DIV_EXPR.
12458 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12459 At one time others generated faster code, it's not clear if they do
12460 after the last round to changes to the DIV code in expmed.c. */
12461 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12462 && multiple_of_p (type, arg0, arg1))
12463 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12465 strict_overflow_p = false;
12466 if (TREE_CODE (arg1) == INTEGER_CST
12467 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12468 &strict_overflow_p)))
12470 if (strict_overflow_p)
12471 fold_overflow_warning (("assuming signed overflow does not occur "
12472 "when simplifying division"),
12473 WARN_STRICT_OVERFLOW_MISC);
12474 return fold_convert_loc (loc, type, tem);
12477 return NULL_TREE;
12479 case CEIL_MOD_EXPR:
12480 case FLOOR_MOD_EXPR:
12481 case ROUND_MOD_EXPR:
12482 case TRUNC_MOD_EXPR:
12483 /* X % 1 is always zero, but be sure to preserve any side
12484 effects in X. */
12485 if (integer_onep (arg1))
12486 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12488 /* X % 0, return X % 0 unchanged so that we can get the
12489 proper warnings and errors. */
12490 if (integer_zerop (arg1))
12491 return NULL_TREE;
12493 /* 0 % X is always zero, but be sure to preserve any side
12494 effects in X. Place this after checking for X == 0. */
12495 if (integer_zerop (arg0))
12496 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12498 /* X % -1 is zero. */
12499 if (!TYPE_UNSIGNED (type)
12500 && TREE_CODE (arg1) == INTEGER_CST
12501 && wi::eq_p (arg1, -1))
12502 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12504 /* X % -C is the same as X % C. */
12505 if (code == TRUNC_MOD_EXPR
12506 && TYPE_SIGN (type) == SIGNED
12507 && TREE_CODE (arg1) == INTEGER_CST
12508 && !TREE_OVERFLOW (arg1)
12509 && wi::neg_p (arg1)
12510 && !TYPE_OVERFLOW_TRAPS (type)
12511 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12512 && !sign_bit_p (arg1, arg1))
12513 return fold_build2_loc (loc, code, type,
12514 fold_convert_loc (loc, type, arg0),
12515 fold_convert_loc (loc, type,
12516 negate_expr (arg1)));
12518 /* X % -Y is the same as X % Y. */
12519 if (code == TRUNC_MOD_EXPR
12520 && !TYPE_UNSIGNED (type)
12521 && TREE_CODE (arg1) == NEGATE_EXPR
12522 && !TYPE_OVERFLOW_TRAPS (type))
12523 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12524 fold_convert_loc (loc, type,
12525 TREE_OPERAND (arg1, 0)));
12527 strict_overflow_p = false;
12528 if (TREE_CODE (arg1) == INTEGER_CST
12529 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12530 &strict_overflow_p)))
12532 if (strict_overflow_p)
12533 fold_overflow_warning (("assuming signed overflow does not occur "
12534 "when simplifying modulus"),
12535 WARN_STRICT_OVERFLOW_MISC);
12536 return fold_convert_loc (loc, type, tem);
12539 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12540 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12541 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12542 && (TYPE_UNSIGNED (type)
12543 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12545 tree c = arg1;
12546 /* Also optimize A % (C << N) where C is a power of 2,
12547 to A & ((C << N) - 1). */
12548 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12549 c = TREE_OPERAND (arg1, 0);
12551 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12553 tree mask
12554 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12555 build_int_cst (TREE_TYPE (arg1), 1));
12556 if (strict_overflow_p)
12557 fold_overflow_warning (("assuming signed overflow does not "
12558 "occur when simplifying "
12559 "X % (power of two)"),
12560 WARN_STRICT_OVERFLOW_MISC);
12561 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12562 fold_convert_loc (loc, type, arg0),
12563 fold_convert_loc (loc, type, mask));
12567 return NULL_TREE;
12569 case LROTATE_EXPR:
12570 case RROTATE_EXPR:
12571 if (integer_all_onesp (arg0))
12572 return omit_one_operand_loc (loc, type, arg0, arg1);
12573 goto shift;
12575 case RSHIFT_EXPR:
12576 /* Optimize -1 >> x for arithmetic right shifts. */
12577 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12578 && tree_expr_nonnegative_p (arg1))
12579 return omit_one_operand_loc (loc, type, arg0, arg1);
12580 /* ... fall through ... */
12582 case LSHIFT_EXPR:
12583 shift:
12584 if (integer_zerop (arg1))
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12586 if (integer_zerop (arg0))
12587 return omit_one_operand_loc (loc, type, arg0, arg1);
12589 /* Prefer vector1 << scalar to vector1 << vector2
12590 if vector2 is uniform. */
12591 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12592 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12593 return fold_build2_loc (loc, code, type, op0, tem);
12595 /* Since negative shift count is not well-defined,
12596 don't try to compute it in the compiler. */
12597 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12598 return NULL_TREE;
12600 prec = element_precision (type);
12602 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12603 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12604 && tree_to_uhwi (arg1) < prec
12605 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12606 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12608 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12609 + tree_to_uhwi (arg1));
12611 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12612 being well defined. */
12613 if (low >= prec)
12615 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12616 low = low % prec;
12617 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12618 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12619 TREE_OPERAND (arg0, 0));
12620 else
12621 low = prec - 1;
12624 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12625 build_int_cst (TREE_TYPE (arg1), low));
12628 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12629 into x & ((unsigned)-1 >> c) for unsigned types. */
12630 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12631 || (TYPE_UNSIGNED (type)
12632 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12633 && tree_fits_uhwi_p (arg1)
12634 && tree_to_uhwi (arg1) < prec
12635 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12636 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12638 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12639 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12640 tree lshift;
12641 tree arg00;
12643 if (low0 == low1)
12645 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12647 lshift = build_minus_one_cst (type);
12648 lshift = const_binop (code, lshift, arg1);
12650 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12654 /* Rewrite an LROTATE_EXPR by a constant into an
12655 RROTATE_EXPR by a new constant. */
12656 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12658 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12659 tem = const_binop (MINUS_EXPR, tem, arg1);
12660 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12663 /* If we have a rotate of a bit operation with the rotate count and
12664 the second operand of the bit operation both constant,
12665 permute the two operations. */
12666 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12667 && (TREE_CODE (arg0) == BIT_AND_EXPR
12668 || TREE_CODE (arg0) == BIT_IOR_EXPR
12669 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12671 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12672 fold_build2_loc (loc, code, type,
12673 TREE_OPERAND (arg0, 0), arg1),
12674 fold_build2_loc (loc, code, type,
12675 TREE_OPERAND (arg0, 1), arg1));
12677 /* Two consecutive rotates adding up to the some integer
12678 multiple of the precision of the type can be ignored. */
12679 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12680 && TREE_CODE (arg0) == RROTATE_EXPR
12681 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12682 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12683 prec) == 0)
12684 return TREE_OPERAND (arg0, 0);
12686 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12687 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12688 if the latter can be further optimized. */
12689 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12690 && TREE_CODE (arg0) == BIT_AND_EXPR
12691 && TREE_CODE (arg1) == INTEGER_CST
12692 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12694 tree mask = fold_build2_loc (loc, code, type,
12695 fold_convert_loc (loc, type,
12696 TREE_OPERAND (arg0, 1)),
12697 arg1);
12698 tree shift = fold_build2_loc (loc, code, type,
12699 fold_convert_loc (loc, type,
12700 TREE_OPERAND (arg0, 0)),
12701 arg1);
12702 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12703 if (tem)
12704 return tem;
12707 return NULL_TREE;
12709 case MIN_EXPR:
12710 if (operand_equal_p (arg0, arg1, 0))
12711 return omit_one_operand_loc (loc, type, arg0, arg1);
12712 if (INTEGRAL_TYPE_P (type)
12713 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12714 return omit_one_operand_loc (loc, type, arg1, arg0);
12715 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12716 if (tem)
12717 return tem;
12718 goto associate;
12720 case MAX_EXPR:
12721 if (operand_equal_p (arg0, arg1, 0))
12722 return omit_one_operand_loc (loc, type, arg0, arg1);
12723 if (INTEGRAL_TYPE_P (type)
12724 && TYPE_MAX_VALUE (type)
12725 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12726 return omit_one_operand_loc (loc, type, arg1, arg0);
12727 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12728 if (tem)
12729 return tem;
12730 goto associate;
12732 case TRUTH_ANDIF_EXPR:
12733 /* Note that the operands of this must be ints
12734 and their values must be 0 or 1.
12735 ("true" is a fixed value perhaps depending on the language.) */
12736 /* If first arg is constant zero, return it. */
12737 if (integer_zerop (arg0))
12738 return fold_convert_loc (loc, type, arg0);
12739 case TRUTH_AND_EXPR:
12740 /* If either arg is constant true, drop it. */
12741 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12742 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12743 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12744 /* Preserve sequence points. */
12745 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12746 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12747 /* If second arg is constant zero, result is zero, but first arg
12748 must be evaluated. */
12749 if (integer_zerop (arg1))
12750 return omit_one_operand_loc (loc, type, arg1, arg0);
12751 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12752 case will be handled here. */
12753 if (integer_zerop (arg0))
12754 return omit_one_operand_loc (loc, type, arg0, arg1);
12756 /* !X && X is always false. */
12757 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12758 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12759 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12760 /* X && !X is always false. */
12761 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12762 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12763 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12765 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12766 means A >= Y && A != MAX, but in this case we know that
12767 A < X <= MAX. */
12769 if (!TREE_SIDE_EFFECTS (arg0)
12770 && !TREE_SIDE_EFFECTS (arg1))
12772 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12773 if (tem && !operand_equal_p (tem, arg0, 0))
12774 return fold_build2_loc (loc, code, type, tem, arg1);
12776 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12777 if (tem && !operand_equal_p (tem, arg1, 0))
12778 return fold_build2_loc (loc, code, type, arg0, tem);
12781 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12782 != NULL_TREE)
12783 return tem;
12785 return NULL_TREE;
12787 case TRUTH_ORIF_EXPR:
12788 /* Note that the operands of this must be ints
12789 and their values must be 0 or true.
12790 ("true" is a fixed value perhaps depending on the language.) */
12791 /* If first arg is constant true, return it. */
12792 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12793 return fold_convert_loc (loc, type, arg0);
12794 case TRUTH_OR_EXPR:
12795 /* If either arg is constant zero, drop it. */
12796 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12798 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12799 /* Preserve sequence points. */
12800 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12801 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12802 /* If second arg is constant true, result is true, but we must
12803 evaluate first arg. */
12804 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12805 return omit_one_operand_loc (loc, type, arg1, arg0);
12806 /* Likewise for first arg, but note this only occurs here for
12807 TRUTH_OR_EXPR. */
12808 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12809 return omit_one_operand_loc (loc, type, arg0, arg1);
12811 /* !X || X is always true. */
12812 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12813 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12814 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12815 /* X || !X is always true. */
12816 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12817 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12818 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12820 /* (X && !Y) || (!X && Y) is X ^ Y */
12821 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12822 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12824 tree a0, a1, l0, l1, n0, n1;
12826 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12827 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12829 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12830 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12832 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12833 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12835 if ((operand_equal_p (n0, a0, 0)
12836 && operand_equal_p (n1, a1, 0))
12837 || (operand_equal_p (n0, a1, 0)
12838 && operand_equal_p (n1, a0, 0)))
12839 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12842 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12843 != NULL_TREE)
12844 return tem;
12846 return NULL_TREE;
12848 case TRUTH_XOR_EXPR:
12849 /* If the second arg is constant zero, drop it. */
12850 if (integer_zerop (arg1))
12851 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12852 /* If the second arg is constant true, this is a logical inversion. */
12853 if (integer_onep (arg1))
12855 tem = invert_truthvalue_loc (loc, arg0);
12856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12858 /* Identical arguments cancel to zero. */
12859 if (operand_equal_p (arg0, arg1, 0))
12860 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12862 /* !X ^ X is always true. */
12863 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12864 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12865 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12867 /* X ^ !X is always true. */
12868 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12869 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12870 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12872 return NULL_TREE;
12874 case EQ_EXPR:
12875 case NE_EXPR:
12876 STRIP_NOPS (arg0);
12877 STRIP_NOPS (arg1);
12879 tem = fold_comparison (loc, code, type, op0, op1);
12880 if (tem != NULL_TREE)
12881 return tem;
12883 /* bool_var != 0 becomes bool_var. */
12884 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12885 && code == NE_EXPR)
12886 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12888 /* bool_var == 1 becomes bool_var. */
12889 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12890 && code == EQ_EXPR)
12891 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12893 /* bool_var != 1 becomes !bool_var. */
12894 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12895 && code == NE_EXPR)
12896 return fold_convert_loc (loc, type,
12897 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12898 TREE_TYPE (arg0), arg0));
12900 /* bool_var == 0 becomes !bool_var. */
12901 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12902 && code == EQ_EXPR)
12903 return fold_convert_loc (loc, type,
12904 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12905 TREE_TYPE (arg0), arg0));
12907 /* !exp != 0 becomes !exp */
12908 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12909 && code == NE_EXPR)
12910 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12912 /* If this is an equality comparison of the address of two non-weak,
12913 unaliased symbols neither of which are extern (since we do not
12914 have access to attributes for externs), then we know the result. */
12915 if (TREE_CODE (arg0) == ADDR_EXPR
12916 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12917 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12918 && ! lookup_attribute ("alias",
12919 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12920 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12921 && TREE_CODE (arg1) == ADDR_EXPR
12922 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12923 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12924 && ! lookup_attribute ("alias",
12925 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12926 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12928 /* We know that we're looking at the address of two
12929 non-weak, unaliased, static _DECL nodes.
12931 It is both wasteful and incorrect to call operand_equal_p
12932 to compare the two ADDR_EXPR nodes. It is wasteful in that
12933 all we need to do is test pointer equality for the arguments
12934 to the two ADDR_EXPR nodes. It is incorrect to use
12935 operand_equal_p as that function is NOT equivalent to a
12936 C equality test. It can in fact return false for two
12937 objects which would test as equal using the C equality
12938 operator. */
12939 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12940 return constant_boolean_node (equal
12941 ? code == EQ_EXPR : code != EQ_EXPR,
12942 type);
12945 /* Similarly for a NEGATE_EXPR. */
12946 if (TREE_CODE (arg0) == NEGATE_EXPR
12947 && TREE_CODE (arg1) == INTEGER_CST
12948 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12949 arg1)))
12950 && TREE_CODE (tem) == INTEGER_CST
12951 && !TREE_OVERFLOW (tem))
12952 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12954 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12955 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12956 && TREE_CODE (arg1) == INTEGER_CST
12957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12958 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12959 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12960 fold_convert_loc (loc,
12961 TREE_TYPE (arg0),
12962 arg1),
12963 TREE_OPERAND (arg0, 1)));
12965 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12966 if ((TREE_CODE (arg0) == PLUS_EXPR
12967 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12968 || TREE_CODE (arg0) == MINUS_EXPR)
12969 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12970 0)),
12971 arg1, 0)
12972 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12973 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12975 tree val = TREE_OPERAND (arg0, 1);
12976 return omit_two_operands_loc (loc, type,
12977 fold_build2_loc (loc, code, type,
12978 val,
12979 build_int_cst (TREE_TYPE (val),
12980 0)),
12981 TREE_OPERAND (arg0, 0), arg1);
12984 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12985 if (TREE_CODE (arg0) == MINUS_EXPR
12986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12987 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12988 1)),
12989 arg1, 0)
12990 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12992 return omit_two_operands_loc (loc, type,
12993 code == NE_EXPR
12994 ? boolean_true_node : boolean_false_node,
12995 TREE_OPERAND (arg0, 1), arg1);
12998 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12999 if (TREE_CODE (arg0) == ABS_EXPR
13000 && (integer_zerop (arg1) || real_zerop (arg1)))
13001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13003 /* If this is an EQ or NE comparison with zero and ARG0 is
13004 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13005 two operations, but the latter can be done in one less insn
13006 on machines that have only two-operand insns or on which a
13007 constant cannot be the first operand. */
13008 if (TREE_CODE (arg0) == BIT_AND_EXPR
13009 && integer_zerop (arg1))
13011 tree arg00 = TREE_OPERAND (arg0, 0);
13012 tree arg01 = TREE_OPERAND (arg0, 1);
13013 if (TREE_CODE (arg00) == LSHIFT_EXPR
13014 && integer_onep (TREE_OPERAND (arg00, 0)))
13016 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13017 arg01, TREE_OPERAND (arg00, 1));
13018 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13019 build_int_cst (TREE_TYPE (arg0), 1));
13020 return fold_build2_loc (loc, code, type,
13021 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13022 arg1);
13024 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13025 && integer_onep (TREE_OPERAND (arg01, 0)))
13027 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13028 arg00, TREE_OPERAND (arg01, 1));
13029 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13030 build_int_cst (TREE_TYPE (arg0), 1));
13031 return fold_build2_loc (loc, code, type,
13032 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13033 arg1);
13037 /* If this is an NE or EQ comparison of zero against the result of a
13038 signed MOD operation whose second operand is a power of 2, make
13039 the MOD operation unsigned since it is simpler and equivalent. */
13040 if (integer_zerop (arg1)
13041 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13042 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13043 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13044 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13045 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13046 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13048 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13049 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13050 fold_convert_loc (loc, newtype,
13051 TREE_OPERAND (arg0, 0)),
13052 fold_convert_loc (loc, newtype,
13053 TREE_OPERAND (arg0, 1)));
13055 return fold_build2_loc (loc, code, type, newmod,
13056 fold_convert_loc (loc, newtype, arg1));
13059 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13060 C1 is a valid shift constant, and C2 is a power of two, i.e.
13061 a single bit. */
13062 if (TREE_CODE (arg0) == BIT_AND_EXPR
13063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13065 == INTEGER_CST
13066 && integer_pow2p (TREE_OPERAND (arg0, 1))
13067 && integer_zerop (arg1))
13069 tree itype = TREE_TYPE (arg0);
13070 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13071 prec = TYPE_PRECISION (itype);
13073 /* Check for a valid shift count. */
13074 if (wi::ltu_p (arg001, prec))
13076 tree arg01 = TREE_OPERAND (arg0, 1);
13077 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13078 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13079 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13080 can be rewritten as (X & (C2 << C1)) != 0. */
13081 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13083 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13084 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13085 return fold_build2_loc (loc, code, type, tem,
13086 fold_convert_loc (loc, itype, arg1));
13088 /* Otherwise, for signed (arithmetic) shifts,
13089 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13090 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13091 else if (!TYPE_UNSIGNED (itype))
13092 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13093 arg000, build_int_cst (itype, 0));
13094 /* Otherwise, of unsigned (logical) shifts,
13095 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13096 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13097 else
13098 return omit_one_operand_loc (loc, type,
13099 code == EQ_EXPR ? integer_one_node
13100 : integer_zero_node,
13101 arg000);
13105 /* If we have (A & C) == C where C is a power of 2, convert this into
13106 (A & C) != 0. Similarly for NE_EXPR. */
13107 if (TREE_CODE (arg0) == BIT_AND_EXPR
13108 && integer_pow2p (TREE_OPERAND (arg0, 1))
13109 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13110 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13111 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13112 integer_zero_node));
13114 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13115 bit, then fold the expression into A < 0 or A >= 0. */
13116 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13117 if (tem)
13118 return tem;
13120 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13121 Similarly for NE_EXPR. */
13122 if (TREE_CODE (arg0) == BIT_AND_EXPR
13123 && TREE_CODE (arg1) == INTEGER_CST
13124 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13126 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13127 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13128 TREE_OPERAND (arg0, 1));
13129 tree dandnotc
13130 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13131 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13132 notc);
13133 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13134 if (integer_nonzerop (dandnotc))
13135 return omit_one_operand_loc (loc, type, rslt, arg0);
13138 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13139 Similarly for NE_EXPR. */
13140 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13141 && TREE_CODE (arg1) == INTEGER_CST
13142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13144 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13145 tree candnotd
13146 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13147 TREE_OPERAND (arg0, 1),
13148 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13149 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13150 if (integer_nonzerop (candnotd))
13151 return omit_one_operand_loc (loc, type, rslt, arg0);
13154 /* If this is a comparison of a field, we may be able to simplify it. */
13155 if ((TREE_CODE (arg0) == COMPONENT_REF
13156 || TREE_CODE (arg0) == BIT_FIELD_REF)
13157 /* Handle the constant case even without -O
13158 to make sure the warnings are given. */
13159 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13161 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13162 if (t1)
13163 return t1;
13166 /* Optimize comparisons of strlen vs zero to a compare of the
13167 first character of the string vs zero. To wit,
13168 strlen(ptr) == 0 => *ptr == 0
13169 strlen(ptr) != 0 => *ptr != 0
13170 Other cases should reduce to one of these two (or a constant)
13171 due to the return value of strlen being unsigned. */
13172 if (TREE_CODE (arg0) == CALL_EXPR
13173 && integer_zerop (arg1))
13175 tree fndecl = get_callee_fndecl (arg0);
13177 if (fndecl
13178 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13179 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13180 && call_expr_nargs (arg0) == 1
13181 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13183 tree iref = build_fold_indirect_ref_loc (loc,
13184 CALL_EXPR_ARG (arg0, 0));
13185 return fold_build2_loc (loc, code, type, iref,
13186 build_int_cst (TREE_TYPE (iref), 0));
13190 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13191 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13192 if (TREE_CODE (arg0) == RSHIFT_EXPR
13193 && integer_zerop (arg1)
13194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13196 tree arg00 = TREE_OPERAND (arg0, 0);
13197 tree arg01 = TREE_OPERAND (arg0, 1);
13198 tree itype = TREE_TYPE (arg00);
13199 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13201 if (TYPE_UNSIGNED (itype))
13203 itype = signed_type_for (itype);
13204 arg00 = fold_convert_loc (loc, itype, arg00);
13206 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13207 type, arg00, build_zero_cst (itype));
13211 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13212 if (integer_zerop (arg1)
13213 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13214 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13215 TREE_OPERAND (arg0, 1));
13217 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13218 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13219 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13220 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13221 build_zero_cst (TREE_TYPE (arg0)));
13222 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13223 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13225 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13226 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13227 build_zero_cst (TREE_TYPE (arg0)));
13229 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13230 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13231 && TREE_CODE (arg1) == INTEGER_CST
13232 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13233 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13234 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13235 TREE_OPERAND (arg0, 1), arg1));
13237 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13238 (X & C) == 0 when C is a single bit. */
13239 if (TREE_CODE (arg0) == BIT_AND_EXPR
13240 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13241 && integer_zerop (arg1)
13242 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13244 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13245 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13246 TREE_OPERAND (arg0, 1));
13247 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13248 type, tem,
13249 fold_convert_loc (loc, TREE_TYPE (arg0),
13250 arg1));
13253 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13254 constant C is a power of two, i.e. a single bit. */
13255 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13256 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13257 && integer_zerop (arg1)
13258 && integer_pow2p (TREE_OPERAND (arg0, 1))
13259 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13260 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13262 tree arg00 = TREE_OPERAND (arg0, 0);
13263 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13264 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13267 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13268 when is C is a power of two, i.e. a single bit. */
13269 if (TREE_CODE (arg0) == BIT_AND_EXPR
13270 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13271 && integer_zerop (arg1)
13272 && integer_pow2p (TREE_OPERAND (arg0, 1))
13273 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13274 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13276 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13277 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13278 arg000, TREE_OPERAND (arg0, 1));
13279 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13280 tem, build_int_cst (TREE_TYPE (tem), 0));
13283 if (integer_zerop (arg1)
13284 && tree_expr_nonzero_p (arg0))
13286 tree res = constant_boolean_node (code==NE_EXPR, type);
13287 return omit_one_operand_loc (loc, type, res, arg0);
13290 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13291 if (TREE_CODE (arg0) == NEGATE_EXPR
13292 && TREE_CODE (arg1) == NEGATE_EXPR)
13293 return fold_build2_loc (loc, code, type,
13294 TREE_OPERAND (arg0, 0),
13295 fold_convert_loc (loc, TREE_TYPE (arg0),
13296 TREE_OPERAND (arg1, 0)));
13298 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13299 if (TREE_CODE (arg0) == BIT_AND_EXPR
13300 && TREE_CODE (arg1) == BIT_AND_EXPR)
13302 tree arg00 = TREE_OPERAND (arg0, 0);
13303 tree arg01 = TREE_OPERAND (arg0, 1);
13304 tree arg10 = TREE_OPERAND (arg1, 0);
13305 tree arg11 = TREE_OPERAND (arg1, 1);
13306 tree itype = TREE_TYPE (arg0);
13308 if (operand_equal_p (arg01, arg11, 0))
13309 return fold_build2_loc (loc, code, type,
13310 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13311 fold_build2_loc (loc,
13312 BIT_XOR_EXPR, itype,
13313 arg00, arg10),
13314 arg01),
13315 build_zero_cst (itype));
13317 if (operand_equal_p (arg01, arg10, 0))
13318 return fold_build2_loc (loc, code, type,
13319 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13320 fold_build2_loc (loc,
13321 BIT_XOR_EXPR, itype,
13322 arg00, arg11),
13323 arg01),
13324 build_zero_cst (itype));
13326 if (operand_equal_p (arg00, arg11, 0))
13327 return fold_build2_loc (loc, code, type,
13328 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13329 fold_build2_loc (loc,
13330 BIT_XOR_EXPR, itype,
13331 arg01, arg10),
13332 arg00),
13333 build_zero_cst (itype));
13335 if (operand_equal_p (arg00, arg10, 0))
13336 return fold_build2_loc (loc, code, type,
13337 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13338 fold_build2_loc (loc,
13339 BIT_XOR_EXPR, itype,
13340 arg01, arg11),
13341 arg00),
13342 build_zero_cst (itype));
13345 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13346 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13348 tree arg00 = TREE_OPERAND (arg0, 0);
13349 tree arg01 = TREE_OPERAND (arg0, 1);
13350 tree arg10 = TREE_OPERAND (arg1, 0);
13351 tree arg11 = TREE_OPERAND (arg1, 1);
13352 tree itype = TREE_TYPE (arg0);
13354 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13355 operand_equal_p guarantees no side-effects so we don't need
13356 to use omit_one_operand on Z. */
13357 if (operand_equal_p (arg01, arg11, 0))
13358 return fold_build2_loc (loc, code, type, arg00,
13359 fold_convert_loc (loc, TREE_TYPE (arg00),
13360 arg10));
13361 if (operand_equal_p (arg01, arg10, 0))
13362 return fold_build2_loc (loc, code, type, arg00,
13363 fold_convert_loc (loc, TREE_TYPE (arg00),
13364 arg11));
13365 if (operand_equal_p (arg00, arg11, 0))
13366 return fold_build2_loc (loc, code, type, arg01,
13367 fold_convert_loc (loc, TREE_TYPE (arg01),
13368 arg10));
13369 if (operand_equal_p (arg00, arg10, 0))
13370 return fold_build2_loc (loc, code, type, arg01,
13371 fold_convert_loc (loc, TREE_TYPE (arg01),
13372 arg11));
13374 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13375 if (TREE_CODE (arg01) == INTEGER_CST
13376 && TREE_CODE (arg11) == INTEGER_CST)
13378 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13379 fold_convert_loc (loc, itype, arg11));
13380 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13381 return fold_build2_loc (loc, code, type, tem,
13382 fold_convert_loc (loc, itype, arg10));
13386 /* Attempt to simplify equality/inequality comparisons of complex
13387 values. Only lower the comparison if the result is known or
13388 can be simplified to a single scalar comparison. */
13389 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13390 || TREE_CODE (arg0) == COMPLEX_CST)
13391 && (TREE_CODE (arg1) == COMPLEX_EXPR
13392 || TREE_CODE (arg1) == COMPLEX_CST))
13394 tree real0, imag0, real1, imag1;
13395 tree rcond, icond;
13397 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13399 real0 = TREE_OPERAND (arg0, 0);
13400 imag0 = TREE_OPERAND (arg0, 1);
13402 else
13404 real0 = TREE_REALPART (arg0);
13405 imag0 = TREE_IMAGPART (arg0);
13408 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13410 real1 = TREE_OPERAND (arg1, 0);
13411 imag1 = TREE_OPERAND (arg1, 1);
13413 else
13415 real1 = TREE_REALPART (arg1);
13416 imag1 = TREE_IMAGPART (arg1);
13419 rcond = fold_binary_loc (loc, code, type, real0, real1);
13420 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13422 if (integer_zerop (rcond))
13424 if (code == EQ_EXPR)
13425 return omit_two_operands_loc (loc, type, boolean_false_node,
13426 imag0, imag1);
13427 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13429 else
13431 if (code == NE_EXPR)
13432 return omit_two_operands_loc (loc, type, boolean_true_node,
13433 imag0, imag1);
13434 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13438 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13439 if (icond && TREE_CODE (icond) == INTEGER_CST)
13441 if (integer_zerop (icond))
13443 if (code == EQ_EXPR)
13444 return omit_two_operands_loc (loc, type, boolean_false_node,
13445 real0, real1);
13446 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13448 else
13450 if (code == NE_EXPR)
13451 return omit_two_operands_loc (loc, type, boolean_true_node,
13452 real0, real1);
13453 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13458 return NULL_TREE;
13460 case LT_EXPR:
13461 case GT_EXPR:
13462 case LE_EXPR:
13463 case GE_EXPR:
13464 tem = fold_comparison (loc, code, type, op0, op1);
13465 if (tem != NULL_TREE)
13466 return tem;
13468 /* Transform comparisons of the form X +- C CMP X. */
13469 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13470 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13471 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13472 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13473 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13474 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13476 tree arg01 = TREE_OPERAND (arg0, 1);
13477 enum tree_code code0 = TREE_CODE (arg0);
13478 int is_positive;
13480 if (TREE_CODE (arg01) == REAL_CST)
13481 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13482 else
13483 is_positive = tree_int_cst_sgn (arg01);
13485 /* (X - c) > X becomes false. */
13486 if (code == GT_EXPR
13487 && ((code0 == MINUS_EXPR && is_positive >= 0)
13488 || (code0 == PLUS_EXPR && is_positive <= 0)))
13490 if (TREE_CODE (arg01) == INTEGER_CST
13491 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13492 fold_overflow_warning (("assuming signed overflow does not "
13493 "occur when assuming that (X - c) > X "
13494 "is always false"),
13495 WARN_STRICT_OVERFLOW_ALL);
13496 return constant_boolean_node (0, type);
13499 /* Likewise (X + c) < X becomes false. */
13500 if (code == LT_EXPR
13501 && ((code0 == PLUS_EXPR && is_positive >= 0)
13502 || (code0 == MINUS_EXPR && is_positive <= 0)))
13504 if (TREE_CODE (arg01) == INTEGER_CST
13505 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13506 fold_overflow_warning (("assuming signed overflow does not "
13507 "occur when assuming that "
13508 "(X + c) < X is always false"),
13509 WARN_STRICT_OVERFLOW_ALL);
13510 return constant_boolean_node (0, type);
13513 /* Convert (X - c) <= X to true. */
13514 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13515 && code == LE_EXPR
13516 && ((code0 == MINUS_EXPR && is_positive >= 0)
13517 || (code0 == PLUS_EXPR && is_positive <= 0)))
13519 if (TREE_CODE (arg01) == INTEGER_CST
13520 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13521 fold_overflow_warning (("assuming signed overflow does not "
13522 "occur when assuming that "
13523 "(X - c) <= X is always true"),
13524 WARN_STRICT_OVERFLOW_ALL);
13525 return constant_boolean_node (1, type);
13528 /* Convert (X + c) >= X to true. */
13529 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13530 && code == GE_EXPR
13531 && ((code0 == PLUS_EXPR && is_positive >= 0)
13532 || (code0 == MINUS_EXPR && is_positive <= 0)))
13534 if (TREE_CODE (arg01) == INTEGER_CST
13535 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13536 fold_overflow_warning (("assuming signed overflow does not "
13537 "occur when assuming that "
13538 "(X + c) >= X is always true"),
13539 WARN_STRICT_OVERFLOW_ALL);
13540 return constant_boolean_node (1, type);
13543 if (TREE_CODE (arg01) == INTEGER_CST)
13545 /* Convert X + c > X and X - c < X to true for integers. */
13546 if (code == GT_EXPR
13547 && ((code0 == PLUS_EXPR && is_positive > 0)
13548 || (code0 == MINUS_EXPR && is_positive < 0)))
13550 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13551 fold_overflow_warning (("assuming signed overflow does "
13552 "not occur when assuming that "
13553 "(X + c) > X is always true"),
13554 WARN_STRICT_OVERFLOW_ALL);
13555 return constant_boolean_node (1, type);
13558 if (code == LT_EXPR
13559 && ((code0 == MINUS_EXPR && is_positive > 0)
13560 || (code0 == PLUS_EXPR && is_positive < 0)))
13562 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13563 fold_overflow_warning (("assuming signed overflow does "
13564 "not occur when assuming that "
13565 "(X - c) < X is always true"),
13566 WARN_STRICT_OVERFLOW_ALL);
13567 return constant_boolean_node (1, type);
13570 /* Convert X + c <= X and X - c >= X to false for integers. */
13571 if (code == LE_EXPR
13572 && ((code0 == PLUS_EXPR && is_positive > 0)
13573 || (code0 == MINUS_EXPR && is_positive < 0)))
13575 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13576 fold_overflow_warning (("assuming signed overflow does "
13577 "not occur when assuming that "
13578 "(X + c) <= X is always false"),
13579 WARN_STRICT_OVERFLOW_ALL);
13580 return constant_boolean_node (0, type);
13583 if (code == GE_EXPR
13584 && ((code0 == MINUS_EXPR && is_positive > 0)
13585 || (code0 == PLUS_EXPR && is_positive < 0)))
13587 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13588 fold_overflow_warning (("assuming signed overflow does "
13589 "not occur when assuming that "
13590 "(X - c) >= X is always false"),
13591 WARN_STRICT_OVERFLOW_ALL);
13592 return constant_boolean_node (0, type);
13597 /* Comparisons with the highest or lowest possible integer of
13598 the specified precision will have known values. */
13600 tree arg1_type = TREE_TYPE (arg1);
13601 unsigned int prec = TYPE_PRECISION (arg1_type);
13603 if (TREE_CODE (arg1) == INTEGER_CST
13604 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13606 wide_int max = wi::max_value (arg1_type);
13607 wide_int signed_max = wi::max_value (prec, SIGNED);
13608 wide_int min = wi::min_value (arg1_type);
13610 if (wi::eq_p (arg1, max))
13611 switch (code)
13613 case GT_EXPR:
13614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13616 case GE_EXPR:
13617 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13619 case LE_EXPR:
13620 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13622 case LT_EXPR:
13623 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13625 /* The GE_EXPR and LT_EXPR cases above are not normally
13626 reached because of previous transformations. */
13628 default:
13629 break;
13631 else if (wi::eq_p (arg1, max - 1))
13632 switch (code)
13634 case GT_EXPR:
13635 arg1 = const_binop (PLUS_EXPR, arg1,
13636 build_int_cst (TREE_TYPE (arg1), 1));
13637 return fold_build2_loc (loc, EQ_EXPR, type,
13638 fold_convert_loc (loc,
13639 TREE_TYPE (arg1), arg0),
13640 arg1);
13641 case LE_EXPR:
13642 arg1 = const_binop (PLUS_EXPR, arg1,
13643 build_int_cst (TREE_TYPE (arg1), 1));
13644 return fold_build2_loc (loc, NE_EXPR, type,
13645 fold_convert_loc (loc, TREE_TYPE (arg1),
13646 arg0),
13647 arg1);
13648 default:
13649 break;
13651 else if (wi::eq_p (arg1, min))
13652 switch (code)
13654 case LT_EXPR:
13655 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13657 case LE_EXPR:
13658 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13660 case GE_EXPR:
13661 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13663 case GT_EXPR:
13664 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13666 default:
13667 break;
13669 else if (wi::eq_p (arg1, min + 1))
13670 switch (code)
13672 case GE_EXPR:
13673 arg1 = const_binop (MINUS_EXPR, arg1,
13674 build_int_cst (TREE_TYPE (arg1), 1));
13675 return fold_build2_loc (loc, NE_EXPR, type,
13676 fold_convert_loc (loc,
13677 TREE_TYPE (arg1), arg0),
13678 arg1);
13679 case LT_EXPR:
13680 arg1 = const_binop (MINUS_EXPR, arg1,
13681 build_int_cst (TREE_TYPE (arg1), 1));
13682 return fold_build2_loc (loc, EQ_EXPR, type,
13683 fold_convert_loc (loc, TREE_TYPE (arg1),
13684 arg0),
13685 arg1);
13686 default:
13687 break;
13690 else if (wi::eq_p (arg1, signed_max)
13691 && TYPE_UNSIGNED (arg1_type)
13692 /* We will flip the signedness of the comparison operator
13693 associated with the mode of arg1, so the sign bit is
13694 specified by this mode. Check that arg1 is the signed
13695 max associated with this sign bit. */
13696 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13697 /* signed_type does not work on pointer types. */
13698 && INTEGRAL_TYPE_P (arg1_type))
13700 /* The following case also applies to X < signed_max+1
13701 and X >= signed_max+1 because previous transformations. */
13702 if (code == LE_EXPR || code == GT_EXPR)
13704 tree st = signed_type_for (arg1_type);
13705 return fold_build2_loc (loc,
13706 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13707 type, fold_convert_loc (loc, st, arg0),
13708 build_int_cst (st, 0));
13714 /* If we are comparing an ABS_EXPR with a constant, we can
13715 convert all the cases into explicit comparisons, but they may
13716 well not be faster than doing the ABS and one comparison.
13717 But ABS (X) <= C is a range comparison, which becomes a subtraction
13718 and a comparison, and is probably faster. */
13719 if (code == LE_EXPR
13720 && TREE_CODE (arg1) == INTEGER_CST
13721 && TREE_CODE (arg0) == ABS_EXPR
13722 && ! TREE_SIDE_EFFECTS (arg0)
13723 && (0 != (tem = negate_expr (arg1)))
13724 && TREE_CODE (tem) == INTEGER_CST
13725 && !TREE_OVERFLOW (tem))
13726 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13727 build2 (GE_EXPR, type,
13728 TREE_OPERAND (arg0, 0), tem),
13729 build2 (LE_EXPR, type,
13730 TREE_OPERAND (arg0, 0), arg1));
13732 /* Convert ABS_EXPR<x> >= 0 to true. */
13733 strict_overflow_p = false;
13734 if (code == GE_EXPR
13735 && (integer_zerop (arg1)
13736 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13737 && real_zerop (arg1)))
13738 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13740 if (strict_overflow_p)
13741 fold_overflow_warning (("assuming signed overflow does not occur "
13742 "when simplifying comparison of "
13743 "absolute value and zero"),
13744 WARN_STRICT_OVERFLOW_CONDITIONAL);
13745 return omit_one_operand_loc (loc, type,
13746 constant_boolean_node (true, type),
13747 arg0);
13750 /* Convert ABS_EXPR<x> < 0 to false. */
13751 strict_overflow_p = false;
13752 if (code == LT_EXPR
13753 && (integer_zerop (arg1) || real_zerop (arg1))
13754 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13756 if (strict_overflow_p)
13757 fold_overflow_warning (("assuming signed overflow does not occur "
13758 "when simplifying comparison of "
13759 "absolute value and zero"),
13760 WARN_STRICT_OVERFLOW_CONDITIONAL);
13761 return omit_one_operand_loc (loc, type,
13762 constant_boolean_node (false, type),
13763 arg0);
13766 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13767 and similarly for >= into !=. */
13768 if ((code == LT_EXPR || code == GE_EXPR)
13769 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13770 && TREE_CODE (arg1) == LSHIFT_EXPR
13771 && integer_onep (TREE_OPERAND (arg1, 0)))
13772 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13773 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13774 TREE_OPERAND (arg1, 1)),
13775 build_zero_cst (TREE_TYPE (arg0)));
13777 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13778 otherwise Y might be >= # of bits in X's type and thus e.g.
13779 (unsigned char) (1 << Y) for Y 15 might be 0.
13780 If the cast is widening, then 1 << Y should have unsigned type,
13781 otherwise if Y is number of bits in the signed shift type minus 1,
13782 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13783 31 might be 0xffffffff80000000. */
13784 if ((code == LT_EXPR || code == GE_EXPR)
13785 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13786 && CONVERT_EXPR_P (arg1)
13787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13788 && (TYPE_PRECISION (TREE_TYPE (arg1))
13789 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13790 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13791 || (TYPE_PRECISION (TREE_TYPE (arg1))
13792 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13793 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13795 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13796 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13797 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13798 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13799 build_zero_cst (TREE_TYPE (arg0)));
13802 return NULL_TREE;
13804 case UNORDERED_EXPR:
13805 case ORDERED_EXPR:
13806 case UNLT_EXPR:
13807 case UNLE_EXPR:
13808 case UNGT_EXPR:
13809 case UNGE_EXPR:
13810 case UNEQ_EXPR:
13811 case LTGT_EXPR:
13812 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13814 t1 = fold_relational_const (code, type, arg0, arg1);
13815 if (t1 != NULL_TREE)
13816 return t1;
13819 /* If the first operand is NaN, the result is constant. */
13820 if (TREE_CODE (arg0) == REAL_CST
13821 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13822 && (code != LTGT_EXPR || ! flag_trapping_math))
13824 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13825 ? integer_zero_node
13826 : integer_one_node;
13827 return omit_one_operand_loc (loc, type, t1, arg1);
13830 /* If the second operand is NaN, the result is constant. */
13831 if (TREE_CODE (arg1) == REAL_CST
13832 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13833 && (code != LTGT_EXPR || ! flag_trapping_math))
13835 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13836 ? integer_zero_node
13837 : integer_one_node;
13838 return omit_one_operand_loc (loc, type, t1, arg0);
13841 /* Simplify unordered comparison of something with itself. */
13842 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13843 && operand_equal_p (arg0, arg1, 0))
13844 return constant_boolean_node (1, type);
13846 if (code == LTGT_EXPR
13847 && !flag_trapping_math
13848 && operand_equal_p (arg0, arg1, 0))
13849 return constant_boolean_node (0, type);
13851 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13853 tree targ0 = strip_float_extensions (arg0);
13854 tree targ1 = strip_float_extensions (arg1);
13855 tree newtype = TREE_TYPE (targ0);
13857 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13858 newtype = TREE_TYPE (targ1);
13860 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13861 return fold_build2_loc (loc, code, type,
13862 fold_convert_loc (loc, newtype, targ0),
13863 fold_convert_loc (loc, newtype, targ1));
13866 return NULL_TREE;
13868 case COMPOUND_EXPR:
13869 /* When pedantic, a compound expression can be neither an lvalue
13870 nor an integer constant expression. */
13871 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13872 return NULL_TREE;
13873 /* Don't let (0, 0) be null pointer constant. */
13874 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13875 : fold_convert_loc (loc, type, arg1);
13876 return pedantic_non_lvalue_loc (loc, tem);
13878 case COMPLEX_EXPR:
13879 if ((TREE_CODE (arg0) == REAL_CST
13880 && TREE_CODE (arg1) == REAL_CST)
13881 || (TREE_CODE (arg0) == INTEGER_CST
13882 && TREE_CODE (arg1) == INTEGER_CST))
13883 return build_complex (type, arg0, arg1);
13884 if (TREE_CODE (arg0) == REALPART_EXPR
13885 && TREE_CODE (arg1) == IMAGPART_EXPR
13886 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13887 && operand_equal_p (TREE_OPERAND (arg0, 0),
13888 TREE_OPERAND (arg1, 0), 0))
13889 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13890 TREE_OPERAND (arg1, 0));
13891 return NULL_TREE;
13893 case ASSERT_EXPR:
13894 /* An ASSERT_EXPR should never be passed to fold_binary. */
13895 gcc_unreachable ();
13897 case VEC_PACK_TRUNC_EXPR:
13898 case VEC_PACK_FIX_TRUNC_EXPR:
13900 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13901 tree *elts;
13903 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13904 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13905 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13906 return NULL_TREE;
13908 elts = XALLOCAVEC (tree, nelts);
13909 if (!vec_cst_ctor_to_array (arg0, elts)
13910 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13911 return NULL_TREE;
13913 for (i = 0; i < nelts; i++)
13915 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13916 ? NOP_EXPR : FIX_TRUNC_EXPR,
13917 TREE_TYPE (type), elts[i]);
13918 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13919 return NULL_TREE;
13922 return build_vector (type, elts);
13925 case VEC_WIDEN_MULT_LO_EXPR:
13926 case VEC_WIDEN_MULT_HI_EXPR:
13927 case VEC_WIDEN_MULT_EVEN_EXPR:
13928 case VEC_WIDEN_MULT_ODD_EXPR:
13930 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13931 unsigned int out, ofs, scale;
13932 tree *elts;
13934 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13935 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13936 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13937 return NULL_TREE;
13939 elts = XALLOCAVEC (tree, nelts * 4);
13940 if (!vec_cst_ctor_to_array (arg0, elts)
13941 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13942 return NULL_TREE;
13944 if (code == VEC_WIDEN_MULT_LO_EXPR)
13945 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13946 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13947 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13948 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13949 scale = 1, ofs = 0;
13950 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13951 scale = 1, ofs = 1;
13953 for (out = 0; out < nelts; out++)
13955 unsigned int in1 = (out << scale) + ofs;
13956 unsigned int in2 = in1 + nelts * 2;
13957 tree t1, t2;
13959 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13960 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13962 if (t1 == NULL_TREE || t2 == NULL_TREE)
13963 return NULL_TREE;
13964 elts[out] = const_binop (MULT_EXPR, t1, t2);
13965 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13966 return NULL_TREE;
13969 return build_vector (type, elts);
13972 default:
13973 return NULL_TREE;
13974 } /* switch (code) */
13977 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13978 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13979 of GOTO_EXPR. */
13981 static tree
13982 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13984 switch (TREE_CODE (*tp))
13986 case LABEL_EXPR:
13987 return *tp;
13989 case GOTO_EXPR:
13990 *walk_subtrees = 0;
13992 /* ... fall through ... */
13994 default:
13995 return NULL_TREE;
13999 /* Return whether the sub-tree ST contains a label which is accessible from
14000 outside the sub-tree. */
14002 static bool
14003 contains_label_p (tree st)
14005 return
14006 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14009 /* Fold a ternary expression of code CODE and type TYPE with operands
14010 OP0, OP1, and OP2. Return the folded expression if folding is
14011 successful. Otherwise, return NULL_TREE. */
14013 tree
14014 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14015 tree op0, tree op1, tree op2)
14017 tree tem;
14018 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14019 enum tree_code_class kind = TREE_CODE_CLASS (code);
14021 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14022 && TREE_CODE_LENGTH (code) == 3);
14024 /* Strip any conversions that don't change the mode. This is safe
14025 for every expression, except for a comparison expression because
14026 its signedness is derived from its operands. So, in the latter
14027 case, only strip conversions that don't change the signedness.
14029 Note that this is done as an internal manipulation within the
14030 constant folder, in order to find the simplest representation of
14031 the arguments so that their form can be studied. In any cases,
14032 the appropriate type conversions should be put back in the tree
14033 that will get out of the constant folder. */
14034 if (op0)
14036 arg0 = op0;
14037 STRIP_NOPS (arg0);
14040 if (op1)
14042 arg1 = op1;
14043 STRIP_NOPS (arg1);
14046 if (op2)
14048 arg2 = op2;
14049 STRIP_NOPS (arg2);
14052 switch (code)
14054 case COMPONENT_REF:
14055 if (TREE_CODE (arg0) == CONSTRUCTOR
14056 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14058 unsigned HOST_WIDE_INT idx;
14059 tree field, value;
14060 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14061 if (field == arg1)
14062 return value;
14064 return NULL_TREE;
14066 case COND_EXPR:
14067 case VEC_COND_EXPR:
14068 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14069 so all simple results must be passed through pedantic_non_lvalue. */
14070 if (TREE_CODE (arg0) == INTEGER_CST)
14072 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14073 tem = integer_zerop (arg0) ? op2 : op1;
14074 /* Only optimize constant conditions when the selected branch
14075 has the same type as the COND_EXPR. This avoids optimizing
14076 away "c ? x : throw", where the throw has a void type.
14077 Avoid throwing away that operand which contains label. */
14078 if ((!TREE_SIDE_EFFECTS (unused_op)
14079 || !contains_label_p (unused_op))
14080 && (! VOID_TYPE_P (TREE_TYPE (tem))
14081 || VOID_TYPE_P (type)))
14082 return pedantic_non_lvalue_loc (loc, tem);
14083 return NULL_TREE;
14085 else if (TREE_CODE (arg0) == VECTOR_CST)
14087 if (integer_all_onesp (arg0))
14088 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14089 if (integer_zerop (arg0))
14090 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14092 if ((TREE_CODE (arg1) == VECTOR_CST
14093 || TREE_CODE (arg1) == CONSTRUCTOR)
14094 && (TREE_CODE (arg2) == VECTOR_CST
14095 || TREE_CODE (arg2) == CONSTRUCTOR))
14097 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14098 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14099 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14100 for (i = 0; i < nelts; i++)
14102 tree val = VECTOR_CST_ELT (arg0, i);
14103 if (integer_all_onesp (val))
14104 sel[i] = i;
14105 else if (integer_zerop (val))
14106 sel[i] = nelts + i;
14107 else /* Currently unreachable. */
14108 return NULL_TREE;
14110 tree t = fold_vec_perm (type, arg1, arg2, sel);
14111 if (t != NULL_TREE)
14112 return t;
14116 if (operand_equal_p (arg1, op2, 0))
14117 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14119 /* If we have A op B ? A : C, we may be able to convert this to a
14120 simpler expression, depending on the operation and the values
14121 of B and C. Signed zeros prevent all of these transformations,
14122 for reasons given above each one.
14124 Also try swapping the arguments and inverting the conditional. */
14125 if (COMPARISON_CLASS_P (arg0)
14126 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14127 arg1, TREE_OPERAND (arg0, 1))
14128 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14130 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14131 if (tem)
14132 return tem;
14135 if (COMPARISON_CLASS_P (arg0)
14136 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14137 op2,
14138 TREE_OPERAND (arg0, 1))
14139 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14141 location_t loc0 = expr_location_or (arg0, loc);
14142 tem = fold_invert_truthvalue (loc0, arg0);
14143 if (tem && COMPARISON_CLASS_P (tem))
14145 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14146 if (tem)
14147 return tem;
14151 /* If the second operand is simpler than the third, swap them
14152 since that produces better jump optimization results. */
14153 if (truth_value_p (TREE_CODE (arg0))
14154 && tree_swap_operands_p (op1, op2, false))
14156 location_t loc0 = expr_location_or (arg0, loc);
14157 /* See if this can be inverted. If it can't, possibly because
14158 it was a floating-point inequality comparison, don't do
14159 anything. */
14160 tem = fold_invert_truthvalue (loc0, arg0);
14161 if (tem)
14162 return fold_build3_loc (loc, code, type, tem, op2, op1);
14165 /* Convert A ? 1 : 0 to simply A. */
14166 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14167 : (integer_onep (op1)
14168 && !VECTOR_TYPE_P (type)))
14169 && integer_zerop (op2)
14170 /* If we try to convert OP0 to our type, the
14171 call to fold will try to move the conversion inside
14172 a COND, which will recurse. In that case, the COND_EXPR
14173 is probably the best choice, so leave it alone. */
14174 && type == TREE_TYPE (arg0))
14175 return pedantic_non_lvalue_loc (loc, arg0);
14177 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14178 over COND_EXPR in cases such as floating point comparisons. */
14179 if (integer_zerop (op1)
14180 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14181 : (integer_onep (op2)
14182 && !VECTOR_TYPE_P (type)))
14183 && truth_value_p (TREE_CODE (arg0)))
14184 return pedantic_non_lvalue_loc (loc,
14185 fold_convert_loc (loc, type,
14186 invert_truthvalue_loc (loc,
14187 arg0)));
14189 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14190 if (TREE_CODE (arg0) == LT_EXPR
14191 && integer_zerop (TREE_OPERAND (arg0, 1))
14192 && integer_zerop (op2)
14193 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14195 /* sign_bit_p looks through both zero and sign extensions,
14196 but for this optimization only sign extensions are
14197 usable. */
14198 tree tem2 = TREE_OPERAND (arg0, 0);
14199 while (tem != tem2)
14201 if (TREE_CODE (tem2) != NOP_EXPR
14202 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14204 tem = NULL_TREE;
14205 break;
14207 tem2 = TREE_OPERAND (tem2, 0);
14209 /* sign_bit_p only checks ARG1 bits within A's precision.
14210 If <sign bit of A> has wider type than A, bits outside
14211 of A's precision in <sign bit of A> need to be checked.
14212 If they are all 0, this optimization needs to be done
14213 in unsigned A's type, if they are all 1 in signed A's type,
14214 otherwise this can't be done. */
14215 if (tem
14216 && TYPE_PRECISION (TREE_TYPE (tem))
14217 < TYPE_PRECISION (TREE_TYPE (arg1))
14218 && TYPE_PRECISION (TREE_TYPE (tem))
14219 < TYPE_PRECISION (type))
14221 int inner_width, outer_width;
14222 tree tem_type;
14224 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14225 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14226 if (outer_width > TYPE_PRECISION (type))
14227 outer_width = TYPE_PRECISION (type);
14229 wide_int mask = wi::shifted_mask
14230 (inner_width, outer_width - inner_width, false,
14231 TYPE_PRECISION (TREE_TYPE (arg1)));
14233 wide_int common = mask & arg1;
14234 if (common == mask)
14236 tem_type = signed_type_for (TREE_TYPE (tem));
14237 tem = fold_convert_loc (loc, tem_type, tem);
14239 else if (common == 0)
14241 tem_type = unsigned_type_for (TREE_TYPE (tem));
14242 tem = fold_convert_loc (loc, tem_type, tem);
14244 else
14245 tem = NULL;
14248 if (tem)
14249 return
14250 fold_convert_loc (loc, type,
14251 fold_build2_loc (loc, BIT_AND_EXPR,
14252 TREE_TYPE (tem), tem,
14253 fold_convert_loc (loc,
14254 TREE_TYPE (tem),
14255 arg1)));
14258 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14259 already handled above. */
14260 if (TREE_CODE (arg0) == BIT_AND_EXPR
14261 && integer_onep (TREE_OPERAND (arg0, 1))
14262 && integer_zerop (op2)
14263 && integer_pow2p (arg1))
14265 tree tem = TREE_OPERAND (arg0, 0);
14266 STRIP_NOPS (tem);
14267 if (TREE_CODE (tem) == RSHIFT_EXPR
14268 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14269 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14270 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14271 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14272 TREE_OPERAND (tem, 0), arg1);
14275 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14276 is probably obsolete because the first operand should be a
14277 truth value (that's why we have the two cases above), but let's
14278 leave it in until we can confirm this for all front-ends. */
14279 if (integer_zerop (op2)
14280 && TREE_CODE (arg0) == NE_EXPR
14281 && integer_zerop (TREE_OPERAND (arg0, 1))
14282 && integer_pow2p (arg1)
14283 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14284 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14285 arg1, OEP_ONLY_CONST))
14286 return pedantic_non_lvalue_loc (loc,
14287 fold_convert_loc (loc, type,
14288 TREE_OPERAND (arg0, 0)));
14290 /* Disable the transformations below for vectors, since
14291 fold_binary_op_with_conditional_arg may undo them immediately,
14292 yielding an infinite loop. */
14293 if (code == VEC_COND_EXPR)
14294 return NULL_TREE;
14296 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14297 if (integer_zerop (op2)
14298 && truth_value_p (TREE_CODE (arg0))
14299 && truth_value_p (TREE_CODE (arg1))
14300 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14301 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14302 : TRUTH_ANDIF_EXPR,
14303 type, fold_convert_loc (loc, type, arg0), arg1);
14305 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14306 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14307 && truth_value_p (TREE_CODE (arg0))
14308 && truth_value_p (TREE_CODE (arg1))
14309 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14311 location_t loc0 = expr_location_or (arg0, loc);
14312 /* Only perform transformation if ARG0 is easily inverted. */
14313 tem = fold_invert_truthvalue (loc0, arg0);
14314 if (tem)
14315 return fold_build2_loc (loc, code == VEC_COND_EXPR
14316 ? BIT_IOR_EXPR
14317 : TRUTH_ORIF_EXPR,
14318 type, fold_convert_loc (loc, type, tem),
14319 arg1);
14322 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14323 if (integer_zerop (arg1)
14324 && truth_value_p (TREE_CODE (arg0))
14325 && truth_value_p (TREE_CODE (op2))
14326 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14328 location_t loc0 = expr_location_or (arg0, loc);
14329 /* Only perform transformation if ARG0 is easily inverted. */
14330 tem = fold_invert_truthvalue (loc0, arg0);
14331 if (tem)
14332 return fold_build2_loc (loc, code == VEC_COND_EXPR
14333 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14334 type, fold_convert_loc (loc, type, tem),
14335 op2);
14338 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14339 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14340 && truth_value_p (TREE_CODE (arg0))
14341 && truth_value_p (TREE_CODE (op2))
14342 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14343 return fold_build2_loc (loc, code == VEC_COND_EXPR
14344 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14345 type, fold_convert_loc (loc, type, arg0), op2);
14347 return NULL_TREE;
14349 case CALL_EXPR:
14350 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14351 of fold_ternary on them. */
14352 gcc_unreachable ();
14354 case BIT_FIELD_REF:
14355 if ((TREE_CODE (arg0) == VECTOR_CST
14356 || (TREE_CODE (arg0) == CONSTRUCTOR
14357 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14358 && (type == TREE_TYPE (TREE_TYPE (arg0))
14359 || (TREE_CODE (type) == VECTOR_TYPE
14360 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14362 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14363 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14364 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14365 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14367 if (n != 0
14368 && (idx % width) == 0
14369 && (n % width) == 0
14370 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14372 idx = idx / width;
14373 n = n / width;
14375 if (TREE_CODE (arg0) == VECTOR_CST)
14377 if (n == 1)
14378 return VECTOR_CST_ELT (arg0, idx);
14380 tree *vals = XALLOCAVEC (tree, n);
14381 for (unsigned i = 0; i < n; ++i)
14382 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14383 return build_vector (type, vals);
14386 /* Constructor elements can be subvectors. */
14387 unsigned HOST_WIDE_INT k = 1;
14388 if (CONSTRUCTOR_NELTS (arg0) != 0)
14390 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14391 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14392 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14395 /* We keep an exact subset of the constructor elements. */
14396 if ((idx % k) == 0 && (n % k) == 0)
14398 if (CONSTRUCTOR_NELTS (arg0) == 0)
14399 return build_constructor (type, NULL);
14400 idx /= k;
14401 n /= k;
14402 if (n == 1)
14404 if (idx < CONSTRUCTOR_NELTS (arg0))
14405 return CONSTRUCTOR_ELT (arg0, idx)->value;
14406 return build_zero_cst (type);
14409 vec<constructor_elt, va_gc> *vals;
14410 vec_alloc (vals, n);
14411 for (unsigned i = 0;
14412 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14413 ++i)
14414 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14415 CONSTRUCTOR_ELT
14416 (arg0, idx + i)->value);
14417 return build_constructor (type, vals);
14419 /* The bitfield references a single constructor element. */
14420 else if (idx + n <= (idx / k + 1) * k)
14422 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14423 return build_zero_cst (type);
14424 else if (n == k)
14425 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14426 else
14427 return fold_build3_loc (loc, code, type,
14428 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14429 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14434 /* A bit-field-ref that referenced the full argument can be stripped. */
14435 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14436 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14437 && integer_zerop (op2))
14438 return fold_convert_loc (loc, type, arg0);
14440 /* On constants we can use native encode/interpret to constant
14441 fold (nearly) all BIT_FIELD_REFs. */
14442 if (CONSTANT_CLASS_P (arg0)
14443 && can_native_interpret_type_p (type)
14444 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14445 /* This limitation should not be necessary, we just need to
14446 round this up to mode size. */
14447 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14448 /* Need bit-shifting of the buffer to relax the following. */
14449 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14451 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14452 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14453 unsigned HOST_WIDE_INT clen;
14454 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14455 /* ??? We cannot tell native_encode_expr to start at
14456 some random byte only. So limit us to a reasonable amount
14457 of work. */
14458 if (clen <= 4096)
14460 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14461 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14462 if (len > 0
14463 && len * BITS_PER_UNIT >= bitpos + bitsize)
14465 tree v = native_interpret_expr (type,
14466 b + bitpos / BITS_PER_UNIT,
14467 bitsize / BITS_PER_UNIT);
14468 if (v)
14469 return v;
14474 return NULL_TREE;
14476 case FMA_EXPR:
14477 /* For integers we can decompose the FMA if possible. */
14478 if (TREE_CODE (arg0) == INTEGER_CST
14479 && TREE_CODE (arg1) == INTEGER_CST)
14480 return fold_build2_loc (loc, PLUS_EXPR, type,
14481 const_binop (MULT_EXPR, arg0, arg1), arg2);
14482 if (integer_zerop (arg2))
14483 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14485 return fold_fma (loc, type, arg0, arg1, arg2);
14487 case VEC_PERM_EXPR:
14488 if (TREE_CODE (arg2) == VECTOR_CST)
14490 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14491 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14492 bool need_mask_canon = false;
14493 bool all_in_vec0 = true;
14494 bool all_in_vec1 = true;
14495 bool maybe_identity = true;
14496 bool single_arg = (op0 == op1);
14497 bool changed = false;
14499 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14500 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14501 for (i = 0; i < nelts; i++)
14503 tree val = VECTOR_CST_ELT (arg2, i);
14504 if (TREE_CODE (val) != INTEGER_CST)
14505 return NULL_TREE;
14507 /* Make sure that the perm value is in an acceptable
14508 range. */
14509 wide_int t = val;
14510 if (wi::gtu_p (t, mask))
14512 need_mask_canon = true;
14513 sel[i] = t.to_uhwi () & mask;
14515 else
14516 sel[i] = t.to_uhwi ();
14518 if (sel[i] < nelts)
14519 all_in_vec1 = false;
14520 else
14521 all_in_vec0 = false;
14523 if ((sel[i] & (nelts-1)) != i)
14524 maybe_identity = false;
14527 if (maybe_identity)
14529 if (all_in_vec0)
14530 return op0;
14531 if (all_in_vec1)
14532 return op1;
14535 if (all_in_vec0)
14536 op1 = op0;
14537 else if (all_in_vec1)
14539 op0 = op1;
14540 for (i = 0; i < nelts; i++)
14541 sel[i] -= nelts;
14542 need_mask_canon = true;
14545 if ((TREE_CODE (op0) == VECTOR_CST
14546 || TREE_CODE (op0) == CONSTRUCTOR)
14547 && (TREE_CODE (op1) == VECTOR_CST
14548 || TREE_CODE (op1) == CONSTRUCTOR))
14550 tree t = fold_vec_perm (type, op0, op1, sel);
14551 if (t != NULL_TREE)
14552 return t;
14555 if (op0 == op1 && !single_arg)
14556 changed = true;
14558 if (need_mask_canon && arg2 == op2)
14560 tree *tsel = XALLOCAVEC (tree, nelts);
14561 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14562 for (i = 0; i < nelts; i++)
14563 tsel[i] = build_int_cst (eltype, sel[i]);
14564 op2 = build_vector (TREE_TYPE (arg2), tsel);
14565 changed = true;
14568 if (changed)
14569 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14571 return NULL_TREE;
14573 default:
14574 return NULL_TREE;
14575 } /* switch (code) */
14578 /* Perform constant folding and related simplification of EXPR.
14579 The related simplifications include x*1 => x, x*0 => 0, etc.,
14580 and application of the associative law.
14581 NOP_EXPR conversions may be removed freely (as long as we
14582 are careful not to change the type of the overall expression).
14583 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14584 but we can constant-fold them if they have constant operands. */
14586 #ifdef ENABLE_FOLD_CHECKING
14587 # define fold(x) fold_1 (x)
14588 static tree fold_1 (tree);
14589 static
14590 #endif
14591 tree
14592 fold (tree expr)
14594 const tree t = expr;
14595 enum tree_code code = TREE_CODE (t);
14596 enum tree_code_class kind = TREE_CODE_CLASS (code);
14597 tree tem;
14598 location_t loc = EXPR_LOCATION (expr);
14600 /* Return right away if a constant. */
14601 if (kind == tcc_constant)
14602 return t;
14604 /* CALL_EXPR-like objects with variable numbers of operands are
14605 treated specially. */
14606 if (kind == tcc_vl_exp)
14608 if (code == CALL_EXPR)
14610 tem = fold_call_expr (loc, expr, false);
14611 return tem ? tem : expr;
14613 return expr;
14616 if (IS_EXPR_CODE_CLASS (kind))
14618 tree type = TREE_TYPE (t);
14619 tree op0, op1, op2;
14621 switch (TREE_CODE_LENGTH (code))
14623 case 1:
14624 op0 = TREE_OPERAND (t, 0);
14625 tem = fold_unary_loc (loc, code, type, op0);
14626 return tem ? tem : expr;
14627 case 2:
14628 op0 = TREE_OPERAND (t, 0);
14629 op1 = TREE_OPERAND (t, 1);
14630 tem = fold_binary_loc (loc, code, type, op0, op1);
14631 return tem ? tem : expr;
14632 case 3:
14633 op0 = TREE_OPERAND (t, 0);
14634 op1 = TREE_OPERAND (t, 1);
14635 op2 = TREE_OPERAND (t, 2);
14636 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14637 return tem ? tem : expr;
14638 default:
14639 break;
14643 switch (code)
14645 case ARRAY_REF:
14647 tree op0 = TREE_OPERAND (t, 0);
14648 tree op1 = TREE_OPERAND (t, 1);
14650 if (TREE_CODE (op1) == INTEGER_CST
14651 && TREE_CODE (op0) == CONSTRUCTOR
14652 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14654 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14655 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14656 unsigned HOST_WIDE_INT begin = 0;
14658 /* Find a matching index by means of a binary search. */
14659 while (begin != end)
14661 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14662 tree index = (*elts)[middle].index;
14664 if (TREE_CODE (index) == INTEGER_CST
14665 && tree_int_cst_lt (index, op1))
14666 begin = middle + 1;
14667 else if (TREE_CODE (index) == INTEGER_CST
14668 && tree_int_cst_lt (op1, index))
14669 end = middle;
14670 else if (TREE_CODE (index) == RANGE_EXPR
14671 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14672 begin = middle + 1;
14673 else if (TREE_CODE (index) == RANGE_EXPR
14674 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14675 end = middle;
14676 else
14677 return (*elts)[middle].value;
14681 return t;
14684 /* Return a VECTOR_CST if possible. */
14685 case CONSTRUCTOR:
14687 tree type = TREE_TYPE (t);
14688 if (TREE_CODE (type) != VECTOR_TYPE)
14689 return t;
14691 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14692 unsigned HOST_WIDE_INT idx, pos = 0;
14693 tree value;
14695 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14697 if (!CONSTANT_CLASS_P (value))
14698 return t;
14699 if (TREE_CODE (value) == VECTOR_CST)
14701 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14702 vec[pos++] = VECTOR_CST_ELT (value, i);
14704 else
14705 vec[pos++] = value;
14707 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14708 vec[pos] = build_zero_cst (TREE_TYPE (type));
14710 return build_vector (type, vec);
14713 case CONST_DECL:
14714 return fold (DECL_INITIAL (t));
14716 default:
14717 return t;
14718 } /* switch (code) */
14721 #ifdef ENABLE_FOLD_CHECKING
14722 #undef fold
14724 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14725 hash_table<pointer_hash<const tree_node> > *);
14726 static void fold_check_failed (const_tree, const_tree);
14727 void print_fold_checksum (const_tree);
14729 /* When --enable-checking=fold, compute a digest of expr before
14730 and after actual fold call to see if fold did not accidentally
14731 change original expr. */
14733 tree
14734 fold (tree expr)
14736 tree ret;
14737 struct md5_ctx ctx;
14738 unsigned char checksum_before[16], checksum_after[16];
14739 hash_table<pointer_hash<const tree_node> > ht (32);
14741 md5_init_ctx (&ctx);
14742 fold_checksum_tree (expr, &ctx, &ht);
14743 md5_finish_ctx (&ctx, checksum_before);
14744 ht.empty ();
14746 ret = fold_1 (expr);
14748 md5_init_ctx (&ctx);
14749 fold_checksum_tree (expr, &ctx, &ht);
14750 md5_finish_ctx (&ctx, checksum_after);
14752 if (memcmp (checksum_before, checksum_after, 16))
14753 fold_check_failed (expr, ret);
14755 return ret;
14758 void
14759 print_fold_checksum (const_tree expr)
14761 struct md5_ctx ctx;
14762 unsigned char checksum[16], cnt;
14763 hash_table<pointer_hash<const tree_node> > ht (32);
14765 md5_init_ctx (&ctx);
14766 fold_checksum_tree (expr, &ctx, &ht);
14767 md5_finish_ctx (&ctx, checksum);
14768 for (cnt = 0; cnt < 16; ++cnt)
14769 fprintf (stderr, "%02x", checksum[cnt]);
14770 putc ('\n', stderr);
14773 static void
14774 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14776 internal_error ("fold check: original tree changed by fold");
14779 static void
14780 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14781 hash_table<pointer_hash <const tree_node> > *ht)
14783 const tree_node **slot;
14784 enum tree_code code;
14785 union tree_node buf;
14786 int i, len;
14788 recursive_label:
14789 if (expr == NULL)
14790 return;
14791 slot = ht->find_slot (expr, INSERT);
14792 if (*slot != NULL)
14793 return;
14794 *slot = expr;
14795 code = TREE_CODE (expr);
14796 if (TREE_CODE_CLASS (code) == tcc_declaration
14797 && DECL_ASSEMBLER_NAME_SET_P (expr))
14799 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14800 memcpy ((char *) &buf, expr, tree_size (expr));
14801 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14802 expr = (tree) &buf;
14804 else if (TREE_CODE_CLASS (code) == tcc_type
14805 && (TYPE_POINTER_TO (expr)
14806 || TYPE_REFERENCE_TO (expr)
14807 || TYPE_CACHED_VALUES_P (expr)
14808 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14809 || TYPE_NEXT_VARIANT (expr)))
14811 /* Allow these fields to be modified. */
14812 tree tmp;
14813 memcpy ((char *) &buf, expr, tree_size (expr));
14814 expr = tmp = (tree) &buf;
14815 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14816 TYPE_POINTER_TO (tmp) = NULL;
14817 TYPE_REFERENCE_TO (tmp) = NULL;
14818 TYPE_NEXT_VARIANT (tmp) = NULL;
14819 if (TYPE_CACHED_VALUES_P (tmp))
14821 TYPE_CACHED_VALUES_P (tmp) = 0;
14822 TYPE_CACHED_VALUES (tmp) = NULL;
14825 md5_process_bytes (expr, tree_size (expr), ctx);
14826 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14827 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14828 if (TREE_CODE_CLASS (code) != tcc_type
14829 && TREE_CODE_CLASS (code) != tcc_declaration
14830 && code != TREE_LIST
14831 && code != SSA_NAME
14832 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14833 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14834 switch (TREE_CODE_CLASS (code))
14836 case tcc_constant:
14837 switch (code)
14839 case STRING_CST:
14840 md5_process_bytes (TREE_STRING_POINTER (expr),
14841 TREE_STRING_LENGTH (expr), ctx);
14842 break;
14843 case COMPLEX_CST:
14844 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14845 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14846 break;
14847 case VECTOR_CST:
14848 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14849 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14850 break;
14851 default:
14852 break;
14854 break;
14855 case tcc_exceptional:
14856 switch (code)
14858 case TREE_LIST:
14859 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14860 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14861 expr = TREE_CHAIN (expr);
14862 goto recursive_label;
14863 break;
14864 case TREE_VEC:
14865 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14866 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14867 break;
14868 default:
14869 break;
14871 break;
14872 case tcc_expression:
14873 case tcc_reference:
14874 case tcc_comparison:
14875 case tcc_unary:
14876 case tcc_binary:
14877 case tcc_statement:
14878 case tcc_vl_exp:
14879 len = TREE_OPERAND_LENGTH (expr);
14880 for (i = 0; i < len; ++i)
14881 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14882 break;
14883 case tcc_declaration:
14884 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14885 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14886 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14888 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14889 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14890 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14891 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14892 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14895 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14897 if (TREE_CODE (expr) == FUNCTION_DECL)
14899 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14900 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14902 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14904 break;
14905 case tcc_type:
14906 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14907 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14908 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14909 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14910 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14911 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14912 if (INTEGRAL_TYPE_P (expr)
14913 || SCALAR_FLOAT_TYPE_P (expr))
14915 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14916 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14918 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14919 if (TREE_CODE (expr) == RECORD_TYPE
14920 || TREE_CODE (expr) == UNION_TYPE
14921 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14922 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14923 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14924 break;
14925 default:
14926 break;
14930 /* Helper function for outputting the checksum of a tree T. When
14931 debugging with gdb, you can "define mynext" to be "next" followed
14932 by "call debug_fold_checksum (op0)", then just trace down till the
14933 outputs differ. */
14935 DEBUG_FUNCTION void
14936 debug_fold_checksum (const_tree t)
14938 int i;
14939 unsigned char checksum[16];
14940 struct md5_ctx ctx;
14941 hash_table<pointer_hash<const tree_node> > ht (32);
14943 md5_init_ctx (&ctx);
14944 fold_checksum_tree (t, &ctx, &ht);
14945 md5_finish_ctx (&ctx, checksum);
14946 ht.empty ();
14948 for (i = 0; i < 16; i++)
14949 fprintf (stderr, "%d ", checksum[i]);
14951 fprintf (stderr, "\n");
14954 #endif
14956 /* Fold a unary tree expression with code CODE of type TYPE with an
14957 operand OP0. LOC is the location of the resulting expression.
14958 Return a folded expression if successful. Otherwise, return a tree
14959 expression with code CODE of type TYPE with an operand OP0. */
14961 tree
14962 fold_build1_stat_loc (location_t loc,
14963 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14965 tree tem;
14966 #ifdef ENABLE_FOLD_CHECKING
14967 unsigned char checksum_before[16], checksum_after[16];
14968 struct md5_ctx ctx;
14969 hash_table<pointer_hash<const tree_node> > ht (32);
14971 md5_init_ctx (&ctx);
14972 fold_checksum_tree (op0, &ctx, &ht);
14973 md5_finish_ctx (&ctx, checksum_before);
14974 ht.empty ();
14975 #endif
14977 tem = fold_unary_loc (loc, code, type, op0);
14978 if (!tem)
14979 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14981 #ifdef ENABLE_FOLD_CHECKING
14982 md5_init_ctx (&ctx);
14983 fold_checksum_tree (op0, &ctx, &ht);
14984 md5_finish_ctx (&ctx, checksum_after);
14986 if (memcmp (checksum_before, checksum_after, 16))
14987 fold_check_failed (op0, tem);
14988 #endif
14989 return tem;
14992 /* Fold a binary tree expression with code CODE of type TYPE with
14993 operands OP0 and OP1. LOC is the location of the resulting
14994 expression. Return a folded expression if successful. Otherwise,
14995 return a tree expression with code CODE of type TYPE with operands
14996 OP0 and OP1. */
14998 tree
14999 fold_build2_stat_loc (location_t loc,
15000 enum tree_code code, tree type, tree op0, tree op1
15001 MEM_STAT_DECL)
15003 tree tem;
15004 #ifdef ENABLE_FOLD_CHECKING
15005 unsigned char checksum_before_op0[16],
15006 checksum_before_op1[16],
15007 checksum_after_op0[16],
15008 checksum_after_op1[16];
15009 struct md5_ctx ctx;
15010 hash_table<pointer_hash<const tree_node> > ht (32);
15012 md5_init_ctx (&ctx);
15013 fold_checksum_tree (op0, &ctx, &ht);
15014 md5_finish_ctx (&ctx, checksum_before_op0);
15015 ht.empty ();
15017 md5_init_ctx (&ctx);
15018 fold_checksum_tree (op1, &ctx, &ht);
15019 md5_finish_ctx (&ctx, checksum_before_op1);
15020 ht.empty ();
15021 #endif
15023 tem = fold_binary_loc (loc, code, type, op0, op1);
15024 if (!tem)
15025 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15027 #ifdef ENABLE_FOLD_CHECKING
15028 md5_init_ctx (&ctx);
15029 fold_checksum_tree (op0, &ctx, &ht);
15030 md5_finish_ctx (&ctx, checksum_after_op0);
15031 ht.empty ();
15033 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15034 fold_check_failed (op0, tem);
15036 md5_init_ctx (&ctx);
15037 fold_checksum_tree (op1, &ctx, &ht);
15038 md5_finish_ctx (&ctx, checksum_after_op1);
15040 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15041 fold_check_failed (op1, tem);
15042 #endif
15043 return tem;
15046 /* Fold a ternary tree expression with code CODE of type TYPE with
15047 operands OP0, OP1, and OP2. Return a folded expression if
15048 successful. Otherwise, return a tree expression with code CODE of
15049 type TYPE with operands OP0, OP1, and OP2. */
15051 tree
15052 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15053 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15055 tree tem;
15056 #ifdef ENABLE_FOLD_CHECKING
15057 unsigned char checksum_before_op0[16],
15058 checksum_before_op1[16],
15059 checksum_before_op2[16],
15060 checksum_after_op0[16],
15061 checksum_after_op1[16],
15062 checksum_after_op2[16];
15063 struct md5_ctx ctx;
15064 hash_table<pointer_hash<const tree_node> > ht (32);
15066 md5_init_ctx (&ctx);
15067 fold_checksum_tree (op0, &ctx, &ht);
15068 md5_finish_ctx (&ctx, checksum_before_op0);
15069 ht.empty ();
15071 md5_init_ctx (&ctx);
15072 fold_checksum_tree (op1, &ctx, &ht);
15073 md5_finish_ctx (&ctx, checksum_before_op1);
15074 ht.empty ();
15076 md5_init_ctx (&ctx);
15077 fold_checksum_tree (op2, &ctx, &ht);
15078 md5_finish_ctx (&ctx, checksum_before_op2);
15079 ht.empty ();
15080 #endif
15082 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15083 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15084 if (!tem)
15085 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15087 #ifdef ENABLE_FOLD_CHECKING
15088 md5_init_ctx (&ctx);
15089 fold_checksum_tree (op0, &ctx, &ht);
15090 md5_finish_ctx (&ctx, checksum_after_op0);
15091 ht.empty ();
15093 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15094 fold_check_failed (op0, tem);
15096 md5_init_ctx (&ctx);
15097 fold_checksum_tree (op1, &ctx, &ht);
15098 md5_finish_ctx (&ctx, checksum_after_op1);
15099 ht.empty ();
15101 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15102 fold_check_failed (op1, tem);
15104 md5_init_ctx (&ctx);
15105 fold_checksum_tree (op2, &ctx, &ht);
15106 md5_finish_ctx (&ctx, checksum_after_op2);
15108 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15109 fold_check_failed (op2, tem);
15110 #endif
15111 return tem;
15114 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15115 arguments in ARGARRAY, and a null static chain.
15116 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15117 of type TYPE from the given operands as constructed by build_call_array. */
15119 tree
15120 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15121 int nargs, tree *argarray)
15123 tree tem;
15124 #ifdef ENABLE_FOLD_CHECKING
15125 unsigned char checksum_before_fn[16],
15126 checksum_before_arglist[16],
15127 checksum_after_fn[16],
15128 checksum_after_arglist[16];
15129 struct md5_ctx ctx;
15130 hash_table<pointer_hash<const tree_node> > ht (32);
15131 int i;
15133 md5_init_ctx (&ctx);
15134 fold_checksum_tree (fn, &ctx, &ht);
15135 md5_finish_ctx (&ctx, checksum_before_fn);
15136 ht.empty ();
15138 md5_init_ctx (&ctx);
15139 for (i = 0; i < nargs; i++)
15140 fold_checksum_tree (argarray[i], &ctx, &ht);
15141 md5_finish_ctx (&ctx, checksum_before_arglist);
15142 ht.empty ();
15143 #endif
15145 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15147 #ifdef ENABLE_FOLD_CHECKING
15148 md5_init_ctx (&ctx);
15149 fold_checksum_tree (fn, &ctx, &ht);
15150 md5_finish_ctx (&ctx, checksum_after_fn);
15151 ht.empty ();
15153 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15154 fold_check_failed (fn, tem);
15156 md5_init_ctx (&ctx);
15157 for (i = 0; i < nargs; i++)
15158 fold_checksum_tree (argarray[i], &ctx, &ht);
15159 md5_finish_ctx (&ctx, checksum_after_arglist);
15161 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15162 fold_check_failed (NULL_TREE, tem);
15163 #endif
15164 return tem;
15167 /* Perform constant folding and related simplification of initializer
15168 expression EXPR. These behave identically to "fold_buildN" but ignore
15169 potential run-time traps and exceptions that fold must preserve. */
15171 #define START_FOLD_INIT \
15172 int saved_signaling_nans = flag_signaling_nans;\
15173 int saved_trapping_math = flag_trapping_math;\
15174 int saved_rounding_math = flag_rounding_math;\
15175 int saved_trapv = flag_trapv;\
15176 int saved_folding_initializer = folding_initializer;\
15177 flag_signaling_nans = 0;\
15178 flag_trapping_math = 0;\
15179 flag_rounding_math = 0;\
15180 flag_trapv = 0;\
15181 folding_initializer = 1;
15183 #define END_FOLD_INIT \
15184 flag_signaling_nans = saved_signaling_nans;\
15185 flag_trapping_math = saved_trapping_math;\
15186 flag_rounding_math = saved_rounding_math;\
15187 flag_trapv = saved_trapv;\
15188 folding_initializer = saved_folding_initializer;
15190 tree
15191 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15192 tree type, tree op)
15194 tree result;
15195 START_FOLD_INIT;
15197 result = fold_build1_loc (loc, code, type, op);
15199 END_FOLD_INIT;
15200 return result;
15203 tree
15204 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15205 tree type, tree op0, tree op1)
15207 tree result;
15208 START_FOLD_INIT;
15210 result = fold_build2_loc (loc, code, type, op0, op1);
15212 END_FOLD_INIT;
15213 return result;
15216 tree
15217 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15218 int nargs, tree *argarray)
15220 tree result;
15221 START_FOLD_INIT;
15223 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15225 END_FOLD_INIT;
15226 return result;
15229 #undef START_FOLD_INIT
15230 #undef END_FOLD_INIT
15232 /* Determine if first argument is a multiple of second argument. Return 0 if
15233 it is not, or we cannot easily determined it to be.
15235 An example of the sort of thing we care about (at this point; this routine
15236 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15237 fold cases do now) is discovering that
15239 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15241 is a multiple of
15243 SAVE_EXPR (J * 8)
15245 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15247 This code also handles discovering that
15249 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15251 is a multiple of 8 so we don't have to worry about dealing with a
15252 possible remainder.
15254 Note that we *look* inside a SAVE_EXPR only to determine how it was
15255 calculated; it is not safe for fold to do much of anything else with the
15256 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15257 at run time. For example, the latter example above *cannot* be implemented
15258 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15259 evaluation time of the original SAVE_EXPR is not necessarily the same at
15260 the time the new expression is evaluated. The only optimization of this
15261 sort that would be valid is changing
15263 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15265 divided by 8 to
15267 SAVE_EXPR (I) * SAVE_EXPR (J)
15269 (where the same SAVE_EXPR (J) is used in the original and the
15270 transformed version). */
15273 multiple_of_p (tree type, const_tree top, const_tree bottom)
15275 if (operand_equal_p (top, bottom, 0))
15276 return 1;
15278 if (TREE_CODE (type) != INTEGER_TYPE)
15279 return 0;
15281 switch (TREE_CODE (top))
15283 case BIT_AND_EXPR:
15284 /* Bitwise and provides a power of two multiple. If the mask is
15285 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15286 if (!integer_pow2p (bottom))
15287 return 0;
15288 /* FALLTHRU */
15290 case MULT_EXPR:
15291 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15292 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15294 case PLUS_EXPR:
15295 case MINUS_EXPR:
15296 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15297 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15299 case LSHIFT_EXPR:
15300 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15302 tree op1, t1;
15304 op1 = TREE_OPERAND (top, 1);
15305 /* const_binop may not detect overflow correctly,
15306 so check for it explicitly here. */
15307 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15308 && 0 != (t1 = fold_convert (type,
15309 const_binop (LSHIFT_EXPR,
15310 size_one_node,
15311 op1)))
15312 && !TREE_OVERFLOW (t1))
15313 return multiple_of_p (type, t1, bottom);
15315 return 0;
15317 case NOP_EXPR:
15318 /* Can't handle conversions from non-integral or wider integral type. */
15319 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15320 || (TYPE_PRECISION (type)
15321 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15322 return 0;
15324 /* .. fall through ... */
15326 case SAVE_EXPR:
15327 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15329 case COND_EXPR:
15330 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15331 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15333 case INTEGER_CST:
15334 if (TREE_CODE (bottom) != INTEGER_CST
15335 || integer_zerop (bottom)
15336 || (TYPE_UNSIGNED (type)
15337 && (tree_int_cst_sgn (top) < 0
15338 || tree_int_cst_sgn (bottom) < 0)))
15339 return 0;
15340 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15341 SIGNED);
15343 default:
15344 return 0;
15348 /* Return true if CODE or TYPE is known to be non-negative. */
15350 static bool
15351 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15353 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15354 && truth_value_p (code))
15355 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15356 have a signed:1 type (where the value is -1 and 0). */
15357 return true;
15358 return false;
15361 /* Return true if (CODE OP0) is known to be non-negative. If the return
15362 value is based on the assumption that signed overflow is undefined,
15363 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15364 *STRICT_OVERFLOW_P. */
15366 bool
15367 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15368 bool *strict_overflow_p)
15370 if (TYPE_UNSIGNED (type))
15371 return true;
15373 switch (code)
15375 case ABS_EXPR:
15376 /* We can't return 1 if flag_wrapv is set because
15377 ABS_EXPR<INT_MIN> = INT_MIN. */
15378 if (!INTEGRAL_TYPE_P (type))
15379 return true;
15380 if (TYPE_OVERFLOW_UNDEFINED (type))
15382 *strict_overflow_p = true;
15383 return true;
15385 break;
15387 case NON_LVALUE_EXPR:
15388 case FLOAT_EXPR:
15389 case FIX_TRUNC_EXPR:
15390 return tree_expr_nonnegative_warnv_p (op0,
15391 strict_overflow_p);
15393 case NOP_EXPR:
15395 tree inner_type = TREE_TYPE (op0);
15396 tree outer_type = type;
15398 if (TREE_CODE (outer_type) == REAL_TYPE)
15400 if (TREE_CODE (inner_type) == REAL_TYPE)
15401 return tree_expr_nonnegative_warnv_p (op0,
15402 strict_overflow_p);
15403 if (INTEGRAL_TYPE_P (inner_type))
15405 if (TYPE_UNSIGNED (inner_type))
15406 return true;
15407 return tree_expr_nonnegative_warnv_p (op0,
15408 strict_overflow_p);
15411 else if (INTEGRAL_TYPE_P (outer_type))
15413 if (TREE_CODE (inner_type) == REAL_TYPE)
15414 return tree_expr_nonnegative_warnv_p (op0,
15415 strict_overflow_p);
15416 if (INTEGRAL_TYPE_P (inner_type))
15417 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15418 && TYPE_UNSIGNED (inner_type);
15421 break;
15423 default:
15424 return tree_simple_nonnegative_warnv_p (code, type);
15427 /* We don't know sign of `t', so be conservative and return false. */
15428 return false;
15431 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15432 value is based on the assumption that signed overflow is undefined,
15433 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15434 *STRICT_OVERFLOW_P. */
15436 bool
15437 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15438 tree op1, bool *strict_overflow_p)
15440 if (TYPE_UNSIGNED (type))
15441 return true;
15443 switch (code)
15445 case POINTER_PLUS_EXPR:
15446 case PLUS_EXPR:
15447 if (FLOAT_TYPE_P (type))
15448 return (tree_expr_nonnegative_warnv_p (op0,
15449 strict_overflow_p)
15450 && tree_expr_nonnegative_warnv_p (op1,
15451 strict_overflow_p));
15453 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15454 both unsigned and at least 2 bits shorter than the result. */
15455 if (TREE_CODE (type) == INTEGER_TYPE
15456 && TREE_CODE (op0) == NOP_EXPR
15457 && TREE_CODE (op1) == NOP_EXPR)
15459 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15460 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15461 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15462 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15464 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15465 TYPE_PRECISION (inner2)) + 1;
15466 return prec < TYPE_PRECISION (type);
15469 break;
15471 case MULT_EXPR:
15472 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15474 /* x * x is always non-negative for floating point x
15475 or without overflow. */
15476 if (operand_equal_p (op0, op1, 0)
15477 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15478 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15480 if (TYPE_OVERFLOW_UNDEFINED (type))
15481 *strict_overflow_p = true;
15482 return true;
15486 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15487 both unsigned and their total bits is shorter than the result. */
15488 if (TREE_CODE (type) == INTEGER_TYPE
15489 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15490 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15492 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15493 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15494 : TREE_TYPE (op0);
15495 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15496 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15497 : TREE_TYPE (op1);
15499 bool unsigned0 = TYPE_UNSIGNED (inner0);
15500 bool unsigned1 = TYPE_UNSIGNED (inner1);
15502 if (TREE_CODE (op0) == INTEGER_CST)
15503 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15505 if (TREE_CODE (op1) == INTEGER_CST)
15506 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15508 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15509 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15511 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15512 ? tree_int_cst_min_precision (op0, UNSIGNED)
15513 : TYPE_PRECISION (inner0);
15515 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15516 ? tree_int_cst_min_precision (op1, UNSIGNED)
15517 : TYPE_PRECISION (inner1);
15519 return precision0 + precision1 < TYPE_PRECISION (type);
15522 return false;
15524 case BIT_AND_EXPR:
15525 case MAX_EXPR:
15526 return (tree_expr_nonnegative_warnv_p (op0,
15527 strict_overflow_p)
15528 || tree_expr_nonnegative_warnv_p (op1,
15529 strict_overflow_p));
15531 case BIT_IOR_EXPR:
15532 case BIT_XOR_EXPR:
15533 case MIN_EXPR:
15534 case RDIV_EXPR:
15535 case TRUNC_DIV_EXPR:
15536 case CEIL_DIV_EXPR:
15537 case FLOOR_DIV_EXPR:
15538 case ROUND_DIV_EXPR:
15539 return (tree_expr_nonnegative_warnv_p (op0,
15540 strict_overflow_p)
15541 && tree_expr_nonnegative_warnv_p (op1,
15542 strict_overflow_p));
15544 case TRUNC_MOD_EXPR:
15545 case CEIL_MOD_EXPR:
15546 case FLOOR_MOD_EXPR:
15547 case ROUND_MOD_EXPR:
15548 return tree_expr_nonnegative_warnv_p (op0,
15549 strict_overflow_p);
15550 default:
15551 return tree_simple_nonnegative_warnv_p (code, type);
15554 /* We don't know sign of `t', so be conservative and return false. */
15555 return false;
15558 /* Return true if T is known to be non-negative. If the return
15559 value is based on the assumption that signed overflow is undefined,
15560 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15561 *STRICT_OVERFLOW_P. */
15563 bool
15564 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15566 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15567 return true;
15569 switch (TREE_CODE (t))
15571 case INTEGER_CST:
15572 return tree_int_cst_sgn (t) >= 0;
15574 case REAL_CST:
15575 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15577 case FIXED_CST:
15578 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15580 case COND_EXPR:
15581 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15582 strict_overflow_p)
15583 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15584 strict_overflow_p));
15585 default:
15586 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15587 TREE_TYPE (t));
15589 /* We don't know sign of `t', so be conservative and return false. */
15590 return false;
15593 /* Return true if T is known to be non-negative. If the return
15594 value is based on the assumption that signed overflow is undefined,
15595 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15596 *STRICT_OVERFLOW_P. */
15598 bool
15599 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15600 tree arg0, tree arg1, bool *strict_overflow_p)
15602 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15603 switch (DECL_FUNCTION_CODE (fndecl))
15605 CASE_FLT_FN (BUILT_IN_ACOS):
15606 CASE_FLT_FN (BUILT_IN_ACOSH):
15607 CASE_FLT_FN (BUILT_IN_CABS):
15608 CASE_FLT_FN (BUILT_IN_COSH):
15609 CASE_FLT_FN (BUILT_IN_ERFC):
15610 CASE_FLT_FN (BUILT_IN_EXP):
15611 CASE_FLT_FN (BUILT_IN_EXP10):
15612 CASE_FLT_FN (BUILT_IN_EXP2):
15613 CASE_FLT_FN (BUILT_IN_FABS):
15614 CASE_FLT_FN (BUILT_IN_FDIM):
15615 CASE_FLT_FN (BUILT_IN_HYPOT):
15616 CASE_FLT_FN (BUILT_IN_POW10):
15617 CASE_INT_FN (BUILT_IN_FFS):
15618 CASE_INT_FN (BUILT_IN_PARITY):
15619 CASE_INT_FN (BUILT_IN_POPCOUNT):
15620 CASE_INT_FN (BUILT_IN_CLZ):
15621 CASE_INT_FN (BUILT_IN_CLRSB):
15622 case BUILT_IN_BSWAP32:
15623 case BUILT_IN_BSWAP64:
15624 /* Always true. */
15625 return true;
15627 CASE_FLT_FN (BUILT_IN_SQRT):
15628 /* sqrt(-0.0) is -0.0. */
15629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15630 return true;
15631 return tree_expr_nonnegative_warnv_p (arg0,
15632 strict_overflow_p);
15634 CASE_FLT_FN (BUILT_IN_ASINH):
15635 CASE_FLT_FN (BUILT_IN_ATAN):
15636 CASE_FLT_FN (BUILT_IN_ATANH):
15637 CASE_FLT_FN (BUILT_IN_CBRT):
15638 CASE_FLT_FN (BUILT_IN_CEIL):
15639 CASE_FLT_FN (BUILT_IN_ERF):
15640 CASE_FLT_FN (BUILT_IN_EXPM1):
15641 CASE_FLT_FN (BUILT_IN_FLOOR):
15642 CASE_FLT_FN (BUILT_IN_FMOD):
15643 CASE_FLT_FN (BUILT_IN_FREXP):
15644 CASE_FLT_FN (BUILT_IN_ICEIL):
15645 CASE_FLT_FN (BUILT_IN_IFLOOR):
15646 CASE_FLT_FN (BUILT_IN_IRINT):
15647 CASE_FLT_FN (BUILT_IN_IROUND):
15648 CASE_FLT_FN (BUILT_IN_LCEIL):
15649 CASE_FLT_FN (BUILT_IN_LDEXP):
15650 CASE_FLT_FN (BUILT_IN_LFLOOR):
15651 CASE_FLT_FN (BUILT_IN_LLCEIL):
15652 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15653 CASE_FLT_FN (BUILT_IN_LLRINT):
15654 CASE_FLT_FN (BUILT_IN_LLROUND):
15655 CASE_FLT_FN (BUILT_IN_LRINT):
15656 CASE_FLT_FN (BUILT_IN_LROUND):
15657 CASE_FLT_FN (BUILT_IN_MODF):
15658 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15659 CASE_FLT_FN (BUILT_IN_RINT):
15660 CASE_FLT_FN (BUILT_IN_ROUND):
15661 CASE_FLT_FN (BUILT_IN_SCALB):
15662 CASE_FLT_FN (BUILT_IN_SCALBLN):
15663 CASE_FLT_FN (BUILT_IN_SCALBN):
15664 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15665 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15666 CASE_FLT_FN (BUILT_IN_SINH):
15667 CASE_FLT_FN (BUILT_IN_TANH):
15668 CASE_FLT_FN (BUILT_IN_TRUNC):
15669 /* True if the 1st argument is nonnegative. */
15670 return tree_expr_nonnegative_warnv_p (arg0,
15671 strict_overflow_p);
15673 CASE_FLT_FN (BUILT_IN_FMAX):
15674 /* True if the 1st OR 2nd arguments are nonnegative. */
15675 return (tree_expr_nonnegative_warnv_p (arg0,
15676 strict_overflow_p)
15677 || (tree_expr_nonnegative_warnv_p (arg1,
15678 strict_overflow_p)));
15680 CASE_FLT_FN (BUILT_IN_FMIN):
15681 /* True if the 1st AND 2nd arguments are nonnegative. */
15682 return (tree_expr_nonnegative_warnv_p (arg0,
15683 strict_overflow_p)
15684 && (tree_expr_nonnegative_warnv_p (arg1,
15685 strict_overflow_p)));
15687 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15688 /* True if the 2nd argument is nonnegative. */
15689 return tree_expr_nonnegative_warnv_p (arg1,
15690 strict_overflow_p);
15692 CASE_FLT_FN (BUILT_IN_POWI):
15693 /* True if the 1st argument is nonnegative or the second
15694 argument is an even integer. */
15695 if (TREE_CODE (arg1) == INTEGER_CST
15696 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15697 return true;
15698 return tree_expr_nonnegative_warnv_p (arg0,
15699 strict_overflow_p);
15701 CASE_FLT_FN (BUILT_IN_POW):
15702 /* True if the 1st argument is nonnegative or the second
15703 argument is an even integer valued real. */
15704 if (TREE_CODE (arg1) == REAL_CST)
15706 REAL_VALUE_TYPE c;
15707 HOST_WIDE_INT n;
15709 c = TREE_REAL_CST (arg1);
15710 n = real_to_integer (&c);
15711 if ((n & 1) == 0)
15713 REAL_VALUE_TYPE cint;
15714 real_from_integer (&cint, VOIDmode, n, SIGNED);
15715 if (real_identical (&c, &cint))
15716 return true;
15719 return tree_expr_nonnegative_warnv_p (arg0,
15720 strict_overflow_p);
15722 default:
15723 break;
15725 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15726 type);
15729 /* Return true if T is known to be non-negative. If the return
15730 value is based on the assumption that signed overflow is undefined,
15731 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15732 *STRICT_OVERFLOW_P. */
15734 static bool
15735 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15737 enum tree_code code = TREE_CODE (t);
15738 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15739 return true;
15741 switch (code)
15743 case TARGET_EXPR:
15745 tree temp = TARGET_EXPR_SLOT (t);
15746 t = TARGET_EXPR_INITIAL (t);
15748 /* If the initializer is non-void, then it's a normal expression
15749 that will be assigned to the slot. */
15750 if (!VOID_TYPE_P (t))
15751 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15753 /* Otherwise, the initializer sets the slot in some way. One common
15754 way is an assignment statement at the end of the initializer. */
15755 while (1)
15757 if (TREE_CODE (t) == BIND_EXPR)
15758 t = expr_last (BIND_EXPR_BODY (t));
15759 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15760 || TREE_CODE (t) == TRY_CATCH_EXPR)
15761 t = expr_last (TREE_OPERAND (t, 0));
15762 else if (TREE_CODE (t) == STATEMENT_LIST)
15763 t = expr_last (t);
15764 else
15765 break;
15767 if (TREE_CODE (t) == MODIFY_EXPR
15768 && TREE_OPERAND (t, 0) == temp)
15769 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15770 strict_overflow_p);
15772 return false;
15775 case CALL_EXPR:
15777 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15778 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15780 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15781 get_callee_fndecl (t),
15782 arg0,
15783 arg1,
15784 strict_overflow_p);
15786 case COMPOUND_EXPR:
15787 case MODIFY_EXPR:
15788 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15789 strict_overflow_p);
15790 case BIND_EXPR:
15791 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15792 strict_overflow_p);
15793 case SAVE_EXPR:
15794 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15795 strict_overflow_p);
15797 default:
15798 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15799 TREE_TYPE (t));
15802 /* We don't know sign of `t', so be conservative and return false. */
15803 return false;
15806 /* Return true if T is known to be non-negative. If the return
15807 value is based on the assumption that signed overflow is undefined,
15808 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15809 *STRICT_OVERFLOW_P. */
15811 bool
15812 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15814 enum tree_code code;
15815 if (t == error_mark_node)
15816 return false;
15818 code = TREE_CODE (t);
15819 switch (TREE_CODE_CLASS (code))
15821 case tcc_binary:
15822 case tcc_comparison:
15823 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15824 TREE_TYPE (t),
15825 TREE_OPERAND (t, 0),
15826 TREE_OPERAND (t, 1),
15827 strict_overflow_p);
15829 case tcc_unary:
15830 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15831 TREE_TYPE (t),
15832 TREE_OPERAND (t, 0),
15833 strict_overflow_p);
15835 case tcc_constant:
15836 case tcc_declaration:
15837 case tcc_reference:
15838 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15840 default:
15841 break;
15844 switch (code)
15846 case TRUTH_AND_EXPR:
15847 case TRUTH_OR_EXPR:
15848 case TRUTH_XOR_EXPR:
15849 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15850 TREE_TYPE (t),
15851 TREE_OPERAND (t, 0),
15852 TREE_OPERAND (t, 1),
15853 strict_overflow_p);
15854 case TRUTH_NOT_EXPR:
15855 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15856 TREE_TYPE (t),
15857 TREE_OPERAND (t, 0),
15858 strict_overflow_p);
15860 case COND_EXPR:
15861 case CONSTRUCTOR:
15862 case OBJ_TYPE_REF:
15863 case ASSERT_EXPR:
15864 case ADDR_EXPR:
15865 case WITH_SIZE_EXPR:
15866 case SSA_NAME:
15867 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15869 default:
15870 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15874 /* Return true if `t' is known to be non-negative. Handle warnings
15875 about undefined signed overflow. */
15877 bool
15878 tree_expr_nonnegative_p (tree t)
15880 bool ret, strict_overflow_p;
15882 strict_overflow_p = false;
15883 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15884 if (strict_overflow_p)
15885 fold_overflow_warning (("assuming signed overflow does not occur when "
15886 "determining that expression is always "
15887 "non-negative"),
15888 WARN_STRICT_OVERFLOW_MISC);
15889 return ret;
15893 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15894 For floating point we further ensure that T is not denormal.
15895 Similar logic is present in nonzero_address in rtlanal.h.
15897 If the return value is based on the assumption that signed overflow
15898 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15899 change *STRICT_OVERFLOW_P. */
15901 bool
15902 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15903 bool *strict_overflow_p)
15905 switch (code)
15907 case ABS_EXPR:
15908 return tree_expr_nonzero_warnv_p (op0,
15909 strict_overflow_p);
15911 case NOP_EXPR:
15913 tree inner_type = TREE_TYPE (op0);
15914 tree outer_type = type;
15916 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15917 && tree_expr_nonzero_warnv_p (op0,
15918 strict_overflow_p));
15920 break;
15922 case NON_LVALUE_EXPR:
15923 return tree_expr_nonzero_warnv_p (op0,
15924 strict_overflow_p);
15926 default:
15927 break;
15930 return false;
15933 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15934 For floating point we further ensure that T is not denormal.
15935 Similar logic is present in nonzero_address in rtlanal.h.
15937 If the return value is based on the assumption that signed overflow
15938 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15939 change *STRICT_OVERFLOW_P. */
15941 bool
15942 tree_binary_nonzero_warnv_p (enum tree_code code,
15943 tree type,
15944 tree op0,
15945 tree op1, bool *strict_overflow_p)
15947 bool sub_strict_overflow_p;
15948 switch (code)
15950 case POINTER_PLUS_EXPR:
15951 case PLUS_EXPR:
15952 if (TYPE_OVERFLOW_UNDEFINED (type))
15954 /* With the presence of negative values it is hard
15955 to say something. */
15956 sub_strict_overflow_p = false;
15957 if (!tree_expr_nonnegative_warnv_p (op0,
15958 &sub_strict_overflow_p)
15959 || !tree_expr_nonnegative_warnv_p (op1,
15960 &sub_strict_overflow_p))
15961 return false;
15962 /* One of operands must be positive and the other non-negative. */
15963 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15964 overflows, on a twos-complement machine the sum of two
15965 nonnegative numbers can never be zero. */
15966 return (tree_expr_nonzero_warnv_p (op0,
15967 strict_overflow_p)
15968 || tree_expr_nonzero_warnv_p (op1,
15969 strict_overflow_p));
15971 break;
15973 case MULT_EXPR:
15974 if (TYPE_OVERFLOW_UNDEFINED (type))
15976 if (tree_expr_nonzero_warnv_p (op0,
15977 strict_overflow_p)
15978 && tree_expr_nonzero_warnv_p (op1,
15979 strict_overflow_p))
15981 *strict_overflow_p = true;
15982 return true;
15985 break;
15987 case MIN_EXPR:
15988 sub_strict_overflow_p = false;
15989 if (tree_expr_nonzero_warnv_p (op0,
15990 &sub_strict_overflow_p)
15991 && tree_expr_nonzero_warnv_p (op1,
15992 &sub_strict_overflow_p))
15994 if (sub_strict_overflow_p)
15995 *strict_overflow_p = true;
15997 break;
15999 case MAX_EXPR:
16000 sub_strict_overflow_p = false;
16001 if (tree_expr_nonzero_warnv_p (op0,
16002 &sub_strict_overflow_p))
16004 if (sub_strict_overflow_p)
16005 *strict_overflow_p = true;
16007 /* When both operands are nonzero, then MAX must be too. */
16008 if (tree_expr_nonzero_warnv_p (op1,
16009 strict_overflow_p))
16010 return true;
16012 /* MAX where operand 0 is positive is positive. */
16013 return tree_expr_nonnegative_warnv_p (op0,
16014 strict_overflow_p);
16016 /* MAX where operand 1 is positive is positive. */
16017 else if (tree_expr_nonzero_warnv_p (op1,
16018 &sub_strict_overflow_p)
16019 && tree_expr_nonnegative_warnv_p (op1,
16020 &sub_strict_overflow_p))
16022 if (sub_strict_overflow_p)
16023 *strict_overflow_p = true;
16024 return true;
16026 break;
16028 case BIT_IOR_EXPR:
16029 return (tree_expr_nonzero_warnv_p (op1,
16030 strict_overflow_p)
16031 || tree_expr_nonzero_warnv_p (op0,
16032 strict_overflow_p));
16034 default:
16035 break;
16038 return false;
16041 /* Return true when T is an address and is known to be nonzero.
16042 For floating point we further ensure that T is not denormal.
16043 Similar logic is present in nonzero_address in rtlanal.h.
16045 If the return value is based on the assumption that signed overflow
16046 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16047 change *STRICT_OVERFLOW_P. */
16049 bool
16050 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16052 bool sub_strict_overflow_p;
16053 switch (TREE_CODE (t))
16055 case INTEGER_CST:
16056 return !integer_zerop (t);
16058 case ADDR_EXPR:
16060 tree base = TREE_OPERAND (t, 0);
16062 if (!DECL_P (base))
16063 base = get_base_address (base);
16065 if (!base)
16066 return false;
16068 /* For objects in symbol table check if we know they are non-zero.
16069 Don't do anything for variables and functions before symtab is built;
16070 it is quite possible that they will be declared weak later. */
16071 if (DECL_P (base) && decl_in_symtab_p (base))
16073 struct symtab_node *symbol;
16075 symbol = symtab_node::get (base);
16076 if (symbol)
16077 return symbol->nonzero_address ();
16078 else
16079 return false;
16082 /* Function local objects are never NULL. */
16083 if (DECL_P (base)
16084 && (DECL_CONTEXT (base)
16085 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16086 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
16087 return true;
16089 /* Constants are never weak. */
16090 if (CONSTANT_CLASS_P (base))
16091 return true;
16093 return false;
16096 case COND_EXPR:
16097 sub_strict_overflow_p = false;
16098 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16099 &sub_strict_overflow_p)
16100 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16101 &sub_strict_overflow_p))
16103 if (sub_strict_overflow_p)
16104 *strict_overflow_p = true;
16105 return true;
16107 break;
16109 default:
16110 break;
16112 return false;
16115 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16116 attempt to fold the expression to a constant without modifying TYPE,
16117 OP0 or OP1.
16119 If the expression could be simplified to a constant, then return
16120 the constant. If the expression would not be simplified to a
16121 constant, then return NULL_TREE. */
16123 tree
16124 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16126 tree tem = fold_binary (code, type, op0, op1);
16127 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16130 /* Given the components of a unary expression CODE, TYPE and OP0,
16131 attempt to fold the expression to a constant without modifying
16132 TYPE or OP0.
16134 If the expression could be simplified to a constant, then return
16135 the constant. If the expression would not be simplified to a
16136 constant, then return NULL_TREE. */
16138 tree
16139 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16141 tree tem = fold_unary (code, type, op0);
16142 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16145 /* If EXP represents referencing an element in a constant string
16146 (either via pointer arithmetic or array indexing), return the
16147 tree representing the value accessed, otherwise return NULL. */
16149 tree
16150 fold_read_from_constant_string (tree exp)
16152 if ((TREE_CODE (exp) == INDIRECT_REF
16153 || TREE_CODE (exp) == ARRAY_REF)
16154 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16156 tree exp1 = TREE_OPERAND (exp, 0);
16157 tree index;
16158 tree string;
16159 location_t loc = EXPR_LOCATION (exp);
16161 if (TREE_CODE (exp) == INDIRECT_REF)
16162 string = string_constant (exp1, &index);
16163 else
16165 tree low_bound = array_ref_low_bound (exp);
16166 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16168 /* Optimize the special-case of a zero lower bound.
16170 We convert the low_bound to sizetype to avoid some problems
16171 with constant folding. (E.g. suppose the lower bound is 1,
16172 and its mode is QI. Without the conversion,l (ARRAY
16173 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16174 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16175 if (! integer_zerop (low_bound))
16176 index = size_diffop_loc (loc, index,
16177 fold_convert_loc (loc, sizetype, low_bound));
16179 string = exp1;
16182 if (string
16183 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16184 && TREE_CODE (string) == STRING_CST
16185 && TREE_CODE (index) == INTEGER_CST
16186 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16187 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16188 == MODE_INT)
16189 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16190 return build_int_cst_type (TREE_TYPE (exp),
16191 (TREE_STRING_POINTER (string)
16192 [TREE_INT_CST_LOW (index)]));
16194 return NULL;
16197 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16198 an integer constant, real, or fixed-point constant.
16200 TYPE is the type of the result. */
16202 static tree
16203 fold_negate_const (tree arg0, tree type)
16205 tree t = NULL_TREE;
16207 switch (TREE_CODE (arg0))
16209 case INTEGER_CST:
16211 bool overflow;
16212 wide_int val = wi::neg (arg0, &overflow);
16213 t = force_fit_type (type, val, 1,
16214 (overflow | TREE_OVERFLOW (arg0))
16215 && !TYPE_UNSIGNED (type));
16216 break;
16219 case REAL_CST:
16220 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16221 break;
16223 case FIXED_CST:
16225 FIXED_VALUE_TYPE f;
16226 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16227 &(TREE_FIXED_CST (arg0)), NULL,
16228 TYPE_SATURATING (type));
16229 t = build_fixed (type, f);
16230 /* Propagate overflow flags. */
16231 if (overflow_p | TREE_OVERFLOW (arg0))
16232 TREE_OVERFLOW (t) = 1;
16233 break;
16236 default:
16237 gcc_unreachable ();
16240 return t;
16243 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16244 an integer constant or real constant.
16246 TYPE is the type of the result. */
16248 tree
16249 fold_abs_const (tree arg0, tree type)
16251 tree t = NULL_TREE;
16253 switch (TREE_CODE (arg0))
16255 case INTEGER_CST:
16257 /* If the value is unsigned or non-negative, then the absolute value
16258 is the same as the ordinary value. */
16259 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16260 t = arg0;
16262 /* If the value is negative, then the absolute value is
16263 its negation. */
16264 else
16266 bool overflow;
16267 wide_int val = wi::neg (arg0, &overflow);
16268 t = force_fit_type (type, val, -1,
16269 overflow | TREE_OVERFLOW (arg0));
16272 break;
16274 case REAL_CST:
16275 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16276 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16277 else
16278 t = arg0;
16279 break;
16281 default:
16282 gcc_unreachable ();
16285 return t;
16288 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16289 constant. TYPE is the type of the result. */
16291 static tree
16292 fold_not_const (const_tree arg0, tree type)
16294 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16296 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16299 /* Given CODE, a relational operator, the target type, TYPE and two
16300 constant operands OP0 and OP1, return the result of the
16301 relational operation. If the result is not a compile time
16302 constant, then return NULL_TREE. */
16304 static tree
16305 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16307 int result, invert;
16309 /* From here on, the only cases we handle are when the result is
16310 known to be a constant. */
16312 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16314 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16315 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16317 /* Handle the cases where either operand is a NaN. */
16318 if (real_isnan (c0) || real_isnan (c1))
16320 switch (code)
16322 case EQ_EXPR:
16323 case ORDERED_EXPR:
16324 result = 0;
16325 break;
16327 case NE_EXPR:
16328 case UNORDERED_EXPR:
16329 case UNLT_EXPR:
16330 case UNLE_EXPR:
16331 case UNGT_EXPR:
16332 case UNGE_EXPR:
16333 case UNEQ_EXPR:
16334 result = 1;
16335 break;
16337 case LT_EXPR:
16338 case LE_EXPR:
16339 case GT_EXPR:
16340 case GE_EXPR:
16341 case LTGT_EXPR:
16342 if (flag_trapping_math)
16343 return NULL_TREE;
16344 result = 0;
16345 break;
16347 default:
16348 gcc_unreachable ();
16351 return constant_boolean_node (result, type);
16354 return constant_boolean_node (real_compare (code, c0, c1), type);
16357 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16359 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16360 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16361 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16364 /* Handle equality/inequality of complex constants. */
16365 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16367 tree rcond = fold_relational_const (code, type,
16368 TREE_REALPART (op0),
16369 TREE_REALPART (op1));
16370 tree icond = fold_relational_const (code, type,
16371 TREE_IMAGPART (op0),
16372 TREE_IMAGPART (op1));
16373 if (code == EQ_EXPR)
16374 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16375 else if (code == NE_EXPR)
16376 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16377 else
16378 return NULL_TREE;
16381 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16383 unsigned count = VECTOR_CST_NELTS (op0);
16384 tree *elts = XALLOCAVEC (tree, count);
16385 gcc_assert (VECTOR_CST_NELTS (op1) == count
16386 && TYPE_VECTOR_SUBPARTS (type) == count);
16388 for (unsigned i = 0; i < count; i++)
16390 tree elem_type = TREE_TYPE (type);
16391 tree elem0 = VECTOR_CST_ELT (op0, i);
16392 tree elem1 = VECTOR_CST_ELT (op1, i);
16394 tree tem = fold_relational_const (code, elem_type,
16395 elem0, elem1);
16397 if (tem == NULL_TREE)
16398 return NULL_TREE;
16400 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16403 return build_vector (type, elts);
16406 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16408 To compute GT, swap the arguments and do LT.
16409 To compute GE, do LT and invert the result.
16410 To compute LE, swap the arguments, do LT and invert the result.
16411 To compute NE, do EQ and invert the result.
16413 Therefore, the code below must handle only EQ and LT. */
16415 if (code == LE_EXPR || code == GT_EXPR)
16417 tree tem = op0;
16418 op0 = op1;
16419 op1 = tem;
16420 code = swap_tree_comparison (code);
16423 /* Note that it is safe to invert for real values here because we
16424 have already handled the one case that it matters. */
16426 invert = 0;
16427 if (code == NE_EXPR || code == GE_EXPR)
16429 invert = 1;
16430 code = invert_tree_comparison (code, false);
16433 /* Compute a result for LT or EQ if args permit;
16434 Otherwise return T. */
16435 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16437 if (code == EQ_EXPR)
16438 result = tree_int_cst_equal (op0, op1);
16439 else
16440 result = tree_int_cst_lt (op0, op1);
16442 else
16443 return NULL_TREE;
16445 if (invert)
16446 result ^= 1;
16447 return constant_boolean_node (result, type);
16450 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16451 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16452 itself. */
16454 tree
16455 fold_build_cleanup_point_expr (tree type, tree expr)
16457 /* If the expression does not have side effects then we don't have to wrap
16458 it with a cleanup point expression. */
16459 if (!TREE_SIDE_EFFECTS (expr))
16460 return expr;
16462 /* If the expression is a return, check to see if the expression inside the
16463 return has no side effects or the right hand side of the modify expression
16464 inside the return. If either don't have side effects set we don't need to
16465 wrap the expression in a cleanup point expression. Note we don't check the
16466 left hand side of the modify because it should always be a return decl. */
16467 if (TREE_CODE (expr) == RETURN_EXPR)
16469 tree op = TREE_OPERAND (expr, 0);
16470 if (!op || !TREE_SIDE_EFFECTS (op))
16471 return expr;
16472 op = TREE_OPERAND (op, 1);
16473 if (!TREE_SIDE_EFFECTS (op))
16474 return expr;
16477 return build1 (CLEANUP_POINT_EXPR, type, expr);
16480 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16481 of an indirection through OP0, or NULL_TREE if no simplification is
16482 possible. */
16484 tree
16485 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16487 tree sub = op0;
16488 tree subtype;
16490 STRIP_NOPS (sub);
16491 subtype = TREE_TYPE (sub);
16492 if (!POINTER_TYPE_P (subtype))
16493 return NULL_TREE;
16495 if (TREE_CODE (sub) == ADDR_EXPR)
16497 tree op = TREE_OPERAND (sub, 0);
16498 tree optype = TREE_TYPE (op);
16499 /* *&CONST_DECL -> to the value of the const decl. */
16500 if (TREE_CODE (op) == CONST_DECL)
16501 return DECL_INITIAL (op);
16502 /* *&p => p; make sure to handle *&"str"[cst] here. */
16503 if (type == optype)
16505 tree fop = fold_read_from_constant_string (op);
16506 if (fop)
16507 return fop;
16508 else
16509 return op;
16511 /* *(foo *)&fooarray => fooarray[0] */
16512 else if (TREE_CODE (optype) == ARRAY_TYPE
16513 && type == TREE_TYPE (optype)
16514 && (!in_gimple_form
16515 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16517 tree type_domain = TYPE_DOMAIN (optype);
16518 tree min_val = size_zero_node;
16519 if (type_domain && TYPE_MIN_VALUE (type_domain))
16520 min_val = TYPE_MIN_VALUE (type_domain);
16521 if (in_gimple_form
16522 && TREE_CODE (min_val) != INTEGER_CST)
16523 return NULL_TREE;
16524 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16525 NULL_TREE, NULL_TREE);
16527 /* *(foo *)&complexfoo => __real__ complexfoo */
16528 else if (TREE_CODE (optype) == COMPLEX_TYPE
16529 && type == TREE_TYPE (optype))
16530 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16531 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16532 else if (TREE_CODE (optype) == VECTOR_TYPE
16533 && type == TREE_TYPE (optype))
16535 tree part_width = TYPE_SIZE (type);
16536 tree index = bitsize_int (0);
16537 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16541 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16542 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16544 tree op00 = TREE_OPERAND (sub, 0);
16545 tree op01 = TREE_OPERAND (sub, 1);
16547 STRIP_NOPS (op00);
16548 if (TREE_CODE (op00) == ADDR_EXPR)
16550 tree op00type;
16551 op00 = TREE_OPERAND (op00, 0);
16552 op00type = TREE_TYPE (op00);
16554 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16555 if (TREE_CODE (op00type) == VECTOR_TYPE
16556 && type == TREE_TYPE (op00type))
16558 HOST_WIDE_INT offset = tree_to_shwi (op01);
16559 tree part_width = TYPE_SIZE (type);
16560 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16561 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16562 tree index = bitsize_int (indexi);
16564 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16565 return fold_build3_loc (loc,
16566 BIT_FIELD_REF, type, op00,
16567 part_width, index);
16570 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16571 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16572 && type == TREE_TYPE (op00type))
16574 tree size = TYPE_SIZE_UNIT (type);
16575 if (tree_int_cst_equal (size, op01))
16576 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16578 /* ((foo *)&fooarray)[1] => fooarray[1] */
16579 else if (TREE_CODE (op00type) == ARRAY_TYPE
16580 && type == TREE_TYPE (op00type))
16582 tree type_domain = TYPE_DOMAIN (op00type);
16583 tree min_val = size_zero_node;
16584 if (type_domain && TYPE_MIN_VALUE (type_domain))
16585 min_val = TYPE_MIN_VALUE (type_domain);
16586 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16587 TYPE_SIZE_UNIT (type));
16588 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16589 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16590 NULL_TREE, NULL_TREE);
16595 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16596 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16597 && type == TREE_TYPE (TREE_TYPE (subtype))
16598 && (!in_gimple_form
16599 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16601 tree type_domain;
16602 tree min_val = size_zero_node;
16603 sub = build_fold_indirect_ref_loc (loc, sub);
16604 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16605 if (type_domain && TYPE_MIN_VALUE (type_domain))
16606 min_val = TYPE_MIN_VALUE (type_domain);
16607 if (in_gimple_form
16608 && TREE_CODE (min_val) != INTEGER_CST)
16609 return NULL_TREE;
16610 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16611 NULL_TREE);
16614 return NULL_TREE;
16617 /* Builds an expression for an indirection through T, simplifying some
16618 cases. */
16620 tree
16621 build_fold_indirect_ref_loc (location_t loc, tree t)
16623 tree type = TREE_TYPE (TREE_TYPE (t));
16624 tree sub = fold_indirect_ref_1 (loc, type, t);
16626 if (sub)
16627 return sub;
16629 return build1_loc (loc, INDIRECT_REF, type, t);
16632 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16634 tree
16635 fold_indirect_ref_loc (location_t loc, tree t)
16637 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16639 if (sub)
16640 return sub;
16641 else
16642 return t;
16645 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16646 whose result is ignored. The type of the returned tree need not be
16647 the same as the original expression. */
16649 tree
16650 fold_ignored_result (tree t)
16652 if (!TREE_SIDE_EFFECTS (t))
16653 return integer_zero_node;
16655 for (;;)
16656 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16658 case tcc_unary:
16659 t = TREE_OPERAND (t, 0);
16660 break;
16662 case tcc_binary:
16663 case tcc_comparison:
16664 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16665 t = TREE_OPERAND (t, 0);
16666 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16667 t = TREE_OPERAND (t, 1);
16668 else
16669 return t;
16670 break;
16672 case tcc_expression:
16673 switch (TREE_CODE (t))
16675 case COMPOUND_EXPR:
16676 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16677 return t;
16678 t = TREE_OPERAND (t, 0);
16679 break;
16681 case COND_EXPR:
16682 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16683 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16684 return t;
16685 t = TREE_OPERAND (t, 0);
16686 break;
16688 default:
16689 return t;
16691 break;
16693 default:
16694 return t;
16698 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16700 tree
16701 round_up_loc (location_t loc, tree value, unsigned int divisor)
16703 tree div = NULL_TREE;
16705 if (divisor == 1)
16706 return value;
16708 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16709 have to do anything. Only do this when we are not given a const,
16710 because in that case, this check is more expensive than just
16711 doing it. */
16712 if (TREE_CODE (value) != INTEGER_CST)
16714 div = build_int_cst (TREE_TYPE (value), divisor);
16716 if (multiple_of_p (TREE_TYPE (value), value, div))
16717 return value;
16720 /* If divisor is a power of two, simplify this to bit manipulation. */
16721 if (divisor == (divisor & -divisor))
16723 if (TREE_CODE (value) == INTEGER_CST)
16725 wide_int val = value;
16726 bool overflow_p;
16728 if ((val & (divisor - 1)) == 0)
16729 return value;
16731 overflow_p = TREE_OVERFLOW (value);
16732 val &= ~(divisor - 1);
16733 val += divisor;
16734 if (val == 0)
16735 overflow_p = true;
16737 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16739 else
16741 tree t;
16743 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16744 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16745 t = build_int_cst (TREE_TYPE (value), -divisor);
16746 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16749 else
16751 if (!div)
16752 div = build_int_cst (TREE_TYPE (value), divisor);
16753 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16754 value = size_binop_loc (loc, MULT_EXPR, value, div);
16757 return value;
16760 /* Likewise, but round down. */
16762 tree
16763 round_down_loc (location_t loc, tree value, int divisor)
16765 tree div = NULL_TREE;
16767 gcc_assert (divisor > 0);
16768 if (divisor == 1)
16769 return value;
16771 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16772 have to do anything. Only do this when we are not given a const,
16773 because in that case, this check is more expensive than just
16774 doing it. */
16775 if (TREE_CODE (value) != INTEGER_CST)
16777 div = build_int_cst (TREE_TYPE (value), divisor);
16779 if (multiple_of_p (TREE_TYPE (value), value, div))
16780 return value;
16783 /* If divisor is a power of two, simplify this to bit manipulation. */
16784 if (divisor == (divisor & -divisor))
16786 tree t;
16788 t = build_int_cst (TREE_TYPE (value), -divisor);
16789 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16791 else
16793 if (!div)
16794 div = build_int_cst (TREE_TYPE (value), divisor);
16795 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16796 value = size_binop_loc (loc, MULT_EXPR, value, div);
16799 return value;
16802 /* Returns the pointer to the base of the object addressed by EXP and
16803 extracts the information about the offset of the access, storing it
16804 to PBITPOS and POFFSET. */
16806 static tree
16807 split_address_to_core_and_offset (tree exp,
16808 HOST_WIDE_INT *pbitpos, tree *poffset)
16810 tree core;
16811 enum machine_mode mode;
16812 int unsignedp, volatilep;
16813 HOST_WIDE_INT bitsize;
16814 location_t loc = EXPR_LOCATION (exp);
16816 if (TREE_CODE (exp) == ADDR_EXPR)
16818 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16819 poffset, &mode, &unsignedp, &volatilep,
16820 false);
16821 core = build_fold_addr_expr_loc (loc, core);
16823 else
16825 core = exp;
16826 *pbitpos = 0;
16827 *poffset = NULL_TREE;
16830 return core;
16833 /* Returns true if addresses of E1 and E2 differ by a constant, false
16834 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16836 bool
16837 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16839 tree core1, core2;
16840 HOST_WIDE_INT bitpos1, bitpos2;
16841 tree toffset1, toffset2, tdiff, type;
16843 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16844 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16846 if (bitpos1 % BITS_PER_UNIT != 0
16847 || bitpos2 % BITS_PER_UNIT != 0
16848 || !operand_equal_p (core1, core2, 0))
16849 return false;
16851 if (toffset1 && toffset2)
16853 type = TREE_TYPE (toffset1);
16854 if (type != TREE_TYPE (toffset2))
16855 toffset2 = fold_convert (type, toffset2);
16857 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16858 if (!cst_and_fits_in_hwi (tdiff))
16859 return false;
16861 *diff = int_cst_value (tdiff);
16863 else if (toffset1 || toffset2)
16865 /* If only one of the offsets is non-constant, the difference cannot
16866 be a constant. */
16867 return false;
16869 else
16870 *diff = 0;
16872 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16873 return true;
16876 /* Simplify the floating point expression EXP when the sign of the
16877 result is not significant. Return NULL_TREE if no simplification
16878 is possible. */
16880 tree
16881 fold_strip_sign_ops (tree exp)
16883 tree arg0, arg1;
16884 location_t loc = EXPR_LOCATION (exp);
16886 switch (TREE_CODE (exp))
16888 case ABS_EXPR:
16889 case NEGATE_EXPR:
16890 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16891 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16893 case MULT_EXPR:
16894 case RDIV_EXPR:
16895 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16896 return NULL_TREE;
16897 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16898 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16899 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16900 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16901 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16902 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16903 break;
16905 case COMPOUND_EXPR:
16906 arg0 = TREE_OPERAND (exp, 0);
16907 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16908 if (arg1)
16909 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16910 break;
16912 case COND_EXPR:
16913 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16914 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16915 if (arg0 || arg1)
16916 return fold_build3_loc (loc,
16917 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16918 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16919 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16920 break;
16922 case CALL_EXPR:
16924 const enum built_in_function fcode = builtin_mathfn_code (exp);
16925 switch (fcode)
16927 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16928 /* Strip copysign function call, return the 1st argument. */
16929 arg0 = CALL_EXPR_ARG (exp, 0);
16930 arg1 = CALL_EXPR_ARG (exp, 1);
16931 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16933 default:
16934 /* Strip sign ops from the argument of "odd" math functions. */
16935 if (negate_mathfn_p (fcode))
16937 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16938 if (arg0)
16939 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16941 break;
16944 break;
16946 default:
16947 break;
16949 return NULL_TREE;