* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / fold-const.c
blob8be8f3ee4a73ab0c3213c9c9ab3d60622ceafaa8
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "toplev.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* If ARG2 divides ARG1 with zero remainder, carries out the division
156 of type CODE and returns the quotient.
157 Otherwise returns NULL_TREE. */
159 tree
160 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
162 double_int quo, rem;
163 int uns;
165 /* The sign of the division is according to operand two, that
166 does the correct thing for POINTER_PLUS_EXPR where we want
167 a signed division. */
168 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
169 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
170 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
171 uns = false;
173 quo = double_int_divmod (tree_to_double_int (arg1),
174 tree_to_double_int (arg2),
175 uns, code, &rem);
177 if (double_int_zero_p (rem))
178 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
180 return NULL_TREE;
183 /* This is nonzero if we should defer warnings about undefined
184 overflow. This facility exists because these warnings are a
185 special case. The code to estimate loop iterations does not want
186 to issue any warnings, since it works with expressions which do not
187 occur in user code. Various bits of cleanup code call fold(), but
188 only use the result if it has certain characteristics (e.g., is a
189 constant); that code only wants to issue a warning if the result is
190 used. */
192 static int fold_deferring_overflow_warnings;
194 /* If a warning about undefined overflow is deferred, this is the
195 warning. Note that this may cause us to turn two warnings into
196 one, but that is fine since it is sufficient to only give one
197 warning per expression. */
199 static const char* fold_deferred_overflow_warning;
201 /* If a warning about undefined overflow is deferred, this is the
202 level at which the warning should be emitted. */
204 static enum warn_strict_overflow_code fold_deferred_overflow_code;
206 /* Start deferring overflow warnings. We could use a stack here to
207 permit nested calls, but at present it is not necessary. */
209 void
210 fold_defer_overflow_warnings (void)
212 ++fold_deferring_overflow_warnings;
215 /* Stop deferring overflow warnings. If there is a pending warning,
216 and ISSUE is true, then issue the warning if appropriate. STMT is
217 the statement with which the warning should be associated (used for
218 location information); STMT may be NULL. CODE is the level of the
219 warning--a warn_strict_overflow_code value. This function will use
220 the smaller of CODE and the deferred code when deciding whether to
221 issue the warning. CODE may be zero to mean to always use the
222 deferred code. */
224 void
225 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
227 const char *warnmsg;
228 location_t locus;
230 gcc_assert (fold_deferring_overflow_warnings > 0);
231 --fold_deferring_overflow_warnings;
232 if (fold_deferring_overflow_warnings > 0)
234 if (fold_deferred_overflow_warning != NULL
235 && code != 0
236 && code < (int) fold_deferred_overflow_code)
237 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
238 return;
241 warnmsg = fold_deferred_overflow_warning;
242 fold_deferred_overflow_warning = NULL;
244 if (!issue || warnmsg == NULL)
245 return;
247 if (gimple_no_warning_p (stmt))
248 return;
250 /* Use the smallest code level when deciding to issue the
251 warning. */
252 if (code == 0 || code > (int) fold_deferred_overflow_code)
253 code = fold_deferred_overflow_code;
255 if (!issue_strict_overflow_warning (code))
256 return;
258 if (stmt == NULL)
259 locus = input_location;
260 else
261 locus = gimple_location (stmt);
262 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
265 /* Stop deferring overflow warnings, ignoring any deferred
266 warnings. */
268 void
269 fold_undefer_and_ignore_overflow_warnings (void)
271 fold_undefer_overflow_warnings (false, NULL, 0);
274 /* Whether we are deferring overflow warnings. */
276 bool
277 fold_deferring_overflow_warnings_p (void)
279 return fold_deferring_overflow_warnings > 0;
282 /* This is called when we fold something based on the fact that signed
283 overflow is undefined. */
285 static void
286 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
288 if (fold_deferring_overflow_warnings > 0)
290 if (fold_deferred_overflow_warning == NULL
291 || wc < fold_deferred_overflow_code)
293 fold_deferred_overflow_warning = gmsgid;
294 fold_deferred_overflow_code = wc;
297 else if (issue_strict_overflow_warning (wc))
298 warning (OPT_Wstrict_overflow, gmsgid);
301 /* Return true if the built-in mathematical function specified by CODE
302 is odd, i.e. -f(x) == f(-x). */
304 static bool
305 negate_mathfn_p (enum built_in_function code)
307 switch (code)
309 CASE_FLT_FN (BUILT_IN_ASIN):
310 CASE_FLT_FN (BUILT_IN_ASINH):
311 CASE_FLT_FN (BUILT_IN_ATAN):
312 CASE_FLT_FN (BUILT_IN_ATANH):
313 CASE_FLT_FN (BUILT_IN_CASIN):
314 CASE_FLT_FN (BUILT_IN_CASINH):
315 CASE_FLT_FN (BUILT_IN_CATAN):
316 CASE_FLT_FN (BUILT_IN_CATANH):
317 CASE_FLT_FN (BUILT_IN_CBRT):
318 CASE_FLT_FN (BUILT_IN_CPROJ):
319 CASE_FLT_FN (BUILT_IN_CSIN):
320 CASE_FLT_FN (BUILT_IN_CSINH):
321 CASE_FLT_FN (BUILT_IN_CTAN):
322 CASE_FLT_FN (BUILT_IN_CTANH):
323 CASE_FLT_FN (BUILT_IN_ERF):
324 CASE_FLT_FN (BUILT_IN_LLROUND):
325 CASE_FLT_FN (BUILT_IN_LROUND):
326 CASE_FLT_FN (BUILT_IN_ROUND):
327 CASE_FLT_FN (BUILT_IN_SIN):
328 CASE_FLT_FN (BUILT_IN_SINH):
329 CASE_FLT_FN (BUILT_IN_TAN):
330 CASE_FLT_FN (BUILT_IN_TANH):
331 CASE_FLT_FN (BUILT_IN_TRUNC):
332 return true;
334 CASE_FLT_FN (BUILT_IN_LLRINT):
335 CASE_FLT_FN (BUILT_IN_LRINT):
336 CASE_FLT_FN (BUILT_IN_NEARBYINT):
337 CASE_FLT_FN (BUILT_IN_RINT):
338 return !flag_rounding_math;
340 default:
341 break;
343 return false;
346 /* Check whether we may negate an integer constant T without causing
347 overflow. */
349 bool
350 may_negate_without_overflow_p (const_tree t)
352 unsigned HOST_WIDE_INT val;
353 unsigned int prec;
354 tree type;
356 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358 type = TREE_TYPE (t);
359 if (TYPE_UNSIGNED (type))
360 return false;
362 prec = TYPE_PRECISION (type);
363 if (prec > HOST_BITS_PER_WIDE_INT)
365 if (TREE_INT_CST_LOW (t) != 0)
366 return true;
367 prec -= HOST_BITS_PER_WIDE_INT;
368 val = TREE_INT_CST_HIGH (t);
370 else
371 val = TREE_INT_CST_LOW (t);
372 if (prec < HOST_BITS_PER_WIDE_INT)
373 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
374 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
377 /* Determine whether an expression T can be cheaply negated using
378 the function negate_expr without introducing undefined overflow. */
380 static bool
381 negate_expr_p (tree t)
383 tree type;
385 if (t == 0)
386 return false;
388 type = TREE_TYPE (t);
390 STRIP_SIGN_NOPS (t);
391 switch (TREE_CODE (t))
393 case INTEGER_CST:
394 if (TYPE_OVERFLOW_WRAPS (type))
395 return true;
397 /* Check that -CST will not overflow type. */
398 return may_negate_without_overflow_p (t);
399 case BIT_NOT_EXPR:
400 return (INTEGRAL_TYPE_P (type)
401 && TYPE_OVERFLOW_WRAPS (type));
403 case FIXED_CST:
404 case NEGATE_EXPR:
405 return true;
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
416 case COMPLEX_EXPR:
417 return negate_expr_p (TREE_OPERAND (t, 0))
418 && negate_expr_p (TREE_OPERAND (t, 1));
420 case CONJ_EXPR:
421 return negate_expr_p (TREE_OPERAND (t, 0));
423 case PLUS_EXPR:
424 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
425 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
426 return false;
427 /* -(A + B) -> (-B) - A. */
428 if (negate_expr_p (TREE_OPERAND (t, 1))
429 && reorder_operands_p (TREE_OPERAND (t, 0),
430 TREE_OPERAND (t, 1)))
431 return true;
432 /* -(A + B) -> (-A) - B. */
433 return negate_expr_p (TREE_OPERAND (t, 0));
435 case MINUS_EXPR:
436 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
437 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1));
442 case MULT_EXPR:
443 if (TYPE_UNSIGNED (TREE_TYPE (t)))
444 break;
446 /* Fall through. */
448 case RDIV_EXPR:
449 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
450 return negate_expr_p (TREE_OPERAND (t, 1))
451 || negate_expr_p (TREE_OPERAND (t, 0));
452 break;
454 case TRUNC_DIV_EXPR:
455 case ROUND_DIV_EXPR:
456 case FLOOR_DIV_EXPR:
457 case CEIL_DIV_EXPR:
458 case EXACT_DIV_EXPR:
459 /* In general we can't negate A / B, because if A is INT_MIN and
460 B is 1, we may turn this into INT_MIN / -1 which is undefined
461 and actually traps on some architectures. But if overflow is
462 undefined, we can negate, because - (INT_MIN / 1) is an
463 overflow. */
464 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
465 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
466 break;
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
470 case NOP_EXPR:
471 /* Negate -((double)float) as (double)(-float). */
472 if (TREE_CODE (type) == REAL_TYPE)
474 tree tem = strip_float_extensions (t);
475 if (tem != t)
476 return negate_expr_p (tem);
478 break;
480 case CALL_EXPR:
481 /* Negate -f(x) as f(-x). */
482 if (negate_mathfn_p (builtin_mathfn_code (t)))
483 return negate_expr_p (CALL_EXPR_ARG (t, 0));
484 break;
486 case RSHIFT_EXPR:
487 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
488 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
490 tree op1 = TREE_OPERAND (t, 1);
491 if (TREE_INT_CST_HIGH (op1) == 0
492 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
493 == TREE_INT_CST_LOW (op1))
494 return true;
496 break;
498 default:
499 break;
501 return false;
504 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
505 simplification is possible.
506 If negate_expr_p would return true for T, NULL_TREE will never be
507 returned. */
509 static tree
510 fold_negate_expr (location_t loc, tree t)
512 tree type = TREE_TYPE (t);
513 tree tem;
515 switch (TREE_CODE (t))
517 /* Convert - (~A) to A + 1. */
518 case BIT_NOT_EXPR:
519 if (INTEGRAL_TYPE_P (type))
520 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
521 build_int_cst (type, 1));
522 break;
524 case INTEGER_CST:
525 tem = fold_negate_const (t, type);
526 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
527 || !TYPE_OVERFLOW_TRAPS (type))
528 return tem;
529 break;
531 case REAL_CST:
532 tem = fold_negate_const (t, type);
533 /* Two's complement FP formats, such as c4x, may overflow. */
534 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
535 return tem;
536 break;
538 case FIXED_CST:
539 tem = fold_negate_const (t, type);
540 return tem;
542 case COMPLEX_CST:
544 tree rpart = negate_expr (TREE_REALPART (t));
545 tree ipart = negate_expr (TREE_IMAGPART (t));
547 if ((TREE_CODE (rpart) == REAL_CST
548 && TREE_CODE (ipart) == REAL_CST)
549 || (TREE_CODE (rpart) == INTEGER_CST
550 && TREE_CODE (ipart) == INTEGER_CST))
551 return build_complex (type, rpart, ipart);
553 break;
555 case COMPLEX_EXPR:
556 if (negate_expr_p (t))
557 return fold_build2_loc (loc, COMPLEX_EXPR, type,
558 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
559 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
560 break;
562 case CONJ_EXPR:
563 if (negate_expr_p (t))
564 return fold_build1_loc (loc, CONJ_EXPR, type,
565 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
566 break;
568 case NEGATE_EXPR:
569 return TREE_OPERAND (t, 0);
571 case PLUS_EXPR:
572 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
573 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
575 /* -(A + B) -> (-B) - A. */
576 if (negate_expr_p (TREE_OPERAND (t, 1))
577 && reorder_operands_p (TREE_OPERAND (t, 0),
578 TREE_OPERAND (t, 1)))
580 tem = negate_expr (TREE_OPERAND (t, 1));
581 return fold_build2_loc (loc, MINUS_EXPR, type,
582 tem, TREE_OPERAND (t, 0));
585 /* -(A + B) -> (-A) - B. */
586 if (negate_expr_p (TREE_OPERAND (t, 0)))
588 tem = negate_expr (TREE_OPERAND (t, 0));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 1));
593 break;
595 case MINUS_EXPR:
596 /* - (A - B) -> B - A */
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
599 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
602 break;
604 case MULT_EXPR:
605 if (TYPE_UNSIGNED (type))
606 break;
608 /* Fall through. */
610 case RDIV_EXPR:
611 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
613 tem = TREE_OPERAND (t, 1);
614 if (negate_expr_p (tem))
615 return fold_build2_loc (loc, TREE_CODE (t), type,
616 TREE_OPERAND (t, 0), negate_expr (tem));
617 tem = TREE_OPERAND (t, 0);
618 if (negate_expr_p (tem))
619 return fold_build2_loc (loc, TREE_CODE (t), type,
620 negate_expr (tem), TREE_OPERAND (t, 1));
622 break;
624 case TRUNC_DIV_EXPR:
625 case ROUND_DIV_EXPR:
626 case FLOOR_DIV_EXPR:
627 case CEIL_DIV_EXPR:
628 case EXACT_DIV_EXPR:
629 /* In general we can't negate A / B, because if A is INT_MIN and
630 B is 1, we may turn this into INT_MIN / -1 which is undefined
631 and actually traps on some architectures. But if overflow is
632 undefined, we can negate, because - (INT_MIN / 1) is an
633 overflow. */
634 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
636 const char * const warnmsg = G_("assuming signed overflow does not "
637 "occur when negating a division");
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
641 if (INTEGRAL_TYPE_P (type)
642 && (TREE_CODE (tem) != INTEGER_CST
643 || integer_onep (tem)))
644 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
648 tem = TREE_OPERAND (t, 0);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (tem), TREE_OPERAND (t, 1));
659 break;
661 case NOP_EXPR:
662 /* Convert -((double)float) into (double)(-float). */
663 if (TREE_CODE (type) == REAL_TYPE)
665 tem = strip_float_extensions (t);
666 if (tem != t && negate_expr_p (tem))
667 return fold_convert_loc (loc, type, negate_expr (tem));
669 break;
671 case CALL_EXPR:
672 /* Negate -f(x) as f(-x). */
673 if (negate_mathfn_p (builtin_mathfn_code (t))
674 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
676 tree fndecl, arg;
678 fndecl = get_callee_fndecl (t);
679 arg = negate_expr (CALL_EXPR_ARG (t, 0));
680 return build_call_expr_loc (loc, fndecl, 1, arg);
682 break;
684 case RSHIFT_EXPR:
685 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
686 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
688 tree op1 = TREE_OPERAND (t, 1);
689 if (TREE_INT_CST_HIGH (op1) == 0
690 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
691 == TREE_INT_CST_LOW (op1))
693 tree ntype = TYPE_UNSIGNED (type)
694 ? signed_type_for (type)
695 : unsigned_type_for (type);
696 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
697 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
698 return fold_convert_loc (loc, type, temp);
701 break;
703 default:
704 break;
707 return NULL_TREE;
710 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
711 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
712 return NULL_TREE. */
714 static tree
715 negate_expr (tree t)
717 tree type, tem;
718 location_t loc;
720 if (t == NULL_TREE)
721 return NULL_TREE;
723 loc = EXPR_LOCATION (t);
724 type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
727 tem = fold_negate_expr (loc, t);
728 if (!tem)
730 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
731 SET_EXPR_LOCATION (tem, loc);
733 return fold_convert_loc (loc, type, tem);
736 /* Split a tree IN into a constant, literal and variable parts that could be
737 combined with CODE to make IN. "constant" means an expression with
738 TREE_CONSTANT but that isn't an actual constant. CODE must be a
739 commutative arithmetic operation. Store the constant part into *CONP,
740 the literal in *LITP and return the variable part. If a part isn't
741 present, set it to null. If the tree does not decompose in this way,
742 return the entire tree as the variable part and the other parts as null.
744 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
745 case, we negate an operand that was subtracted. Except if it is a
746 literal for which we use *MINUS_LITP instead.
748 If NEGATE_P is true, we are negating all of IN, again except a literal
749 for which we use *MINUS_LITP instead.
751 If IN is itself a literal or constant, return it as appropriate.
753 Note that we do not guarantee that any of the three values will be the
754 same type as IN, but they will have the same signedness and mode. */
756 static tree
757 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
758 tree *minus_litp, int negate_p)
760 tree var = 0;
762 *conp = 0;
763 *litp = 0;
764 *minus_litp = 0;
766 /* Strip any conversions that don't change the machine mode or signedness. */
767 STRIP_SIGN_NOPS (in);
769 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
770 || TREE_CODE (in) == FIXED_CST)
771 *litp = in;
772 else if (TREE_CODE (in) == code
773 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
774 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
775 /* We can associate addition and subtraction together (even
776 though the C standard doesn't say so) for integers because
777 the value is not affected. For reals, the value might be
778 affected, so we can't. */
779 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
780 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
782 tree op0 = TREE_OPERAND (in, 0);
783 tree op1 = TREE_OPERAND (in, 1);
784 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
785 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
787 /* First see if either of the operands is a literal, then a constant. */
788 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
789 || TREE_CODE (op0) == FIXED_CST)
790 *litp = op0, op0 = 0;
791 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
792 || TREE_CODE (op1) == FIXED_CST)
793 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
795 if (op0 != 0 && TREE_CONSTANT (op0))
796 *conp = op0, op0 = 0;
797 else if (op1 != 0 && TREE_CONSTANT (op1))
798 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
800 /* If we haven't dealt with either operand, this is not a case we can
801 decompose. Otherwise, VAR is either of the ones remaining, if any. */
802 if (op0 != 0 && op1 != 0)
803 var = in;
804 else if (op0 != 0)
805 var = op0;
806 else
807 var = op1, neg_var_p = neg1_p;
809 /* Now do any needed negations. */
810 if (neg_litp_p)
811 *minus_litp = *litp, *litp = 0;
812 if (neg_conp_p)
813 *conp = negate_expr (*conp);
814 if (neg_var_p)
815 var = negate_expr (var);
817 else if (TREE_CONSTANT (in))
818 *conp = in;
819 else
820 var = in;
822 if (negate_p)
824 if (*litp)
825 *minus_litp = *litp, *litp = 0;
826 else if (*minus_litp)
827 *litp = *minus_litp, *minus_litp = 0;
828 *conp = negate_expr (*conp);
829 var = negate_expr (var);
832 return var;
835 /* Re-associate trees split by the above function. T1 and T2 are
836 either expressions to associate or null. Return the new
837 expression, if any. LOC is the location of the new expression. If
838 we build an operation, do it in TYPE and with CODE. */
840 static tree
841 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
843 tree tem;
845 if (t1 == 0)
846 return t2;
847 else if (t2 == 0)
848 return t1;
850 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
851 try to fold this since we will have infinite recursion. But do
852 deal with any NEGATE_EXPRs. */
853 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
854 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
856 if (code == PLUS_EXPR)
858 if (TREE_CODE (t1) == NEGATE_EXPR)
859 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
860 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
861 else if (TREE_CODE (t2) == NEGATE_EXPR)
862 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
863 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
864 else if (integer_zerop (t2))
865 return fold_convert_loc (loc, type, t1);
867 else if (code == MINUS_EXPR)
869 if (integer_zerop (t2))
870 return fold_convert_loc (loc, type, t1);
873 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
874 fold_convert_loc (loc, type, t2));
875 goto associate_trees_exit;
878 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
879 fold_convert_loc (loc, type, t2));
880 associate_trees_exit:
881 protected_set_expr_location (tem, loc);
882 return tem;
885 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
886 for use in int_const_binop, size_binop and size_diffop. */
888 static bool
889 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
891 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
892 return false;
893 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
894 return false;
896 switch (code)
898 case LSHIFT_EXPR:
899 case RSHIFT_EXPR:
900 case LROTATE_EXPR:
901 case RROTATE_EXPR:
902 return true;
904 default:
905 break;
908 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
909 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
910 && TYPE_MODE (type1) == TYPE_MODE (type2);
914 /* Combine two integer constants ARG1 and ARG2 under operation CODE
915 to produce a new constant. Return NULL_TREE if we don't know how
916 to evaluate CODE at compile-time.
918 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
920 tree
921 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
923 double_int op1, op2, res, tmp;
924 tree t;
925 tree type = TREE_TYPE (arg1);
926 bool uns = TYPE_UNSIGNED (type);
927 bool is_sizetype
928 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
929 bool overflow = false;
931 op1 = tree_to_double_int (arg1);
932 op2 = tree_to_double_int (arg2);
934 switch (code)
936 case BIT_IOR_EXPR:
937 res = double_int_ior (op1, op2);
938 break;
940 case BIT_XOR_EXPR:
941 res = double_int_xor (op1, op2);
942 break;
944 case BIT_AND_EXPR:
945 res = double_int_and (op1, op2);
946 break;
948 case RSHIFT_EXPR:
949 res = double_int_rshift (op1, double_int_to_shwi (op2),
950 TYPE_PRECISION (type), !uns);
951 break;
953 case LSHIFT_EXPR:
954 /* It's unclear from the C standard whether shifts can overflow.
955 The following code ignores overflow; perhaps a C standard
956 interpretation ruling is needed. */
957 res = double_int_lshift (op1, double_int_to_shwi (op2),
958 TYPE_PRECISION (type), !uns);
959 break;
961 case RROTATE_EXPR:
962 res = double_int_rrotate (op1, double_int_to_shwi (op2),
963 TYPE_PRECISION (type));
964 break;
966 case LROTATE_EXPR:
967 res = double_int_lrotate (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type));
969 break;
971 case PLUS_EXPR:
972 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
973 &res.low, &res.high);
974 break;
976 case MINUS_EXPR:
977 neg_double (op2.low, op2.high, &res.low, &res.high);
978 add_double (op1.low, op1.high, res.low, res.high,
979 &res.low, &res.high);
980 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
981 break;
983 case MULT_EXPR:
984 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
985 &res.low, &res.high);
986 break;
988 case TRUNC_DIV_EXPR:
989 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
990 case EXACT_DIV_EXPR:
991 /* This is a shortcut for a common special case. */
992 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
993 && !TREE_OVERFLOW (arg1)
994 && !TREE_OVERFLOW (arg2)
995 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
997 if (code == CEIL_DIV_EXPR)
998 op1.low += op2.low - 1;
1000 res.low = op1.low / op2.low, res.high = 0;
1001 break;
1004 /* ... fall through ... */
1006 case ROUND_DIV_EXPR:
1007 if (double_int_zero_p (op2))
1008 return NULL_TREE;
1009 if (double_int_one_p (op2))
1011 res = op1;
1012 break;
1014 if (double_int_equal_p (op1, op2)
1015 && ! double_int_zero_p (op1))
1017 res = double_int_one;
1018 break;
1020 overflow = div_and_round_double (code, uns,
1021 op1.low, op1.high, op2.low, op2.high,
1022 &res.low, &res.high,
1023 &tmp.low, &tmp.high);
1024 break;
1026 case TRUNC_MOD_EXPR:
1027 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1028 /* This is a shortcut for a common special case. */
1029 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1030 && !TREE_OVERFLOW (arg1)
1031 && !TREE_OVERFLOW (arg2)
1032 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1034 if (code == CEIL_MOD_EXPR)
1035 op1.low += op2.low - 1;
1036 res.low = op1.low % op2.low, res.high = 0;
1037 break;
1040 /* ... fall through ... */
1042 case ROUND_MOD_EXPR:
1043 if (double_int_zero_p (op2))
1044 return NULL_TREE;
1045 overflow = div_and_round_double (code, uns,
1046 op1.low, op1.high, op2.low, op2.high,
1047 &tmp.low, &tmp.high,
1048 &res.low, &res.high);
1049 break;
1051 case MIN_EXPR:
1052 res = double_int_min (op1, op2, uns);
1053 break;
1055 case MAX_EXPR:
1056 res = double_int_max (op1, op2, uns);
1057 break;
1059 default:
1060 return NULL_TREE;
1063 if (notrunc)
1065 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1067 /* Propagate overflow flags ourselves. */
1068 if (((!uns || is_sizetype) && overflow)
1069 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1071 t = copy_node (t);
1072 TREE_OVERFLOW (t) = 1;
1075 else
1076 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1077 ((!uns || is_sizetype) && overflow)
1078 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1080 return t;
1083 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1084 constant. We assume ARG1 and ARG2 have the same data type, or at least
1085 are the same kind of constant and the same machine mode. Return zero if
1086 combining the constants is not allowed in the current operating mode. */
1088 static tree
1089 const_binop (enum tree_code code, tree arg1, tree arg2)
1091 /* Sanity check for the recursive cases. */
1092 if (!arg1 || !arg2)
1093 return NULL_TREE;
1095 STRIP_NOPS (arg1);
1096 STRIP_NOPS (arg2);
1098 if (TREE_CODE (arg1) == INTEGER_CST)
1099 return int_const_binop (code, arg1, arg2, 0);
1101 if (TREE_CODE (arg1) == REAL_CST)
1103 enum machine_mode mode;
1104 REAL_VALUE_TYPE d1;
1105 REAL_VALUE_TYPE d2;
1106 REAL_VALUE_TYPE value;
1107 REAL_VALUE_TYPE result;
1108 bool inexact;
1109 tree t, type;
1111 /* The following codes are handled by real_arithmetic. */
1112 switch (code)
1114 case PLUS_EXPR:
1115 case MINUS_EXPR:
1116 case MULT_EXPR:
1117 case RDIV_EXPR:
1118 case MIN_EXPR:
1119 case MAX_EXPR:
1120 break;
1122 default:
1123 return NULL_TREE;
1126 d1 = TREE_REAL_CST (arg1);
1127 d2 = TREE_REAL_CST (arg2);
1129 type = TREE_TYPE (arg1);
1130 mode = TYPE_MODE (type);
1132 /* Don't perform operation if we honor signaling NaNs and
1133 either operand is a NaN. */
1134 if (HONOR_SNANS (mode)
1135 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1136 return NULL_TREE;
1138 /* Don't perform operation if it would raise a division
1139 by zero exception. */
1140 if (code == RDIV_EXPR
1141 && REAL_VALUES_EQUAL (d2, dconst0)
1142 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1143 return NULL_TREE;
1145 /* If either operand is a NaN, just return it. Otherwise, set up
1146 for floating-point trap; we return an overflow. */
1147 if (REAL_VALUE_ISNAN (d1))
1148 return arg1;
1149 else if (REAL_VALUE_ISNAN (d2))
1150 return arg2;
1152 inexact = real_arithmetic (&value, code, &d1, &d2);
1153 real_convert (&result, mode, &value);
1155 /* Don't constant fold this floating point operation if
1156 the result has overflowed and flag_trapping_math. */
1157 if (flag_trapping_math
1158 && MODE_HAS_INFINITIES (mode)
1159 && REAL_VALUE_ISINF (result)
1160 && !REAL_VALUE_ISINF (d1)
1161 && !REAL_VALUE_ISINF (d2))
1162 return NULL_TREE;
1164 /* Don't constant fold this floating point operation if the
1165 result may dependent upon the run-time rounding mode and
1166 flag_rounding_math is set, or if GCC's software emulation
1167 is unable to accurately represent the result. */
1168 if ((flag_rounding_math
1169 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1170 && (inexact || !real_identical (&result, &value)))
1171 return NULL_TREE;
1173 t = build_real (type, result);
1175 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1176 return t;
1179 if (TREE_CODE (arg1) == FIXED_CST)
1181 FIXED_VALUE_TYPE f1;
1182 FIXED_VALUE_TYPE f2;
1183 FIXED_VALUE_TYPE result;
1184 tree t, type;
1185 int sat_p;
1186 bool overflow_p;
1188 /* The following codes are handled by fixed_arithmetic. */
1189 switch (code)
1191 case PLUS_EXPR:
1192 case MINUS_EXPR:
1193 case MULT_EXPR:
1194 case TRUNC_DIV_EXPR:
1195 f2 = TREE_FIXED_CST (arg2);
1196 break;
1198 case LSHIFT_EXPR:
1199 case RSHIFT_EXPR:
1200 f2.data.high = TREE_INT_CST_HIGH (arg2);
1201 f2.data.low = TREE_INT_CST_LOW (arg2);
1202 f2.mode = SImode;
1203 break;
1205 default:
1206 return NULL_TREE;
1209 f1 = TREE_FIXED_CST (arg1);
1210 type = TREE_TYPE (arg1);
1211 sat_p = TYPE_SATURATING (type);
1212 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1213 t = build_fixed (type, result);
1214 /* Propagate overflow flags. */
1215 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1216 TREE_OVERFLOW (t) = 1;
1217 return t;
1220 if (TREE_CODE (arg1) == COMPLEX_CST)
1222 tree type = TREE_TYPE (arg1);
1223 tree r1 = TREE_REALPART (arg1);
1224 tree i1 = TREE_IMAGPART (arg1);
1225 tree r2 = TREE_REALPART (arg2);
1226 tree i2 = TREE_IMAGPART (arg2);
1227 tree real, imag;
1229 switch (code)
1231 case PLUS_EXPR:
1232 case MINUS_EXPR:
1233 real = const_binop (code, r1, r2);
1234 imag = const_binop (code, i1, i2);
1235 break;
1237 case MULT_EXPR:
1238 if (COMPLEX_FLOAT_TYPE_P (type))
1239 return do_mpc_arg2 (arg1, arg2, type,
1240 /* do_nonfinite= */ folding_initializer,
1241 mpc_mul);
1243 real = const_binop (MINUS_EXPR,
1244 const_binop (MULT_EXPR, r1, r2),
1245 const_binop (MULT_EXPR, i1, i2));
1246 imag = const_binop (PLUS_EXPR,
1247 const_binop (MULT_EXPR, r1, i2),
1248 const_binop (MULT_EXPR, i1, r2));
1249 break;
1251 case RDIV_EXPR:
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1255 mpc_div);
1256 /* Fallthru ... */
1257 case TRUNC_DIV_EXPR:
1258 case CEIL_DIV_EXPR:
1259 case FLOOR_DIV_EXPR:
1260 case ROUND_DIV_EXPR:
1261 if (flag_complex_method == 0)
1263 /* Keep this algorithm in sync with
1264 tree-complex.c:expand_complex_div_straight().
1266 Expand complex division to scalars, straightforward algorithm.
1267 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1268 t = br*br + bi*bi
1270 tree magsquared
1271 = const_binop (PLUS_EXPR,
1272 const_binop (MULT_EXPR, r2, r2),
1273 const_binop (MULT_EXPR, i2, i2));
1274 tree t1
1275 = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, r2),
1277 const_binop (MULT_EXPR, i1, i2));
1278 tree t2
1279 = const_binop (MINUS_EXPR,
1280 const_binop (MULT_EXPR, i1, r2),
1281 const_binop (MULT_EXPR, r1, i2));
1283 real = const_binop (code, t1, magsquared);
1284 imag = const_binop (code, t2, magsquared);
1286 else
1288 /* Keep this algorithm in sync with
1289 tree-complex.c:expand_complex_div_wide().
1291 Expand complex division to scalars, modified algorithm to minimize
1292 overflow with wide input ranges. */
1293 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1294 fold_abs_const (r2, TREE_TYPE (type)),
1295 fold_abs_const (i2, TREE_TYPE (type)));
1297 if (integer_nonzerop (compare))
1299 /* In the TRUE branch, we compute
1300 ratio = br/bi;
1301 div = (br * ratio) + bi;
1302 tr = (ar * ratio) + ai;
1303 ti = (ai * ratio) - ar;
1304 tr = tr / div;
1305 ti = ti / div; */
1306 tree ratio = const_binop (code, r2, i2);
1307 tree div = const_binop (PLUS_EXPR, i2,
1308 const_binop (MULT_EXPR, r2, ratio));
1309 real = const_binop (MULT_EXPR, r1, ratio);
1310 real = const_binop (PLUS_EXPR, real, i1);
1311 real = const_binop (code, real, div);
1313 imag = const_binop (MULT_EXPR, i1, ratio);
1314 imag = const_binop (MINUS_EXPR, imag, r1);
1315 imag = const_binop (code, imag, div);
1317 else
1319 /* In the FALSE branch, we compute
1320 ratio = d/c;
1321 divisor = (d * ratio) + c;
1322 tr = (b * ratio) + a;
1323 ti = b - (a * ratio);
1324 tr = tr / div;
1325 ti = ti / div; */
1326 tree ratio = const_binop (code, i2, r2);
1327 tree div = const_binop (PLUS_EXPR, r2,
1328 const_binop (MULT_EXPR, i2, ratio));
1330 real = const_binop (MULT_EXPR, i1, ratio);
1331 real = const_binop (PLUS_EXPR, real, r1);
1332 real = const_binop (code, real, div);
1334 imag = const_binop (MULT_EXPR, r1, ratio);
1335 imag = const_binop (MINUS_EXPR, i1, imag);
1336 imag = const_binop (code, imag, div);
1339 break;
1341 default:
1342 return NULL_TREE;
1345 if (real && imag)
1346 return build_complex (type, real, imag);
1349 if (TREE_CODE (arg1) == VECTOR_CST)
1351 tree type = TREE_TYPE(arg1);
1352 int count = TYPE_VECTOR_SUBPARTS (type), i;
1353 tree elements1, elements2, list = NULL_TREE;
1355 if(TREE_CODE(arg2) != VECTOR_CST)
1356 return NULL_TREE;
1358 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1359 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1361 for (i = 0; i < count; i++)
1363 tree elem1, elem2, elem;
1365 /* The trailing elements can be empty and should be treated as 0 */
1366 if(!elements1)
1367 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1368 else
1370 elem1 = TREE_VALUE(elements1);
1371 elements1 = TREE_CHAIN (elements1);
1374 if(!elements2)
1375 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1376 else
1378 elem2 = TREE_VALUE(elements2);
1379 elements2 = TREE_CHAIN (elements2);
1382 elem = const_binop (code, elem1, elem2);
1384 /* It is possible that const_binop cannot handle the given
1385 code and return NULL_TREE */
1386 if(elem == NULL_TREE)
1387 return NULL_TREE;
1389 list = tree_cons (NULL_TREE, elem, list);
1391 return build_vector(type, nreverse(list));
1393 return NULL_TREE;
1396 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1397 indicates which particular sizetype to create. */
1399 tree
1400 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1402 return build_int_cst (sizetype_tab[(int) kind], number);
1405 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1406 is a tree code. The type of the result is taken from the operands.
1407 Both must be equivalent integer types, ala int_binop_types_match_p.
1408 If the operands are constant, so is the result. */
1410 tree
1411 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1413 tree type = TREE_TYPE (arg0);
1415 if (arg0 == error_mark_node || arg1 == error_mark_node)
1416 return error_mark_node;
1418 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1419 TREE_TYPE (arg1)));
1421 /* Handle the special case of two integer constants faster. */
1422 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1424 /* And some specific cases even faster than that. */
1425 if (code == PLUS_EXPR)
1427 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1428 return arg1;
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1430 return arg0;
1432 else if (code == MINUS_EXPR)
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 return arg0;
1437 else if (code == MULT_EXPR)
1439 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1440 return arg1;
1443 /* Handle general case of two integer constants. */
1444 return int_const_binop (code, arg0, arg1, 0);
1447 return fold_build2_loc (loc, code, type, arg0, arg1);
1450 /* Given two values, either both of sizetype or both of bitsizetype,
1451 compute the difference between the two values. Return the value
1452 in signed type corresponding to the type of the operands. */
1454 tree
1455 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1457 tree type = TREE_TYPE (arg0);
1458 tree ctype;
1460 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1461 TREE_TYPE (arg1)));
1463 /* If the type is already signed, just do the simple thing. */
1464 if (!TYPE_UNSIGNED (type))
1465 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1467 if (type == sizetype)
1468 ctype = ssizetype;
1469 else if (type == bitsizetype)
1470 ctype = sbitsizetype;
1471 else
1472 ctype = signed_type_for (type);
1474 /* If either operand is not a constant, do the conversions to the signed
1475 type and subtract. The hardware will do the right thing with any
1476 overflow in the subtraction. */
1477 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1478 return size_binop_loc (loc, MINUS_EXPR,
1479 fold_convert_loc (loc, ctype, arg0),
1480 fold_convert_loc (loc, ctype, arg1));
1482 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1483 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1484 overflow) and negate (which can't either). Special-case a result
1485 of zero while we're here. */
1486 if (tree_int_cst_equal (arg0, arg1))
1487 return build_int_cst (ctype, 0);
1488 else if (tree_int_cst_lt (arg1, arg0))
1489 return fold_convert_loc (loc, ctype,
1490 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1491 else
1492 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1493 fold_convert_loc (loc, ctype,
1494 size_binop_loc (loc,
1495 MINUS_EXPR,
1496 arg1, arg0)));
1499 /* A subroutine of fold_convert_const handling conversions of an
1500 INTEGER_CST to another integer type. */
1502 static tree
1503 fold_convert_const_int_from_int (tree type, const_tree arg1)
1505 tree t;
1507 /* Given an integer constant, make new constant with new type,
1508 appropriately sign-extended or truncated. */
1509 t = force_fit_type_double (type, tree_to_double_int (arg1),
1510 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1511 (TREE_INT_CST_HIGH (arg1) < 0
1512 && (TYPE_UNSIGNED (type)
1513 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1514 | TREE_OVERFLOW (arg1));
1516 return t;
1519 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1520 to an integer type. */
1522 static tree
1523 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1525 int overflow = 0;
1526 tree t;
1528 /* The following code implements the floating point to integer
1529 conversion rules required by the Java Language Specification,
1530 that IEEE NaNs are mapped to zero and values that overflow
1531 the target precision saturate, i.e. values greater than
1532 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1533 are mapped to INT_MIN. These semantics are allowed by the
1534 C and C++ standards that simply state that the behavior of
1535 FP-to-integer conversion is unspecified upon overflow. */
1537 double_int val;
1538 REAL_VALUE_TYPE r;
1539 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1541 switch (code)
1543 case FIX_TRUNC_EXPR:
1544 real_trunc (&r, VOIDmode, &x);
1545 break;
1547 default:
1548 gcc_unreachable ();
1551 /* If R is NaN, return zero and show we have an overflow. */
1552 if (REAL_VALUE_ISNAN (r))
1554 overflow = 1;
1555 val = double_int_zero;
1558 /* See if R is less than the lower bound or greater than the
1559 upper bound. */
1561 if (! overflow)
1563 tree lt = TYPE_MIN_VALUE (type);
1564 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1565 if (REAL_VALUES_LESS (r, l))
1567 overflow = 1;
1568 val = tree_to_double_int (lt);
1572 if (! overflow)
1574 tree ut = TYPE_MAX_VALUE (type);
1575 if (ut)
1577 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1578 if (REAL_VALUES_LESS (u, r))
1580 overflow = 1;
1581 val = tree_to_double_int (ut);
1586 if (! overflow)
1587 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1589 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1590 return t;
1593 /* A subroutine of fold_convert_const handling conversions of a
1594 FIXED_CST to an integer type. */
1596 static tree
1597 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1599 tree t;
1600 double_int temp, temp_trunc;
1601 unsigned int mode;
1603 /* Right shift FIXED_CST to temp by fbit. */
1604 temp = TREE_FIXED_CST (arg1).data;
1605 mode = TREE_FIXED_CST (arg1).mode;
1606 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1608 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1609 HOST_BITS_PER_DOUBLE_INT,
1610 SIGNED_FIXED_POINT_MODE_P (mode));
1612 /* Left shift temp to temp_trunc by fbit. */
1613 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1617 else
1619 temp = double_int_zero;
1620 temp_trunc = double_int_zero;
1623 /* If FIXED_CST is negative, we need to round the value toward 0.
1624 By checking if the fractional bits are not zero to add 1 to temp. */
1625 if (SIGNED_FIXED_POINT_MODE_P (mode)
1626 && double_int_negative_p (temp_trunc)
1627 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1628 temp = double_int_add (temp, double_int_one);
1630 /* Given a fixed-point constant, make new constant with new type,
1631 appropriately sign-extended or truncated. */
1632 t = force_fit_type_double (type, temp, -1,
1633 (double_int_negative_p (temp)
1634 && (TYPE_UNSIGNED (type)
1635 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1636 | TREE_OVERFLOW (arg1));
1638 return t;
1641 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1642 to another floating point type. */
1644 static tree
1645 fold_convert_const_real_from_real (tree type, const_tree arg1)
1647 REAL_VALUE_TYPE value;
1648 tree t;
1650 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1651 t = build_real (type, value);
1653 /* If converting an infinity or NAN to a representation that doesn't
1654 have one, set the overflow bit so that we can produce some kind of
1655 error message at the appropriate point if necessary. It's not the
1656 most user-friendly message, but it's better than nothing. */
1657 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1661 && !MODE_HAS_NANS (TYPE_MODE (type)))
1662 TREE_OVERFLOW (t) = 1;
1663 /* Regular overflow, conversion produced an infinity in a mode that
1664 can't represent them. */
1665 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1666 && REAL_VALUE_ISINF (value)
1667 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1668 TREE_OVERFLOW (t) = 1;
1669 else
1670 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1671 return t;
1674 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1675 to a floating point type. */
1677 static tree
1678 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1680 REAL_VALUE_TYPE value;
1681 tree t;
1683 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1684 t = build_real (type, value);
1686 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1687 return t;
1690 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1691 to another fixed-point type. */
1693 static tree
1694 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1696 FIXED_VALUE_TYPE value;
1697 tree t;
1698 bool overflow_p;
1700 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1701 TYPE_SATURATING (type));
1702 t = build_fixed (type, value);
1704 /* Propagate overflow flags. */
1705 if (overflow_p | TREE_OVERFLOW (arg1))
1706 TREE_OVERFLOW (t) = 1;
1707 return t;
1710 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1711 to a fixed-point type. */
1713 static tree
1714 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1716 FIXED_VALUE_TYPE value;
1717 tree t;
1718 bool overflow_p;
1720 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1721 TREE_INT_CST (arg1),
1722 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1723 TYPE_SATURATING (type));
1724 t = build_fixed (type, value);
1726 /* Propagate overflow flags. */
1727 if (overflow_p | TREE_OVERFLOW (arg1))
1728 TREE_OVERFLOW (t) = 1;
1729 return t;
1732 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1733 to a fixed-point type. */
1735 static tree
1736 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1738 FIXED_VALUE_TYPE value;
1739 tree t;
1740 bool overflow_p;
1742 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1743 &TREE_REAL_CST (arg1),
1744 TYPE_SATURATING (type));
1745 t = build_fixed (type, value);
1747 /* Propagate overflow flags. */
1748 if (overflow_p | TREE_OVERFLOW (arg1))
1749 TREE_OVERFLOW (t) = 1;
1750 return t;
1753 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1754 type TYPE. If no simplification can be done return NULL_TREE. */
1756 static tree
1757 fold_convert_const (enum tree_code code, tree type, tree arg1)
1759 if (TREE_TYPE (arg1) == type)
1760 return arg1;
1762 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1763 || TREE_CODE (type) == OFFSET_TYPE)
1765 if (TREE_CODE (arg1) == INTEGER_CST)
1766 return fold_convert_const_int_from_int (type, arg1);
1767 else if (TREE_CODE (arg1) == REAL_CST)
1768 return fold_convert_const_int_from_real (code, type, arg1);
1769 else if (TREE_CODE (arg1) == FIXED_CST)
1770 return fold_convert_const_int_from_fixed (type, arg1);
1772 else if (TREE_CODE (type) == REAL_TYPE)
1774 if (TREE_CODE (arg1) == INTEGER_CST)
1775 return build_real_from_int_cst (type, arg1);
1776 else if (TREE_CODE (arg1) == REAL_CST)
1777 return fold_convert_const_real_from_real (type, arg1);
1778 else if (TREE_CODE (arg1) == FIXED_CST)
1779 return fold_convert_const_real_from_fixed (type, arg1);
1781 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1783 if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_fixed_from_fixed (type, arg1);
1785 else if (TREE_CODE (arg1) == INTEGER_CST)
1786 return fold_convert_const_fixed_from_int (type, arg1);
1787 else if (TREE_CODE (arg1) == REAL_CST)
1788 return fold_convert_const_fixed_from_real (type, arg1);
1790 return NULL_TREE;
1793 /* Construct a vector of zero elements of vector type TYPE. */
1795 static tree
1796 build_zero_vector (tree type)
1798 tree elem, list;
1799 int i, units;
1801 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1802 units = TYPE_VECTOR_SUBPARTS (type);
1804 list = NULL_TREE;
1805 for (i = 0; i < units; i++)
1806 list = tree_cons (NULL_TREE, elem, list);
1807 return build_vector (type, list);
1810 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 bool
1813 fold_convertible_p (const_tree type, const_tree arg)
1815 tree orig = TREE_TYPE (arg);
1817 if (type == orig)
1818 return true;
1820 if (TREE_CODE (arg) == ERROR_MARK
1821 || TREE_CODE (type) == ERROR_MARK
1822 || TREE_CODE (orig) == ERROR_MARK)
1823 return false;
1825 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1826 return true;
1828 switch (TREE_CODE (type))
1830 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1831 case POINTER_TYPE: case REFERENCE_TYPE:
1832 case OFFSET_TYPE:
1833 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1834 || TREE_CODE (orig) == OFFSET_TYPE)
1835 return true;
1836 return (TREE_CODE (orig) == VECTOR_TYPE
1837 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1839 case REAL_TYPE:
1840 case FIXED_POINT_TYPE:
1841 case COMPLEX_TYPE:
1842 case VECTOR_TYPE:
1843 case VOID_TYPE:
1844 return TREE_CODE (type) == TREE_CODE (orig);
1846 default:
1847 return false;
1851 /* Convert expression ARG to type TYPE. Used by the middle-end for
1852 simple conversions in preference to calling the front-end's convert. */
1854 tree
1855 fold_convert_loc (location_t loc, tree type, tree arg)
1857 tree orig = TREE_TYPE (arg);
1858 tree tem;
1860 if (type == orig)
1861 return arg;
1863 if (TREE_CODE (arg) == ERROR_MARK
1864 || TREE_CODE (type) == ERROR_MARK
1865 || TREE_CODE (orig) == ERROR_MARK)
1866 return error_mark_node;
1868 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1869 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1871 switch (TREE_CODE (type))
1873 case POINTER_TYPE:
1874 case REFERENCE_TYPE:
1875 /* Handle conversions between pointers to different address spaces. */
1876 if (POINTER_TYPE_P (orig)
1877 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1878 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1879 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1880 /* fall through */
1882 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1883 case OFFSET_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1888 return tem;
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
1894 return fold_convert_loc (loc, type,
1895 fold_build1_loc (loc, REALPART_EXPR,
1896 TREE_TYPE (orig), arg));
1897 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1898 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1899 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 case REAL_TYPE:
1902 if (TREE_CODE (arg) == INTEGER_CST)
1904 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1908 else if (TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (NOP_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 return tem;
1914 else if (TREE_CODE (arg) == FIXED_CST)
1916 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1917 if (tem != NULL_TREE)
1918 return tem;
1921 switch (TREE_CODE (orig))
1923 case INTEGER_TYPE:
1924 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1925 case POINTER_TYPE: case REFERENCE_TYPE:
1926 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1928 case REAL_TYPE:
1929 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1931 case FIXED_POINT_TYPE:
1932 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1934 case COMPLEX_TYPE:
1935 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1936 return fold_convert_loc (loc, type, tem);
1938 default:
1939 gcc_unreachable ();
1942 case FIXED_POINT_TYPE:
1943 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1944 || TREE_CODE (arg) == REAL_CST)
1946 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1947 if (tem != NULL_TREE)
1948 goto fold_convert_exit;
1951 switch (TREE_CODE (orig))
1953 case FIXED_POINT_TYPE:
1954 case INTEGER_TYPE:
1955 case ENUMERAL_TYPE:
1956 case BOOLEAN_TYPE:
1957 case REAL_TYPE:
1958 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1960 case COMPLEX_TYPE:
1961 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1962 return fold_convert_loc (loc, type, tem);
1964 default:
1965 gcc_unreachable ();
1968 case COMPLEX_TYPE:
1969 switch (TREE_CODE (orig))
1971 case INTEGER_TYPE:
1972 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1973 case POINTER_TYPE: case REFERENCE_TYPE:
1974 case REAL_TYPE:
1975 case FIXED_POINT_TYPE:
1976 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1977 fold_convert_loc (loc, TREE_TYPE (type), arg),
1978 fold_convert_loc (loc, TREE_TYPE (type),
1979 integer_zero_node));
1980 case COMPLEX_TYPE:
1982 tree rpart, ipart;
1984 if (TREE_CODE (arg) == COMPLEX_EXPR)
1986 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1987 TREE_OPERAND (arg, 0));
1988 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 1));
1990 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1993 arg = save_expr (arg);
1994 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1995 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1996 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1997 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 default:
2002 gcc_unreachable ();
2005 case VECTOR_TYPE:
2006 if (integer_zerop (arg))
2007 return build_zero_vector (type);
2008 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2009 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2010 || TREE_CODE (orig) == VECTOR_TYPE);
2011 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2013 case VOID_TYPE:
2014 tem = fold_ignored_result (arg);
2015 if (TREE_CODE (tem) == MODIFY_EXPR)
2016 goto fold_convert_exit;
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2019 default:
2020 gcc_unreachable ();
2022 fold_convert_exit:
2023 protected_set_expr_location (tem, loc);
2024 return tem;
2027 /* Return false if expr can be assumed not to be an lvalue, true
2028 otherwise. */
2030 static bool
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2036 case VAR_DECL:
2037 case PARM_DECL:
2038 case RESULT_DECL:
2039 case LABEL_DECL:
2040 case FUNCTION_DECL:
2041 case SSA_NAME:
2043 case COMPONENT_REF:
2044 case MEM_REF:
2045 case INDIRECT_REF:
2046 case MISALIGNED_INDIRECT_REF:
2047 case ARRAY_REF:
2048 case ARRAY_RANGE_REF:
2049 case BIT_FIELD_REF:
2050 case OBJ_TYPE_REF:
2052 case REALPART_EXPR:
2053 case IMAGPART_EXPR:
2054 case PREINCREMENT_EXPR:
2055 case PREDECREMENT_EXPR:
2056 case SAVE_EXPR:
2057 case TRY_CATCH_EXPR:
2058 case WITH_CLEANUP_EXPR:
2059 case COMPOUND_EXPR:
2060 case MODIFY_EXPR:
2061 case TARGET_EXPR:
2062 case COND_EXPR:
2063 case BIND_EXPR:
2064 break;
2066 default:
2067 /* Assume the worst for front-end tree codes. */
2068 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2069 break;
2070 return false;
2073 return true;
2076 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 tree
2079 non_lvalue_loc (location_t loc, tree x)
2081 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2082 us. */
2083 if (in_gimple_form)
2084 return x;
2086 if (! maybe_lvalue_p (x))
2087 return x;
2088 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2089 SET_EXPR_LOCATION (x, loc);
2090 return x;
2093 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2094 Zero means allow extended lvalues. */
2096 int pedantic_lvalues;
2098 /* When pedantic, return an expr equal to X but certainly not valid as a
2099 pedantic lvalue. Otherwise, return X. */
2101 static tree
2102 pedantic_non_lvalue_loc (location_t loc, tree x)
2104 if (pedantic_lvalues)
2105 return non_lvalue_loc (loc, x);
2106 protected_set_expr_location (x, loc);
2107 return x;
2110 /* Given a tree comparison code, return the code that is the logical inverse
2111 of the given code. It is not safe to do this for floating-point
2112 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2113 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2115 enum tree_code
2116 invert_tree_comparison (enum tree_code code, bool honor_nans)
2118 if (honor_nans && flag_trapping_math)
2119 return ERROR_MARK;
2121 switch (code)
2123 case EQ_EXPR:
2124 return NE_EXPR;
2125 case NE_EXPR:
2126 return EQ_EXPR;
2127 case GT_EXPR:
2128 return honor_nans ? UNLE_EXPR : LE_EXPR;
2129 case GE_EXPR:
2130 return honor_nans ? UNLT_EXPR : LT_EXPR;
2131 case LT_EXPR:
2132 return honor_nans ? UNGE_EXPR : GE_EXPR;
2133 case LE_EXPR:
2134 return honor_nans ? UNGT_EXPR : GT_EXPR;
2135 case LTGT_EXPR:
2136 return UNEQ_EXPR;
2137 case UNEQ_EXPR:
2138 return LTGT_EXPR;
2139 case UNGT_EXPR:
2140 return LE_EXPR;
2141 case UNGE_EXPR:
2142 return LT_EXPR;
2143 case UNLT_EXPR:
2144 return GE_EXPR;
2145 case UNLE_EXPR:
2146 return GT_EXPR;
2147 case ORDERED_EXPR:
2148 return UNORDERED_EXPR;
2149 case UNORDERED_EXPR:
2150 return ORDERED_EXPR;
2151 default:
2152 gcc_unreachable ();
2156 /* Similar, but return the comparison that results if the operands are
2157 swapped. This is safe for floating-point. */
2159 enum tree_code
2160 swap_tree_comparison (enum tree_code code)
2162 switch (code)
2164 case EQ_EXPR:
2165 case NE_EXPR:
2166 case ORDERED_EXPR:
2167 case UNORDERED_EXPR:
2168 case LTGT_EXPR:
2169 case UNEQ_EXPR:
2170 return code;
2171 case GT_EXPR:
2172 return LT_EXPR;
2173 case GE_EXPR:
2174 return LE_EXPR;
2175 case LT_EXPR:
2176 return GT_EXPR;
2177 case LE_EXPR:
2178 return GE_EXPR;
2179 case UNGT_EXPR:
2180 return UNLT_EXPR;
2181 case UNGE_EXPR:
2182 return UNLE_EXPR;
2183 case UNLT_EXPR:
2184 return UNGT_EXPR;
2185 case UNLE_EXPR:
2186 return UNGE_EXPR;
2187 default:
2188 gcc_unreachable ();
2193 /* Convert a comparison tree code from an enum tree_code representation
2194 into a compcode bit-based encoding. This function is the inverse of
2195 compcode_to_comparison. */
2197 static enum comparison_code
2198 comparison_to_compcode (enum tree_code code)
2200 switch (code)
2202 case LT_EXPR:
2203 return COMPCODE_LT;
2204 case EQ_EXPR:
2205 return COMPCODE_EQ;
2206 case LE_EXPR:
2207 return COMPCODE_LE;
2208 case GT_EXPR:
2209 return COMPCODE_GT;
2210 case NE_EXPR:
2211 return COMPCODE_NE;
2212 case GE_EXPR:
2213 return COMPCODE_GE;
2214 case ORDERED_EXPR:
2215 return COMPCODE_ORD;
2216 case UNORDERED_EXPR:
2217 return COMPCODE_UNORD;
2218 case UNLT_EXPR:
2219 return COMPCODE_UNLT;
2220 case UNEQ_EXPR:
2221 return COMPCODE_UNEQ;
2222 case UNLE_EXPR:
2223 return COMPCODE_UNLE;
2224 case UNGT_EXPR:
2225 return COMPCODE_UNGT;
2226 case LTGT_EXPR:
2227 return COMPCODE_LTGT;
2228 case UNGE_EXPR:
2229 return COMPCODE_UNGE;
2230 default:
2231 gcc_unreachable ();
2235 /* Convert a compcode bit-based encoding of a comparison operator back
2236 to GCC's enum tree_code representation. This function is the
2237 inverse of comparison_to_compcode. */
2239 static enum tree_code
2240 compcode_to_comparison (enum comparison_code code)
2242 switch (code)
2244 case COMPCODE_LT:
2245 return LT_EXPR;
2246 case COMPCODE_EQ:
2247 return EQ_EXPR;
2248 case COMPCODE_LE:
2249 return LE_EXPR;
2250 case COMPCODE_GT:
2251 return GT_EXPR;
2252 case COMPCODE_NE:
2253 return NE_EXPR;
2254 case COMPCODE_GE:
2255 return GE_EXPR;
2256 case COMPCODE_ORD:
2257 return ORDERED_EXPR;
2258 case COMPCODE_UNORD:
2259 return UNORDERED_EXPR;
2260 case COMPCODE_UNLT:
2261 return UNLT_EXPR;
2262 case COMPCODE_UNEQ:
2263 return UNEQ_EXPR;
2264 case COMPCODE_UNLE:
2265 return UNLE_EXPR;
2266 case COMPCODE_UNGT:
2267 return UNGT_EXPR;
2268 case COMPCODE_LTGT:
2269 return LTGT_EXPR;
2270 case COMPCODE_UNGE:
2271 return UNGE_EXPR;
2272 default:
2273 gcc_unreachable ();
2277 /* Return a tree for the comparison which is the combination of
2278 doing the AND or OR (depending on CODE) of the two operations LCODE
2279 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2280 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2281 if this makes the transformation invalid. */
2283 tree
2284 combine_comparisons (location_t loc,
2285 enum tree_code code, enum tree_code lcode,
2286 enum tree_code rcode, tree truth_type,
2287 tree ll_arg, tree lr_arg)
2289 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2290 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2291 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2292 int compcode;
2294 switch (code)
2296 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2297 compcode = lcompcode & rcompcode;
2298 break;
2300 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2301 compcode = lcompcode | rcompcode;
2302 break;
2304 default:
2305 return NULL_TREE;
2308 if (!honor_nans)
2310 /* Eliminate unordered comparisons, as well as LTGT and ORD
2311 which are not used unless the mode has NaNs. */
2312 compcode &= ~COMPCODE_UNORD;
2313 if (compcode == COMPCODE_LTGT)
2314 compcode = COMPCODE_NE;
2315 else if (compcode == COMPCODE_ORD)
2316 compcode = COMPCODE_TRUE;
2318 else if (flag_trapping_math)
2320 /* Check that the original operation and the optimized ones will trap
2321 under the same condition. */
2322 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2323 && (lcompcode != COMPCODE_EQ)
2324 && (lcompcode != COMPCODE_ORD);
2325 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2326 && (rcompcode != COMPCODE_EQ)
2327 && (rcompcode != COMPCODE_ORD);
2328 bool trap = (compcode & COMPCODE_UNORD) == 0
2329 && (compcode != COMPCODE_EQ)
2330 && (compcode != COMPCODE_ORD);
2332 /* In a short-circuited boolean expression the LHS might be
2333 such that the RHS, if evaluated, will never trap. For
2334 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2335 if neither x nor y is NaN. (This is a mixed blessing: for
2336 example, the expression above will never trap, hence
2337 optimizing it to x < y would be invalid). */
2338 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2339 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2340 rtrap = false;
2342 /* If the comparison was short-circuited, and only the RHS
2343 trapped, we may now generate a spurious trap. */
2344 if (rtrap && !ltrap
2345 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2346 return NULL_TREE;
2348 /* If we changed the conditions that cause a trap, we lose. */
2349 if ((ltrap || rtrap) != trap)
2350 return NULL_TREE;
2353 if (compcode == COMPCODE_TRUE)
2354 return constant_boolean_node (true, truth_type);
2355 else if (compcode == COMPCODE_FALSE)
2356 return constant_boolean_node (false, truth_type);
2357 else
2359 enum tree_code tcode;
2361 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2362 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if two operands (typically of the same tree node)
2367 are necessarily equal. If either argument has side-effects this
2368 function returns zero. FLAGS modifies behavior as follows:
2370 If OEP_ONLY_CONST is set, only return nonzero for constants.
2371 This function tests whether the operands are indistinguishable;
2372 it does not test whether they are equal using C's == operation.
2373 The distinction is important for IEEE floating point, because
2374 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2375 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2378 even though it may hold multiple values during a function.
2379 This is because a GCC tree node guarantees that nothing else is
2380 executed between the evaluation of its "operands" (which may often
2381 be evaluated in arbitrary order). Hence if the operands themselves
2382 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2383 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2384 unset means assuming isochronic (or instantaneous) tree equivalence.
2385 Unless comparing arbitrary expression trees, such as from different
2386 statements, this flag can usually be left unset.
2388 If OEP_PURE_SAME is set, then pure functions with identical arguments
2389 are considered the same. It is used when the caller has other ways
2390 to ensure that global memory is unchanged in between. */
2393 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2395 /* If either is ERROR_MARK, they aren't equal. */
2396 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2397 || TREE_TYPE (arg0) == error_mark_node
2398 || TREE_TYPE (arg1) == error_mark_node)
2399 return 0;
2401 /* Similar, if either does not have a type (like a released SSA name),
2402 they aren't equal. */
2403 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2404 return 0;
2406 /* Check equality of integer constants before bailing out due to
2407 precision differences. */
2408 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2409 return tree_int_cst_equal (arg0, arg1);
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. As pointers
2414 strictly don't have a signedness, require either two pointers or
2415 two non-pointers as well. */
2416 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2417 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2418 return 0;
2420 /* We cannot consider pointers to different address space equal. */
2421 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2422 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2423 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2424 return 0;
2426 /* If both types don't have the same precision, then it is not safe
2427 to strip NOPs. */
2428 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2429 return 0;
2431 STRIP_NOPS (arg0);
2432 STRIP_NOPS (arg1);
2434 /* In case both args are comparisons but with different comparison
2435 code, try to swap the comparison operands of one arg to produce
2436 a match and compare that variant. */
2437 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2438 && COMPARISON_CLASS_P (arg0)
2439 && COMPARISON_CLASS_P (arg1))
2441 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2443 if (TREE_CODE (arg0) == swap_code)
2444 return operand_equal_p (TREE_OPERAND (arg0, 0),
2445 TREE_OPERAND (arg1, 1), flags)
2446 && operand_equal_p (TREE_OPERAND (arg0, 1),
2447 TREE_OPERAND (arg1, 0), flags);
2450 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2451 /* This is needed for conversions and for COMPONENT_REF.
2452 Might as well play it safe and always test this. */
2453 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2454 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2455 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2456 return 0;
2458 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2459 We don't care about side effects in that case because the SAVE_EXPR
2460 takes care of that for us. In all other cases, two expressions are
2461 equal if they have no side effects. If we have two identical
2462 expressions with side effects that should be treated the same due
2463 to the only side effects being identical SAVE_EXPR's, that will
2464 be detected in the recursive calls below. */
2465 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2466 && (TREE_CODE (arg0) == SAVE_EXPR
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2468 return 1;
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2475 case INTEGER_CST:
2476 return tree_int_cst_equal (arg0, arg1);
2478 case FIXED_CST:
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2482 case REAL_CST:
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2485 return 1;
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2493 return 1;
2495 return 0;
2497 case VECTOR_CST:
2499 tree v1, v2;
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2503 while (v1 && v2)
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2506 flags))
2507 return 0;
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2512 return v1 == v2;
2515 case COMPLEX_CST:
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2517 flags)
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2519 flags));
2521 case STRING_CST:
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2527 case ADDR_EXPR:
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2530 default:
2531 break;
2534 if (flags & OEP_ONLY_CONST)
2535 return 0;
2537 /* Define macros to test an operand from arg0 and arg1 for equality and a
2538 variant that allows null and views null as being different from any
2539 non-null value. In the latter case, if either is null, the both
2540 must be; otherwise, do the normal comparison. */
2541 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2542 TREE_OPERAND (arg1, N), flags)
2544 #define OP_SAME_WITH_NULL(N) \
2545 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2546 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2548 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2550 case tcc_unary:
2551 /* Two conversions are equal only if signedness and modes match. */
2552 switch (TREE_CODE (arg0))
2554 CASE_CONVERT:
2555 case FIX_TRUNC_EXPR:
2556 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2557 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2558 return 0;
2559 break;
2560 default:
2561 break;
2564 return OP_SAME (0);
2567 case tcc_comparison:
2568 case tcc_binary:
2569 if (OP_SAME (0) && OP_SAME (1))
2570 return 1;
2572 /* For commutative ops, allow the other order. */
2573 return (commutative_tree_code (TREE_CODE (arg0))
2574 && operand_equal_p (TREE_OPERAND (arg0, 0),
2575 TREE_OPERAND (arg1, 1), flags)
2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
2577 TREE_OPERAND (arg1, 0), flags));
2579 case tcc_reference:
2580 /* If either of the pointer (or reference) expressions we are
2581 dereferencing contain a side effect, these cannot be equal. */
2582 if (TREE_SIDE_EFFECTS (arg0)
2583 || TREE_SIDE_EFFECTS (arg1))
2584 return 0;
2586 switch (TREE_CODE (arg0))
2588 case INDIRECT_REF:
2589 case MISALIGNED_INDIRECT_REF:
2590 case REALPART_EXPR:
2591 case IMAGPART_EXPR:
2592 return OP_SAME (0);
2594 case MEM_REF:
2595 /* Require equal access sizes. We can have incomplete types
2596 for array references of variable-sized arrays from the
2597 Fortran frontent though. */
2598 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2599 || (TYPE_SIZE (TREE_TYPE (arg0))
2600 && TYPE_SIZE (TREE_TYPE (arg1))
2601 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2602 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2603 && OP_SAME (0) && OP_SAME (1));
2605 case ARRAY_REF:
2606 case ARRAY_RANGE_REF:
2607 /* Operands 2 and 3 may be null.
2608 Compare the array index by value if it is constant first as we
2609 may have different types but same value here. */
2610 return (OP_SAME (0)
2611 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2612 TREE_OPERAND (arg1, 1))
2613 || OP_SAME (1))
2614 && OP_SAME_WITH_NULL (2)
2615 && OP_SAME_WITH_NULL (3));
2617 case COMPONENT_REF:
2618 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2619 may be NULL when we're called to compare MEM_EXPRs. */
2620 return OP_SAME_WITH_NULL (0)
2621 && OP_SAME (1)
2622 && OP_SAME_WITH_NULL (2);
2624 case BIT_FIELD_REF:
2625 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2627 default:
2628 return 0;
2631 case tcc_expression:
2632 switch (TREE_CODE (arg0))
2634 case ADDR_EXPR:
2635 case TRUTH_NOT_EXPR:
2636 return OP_SAME (0);
2638 case TRUTH_ANDIF_EXPR:
2639 case TRUTH_ORIF_EXPR:
2640 return OP_SAME (0) && OP_SAME (1);
2642 case TRUTH_AND_EXPR:
2643 case TRUTH_OR_EXPR:
2644 case TRUTH_XOR_EXPR:
2645 if (OP_SAME (0) && OP_SAME (1))
2646 return 1;
2648 /* Otherwise take into account this is a commutative operation. */
2649 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2650 TREE_OPERAND (arg1, 1), flags)
2651 && operand_equal_p (TREE_OPERAND (arg0, 1),
2652 TREE_OPERAND (arg1, 0), flags));
2654 case COND_EXPR:
2655 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2657 default:
2658 return 0;
2661 case tcc_vl_exp:
2662 switch (TREE_CODE (arg0))
2664 case CALL_EXPR:
2665 /* If the CALL_EXPRs call different functions, then they
2666 clearly can not be equal. */
2667 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2668 flags))
2669 return 0;
2672 unsigned int cef = call_expr_flags (arg0);
2673 if (flags & OEP_PURE_SAME)
2674 cef &= ECF_CONST | ECF_PURE;
2675 else
2676 cef &= ECF_CONST;
2677 if (!cef)
2678 return 0;
2681 /* Now see if all the arguments are the same. */
2683 const_call_expr_arg_iterator iter0, iter1;
2684 const_tree a0, a1;
2685 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2686 a1 = first_const_call_expr_arg (arg1, &iter1);
2687 a0 && a1;
2688 a0 = next_const_call_expr_arg (&iter0),
2689 a1 = next_const_call_expr_arg (&iter1))
2690 if (! operand_equal_p (a0, a1, flags))
2691 return 0;
2693 /* If we get here and both argument lists are exhausted
2694 then the CALL_EXPRs are equal. */
2695 return ! (a0 || a1);
2697 default:
2698 return 0;
2701 case tcc_declaration:
2702 /* Consider __builtin_sqrt equal to sqrt. */
2703 return (TREE_CODE (arg0) == FUNCTION_DECL
2704 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2705 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2706 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2708 default:
2709 return 0;
2712 #undef OP_SAME
2713 #undef OP_SAME_WITH_NULL
2716 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2717 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2719 When in doubt, return 0. */
2721 static int
2722 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2724 int unsignedp1, unsignedpo;
2725 tree primarg0, primarg1, primother;
2726 unsigned int correct_width;
2728 if (operand_equal_p (arg0, arg1, 0))
2729 return 1;
2731 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2732 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2733 return 0;
2735 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2736 and see if the inner values are the same. This removes any
2737 signedness comparison, which doesn't matter here. */
2738 primarg0 = arg0, primarg1 = arg1;
2739 STRIP_NOPS (primarg0);
2740 STRIP_NOPS (primarg1);
2741 if (operand_equal_p (primarg0, primarg1, 0))
2742 return 1;
2744 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2745 actual comparison operand, ARG0.
2747 First throw away any conversions to wider types
2748 already present in the operands. */
2750 primarg1 = get_narrower (arg1, &unsignedp1);
2751 primother = get_narrower (other, &unsignedpo);
2753 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2754 if (unsignedp1 == unsignedpo
2755 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2756 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2758 tree type = TREE_TYPE (arg0);
2760 /* Make sure shorter operand is extended the right way
2761 to match the longer operand. */
2762 primarg1 = fold_convert (signed_or_unsigned_type_for
2763 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2765 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2766 return 1;
2769 return 0;
2772 /* See if ARG is an expression that is either a comparison or is performing
2773 arithmetic on comparisons. The comparisons must only be comparing
2774 two different values, which will be stored in *CVAL1 and *CVAL2; if
2775 they are nonzero it means that some operands have already been found.
2776 No variables may be used anywhere else in the expression except in the
2777 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2778 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2780 If this is true, return 1. Otherwise, return zero. */
2782 static int
2783 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2785 enum tree_code code = TREE_CODE (arg);
2786 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2788 /* We can handle some of the tcc_expression cases here. */
2789 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2790 tclass = tcc_unary;
2791 else if (tclass == tcc_expression
2792 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2793 || code == COMPOUND_EXPR))
2794 tclass = tcc_binary;
2796 else if (tclass == tcc_expression && code == SAVE_EXPR
2797 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2799 /* If we've already found a CVAL1 or CVAL2, this expression is
2800 two complex to handle. */
2801 if (*cval1 || *cval2)
2802 return 0;
2804 tclass = tcc_unary;
2805 *save_p = 1;
2808 switch (tclass)
2810 case tcc_unary:
2811 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2813 case tcc_binary:
2814 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2815 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2816 cval1, cval2, save_p));
2818 case tcc_constant:
2819 return 1;
2821 case tcc_expression:
2822 if (code == COND_EXPR)
2823 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2824 cval1, cval2, save_p)
2825 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2826 cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2828 cval1, cval2, save_p));
2829 return 0;
2831 case tcc_comparison:
2832 /* First see if we can handle the first operand, then the second. For
2833 the second operand, we know *CVAL1 can't be zero. It must be that
2834 one side of the comparison is each of the values; test for the
2835 case where this isn't true by failing if the two operands
2836 are the same. */
2838 if (operand_equal_p (TREE_OPERAND (arg, 0),
2839 TREE_OPERAND (arg, 1), 0))
2840 return 0;
2842 if (*cval1 == 0)
2843 *cval1 = TREE_OPERAND (arg, 0);
2844 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2846 else if (*cval2 == 0)
2847 *cval2 = TREE_OPERAND (arg, 0);
2848 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2850 else
2851 return 0;
2853 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2855 else if (*cval2 == 0)
2856 *cval2 = TREE_OPERAND (arg, 1);
2857 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2859 else
2860 return 0;
2862 return 1;
2864 default:
2865 return 0;
2869 /* ARG is a tree that is known to contain just arithmetic operations and
2870 comparisons. Evaluate the operations in the tree substituting NEW0 for
2871 any occurrence of OLD0 as an operand of a comparison and likewise for
2872 NEW1 and OLD1. */
2874 static tree
2875 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2876 tree old1, tree new1)
2878 tree type = TREE_TYPE (arg);
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2884 tclass = tcc_unary;
2885 else if (tclass == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2887 tclass = tcc_binary;
2889 switch (tclass)
2891 case tcc_unary:
2892 return fold_build1_loc (loc, code, type,
2893 eval_subst (loc, TREE_OPERAND (arg, 0),
2894 old0, new0, old1, new1));
2896 case tcc_binary:
2897 return fold_build2_loc (loc, code, type,
2898 eval_subst (loc, TREE_OPERAND (arg, 0),
2899 old0, new0, old1, new1),
2900 eval_subst (loc, TREE_OPERAND (arg, 1),
2901 old0, new0, old1, new1));
2903 case tcc_expression:
2904 switch (code)
2906 case SAVE_EXPR:
2907 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2908 old1, new1);
2910 case COMPOUND_EXPR:
2911 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2912 old1, new1);
2914 case COND_EXPR:
2915 return fold_build3_loc (loc, code, type,
2916 eval_subst (loc, TREE_OPERAND (arg, 0),
2917 old0, new0, old1, new1),
2918 eval_subst (loc, TREE_OPERAND (arg, 1),
2919 old0, new0, old1, new1),
2920 eval_subst (loc, TREE_OPERAND (arg, 2),
2921 old0, new0, old1, new1));
2922 default:
2923 break;
2925 /* Fall through - ??? */
2927 case tcc_comparison:
2929 tree arg0 = TREE_OPERAND (arg, 0);
2930 tree arg1 = TREE_OPERAND (arg, 1);
2932 /* We need to check both for exact equality and tree equality. The
2933 former will be true if the operand has a side-effect. In that
2934 case, we know the operand occurred exactly once. */
2936 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2937 arg0 = new0;
2938 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2939 arg0 = new1;
2941 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2942 arg1 = new0;
2943 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2944 arg1 = new1;
2946 return fold_build2_loc (loc, code, type, arg0, arg1);
2949 default:
2950 return arg;
2954 /* Return a tree for the case when the result of an expression is RESULT
2955 converted to TYPE and OMITTED was previously an operand of the expression
2956 but is now not needed (e.g., we folded OMITTED * 0).
2958 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2959 the conversion of RESULT to TYPE. */
2961 tree
2962 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2964 tree t = fold_convert_loc (loc, type, result);
2966 /* If the resulting operand is an empty statement, just return the omitted
2967 statement casted to void. */
2968 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2970 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2971 goto omit_one_operand_exit;
2974 if (TREE_SIDE_EFFECTS (omitted))
2976 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2977 goto omit_one_operand_exit;
2980 return non_lvalue_loc (loc, t);
2982 omit_one_operand_exit:
2983 protected_set_expr_location (t, loc);
2984 return t;
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989 static tree
2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2991 tree omitted)
2993 tree t = fold_convert_loc (loc, type, result);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2999 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3000 goto pedantic_omit_one_operand_exit;
3003 if (TREE_SIDE_EFFECTS (omitted))
3005 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3006 goto pedantic_omit_one_operand_exit;
3009 return pedantic_non_lvalue_loc (loc, t);
3011 pedantic_omit_one_operand_exit:
3012 protected_set_expr_location (t, loc);
3013 return t;
3016 /* Return a tree for the case when the result of an expression is RESULT
3017 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3018 of the expression but are now not needed.
3020 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3021 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3022 evaluated before OMITTED2. Otherwise, if neither has side effects,
3023 just do the conversion of RESULT to TYPE. */
3025 tree
3026 omit_two_operands_loc (location_t loc, tree type, tree result,
3027 tree omitted1, tree omitted2)
3029 tree t = fold_convert_loc (loc, type, result);
3031 if (TREE_SIDE_EFFECTS (omitted2))
3033 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3034 SET_EXPR_LOCATION (t, loc);
3036 if (TREE_SIDE_EFFECTS (omitted1))
3038 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3039 SET_EXPR_LOCATION (t, loc);
3042 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3046 /* Return a simplified tree node for the truth-negation of ARG. This
3047 never alters ARG itself. We assume that ARG is an operation that
3048 returns a truth value (0 or 1).
3050 FIXME: one would think we would fold the result, but it causes
3051 problems with the dominator optimizer. */
3053 tree
3054 fold_truth_not_expr (location_t loc, tree arg)
3056 tree t, type = TREE_TYPE (arg);
3057 enum tree_code code = TREE_CODE (arg);
3058 location_t loc1, loc2;
3060 /* If this is a comparison, we can simply invert it, except for
3061 floating-point non-equality comparisons, in which case we just
3062 enclose a TRUTH_NOT_EXPR around what we have. */
3064 if (TREE_CODE_CLASS (code) == tcc_comparison)
3066 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3067 if (FLOAT_TYPE_P (op_type)
3068 && flag_trapping_math
3069 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3070 && code != NE_EXPR && code != EQ_EXPR)
3071 return NULL_TREE;
3073 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3074 if (code == ERROR_MARK)
3075 return NULL_TREE;
3077 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3078 SET_EXPR_LOCATION (t, loc);
3079 return t;
3082 switch (code)
3084 case INTEGER_CST:
3085 return constant_boolean_node (integer_zerop (arg), type);
3087 case TRUTH_AND_EXPR:
3088 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3089 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3090 if (loc1 == UNKNOWN_LOCATION)
3091 loc1 = loc;
3092 if (loc2 == UNKNOWN_LOCATION)
3093 loc2 = loc;
3094 t = build2 (TRUTH_OR_EXPR, type,
3095 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3096 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3097 break;
3099 case TRUTH_OR_EXPR:
3100 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3101 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3102 if (loc1 == UNKNOWN_LOCATION)
3103 loc1 = loc;
3104 if (loc2 == UNKNOWN_LOCATION)
3105 loc2 = loc;
3106 t = build2 (TRUTH_AND_EXPR, type,
3107 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3108 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3109 break;
3111 case TRUTH_XOR_EXPR:
3112 /* Here we can invert either operand. We invert the first operand
3113 unless the second operand is a TRUTH_NOT_EXPR in which case our
3114 result is the XOR of the first operand with the inside of the
3115 negation of the second operand. */
3117 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3118 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3119 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3120 else
3121 t = build2 (TRUTH_XOR_EXPR, type,
3122 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3123 TREE_OPERAND (arg, 1));
3124 break;
3126 case TRUTH_ANDIF_EXPR:
3127 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3128 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3129 if (loc1 == UNKNOWN_LOCATION)
3130 loc1 = loc;
3131 if (loc2 == UNKNOWN_LOCATION)
3132 loc2 = loc;
3133 t = build2 (TRUTH_ORIF_EXPR, type,
3134 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3135 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3136 break;
3138 case TRUTH_ORIF_EXPR:
3139 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3140 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3141 if (loc1 == UNKNOWN_LOCATION)
3142 loc1 = loc;
3143 if (loc2 == UNKNOWN_LOCATION)
3144 loc2 = loc;
3145 t = build2 (TRUTH_ANDIF_EXPR, type,
3146 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3147 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3148 break;
3150 case TRUTH_NOT_EXPR:
3151 return TREE_OPERAND (arg, 0);
3153 case COND_EXPR:
3155 tree arg1 = TREE_OPERAND (arg, 1);
3156 tree arg2 = TREE_OPERAND (arg, 2);
3158 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3159 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3160 if (loc1 == UNKNOWN_LOCATION)
3161 loc1 = loc;
3162 if (loc2 == UNKNOWN_LOCATION)
3163 loc2 = loc;
3165 /* A COND_EXPR may have a throw as one operand, which
3166 then has void type. Just leave void operands
3167 as they are. */
3168 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3169 VOID_TYPE_P (TREE_TYPE (arg1))
3170 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3171 VOID_TYPE_P (TREE_TYPE (arg2))
3172 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3173 break;
3176 case COMPOUND_EXPR:
3177 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3178 if (loc1 == UNKNOWN_LOCATION)
3179 loc1 = loc;
3180 t = build2 (COMPOUND_EXPR, type,
3181 TREE_OPERAND (arg, 0),
3182 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3183 break;
3185 case NON_LVALUE_EXPR:
3186 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3187 if (loc1 == UNKNOWN_LOCATION)
3188 loc1 = loc;
3189 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3191 CASE_CONVERT:
3192 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3194 t = build1 (TRUTH_NOT_EXPR, type, arg);
3195 break;
3198 /* ... fall through ... */
3200 case FLOAT_EXPR:
3201 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3202 if (loc1 == UNKNOWN_LOCATION)
3203 loc1 = loc;
3204 t = build1 (TREE_CODE (arg), type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3206 break;
3208 case BIT_AND_EXPR:
3209 if (!integer_onep (TREE_OPERAND (arg, 1)))
3210 return NULL_TREE;
3211 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3212 break;
3214 case SAVE_EXPR:
3215 t = build1 (TRUTH_NOT_EXPR, type, arg);
3216 break;
3218 case CLEANUP_POINT_EXPR:
3219 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3220 if (loc1 == UNKNOWN_LOCATION)
3221 loc1 = loc;
3222 t = build1 (CLEANUP_POINT_EXPR, type,
3223 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3224 break;
3226 default:
3227 t = NULL_TREE;
3228 break;
3231 if (t)
3232 SET_EXPR_LOCATION (t, loc);
3234 return t;
3237 /* Return a simplified tree node for the truth-negation of ARG. This
3238 never alters ARG itself. We assume that ARG is an operation that
3239 returns a truth value (0 or 1).
3241 FIXME: one would think we would fold the result, but it causes
3242 problems with the dominator optimizer. */
3244 tree
3245 invert_truthvalue_loc (location_t loc, tree arg)
3247 tree tem;
3249 if (TREE_CODE (arg) == ERROR_MARK)
3250 return arg;
3252 tem = fold_truth_not_expr (loc, arg);
3253 if (!tem)
3255 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3256 SET_EXPR_LOCATION (tem, loc);
3259 return tem;
3262 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3263 operands are another bit-wise operation with a common input. If so,
3264 distribute the bit operations to save an operation and possibly two if
3265 constants are involved. For example, convert
3266 (A | B) & (A | C) into A | (B & C)
3267 Further simplification will occur if B and C are constants.
3269 If this optimization cannot be done, 0 will be returned. */
3271 static tree
3272 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3273 tree arg0, tree arg1)
3275 tree common;
3276 tree left, right;
3278 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3279 || TREE_CODE (arg0) == code
3280 || (TREE_CODE (arg0) != BIT_AND_EXPR
3281 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3282 return 0;
3284 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3286 common = TREE_OPERAND (arg0, 0);
3287 left = TREE_OPERAND (arg0, 1);
3288 right = TREE_OPERAND (arg1, 1);
3290 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3292 common = TREE_OPERAND (arg0, 0);
3293 left = TREE_OPERAND (arg0, 1);
3294 right = TREE_OPERAND (arg1, 0);
3296 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3298 common = TREE_OPERAND (arg0, 1);
3299 left = TREE_OPERAND (arg0, 0);
3300 right = TREE_OPERAND (arg1, 1);
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3304 common = TREE_OPERAND (arg0, 1);
3305 left = TREE_OPERAND (arg0, 0);
3306 right = TREE_OPERAND (arg1, 0);
3308 else
3309 return 0;
3311 common = fold_convert_loc (loc, type, common);
3312 left = fold_convert_loc (loc, type, left);
3313 right = fold_convert_loc (loc, type, right);
3314 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3315 fold_build2_loc (loc, code, type, left, right));
3318 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3319 with code CODE. This optimization is unsafe. */
3320 static tree
3321 distribute_real_division (location_t loc, enum tree_code code, tree type,
3322 tree arg0, tree arg1)
3324 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3325 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3327 /* (A / C) +- (B / C) -> (A +- B) / C. */
3328 if (mul0 == mul1
3329 && operand_equal_p (TREE_OPERAND (arg0, 1),
3330 TREE_OPERAND (arg1, 1), 0))
3331 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3332 fold_build2_loc (loc, code, type,
3333 TREE_OPERAND (arg0, 0),
3334 TREE_OPERAND (arg1, 0)),
3335 TREE_OPERAND (arg0, 1));
3337 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3338 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 0), 0)
3340 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3341 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3343 REAL_VALUE_TYPE r0, r1;
3344 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3345 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3346 if (!mul0)
3347 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3348 if (!mul1)
3349 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3350 real_arithmetic (&r0, code, &r0, &r1);
3351 return fold_build2_loc (loc, MULT_EXPR, type,
3352 TREE_OPERAND (arg0, 0),
3353 build_real (type, r0));
3356 return NULL_TREE;
3359 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3360 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3362 static tree
3363 make_bit_field_ref (location_t loc, tree inner, tree type,
3364 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3366 tree result, bftype;
3368 if (bitpos == 0)
3370 tree size = TYPE_SIZE (TREE_TYPE (inner));
3371 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3372 || POINTER_TYPE_P (TREE_TYPE (inner)))
3373 && host_integerp (size, 0)
3374 && tree_low_cst (size, 0) == bitsize)
3375 return fold_convert_loc (loc, type, inner);
3378 bftype = type;
3379 if (TYPE_PRECISION (bftype) != bitsize
3380 || TYPE_UNSIGNED (bftype) == !unsignedp)
3381 bftype = build_nonstandard_integer_type (bitsize, 0);
3383 result = build3 (BIT_FIELD_REF, bftype, inner,
3384 size_int (bitsize), bitsize_int (bitpos));
3385 SET_EXPR_LOCATION (result, loc);
3387 if (bftype != type)
3388 result = fold_convert_loc (loc, type, result);
3390 return result;
3393 /* Optimize a bit-field compare.
3395 There are two cases: First is a compare against a constant and the
3396 second is a comparison of two items where the fields are at the same
3397 bit position relative to the start of a chunk (byte, halfword, word)
3398 large enough to contain it. In these cases we can avoid the shift
3399 implicit in bitfield extractions.
3401 For constants, we emit a compare of the shifted constant with the
3402 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3403 compared. For two fields at the same position, we do the ANDs with the
3404 similar mask and compare the result of the ANDs.
3406 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3407 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3408 are the left and right operands of the comparison, respectively.
3410 If the optimization described above can be done, we return the resulting
3411 tree. Otherwise we return zero. */
3413 static tree
3414 optimize_bit_field_compare (location_t loc, enum tree_code code,
3415 tree compare_type, tree lhs, tree rhs)
3417 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3418 tree type = TREE_TYPE (lhs);
3419 tree signed_type, unsigned_type;
3420 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3421 enum machine_mode lmode, rmode, nmode;
3422 int lunsignedp, runsignedp;
3423 int lvolatilep = 0, rvolatilep = 0;
3424 tree linner, rinner = NULL_TREE;
3425 tree mask;
3426 tree offset;
3428 /* Get all the information about the extractions being done. If the bit size
3429 if the same as the size of the underlying object, we aren't doing an
3430 extraction at all and so can do nothing. We also don't want to
3431 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3432 then will no longer be able to replace it. */
3433 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3434 &lunsignedp, &lvolatilep, false);
3435 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3436 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3437 return 0;
3439 if (!const_p)
3441 /* If this is not a constant, we can only do something if bit positions,
3442 sizes, and signedness are the same. */
3443 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3444 &runsignedp, &rvolatilep, false);
3446 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3447 || lunsignedp != runsignedp || offset != 0
3448 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3449 return 0;
3452 /* See if we can find a mode to refer to this field. We should be able to,
3453 but fail if we can't. */
3454 if (lvolatilep
3455 && GET_MODE_BITSIZE (lmode) > 0
3456 && flag_strict_volatile_bitfields > 0)
3457 nmode = lmode;
3458 else
3459 nmode = get_best_mode (lbitsize, lbitpos,
3460 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3461 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3462 TYPE_ALIGN (TREE_TYPE (rinner))),
3463 word_mode, lvolatilep || rvolatilep);
3464 if (nmode == VOIDmode)
3465 return 0;
3467 /* Set signed and unsigned types of the precision of this mode for the
3468 shifts below. */
3469 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3470 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3472 /* Compute the bit position and size for the new reference and our offset
3473 within it. If the new reference is the same size as the original, we
3474 won't optimize anything, so return zero. */
3475 nbitsize = GET_MODE_BITSIZE (nmode);
3476 nbitpos = lbitpos & ~ (nbitsize - 1);
3477 lbitpos -= nbitpos;
3478 if (nbitsize == lbitsize)
3479 return 0;
3481 if (BYTES_BIG_ENDIAN)
3482 lbitpos = nbitsize - lbitsize - lbitpos;
3484 /* Make the mask to be used against the extracted field. */
3485 mask = build_int_cst_type (unsigned_type, -1);
3486 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3487 mask = const_binop (RSHIFT_EXPR, mask,
3488 size_int (nbitsize - lbitsize - lbitpos));
3490 if (! const_p)
3491 /* If not comparing with constant, just rework the comparison
3492 and return. */
3493 return fold_build2_loc (loc, code, compare_type,
3494 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3495 make_bit_field_ref (loc, linner,
3496 unsigned_type,
3497 nbitsize, nbitpos,
3499 mask),
3500 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3501 make_bit_field_ref (loc, rinner,
3502 unsigned_type,
3503 nbitsize, nbitpos,
3505 mask));
3507 /* Otherwise, we are handling the constant case. See if the constant is too
3508 big for the field. Warn and return a tree of for 0 (false) if so. We do
3509 this not only for its own sake, but to avoid having to test for this
3510 error case below. If we didn't, we might generate wrong code.
3512 For unsigned fields, the constant shifted right by the field length should
3513 be all zero. For signed fields, the high-order bits should agree with
3514 the sign bit. */
3516 if (lunsignedp)
3518 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3519 fold_convert_loc (loc,
3520 unsigned_type, rhs),
3521 size_int (lbitsize))))
3523 warning (0, "comparison is always %d due to width of bit-field",
3524 code == NE_EXPR);
3525 return constant_boolean_node (code == NE_EXPR, compare_type);
3528 else
3530 tree tem = const_binop (RSHIFT_EXPR,
3531 fold_convert_loc (loc, signed_type, rhs),
3532 size_int (lbitsize - 1));
3533 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3535 warning (0, "comparison is always %d due to width of bit-field",
3536 code == NE_EXPR);
3537 return constant_boolean_node (code == NE_EXPR, compare_type);
3541 /* Single-bit compares should always be against zero. */
3542 if (lbitsize == 1 && ! integer_zerop (rhs))
3544 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3545 rhs = build_int_cst (type, 0);
3548 /* Make a new bitfield reference, shift the constant over the
3549 appropriate number of bits and mask it with the computed mask
3550 (in case this was a signed field). If we changed it, make a new one. */
3551 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3552 if (lvolatilep)
3554 TREE_SIDE_EFFECTS (lhs) = 1;
3555 TREE_THIS_VOLATILE (lhs) = 1;
3558 rhs = const_binop (BIT_AND_EXPR,
3559 const_binop (LSHIFT_EXPR,
3560 fold_convert_loc (loc, unsigned_type, rhs),
3561 size_int (lbitpos)),
3562 mask);
3564 lhs = build2 (code, compare_type,
3565 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3566 rhs);
3567 SET_EXPR_LOCATION (lhs, loc);
3568 return lhs;
3571 /* Subroutine for fold_truthop: decode a field reference.
3573 If EXP is a comparison reference, we return the innermost reference.
3575 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3576 set to the starting bit number.
3578 If the innermost field can be completely contained in a mode-sized
3579 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3581 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3582 otherwise it is not changed.
3584 *PUNSIGNEDP is set to the signedness of the field.
3586 *PMASK is set to the mask used. This is either contained in a
3587 BIT_AND_EXPR or derived from the width of the field.
3589 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3591 Return 0 if this is not a component reference or is one that we can't
3592 do anything with. */
3594 static tree
3595 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3596 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3597 int *punsignedp, int *pvolatilep,
3598 tree *pmask, tree *pand_mask)
3600 tree outer_type = 0;
3601 tree and_mask = 0;
3602 tree mask, inner, offset;
3603 tree unsigned_type;
3604 unsigned int precision;
3606 /* All the optimizations using this function assume integer fields.
3607 There are problems with FP fields since the type_for_size call
3608 below can fail for, e.g., XFmode. */
3609 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3610 return 0;
3612 /* We are interested in the bare arrangement of bits, so strip everything
3613 that doesn't affect the machine mode. However, record the type of the
3614 outermost expression if it may matter below. */
3615 if (CONVERT_EXPR_P (exp)
3616 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3617 outer_type = TREE_TYPE (exp);
3618 STRIP_NOPS (exp);
3620 if (TREE_CODE (exp) == BIT_AND_EXPR)
3622 and_mask = TREE_OPERAND (exp, 1);
3623 exp = TREE_OPERAND (exp, 0);
3624 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3625 if (TREE_CODE (and_mask) != INTEGER_CST)
3626 return 0;
3629 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3630 punsignedp, pvolatilep, false);
3631 if ((inner == exp && and_mask == 0)
3632 || *pbitsize < 0 || offset != 0
3633 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3634 return 0;
3636 /* If the number of bits in the reference is the same as the bitsize of
3637 the outer type, then the outer type gives the signedness. Otherwise
3638 (in case of a small bitfield) the signedness is unchanged. */
3639 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3640 *punsignedp = TYPE_UNSIGNED (outer_type);
3642 /* Compute the mask to access the bitfield. */
3643 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3644 precision = TYPE_PRECISION (unsigned_type);
3646 mask = build_int_cst_type (unsigned_type, -1);
3648 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3649 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3651 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3652 if (and_mask != 0)
3653 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3654 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3656 *pmask = mask;
3657 *pand_mask = and_mask;
3658 return inner;
3661 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3662 bit positions. */
3664 static int
3665 all_ones_mask_p (const_tree mask, int size)
3667 tree type = TREE_TYPE (mask);
3668 unsigned int precision = TYPE_PRECISION (type);
3669 tree tmask;
3671 tmask = build_int_cst_type (signed_type_for (type), -1);
3673 return
3674 tree_int_cst_equal (mask,
3675 const_binop (RSHIFT_EXPR,
3676 const_binop (LSHIFT_EXPR, tmask,
3677 size_int (precision - size)),
3678 size_int (precision - size)));
3681 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3682 represents the sign bit of EXP's type. If EXP represents a sign
3683 or zero extension, also test VAL against the unextended type.
3684 The return value is the (sub)expression whose sign bit is VAL,
3685 or NULL_TREE otherwise. */
3687 static tree
3688 sign_bit_p (tree exp, const_tree val)
3690 unsigned HOST_WIDE_INT mask_lo, lo;
3691 HOST_WIDE_INT mask_hi, hi;
3692 int width;
3693 tree t;
3695 /* Tree EXP must have an integral type. */
3696 t = TREE_TYPE (exp);
3697 if (! INTEGRAL_TYPE_P (t))
3698 return NULL_TREE;
3700 /* Tree VAL must be an integer constant. */
3701 if (TREE_CODE (val) != INTEGER_CST
3702 || TREE_OVERFLOW (val))
3703 return NULL_TREE;
3705 width = TYPE_PRECISION (t);
3706 if (width > HOST_BITS_PER_WIDE_INT)
3708 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3709 lo = 0;
3711 mask_hi = ((unsigned HOST_WIDE_INT) -1
3712 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3713 mask_lo = -1;
3715 else
3717 hi = 0;
3718 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3720 mask_hi = 0;
3721 mask_lo = ((unsigned HOST_WIDE_INT) -1
3722 >> (HOST_BITS_PER_WIDE_INT - width));
3725 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3726 treat VAL as if it were unsigned. */
3727 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3728 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3729 return exp;
3731 /* Handle extension from a narrower type. */
3732 if (TREE_CODE (exp) == NOP_EXPR
3733 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3734 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3736 return NULL_TREE;
3739 /* Subroutine for fold_truthop: determine if an operand is simple enough
3740 to be evaluated unconditionally. */
3742 static int
3743 simple_operand_p (const_tree exp)
3745 /* Strip any conversions that don't change the machine mode. */
3746 STRIP_NOPS (exp);
3748 return (CONSTANT_CLASS_P (exp)
3749 || TREE_CODE (exp) == SSA_NAME
3750 || (DECL_P (exp)
3751 && ! TREE_ADDRESSABLE (exp)
3752 && ! TREE_THIS_VOLATILE (exp)
3753 && ! DECL_NONLOCAL (exp)
3754 /* Don't regard global variables as simple. They may be
3755 allocated in ways unknown to the compiler (shared memory,
3756 #pragma weak, etc). */
3757 && ! TREE_PUBLIC (exp)
3758 && ! DECL_EXTERNAL (exp)
3759 /* Loading a static variable is unduly expensive, but global
3760 registers aren't expensive. */
3761 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3764 /* The following functions are subroutines to fold_range_test and allow it to
3765 try to change a logical combination of comparisons into a range test.
3767 For example, both
3768 X == 2 || X == 3 || X == 4 || X == 5
3770 X >= 2 && X <= 5
3771 are converted to
3772 (unsigned) (X - 2) <= 3
3774 We describe each set of comparisons as being either inside or outside
3775 a range, using a variable named like IN_P, and then describe the
3776 range with a lower and upper bound. If one of the bounds is omitted,
3777 it represents either the highest or lowest value of the type.
3779 In the comments below, we represent a range by two numbers in brackets
3780 preceded by a "+" to designate being inside that range, or a "-" to
3781 designate being outside that range, so the condition can be inverted by
3782 flipping the prefix. An omitted bound is represented by a "-". For
3783 example, "- [-, 10]" means being outside the range starting at the lowest
3784 possible value and ending at 10, in other words, being greater than 10.
3785 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3786 always false.
3788 We set up things so that the missing bounds are handled in a consistent
3789 manner so neither a missing bound nor "true" and "false" need to be
3790 handled using a special case. */
3792 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3793 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3794 and UPPER1_P are nonzero if the respective argument is an upper bound
3795 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3796 must be specified for a comparison. ARG1 will be converted to ARG0's
3797 type if both are specified. */
3799 static tree
3800 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3801 tree arg1, int upper1_p)
3803 tree tem;
3804 int result;
3805 int sgn0, sgn1;
3807 /* If neither arg represents infinity, do the normal operation.
3808 Else, if not a comparison, return infinity. Else handle the special
3809 comparison rules. Note that most of the cases below won't occur, but
3810 are handled for consistency. */
3812 if (arg0 != 0 && arg1 != 0)
3814 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3815 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3816 STRIP_NOPS (tem);
3817 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3820 if (TREE_CODE_CLASS (code) != tcc_comparison)
3821 return 0;
3823 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3824 for neither. In real maths, we cannot assume open ended ranges are
3825 the same. But, this is computer arithmetic, where numbers are finite.
3826 We can therefore make the transformation of any unbounded range with
3827 the value Z, Z being greater than any representable number. This permits
3828 us to treat unbounded ranges as equal. */
3829 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3830 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3831 switch (code)
3833 case EQ_EXPR:
3834 result = sgn0 == sgn1;
3835 break;
3836 case NE_EXPR:
3837 result = sgn0 != sgn1;
3838 break;
3839 case LT_EXPR:
3840 result = sgn0 < sgn1;
3841 break;
3842 case LE_EXPR:
3843 result = sgn0 <= sgn1;
3844 break;
3845 case GT_EXPR:
3846 result = sgn0 > sgn1;
3847 break;
3848 case GE_EXPR:
3849 result = sgn0 >= sgn1;
3850 break;
3851 default:
3852 gcc_unreachable ();
3855 return constant_boolean_node (result, type);
3858 /* Given EXP, a logical expression, set the range it is testing into
3859 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3860 actually being tested. *PLOW and *PHIGH will be made of the same
3861 type as the returned expression. If EXP is not a comparison, we
3862 will most likely not be returning a useful value and range. Set
3863 *STRICT_OVERFLOW_P to true if the return value is only valid
3864 because signed overflow is undefined; otherwise, do not change
3865 *STRICT_OVERFLOW_P. */
3867 tree
3868 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3869 bool *strict_overflow_p)
3871 enum tree_code code;
3872 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3873 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3874 int in_p, n_in_p;
3875 tree low, high, n_low, n_high;
3876 location_t loc = EXPR_LOCATION (exp);
3878 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3879 and see if we can refine the range. Some of the cases below may not
3880 happen, but it doesn't seem worth worrying about this. We "continue"
3881 the outer loop when we've changed something; otherwise we "break"
3882 the switch, which will "break" the while. */
3884 in_p = 0;
3885 low = high = build_int_cst (TREE_TYPE (exp), 0);
3887 while (1)
3889 code = TREE_CODE (exp);
3890 exp_type = TREE_TYPE (exp);
3892 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3894 if (TREE_OPERAND_LENGTH (exp) > 0)
3895 arg0 = TREE_OPERAND (exp, 0);
3896 if (TREE_CODE_CLASS (code) == tcc_comparison
3897 || TREE_CODE_CLASS (code) == tcc_unary
3898 || TREE_CODE_CLASS (code) == tcc_binary)
3899 arg0_type = TREE_TYPE (arg0);
3900 if (TREE_CODE_CLASS (code) == tcc_binary
3901 || TREE_CODE_CLASS (code) == tcc_comparison
3902 || (TREE_CODE_CLASS (code) == tcc_expression
3903 && TREE_OPERAND_LENGTH (exp) > 1))
3904 arg1 = TREE_OPERAND (exp, 1);
3907 switch (code)
3909 case TRUTH_NOT_EXPR:
3910 in_p = ! in_p, exp = arg0;
3911 continue;
3913 case EQ_EXPR: case NE_EXPR:
3914 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3915 /* We can only do something if the range is testing for zero
3916 and if the second operand is an integer constant. Note that
3917 saying something is "in" the range we make is done by
3918 complementing IN_P since it will set in the initial case of
3919 being not equal to zero; "out" is leaving it alone. */
3920 if (low == 0 || high == 0
3921 || ! integer_zerop (low) || ! integer_zerop (high)
3922 || TREE_CODE (arg1) != INTEGER_CST)
3923 break;
3925 switch (code)
3927 case NE_EXPR: /* - [c, c] */
3928 low = high = arg1;
3929 break;
3930 case EQ_EXPR: /* + [c, c] */
3931 in_p = ! in_p, low = high = arg1;
3932 break;
3933 case GT_EXPR: /* - [-, c] */
3934 low = 0, high = arg1;
3935 break;
3936 case GE_EXPR: /* + [c, -] */
3937 in_p = ! in_p, low = arg1, high = 0;
3938 break;
3939 case LT_EXPR: /* - [c, -] */
3940 low = arg1, high = 0;
3941 break;
3942 case LE_EXPR: /* + [-, c] */
3943 in_p = ! in_p, low = 0, high = arg1;
3944 break;
3945 default:
3946 gcc_unreachable ();
3949 /* If this is an unsigned comparison, we also know that EXP is
3950 greater than or equal to zero. We base the range tests we make
3951 on that fact, so we record it here so we can parse existing
3952 range tests. We test arg0_type since often the return type
3953 of, e.g. EQ_EXPR, is boolean. */
3954 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3956 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3957 in_p, low, high, 1,
3958 build_int_cst (arg0_type, 0),
3959 NULL_TREE))
3960 break;
3962 in_p = n_in_p, low = n_low, high = n_high;
3964 /* If the high bound is missing, but we have a nonzero low
3965 bound, reverse the range so it goes from zero to the low bound
3966 minus 1. */
3967 if (high == 0 && low && ! integer_zerop (low))
3969 in_p = ! in_p;
3970 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3971 integer_one_node, 0);
3972 low = build_int_cst (arg0_type, 0);
3976 exp = arg0;
3977 continue;
3979 case NEGATE_EXPR:
3980 /* (-x) IN [a,b] -> x in [-b, -a] */
3981 n_low = range_binop (MINUS_EXPR, exp_type,
3982 build_int_cst (exp_type, 0),
3983 0, high, 1);
3984 n_high = range_binop (MINUS_EXPR, exp_type,
3985 build_int_cst (exp_type, 0),
3986 0, low, 0);
3987 low = n_low, high = n_high;
3988 exp = arg0;
3989 continue;
3991 case BIT_NOT_EXPR:
3992 /* ~ X -> -X - 1 */
3993 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3994 build_int_cst (exp_type, 1));
3995 SET_EXPR_LOCATION (exp, loc);
3996 continue;
3998 case PLUS_EXPR: case MINUS_EXPR:
3999 if (TREE_CODE (arg1) != INTEGER_CST)
4000 break;
4002 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4003 move a constant to the other side. */
4004 if (!TYPE_UNSIGNED (arg0_type)
4005 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4006 break;
4008 /* If EXP is signed, any overflow in the computation is undefined,
4009 so we don't worry about it so long as our computations on
4010 the bounds don't overflow. For unsigned, overflow is defined
4011 and this is exactly the right thing. */
4012 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4013 arg0_type, low, 0, arg1, 0);
4014 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4015 arg0_type, high, 1, arg1, 0);
4016 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4017 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4018 break;
4020 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4021 *strict_overflow_p = true;
4023 /* Check for an unsigned range which has wrapped around the maximum
4024 value thus making n_high < n_low, and normalize it. */
4025 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4027 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4028 integer_one_node, 0);
4029 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4030 integer_one_node, 0);
4032 /* If the range is of the form +/- [ x+1, x ], we won't
4033 be able to normalize it. But then, it represents the
4034 whole range or the empty set, so make it
4035 +/- [ -, - ]. */
4036 if (tree_int_cst_equal (n_low, low)
4037 && tree_int_cst_equal (n_high, high))
4038 low = high = 0;
4039 else
4040 in_p = ! in_p;
4042 else
4043 low = n_low, high = n_high;
4045 exp = arg0;
4046 continue;
4048 CASE_CONVERT: case NON_LVALUE_EXPR:
4049 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4050 break;
4052 if (! INTEGRAL_TYPE_P (arg0_type)
4053 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4054 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4055 break;
4057 n_low = low, n_high = high;
4059 if (n_low != 0)
4060 n_low = fold_convert_loc (loc, arg0_type, n_low);
4062 if (n_high != 0)
4063 n_high = fold_convert_loc (loc, arg0_type, n_high);
4066 /* If we're converting arg0 from an unsigned type, to exp,
4067 a signed type, we will be doing the comparison as unsigned.
4068 The tests above have already verified that LOW and HIGH
4069 are both positive.
4071 So we have to ensure that we will handle large unsigned
4072 values the same way that the current signed bounds treat
4073 negative values. */
4075 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4077 tree high_positive;
4078 tree equiv_type;
4079 /* For fixed-point modes, we need to pass the saturating flag
4080 as the 2nd parameter. */
4081 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4082 equiv_type = lang_hooks.types.type_for_mode
4083 (TYPE_MODE (arg0_type),
4084 TYPE_SATURATING (arg0_type));
4085 else
4086 equiv_type = lang_hooks.types.type_for_mode
4087 (TYPE_MODE (arg0_type), 1);
4089 /* A range without an upper bound is, naturally, unbounded.
4090 Since convert would have cropped a very large value, use
4091 the max value for the destination type. */
4092 high_positive
4093 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4094 : TYPE_MAX_VALUE (arg0_type);
4096 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4097 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4098 fold_convert_loc (loc, arg0_type,
4099 high_positive),
4100 build_int_cst (arg0_type, 1));
4102 /* If the low bound is specified, "and" the range with the
4103 range for which the original unsigned value will be
4104 positive. */
4105 if (low != 0)
4107 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4108 1, n_low, n_high, 1,
4109 fold_convert_loc (loc, arg0_type,
4110 integer_zero_node),
4111 high_positive))
4112 break;
4114 in_p = (n_in_p == in_p);
4116 else
4118 /* Otherwise, "or" the range with the range of the input
4119 that will be interpreted as negative. */
4120 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4121 0, n_low, n_high, 1,
4122 fold_convert_loc (loc, arg0_type,
4123 integer_zero_node),
4124 high_positive))
4125 break;
4127 in_p = (in_p != n_in_p);
4131 exp = arg0;
4132 low = n_low, high = n_high;
4133 continue;
4135 default:
4136 break;
4139 break;
4142 /* If EXP is a constant, we can evaluate whether this is true or false. */
4143 if (TREE_CODE (exp) == INTEGER_CST)
4145 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4146 exp, 0, low, 0))
4147 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4148 exp, 1, high, 1)));
4149 low = high = 0;
4150 exp = 0;
4153 *pin_p = in_p, *plow = low, *phigh = high;
4154 return exp;
4157 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4158 type, TYPE, return an expression to test if EXP is in (or out of, depending
4159 on IN_P) the range. Return 0 if the test couldn't be created. */
4161 tree
4162 build_range_check (location_t loc, tree type, tree exp, int in_p,
4163 tree low, tree high)
4165 tree etype = TREE_TYPE (exp), value;
4167 #ifdef HAVE_canonicalize_funcptr_for_compare
4168 /* Disable this optimization for function pointer expressions
4169 on targets that require function pointer canonicalization. */
4170 if (HAVE_canonicalize_funcptr_for_compare
4171 && TREE_CODE (etype) == POINTER_TYPE
4172 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4173 return NULL_TREE;
4174 #endif
4176 if (! in_p)
4178 value = build_range_check (loc, type, exp, 1, low, high);
4179 if (value != 0)
4180 return invert_truthvalue_loc (loc, value);
4182 return 0;
4185 if (low == 0 && high == 0)
4186 return build_int_cst (type, 1);
4188 if (low == 0)
4189 return fold_build2_loc (loc, LE_EXPR, type, exp,
4190 fold_convert_loc (loc, etype, high));
4192 if (high == 0)
4193 return fold_build2_loc (loc, GE_EXPR, type, exp,
4194 fold_convert_loc (loc, etype, low));
4196 if (operand_equal_p (low, high, 0))
4197 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4198 fold_convert_loc (loc, etype, low));
4200 if (integer_zerop (low))
4202 if (! TYPE_UNSIGNED (etype))
4204 etype = unsigned_type_for (etype);
4205 high = fold_convert_loc (loc, etype, high);
4206 exp = fold_convert_loc (loc, etype, exp);
4208 return build_range_check (loc, type, exp, 1, 0, high);
4211 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4212 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4214 unsigned HOST_WIDE_INT lo;
4215 HOST_WIDE_INT hi;
4216 int prec;
4218 prec = TYPE_PRECISION (etype);
4219 if (prec <= HOST_BITS_PER_WIDE_INT)
4221 hi = 0;
4222 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4224 else
4226 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4227 lo = (unsigned HOST_WIDE_INT) -1;
4230 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4232 if (TYPE_UNSIGNED (etype))
4234 tree signed_etype = signed_type_for (etype);
4235 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4236 etype
4237 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4238 else
4239 etype = signed_etype;
4240 exp = fold_convert_loc (loc, etype, exp);
4242 return fold_build2_loc (loc, GT_EXPR, type, exp,
4243 build_int_cst (etype, 0));
4247 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4248 This requires wrap-around arithmetics for the type of the expression.
4249 First make sure that arithmetics in this type is valid, then make sure
4250 that it wraps around. */
4251 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4252 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4253 TYPE_UNSIGNED (etype));
4255 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4257 tree utype, minv, maxv;
4259 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4260 for the type in question, as we rely on this here. */
4261 utype = unsigned_type_for (etype);
4262 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4263 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4264 integer_one_node, 1);
4265 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4267 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4268 minv, 1, maxv, 1)))
4269 etype = utype;
4270 else
4271 return 0;
4274 high = fold_convert_loc (loc, etype, high);
4275 low = fold_convert_loc (loc, etype, low);
4276 exp = fold_convert_loc (loc, etype, exp);
4278 value = const_binop (MINUS_EXPR, high, low);
4281 if (POINTER_TYPE_P (etype))
4283 if (value != 0 && !TREE_OVERFLOW (value))
4285 low = fold_convert_loc (loc, sizetype, low);
4286 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4287 return build_range_check (loc, type,
4288 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4289 etype, exp, low),
4290 1, build_int_cst (etype, 0), value);
4292 return 0;
4295 if (value != 0 && !TREE_OVERFLOW (value))
4296 return build_range_check (loc, type,
4297 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4298 1, build_int_cst (etype, 0), value);
4300 return 0;
4303 /* Return the predecessor of VAL in its type, handling the infinite case. */
4305 static tree
4306 range_predecessor (tree val)
4308 tree type = TREE_TYPE (val);
4310 if (INTEGRAL_TYPE_P (type)
4311 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4312 return 0;
4313 else
4314 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4317 /* Return the successor of VAL in its type, handling the infinite case. */
4319 static tree
4320 range_successor (tree val)
4322 tree type = TREE_TYPE (val);
4324 if (INTEGRAL_TYPE_P (type)
4325 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4326 return 0;
4327 else
4328 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4331 /* Given two ranges, see if we can merge them into one. Return 1 if we
4332 can, 0 if we can't. Set the output range into the specified parameters. */
4334 bool
4335 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4336 tree high0, int in1_p, tree low1, tree high1)
4338 int no_overlap;
4339 int subset;
4340 int temp;
4341 tree tem;
4342 int in_p;
4343 tree low, high;
4344 int lowequal = ((low0 == 0 && low1 == 0)
4345 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4346 low0, 0, low1, 0)));
4347 int highequal = ((high0 == 0 && high1 == 0)
4348 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4349 high0, 1, high1, 1)));
4351 /* Make range 0 be the range that starts first, or ends last if they
4352 start at the same value. Swap them if it isn't. */
4353 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4354 low0, 0, low1, 0))
4355 || (lowequal
4356 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4357 high1, 1, high0, 1))))
4359 temp = in0_p, in0_p = in1_p, in1_p = temp;
4360 tem = low0, low0 = low1, low1 = tem;
4361 tem = high0, high0 = high1, high1 = tem;
4364 /* Now flag two cases, whether the ranges are disjoint or whether the
4365 second range is totally subsumed in the first. Note that the tests
4366 below are simplified by the ones above. */
4367 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4368 high0, 1, low1, 0));
4369 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4370 high1, 1, high0, 1));
4372 /* We now have four cases, depending on whether we are including or
4373 excluding the two ranges. */
4374 if (in0_p && in1_p)
4376 /* If they don't overlap, the result is false. If the second range
4377 is a subset it is the result. Otherwise, the range is from the start
4378 of the second to the end of the first. */
4379 if (no_overlap)
4380 in_p = 0, low = high = 0;
4381 else if (subset)
4382 in_p = 1, low = low1, high = high1;
4383 else
4384 in_p = 1, low = low1, high = high0;
4387 else if (in0_p && ! in1_p)
4389 /* If they don't overlap, the result is the first range. If they are
4390 equal, the result is false. If the second range is a subset of the
4391 first, and the ranges begin at the same place, we go from just after
4392 the end of the second range to the end of the first. If the second
4393 range is not a subset of the first, or if it is a subset and both
4394 ranges end at the same place, the range starts at the start of the
4395 first range and ends just before the second range.
4396 Otherwise, we can't describe this as a single range. */
4397 if (no_overlap)
4398 in_p = 1, low = low0, high = high0;
4399 else if (lowequal && highequal)
4400 in_p = 0, low = high = 0;
4401 else if (subset && lowequal)
4403 low = range_successor (high1);
4404 high = high0;
4405 in_p = 1;
4406 if (low == 0)
4408 /* We are in the weird situation where high0 > high1 but
4409 high1 has no successor. Punt. */
4410 return 0;
4413 else if (! subset || highequal)
4415 low = low0;
4416 high = range_predecessor (low1);
4417 in_p = 1;
4418 if (high == 0)
4420 /* low0 < low1 but low1 has no predecessor. Punt. */
4421 return 0;
4424 else
4425 return 0;
4428 else if (! in0_p && in1_p)
4430 /* If they don't overlap, the result is the second range. If the second
4431 is a subset of the first, the result is false. Otherwise,
4432 the range starts just after the first range and ends at the
4433 end of the second. */
4434 if (no_overlap)
4435 in_p = 1, low = low1, high = high1;
4436 else if (subset || highequal)
4437 in_p = 0, low = high = 0;
4438 else
4440 low = range_successor (high0);
4441 high = high1;
4442 in_p = 1;
4443 if (low == 0)
4445 /* high1 > high0 but high0 has no successor. Punt. */
4446 return 0;
4451 else
4453 /* The case where we are excluding both ranges. Here the complex case
4454 is if they don't overlap. In that case, the only time we have a
4455 range is if they are adjacent. If the second is a subset of the
4456 first, the result is the first. Otherwise, the range to exclude
4457 starts at the beginning of the first range and ends at the end of the
4458 second. */
4459 if (no_overlap)
4461 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4462 range_successor (high0),
4463 1, low1, 0)))
4464 in_p = 0, low = low0, high = high1;
4465 else
4467 /* Canonicalize - [min, x] into - [-, x]. */
4468 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4469 switch (TREE_CODE (TREE_TYPE (low0)))
4471 case ENUMERAL_TYPE:
4472 if (TYPE_PRECISION (TREE_TYPE (low0))
4473 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4474 break;
4475 /* FALLTHROUGH */
4476 case INTEGER_TYPE:
4477 if (tree_int_cst_equal (low0,
4478 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4479 low0 = 0;
4480 break;
4481 case POINTER_TYPE:
4482 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4483 && integer_zerop (low0))
4484 low0 = 0;
4485 break;
4486 default:
4487 break;
4490 /* Canonicalize - [x, max] into - [x, -]. */
4491 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4492 switch (TREE_CODE (TREE_TYPE (high1)))
4494 case ENUMERAL_TYPE:
4495 if (TYPE_PRECISION (TREE_TYPE (high1))
4496 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4497 break;
4498 /* FALLTHROUGH */
4499 case INTEGER_TYPE:
4500 if (tree_int_cst_equal (high1,
4501 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4502 high1 = 0;
4503 break;
4504 case POINTER_TYPE:
4505 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4506 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4507 high1, 1,
4508 integer_one_node, 1)))
4509 high1 = 0;
4510 break;
4511 default:
4512 break;
4515 /* The ranges might be also adjacent between the maximum and
4516 minimum values of the given type. For
4517 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4518 return + [x + 1, y - 1]. */
4519 if (low0 == 0 && high1 == 0)
4521 low = range_successor (high0);
4522 high = range_predecessor (low1);
4523 if (low == 0 || high == 0)
4524 return 0;
4526 in_p = 1;
4528 else
4529 return 0;
4532 else if (subset)
4533 in_p = 0, low = low0, high = high0;
4534 else
4535 in_p = 0, low = low0, high = high1;
4538 *pin_p = in_p, *plow = low, *phigh = high;
4539 return 1;
4543 /* Subroutine of fold, looking inside expressions of the form
4544 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4545 of the COND_EXPR. This function is being used also to optimize
4546 A op B ? C : A, by reversing the comparison first.
4548 Return a folded expression whose code is not a COND_EXPR
4549 anymore, or NULL_TREE if no folding opportunity is found. */
4551 static tree
4552 fold_cond_expr_with_comparison (location_t loc, tree type,
4553 tree arg0, tree arg1, tree arg2)
4555 enum tree_code comp_code = TREE_CODE (arg0);
4556 tree arg00 = TREE_OPERAND (arg0, 0);
4557 tree arg01 = TREE_OPERAND (arg0, 1);
4558 tree arg1_type = TREE_TYPE (arg1);
4559 tree tem;
4561 STRIP_NOPS (arg1);
4562 STRIP_NOPS (arg2);
4564 /* If we have A op 0 ? A : -A, consider applying the following
4565 transformations:
4567 A == 0? A : -A same as -A
4568 A != 0? A : -A same as A
4569 A >= 0? A : -A same as abs (A)
4570 A > 0? A : -A same as abs (A)
4571 A <= 0? A : -A same as -abs (A)
4572 A < 0? A : -A same as -abs (A)
4574 None of these transformations work for modes with signed
4575 zeros. If A is +/-0, the first two transformations will
4576 change the sign of the result (from +0 to -0, or vice
4577 versa). The last four will fix the sign of the result,
4578 even though the original expressions could be positive or
4579 negative, depending on the sign of A.
4581 Note that all these transformations are correct if A is
4582 NaN, since the two alternatives (A and -A) are also NaNs. */
4583 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4584 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4585 ? real_zerop (arg01)
4586 : integer_zerop (arg01))
4587 && ((TREE_CODE (arg2) == NEGATE_EXPR
4588 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4589 /* In the case that A is of the form X-Y, '-A' (arg2) may
4590 have already been folded to Y-X, check for that. */
4591 || (TREE_CODE (arg1) == MINUS_EXPR
4592 && TREE_CODE (arg2) == MINUS_EXPR
4593 && operand_equal_p (TREE_OPERAND (arg1, 0),
4594 TREE_OPERAND (arg2, 1), 0)
4595 && operand_equal_p (TREE_OPERAND (arg1, 1),
4596 TREE_OPERAND (arg2, 0), 0))))
4597 switch (comp_code)
4599 case EQ_EXPR:
4600 case UNEQ_EXPR:
4601 tem = fold_convert_loc (loc, arg1_type, arg1);
4602 return pedantic_non_lvalue_loc (loc,
4603 fold_convert_loc (loc, type,
4604 negate_expr (tem)));
4605 case NE_EXPR:
4606 case LTGT_EXPR:
4607 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4608 case UNGE_EXPR:
4609 case UNGT_EXPR:
4610 if (flag_trapping_math)
4611 break;
4612 /* Fall through. */
4613 case GE_EXPR:
4614 case GT_EXPR:
4615 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4616 arg1 = fold_convert_loc (loc, signed_type_for
4617 (TREE_TYPE (arg1)), arg1);
4618 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4619 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4620 case UNLE_EXPR:
4621 case UNLT_EXPR:
4622 if (flag_trapping_math)
4623 break;
4624 case LE_EXPR:
4625 case LT_EXPR:
4626 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4627 arg1 = fold_convert_loc (loc, signed_type_for
4628 (TREE_TYPE (arg1)), arg1);
4629 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4630 return negate_expr (fold_convert_loc (loc, type, tem));
4631 default:
4632 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4633 break;
4636 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4637 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4638 both transformations are correct when A is NaN: A != 0
4639 is then true, and A == 0 is false. */
4641 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4642 && integer_zerop (arg01) && integer_zerop (arg2))
4644 if (comp_code == NE_EXPR)
4645 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4646 else if (comp_code == EQ_EXPR)
4647 return build_int_cst (type, 0);
4650 /* Try some transformations of A op B ? A : B.
4652 A == B? A : B same as B
4653 A != B? A : B same as A
4654 A >= B? A : B same as max (A, B)
4655 A > B? A : B same as max (B, A)
4656 A <= B? A : B same as min (A, B)
4657 A < B? A : B same as min (B, A)
4659 As above, these transformations don't work in the presence
4660 of signed zeros. For example, if A and B are zeros of
4661 opposite sign, the first two transformations will change
4662 the sign of the result. In the last four, the original
4663 expressions give different results for (A=+0, B=-0) and
4664 (A=-0, B=+0), but the transformed expressions do not.
4666 The first two transformations are correct if either A or B
4667 is a NaN. In the first transformation, the condition will
4668 be false, and B will indeed be chosen. In the case of the
4669 second transformation, the condition A != B will be true,
4670 and A will be chosen.
4672 The conversions to max() and min() are not correct if B is
4673 a number and A is not. The conditions in the original
4674 expressions will be false, so all four give B. The min()
4675 and max() versions would give a NaN instead. */
4676 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4677 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4678 /* Avoid these transformations if the COND_EXPR may be used
4679 as an lvalue in the C++ front-end. PR c++/19199. */
4680 && (in_gimple_form
4681 || (strcmp (lang_hooks.name, "GNU C++") != 0
4682 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4683 || ! maybe_lvalue_p (arg1)
4684 || ! maybe_lvalue_p (arg2)))
4686 tree comp_op0 = arg00;
4687 tree comp_op1 = arg01;
4688 tree comp_type = TREE_TYPE (comp_op0);
4690 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4691 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4693 comp_type = type;
4694 comp_op0 = arg1;
4695 comp_op1 = arg2;
4698 switch (comp_code)
4700 case EQ_EXPR:
4701 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4702 case NE_EXPR:
4703 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4704 case LE_EXPR:
4705 case LT_EXPR:
4706 case UNLE_EXPR:
4707 case UNLT_EXPR:
4708 /* In C++ a ?: expression can be an lvalue, so put the
4709 operand which will be used if they are equal first
4710 so that we can convert this back to the
4711 corresponding COND_EXPR. */
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4714 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4715 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4716 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4717 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4718 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4719 comp_op1, comp_op0);
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, tem));
4723 break;
4724 case GE_EXPR:
4725 case GT_EXPR:
4726 case UNGE_EXPR:
4727 case UNGT_EXPR:
4728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4730 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4731 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4732 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4733 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4734 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4735 comp_op1, comp_op0);
4736 return pedantic_non_lvalue_loc (loc,
4737 fold_convert_loc (loc, type, tem));
4739 break;
4740 case UNEQ_EXPR:
4741 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4742 return pedantic_non_lvalue_loc (loc,
4743 fold_convert_loc (loc, type, arg2));
4744 break;
4745 case LTGT_EXPR:
4746 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4747 return pedantic_non_lvalue_loc (loc,
4748 fold_convert_loc (loc, type, arg1));
4749 break;
4750 default:
4751 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4752 break;
4756 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4757 we might still be able to simplify this. For example,
4758 if C1 is one less or one more than C2, this might have started
4759 out as a MIN or MAX and been transformed by this function.
4760 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4762 if (INTEGRAL_TYPE_P (type)
4763 && TREE_CODE (arg01) == INTEGER_CST
4764 && TREE_CODE (arg2) == INTEGER_CST)
4765 switch (comp_code)
4767 case EQ_EXPR:
4768 if (TREE_CODE (arg1) == INTEGER_CST)
4769 break;
4770 /* We can replace A with C1 in this case. */
4771 arg1 = fold_convert_loc (loc, type, arg01);
4772 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4774 case LT_EXPR:
4775 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4776 MIN_EXPR, to preserve the signedness of the comparison. */
4777 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4778 OEP_ONLY_CONST)
4779 && operand_equal_p (arg01,
4780 const_binop (PLUS_EXPR, arg2,
4781 build_int_cst (type, 1)),
4782 OEP_ONLY_CONST))
4784 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4785 fold_convert_loc (loc, TREE_TYPE (arg00),
4786 arg2));
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, tem));
4790 break;
4792 case LE_EXPR:
4793 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4794 as above. */
4795 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (MINUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4802 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc,
4806 fold_convert_loc (loc, type, tem));
4808 break;
4810 case GT_EXPR:
4811 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4812 MAX_EXPR, to preserve the signedness of the comparison. */
4813 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4814 OEP_ONLY_CONST)
4815 && operand_equal_p (arg01,
4816 const_binop (MINUS_EXPR, arg2,
4817 build_int_cst (type, 1)),
4818 OEP_ONLY_CONST))
4820 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4821 fold_convert_loc (loc, TREE_TYPE (arg00),
4822 arg2));
4823 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4825 break;
4827 case GE_EXPR:
4828 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4829 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4830 OEP_ONLY_CONST)
4831 && operand_equal_p (arg01,
4832 const_binop (PLUS_EXPR, arg2,
4833 build_int_cst (type, 1)),
4834 OEP_ONLY_CONST))
4836 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4837 fold_convert_loc (loc, TREE_TYPE (arg00),
4838 arg2));
4839 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4841 break;
4842 case NE_EXPR:
4843 break;
4844 default:
4845 gcc_unreachable ();
4848 return NULL_TREE;
4853 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4854 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4855 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4856 false) >= 2)
4857 #endif
4859 /* EXP is some logical combination of boolean tests. See if we can
4860 merge it into some range test. Return the new tree if so. */
4862 static tree
4863 fold_range_test (location_t loc, enum tree_code code, tree type,
4864 tree op0, tree op1)
4866 int or_op = (code == TRUTH_ORIF_EXPR
4867 || code == TRUTH_OR_EXPR);
4868 int in0_p, in1_p, in_p;
4869 tree low0, low1, low, high0, high1, high;
4870 bool strict_overflow_p = false;
4871 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4872 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4873 tree tem;
4874 const char * const warnmsg = G_("assuming signed overflow does not occur "
4875 "when simplifying range test");
4877 /* If this is an OR operation, invert both sides; we will invert
4878 again at the end. */
4879 if (or_op)
4880 in0_p = ! in0_p, in1_p = ! in1_p;
4882 /* If both expressions are the same, if we can merge the ranges, and we
4883 can build the range test, return it or it inverted. If one of the
4884 ranges is always true or always false, consider it to be the same
4885 expression as the other. */
4886 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4887 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4888 in1_p, low1, high1)
4889 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4890 lhs != 0 ? lhs
4891 : rhs != 0 ? rhs : integer_zero_node,
4892 in_p, low, high))))
4894 if (strict_overflow_p)
4895 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4896 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4899 /* On machines where the branch cost is expensive, if this is a
4900 short-circuited branch and the underlying object on both sides
4901 is the same, make a non-short-circuit operation. */
4902 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4903 && lhs != 0 && rhs != 0
4904 && (code == TRUTH_ANDIF_EXPR
4905 || code == TRUTH_ORIF_EXPR)
4906 && operand_equal_p (lhs, rhs, 0))
4908 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4909 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4910 which cases we can't do this. */
4911 if (simple_operand_p (lhs))
4913 tem = build2 (code == TRUTH_ANDIF_EXPR
4914 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4915 type, op0, op1);
4916 SET_EXPR_LOCATION (tem, loc);
4917 return tem;
4920 else if (lang_hooks.decls.global_bindings_p () == 0
4921 && ! CONTAINS_PLACEHOLDER_P (lhs))
4923 tree common = save_expr (lhs);
4925 if (0 != (lhs = build_range_check (loc, type, common,
4926 or_op ? ! in0_p : in0_p,
4927 low0, high0))
4928 && (0 != (rhs = build_range_check (loc, type, common,
4929 or_op ? ! in1_p : in1_p,
4930 low1, high1))))
4932 if (strict_overflow_p)
4933 fold_overflow_warning (warnmsg,
4934 WARN_STRICT_OVERFLOW_COMPARISON);
4935 tem = build2 (code == TRUTH_ANDIF_EXPR
4936 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4937 type, lhs, rhs);
4938 SET_EXPR_LOCATION (tem, loc);
4939 return tem;
4944 return 0;
4947 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4948 bit value. Arrange things so the extra bits will be set to zero if and
4949 only if C is signed-extended to its full width. If MASK is nonzero,
4950 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4952 static tree
4953 unextend (tree c, int p, int unsignedp, tree mask)
4955 tree type = TREE_TYPE (c);
4956 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4957 tree temp;
4959 if (p == modesize || unsignedp)
4960 return c;
4962 /* We work by getting just the sign bit into the low-order bit, then
4963 into the high-order bit, then sign-extend. We then XOR that value
4964 with C. */
4965 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4966 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4968 /* We must use a signed type in order to get an arithmetic right shift.
4969 However, we must also avoid introducing accidental overflows, so that
4970 a subsequent call to integer_zerop will work. Hence we must
4971 do the type conversion here. At this point, the constant is either
4972 zero or one, and the conversion to a signed type can never overflow.
4973 We could get an overflow if this conversion is done anywhere else. */
4974 if (TYPE_UNSIGNED (type))
4975 temp = fold_convert (signed_type_for (type), temp);
4977 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4978 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4979 if (mask != 0)
4980 temp = const_binop (BIT_AND_EXPR, temp,
4981 fold_convert (TREE_TYPE (c), mask));
4982 /* If necessary, convert the type back to match the type of C. */
4983 if (TYPE_UNSIGNED (type))
4984 temp = fold_convert (type, temp);
4986 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4989 /* For an expression that has the form
4990 (A && B) || ~B
4992 (A || B) && ~B,
4993 we can drop one of the inner expressions and simplify to
4994 A || ~B
4996 A && ~B
4997 LOC is the location of the resulting expression. OP is the inner
4998 logical operation; the left-hand side in the examples above, while CMPOP
4999 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5000 removing a condition that guards another, as in
5001 (A != NULL && A->...) || A == NULL
5002 which we must not transform. If RHS_ONLY is true, only eliminate the
5003 right-most operand of the inner logical operation. */
5005 static tree
5006 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5007 bool rhs_only)
5009 tree type = TREE_TYPE (cmpop);
5010 enum tree_code code = TREE_CODE (cmpop);
5011 enum tree_code truthop_code = TREE_CODE (op);
5012 tree lhs = TREE_OPERAND (op, 0);
5013 tree rhs = TREE_OPERAND (op, 1);
5014 tree orig_lhs = lhs, orig_rhs = rhs;
5015 enum tree_code rhs_code = TREE_CODE (rhs);
5016 enum tree_code lhs_code = TREE_CODE (lhs);
5017 enum tree_code inv_code;
5019 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5020 return NULL_TREE;
5022 if (TREE_CODE_CLASS (code) != tcc_comparison)
5023 return NULL_TREE;
5025 if (rhs_code == truthop_code)
5027 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5028 if (newrhs != NULL_TREE)
5030 rhs = newrhs;
5031 rhs_code = TREE_CODE (rhs);
5034 if (lhs_code == truthop_code && !rhs_only)
5036 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5037 if (newlhs != NULL_TREE)
5039 lhs = newlhs;
5040 lhs_code = TREE_CODE (lhs);
5044 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5045 if (inv_code == rhs_code
5046 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5047 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5048 return lhs;
5049 if (!rhs_only && inv_code == lhs_code
5050 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5051 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5052 return rhs;
5053 if (rhs != orig_rhs || lhs != orig_lhs)
5054 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5055 lhs, rhs);
5056 return NULL_TREE;
5059 /* Find ways of folding logical expressions of LHS and RHS:
5060 Try to merge two comparisons to the same innermost item.
5061 Look for range tests like "ch >= '0' && ch <= '9'".
5062 Look for combinations of simple terms on machines with expensive branches
5063 and evaluate the RHS unconditionally.
5065 For example, if we have p->a == 2 && p->b == 4 and we can make an
5066 object large enough to span both A and B, we can do this with a comparison
5067 against the object ANDed with the a mask.
5069 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5070 operations to do this with one comparison.
5072 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5073 function and the one above.
5075 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5076 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5078 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5079 two operands.
5081 We return the simplified tree or 0 if no optimization is possible. */
5083 static tree
5084 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5085 tree lhs, tree rhs)
5087 /* If this is the "or" of two comparisons, we can do something if
5088 the comparisons are NE_EXPR. If this is the "and", we can do something
5089 if the comparisons are EQ_EXPR. I.e.,
5090 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5092 WANTED_CODE is this operation code. For single bit fields, we can
5093 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5094 comparison for one-bit fields. */
5096 enum tree_code wanted_code;
5097 enum tree_code lcode, rcode;
5098 tree ll_arg, lr_arg, rl_arg, rr_arg;
5099 tree ll_inner, lr_inner, rl_inner, rr_inner;
5100 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5101 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5102 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5103 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5104 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5105 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5106 enum machine_mode lnmode, rnmode;
5107 tree ll_mask, lr_mask, rl_mask, rr_mask;
5108 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5109 tree l_const, r_const;
5110 tree lntype, rntype, result;
5111 HOST_WIDE_INT first_bit, end_bit;
5112 int volatilep;
5113 tree orig_lhs = lhs, orig_rhs = rhs;
5114 enum tree_code orig_code = code;
5116 /* Start by getting the comparison codes. Fail if anything is volatile.
5117 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5118 it were surrounded with a NE_EXPR. */
5120 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5121 return 0;
5123 lcode = TREE_CODE (lhs);
5124 rcode = TREE_CODE (rhs);
5126 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5128 lhs = build2 (NE_EXPR, truth_type, lhs,
5129 build_int_cst (TREE_TYPE (lhs), 0));
5130 lcode = NE_EXPR;
5133 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5135 rhs = build2 (NE_EXPR, truth_type, rhs,
5136 build_int_cst (TREE_TYPE (rhs), 0));
5137 rcode = NE_EXPR;
5140 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5141 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5142 return 0;
5144 ll_arg = TREE_OPERAND (lhs, 0);
5145 lr_arg = TREE_OPERAND (lhs, 1);
5146 rl_arg = TREE_OPERAND (rhs, 0);
5147 rr_arg = TREE_OPERAND (rhs, 1);
5149 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5150 if (simple_operand_p (ll_arg)
5151 && simple_operand_p (lr_arg))
5153 tree result;
5154 if (operand_equal_p (ll_arg, rl_arg, 0)
5155 && operand_equal_p (lr_arg, rr_arg, 0))
5157 result = combine_comparisons (loc, code, lcode, rcode,
5158 truth_type, ll_arg, lr_arg);
5159 if (result)
5160 return result;
5162 else if (operand_equal_p (ll_arg, rr_arg, 0)
5163 && operand_equal_p (lr_arg, rl_arg, 0))
5165 result = combine_comparisons (loc, code, lcode,
5166 swap_tree_comparison (rcode),
5167 truth_type, ll_arg, lr_arg);
5168 if (result)
5169 return result;
5173 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5174 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5176 /* If the RHS can be evaluated unconditionally and its operands are
5177 simple, it wins to evaluate the RHS unconditionally on machines
5178 with expensive branches. In this case, this isn't a comparison
5179 that can be merged. Avoid doing this if the RHS is a floating-point
5180 comparison since those can trap. */
5182 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5183 false) >= 2
5184 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5185 && simple_operand_p (rl_arg)
5186 && simple_operand_p (rr_arg))
5188 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5189 if (code == TRUTH_OR_EXPR
5190 && lcode == NE_EXPR && integer_zerop (lr_arg)
5191 && rcode == NE_EXPR && integer_zerop (rr_arg)
5192 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5193 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5195 result = build2 (NE_EXPR, truth_type,
5196 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5197 ll_arg, rl_arg),
5198 build_int_cst (TREE_TYPE (ll_arg), 0));
5199 goto fold_truthop_exit;
5202 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5203 if (code == TRUTH_AND_EXPR
5204 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5205 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5206 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5207 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5209 result = build2 (EQ_EXPR, truth_type,
5210 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5211 ll_arg, rl_arg),
5212 build_int_cst (TREE_TYPE (ll_arg), 0));
5213 goto fold_truthop_exit;
5216 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5218 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5220 result = build2 (code, truth_type, lhs, rhs);
5221 goto fold_truthop_exit;
5223 return NULL_TREE;
5227 /* See if the comparisons can be merged. Then get all the parameters for
5228 each side. */
5230 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5231 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5232 return 0;
5234 volatilep = 0;
5235 ll_inner = decode_field_reference (loc, ll_arg,
5236 &ll_bitsize, &ll_bitpos, &ll_mode,
5237 &ll_unsignedp, &volatilep, &ll_mask,
5238 &ll_and_mask);
5239 lr_inner = decode_field_reference (loc, lr_arg,
5240 &lr_bitsize, &lr_bitpos, &lr_mode,
5241 &lr_unsignedp, &volatilep, &lr_mask,
5242 &lr_and_mask);
5243 rl_inner = decode_field_reference (loc, rl_arg,
5244 &rl_bitsize, &rl_bitpos, &rl_mode,
5245 &rl_unsignedp, &volatilep, &rl_mask,
5246 &rl_and_mask);
5247 rr_inner = decode_field_reference (loc, rr_arg,
5248 &rr_bitsize, &rr_bitpos, &rr_mode,
5249 &rr_unsignedp, &volatilep, &rr_mask,
5250 &rr_and_mask);
5252 /* It must be true that the inner operation on the lhs of each
5253 comparison must be the same if we are to be able to do anything.
5254 Then see if we have constants. If not, the same must be true for
5255 the rhs's. */
5256 if (volatilep || ll_inner == 0 || rl_inner == 0
5257 || ! operand_equal_p (ll_inner, rl_inner, 0))
5258 return 0;
5260 if (TREE_CODE (lr_arg) == INTEGER_CST
5261 && TREE_CODE (rr_arg) == INTEGER_CST)
5262 l_const = lr_arg, r_const = rr_arg;
5263 else if (lr_inner == 0 || rr_inner == 0
5264 || ! operand_equal_p (lr_inner, rr_inner, 0))
5265 return 0;
5266 else
5267 l_const = r_const = 0;
5269 /* If either comparison code is not correct for our logical operation,
5270 fail. However, we can convert a one-bit comparison against zero into
5271 the opposite comparison against that bit being set in the field. */
5273 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5274 if (lcode != wanted_code)
5276 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5278 /* Make the left operand unsigned, since we are only interested
5279 in the value of one bit. Otherwise we are doing the wrong
5280 thing below. */
5281 ll_unsignedp = 1;
5282 l_const = ll_mask;
5284 else
5285 return 0;
5288 /* This is analogous to the code for l_const above. */
5289 if (rcode != wanted_code)
5291 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5293 rl_unsignedp = 1;
5294 r_const = rl_mask;
5296 else
5297 return 0;
5300 /* See if we can find a mode that contains both fields being compared on
5301 the left. If we can't, fail. Otherwise, update all constants and masks
5302 to be relative to a field of that size. */
5303 first_bit = MIN (ll_bitpos, rl_bitpos);
5304 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5305 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5306 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5307 volatilep);
5308 if (lnmode == VOIDmode)
5309 return 0;
5311 lnbitsize = GET_MODE_BITSIZE (lnmode);
5312 lnbitpos = first_bit & ~ (lnbitsize - 1);
5313 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5314 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5316 if (BYTES_BIG_ENDIAN)
5318 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5319 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5322 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5323 size_int (xll_bitpos));
5324 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5325 size_int (xrl_bitpos));
5327 if (l_const)
5329 l_const = fold_convert_loc (loc, lntype, l_const);
5330 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5331 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5332 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5333 fold_build1_loc (loc, BIT_NOT_EXPR,
5334 lntype, ll_mask))))
5336 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5338 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5341 if (r_const)
5343 r_const = fold_convert_loc (loc, lntype, r_const);
5344 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5345 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5346 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5347 fold_build1_loc (loc, BIT_NOT_EXPR,
5348 lntype, rl_mask))))
5350 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5352 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5356 /* If the right sides are not constant, do the same for it. Also,
5357 disallow this optimization if a size or signedness mismatch occurs
5358 between the left and right sides. */
5359 if (l_const == 0)
5361 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5362 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5363 /* Make sure the two fields on the right
5364 correspond to the left without being swapped. */
5365 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5366 return 0;
5368 first_bit = MIN (lr_bitpos, rr_bitpos);
5369 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5370 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5371 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5372 volatilep);
5373 if (rnmode == VOIDmode)
5374 return 0;
5376 rnbitsize = GET_MODE_BITSIZE (rnmode);
5377 rnbitpos = first_bit & ~ (rnbitsize - 1);
5378 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5379 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5381 if (BYTES_BIG_ENDIAN)
5383 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5384 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5387 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5388 rntype, lr_mask),
5389 size_int (xlr_bitpos));
5390 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5391 rntype, rr_mask),
5392 size_int (xrr_bitpos));
5394 /* Make a mask that corresponds to both fields being compared.
5395 Do this for both items being compared. If the operands are the
5396 same size and the bits being compared are in the same position
5397 then we can do this by masking both and comparing the masked
5398 results. */
5399 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5400 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5401 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5403 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5404 ll_unsignedp || rl_unsignedp);
5405 if (! all_ones_mask_p (ll_mask, lnbitsize))
5406 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5408 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5409 lr_unsignedp || rr_unsignedp);
5410 if (! all_ones_mask_p (lr_mask, rnbitsize))
5411 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5413 result = build2 (wanted_code, truth_type, lhs, rhs);
5414 goto fold_truthop_exit;
5417 /* There is still another way we can do something: If both pairs of
5418 fields being compared are adjacent, we may be able to make a wider
5419 field containing them both.
5421 Note that we still must mask the lhs/rhs expressions. Furthermore,
5422 the mask must be shifted to account for the shift done by
5423 make_bit_field_ref. */
5424 if ((ll_bitsize + ll_bitpos == rl_bitpos
5425 && lr_bitsize + lr_bitpos == rr_bitpos)
5426 || (ll_bitpos == rl_bitpos + rl_bitsize
5427 && lr_bitpos == rr_bitpos + rr_bitsize))
5429 tree type;
5431 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5432 ll_bitsize + rl_bitsize,
5433 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5434 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5435 lr_bitsize + rr_bitsize,
5436 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5438 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5439 size_int (MIN (xll_bitpos, xrl_bitpos)));
5440 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5441 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5443 /* Convert to the smaller type before masking out unwanted bits. */
5444 type = lntype;
5445 if (lntype != rntype)
5447 if (lnbitsize > rnbitsize)
5449 lhs = fold_convert_loc (loc, rntype, lhs);
5450 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5451 type = rntype;
5453 else if (lnbitsize < rnbitsize)
5455 rhs = fold_convert_loc (loc, lntype, rhs);
5456 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5457 type = lntype;
5461 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5462 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5464 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5465 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5467 result = build2 (wanted_code, truth_type, lhs, rhs);
5468 goto fold_truthop_exit;
5471 return 0;
5474 /* Handle the case of comparisons with constants. If there is something in
5475 common between the masks, those bits of the constants must be the same.
5476 If not, the condition is always false. Test for this to avoid generating
5477 incorrect code below. */
5478 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5479 if (! integer_zerop (result)
5480 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5481 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5483 if (wanted_code == NE_EXPR)
5485 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5486 return constant_boolean_node (true, truth_type);
5488 else
5490 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5491 return constant_boolean_node (false, truth_type);
5495 /* Construct the expression we will return. First get the component
5496 reference we will make. Unless the mask is all ones the width of
5497 that field, perform the mask operation. Then compare with the
5498 merged constant. */
5499 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5500 ll_unsignedp || rl_unsignedp);
5502 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5503 if (! all_ones_mask_p (ll_mask, lnbitsize))
5505 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5506 SET_EXPR_LOCATION (result, loc);
5509 result = build2 (wanted_code, truth_type, result,
5510 const_binop (BIT_IOR_EXPR, l_const, r_const));
5512 fold_truthop_exit:
5513 SET_EXPR_LOCATION (result, loc);
5514 return result;
5517 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5518 constant. */
5520 static tree
5521 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5522 tree op0, tree op1)
5524 tree arg0 = op0;
5525 enum tree_code op_code;
5526 tree comp_const;
5527 tree minmax_const;
5528 int consts_equal, consts_lt;
5529 tree inner;
5531 STRIP_SIGN_NOPS (arg0);
5533 op_code = TREE_CODE (arg0);
5534 minmax_const = TREE_OPERAND (arg0, 1);
5535 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5536 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5537 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5538 inner = TREE_OPERAND (arg0, 0);
5540 /* If something does not permit us to optimize, return the original tree. */
5541 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5542 || TREE_CODE (comp_const) != INTEGER_CST
5543 || TREE_OVERFLOW (comp_const)
5544 || TREE_CODE (minmax_const) != INTEGER_CST
5545 || TREE_OVERFLOW (minmax_const))
5546 return NULL_TREE;
5548 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5549 and GT_EXPR, doing the rest with recursive calls using logical
5550 simplifications. */
5551 switch (code)
5553 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5555 tree tem
5556 = optimize_minmax_comparison (loc,
5557 invert_tree_comparison (code, false),
5558 type, op0, op1);
5559 if (tem)
5560 return invert_truthvalue_loc (loc, tem);
5561 return NULL_TREE;
5564 case GE_EXPR:
5565 return
5566 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5567 optimize_minmax_comparison
5568 (loc, EQ_EXPR, type, arg0, comp_const),
5569 optimize_minmax_comparison
5570 (loc, GT_EXPR, type, arg0, comp_const));
5572 case EQ_EXPR:
5573 if (op_code == MAX_EXPR && consts_equal)
5574 /* MAX (X, 0) == 0 -> X <= 0 */
5575 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5577 else if (op_code == MAX_EXPR && consts_lt)
5578 /* MAX (X, 0) == 5 -> X == 5 */
5579 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5581 else if (op_code == MAX_EXPR)
5582 /* MAX (X, 0) == -1 -> false */
5583 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5585 else if (consts_equal)
5586 /* MIN (X, 0) == 0 -> X >= 0 */
5587 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5589 else if (consts_lt)
5590 /* MIN (X, 0) == 5 -> false */
5591 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5593 else
5594 /* MIN (X, 0) == -1 -> X == -1 */
5595 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5597 case GT_EXPR:
5598 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5599 /* MAX (X, 0) > 0 -> X > 0
5600 MAX (X, 0) > 5 -> X > 5 */
5601 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5603 else if (op_code == MAX_EXPR)
5604 /* MAX (X, 0) > -1 -> true */
5605 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5607 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5608 /* MIN (X, 0) > 0 -> false
5609 MIN (X, 0) > 5 -> false */
5610 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5612 else
5613 /* MIN (X, 0) > -1 -> X > -1 */
5614 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5616 default:
5617 return NULL_TREE;
5621 /* T is an integer expression that is being multiplied, divided, or taken a
5622 modulus (CODE says which and what kind of divide or modulus) by a
5623 constant C. See if we can eliminate that operation by folding it with
5624 other operations already in T. WIDE_TYPE, if non-null, is a type that
5625 should be used for the computation if wider than our type.
5627 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5628 (X * 2) + (Y * 4). We must, however, be assured that either the original
5629 expression would not overflow or that overflow is undefined for the type
5630 in the language in question.
5632 If we return a non-null expression, it is an equivalent form of the
5633 original computation, but need not be in the original type.
5635 We set *STRICT_OVERFLOW_P to true if the return values depends on
5636 signed overflow being undefined. Otherwise we do not change
5637 *STRICT_OVERFLOW_P. */
5639 static tree
5640 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5641 bool *strict_overflow_p)
5643 /* To avoid exponential search depth, refuse to allow recursion past
5644 three levels. Beyond that (1) it's highly unlikely that we'll find
5645 something interesting and (2) we've probably processed it before
5646 when we built the inner expression. */
5648 static int depth;
5649 tree ret;
5651 if (depth > 3)
5652 return NULL;
5654 depth++;
5655 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5656 depth--;
5658 return ret;
5661 static tree
5662 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5663 bool *strict_overflow_p)
5665 tree type = TREE_TYPE (t);
5666 enum tree_code tcode = TREE_CODE (t);
5667 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5668 > GET_MODE_SIZE (TYPE_MODE (type)))
5669 ? wide_type : type);
5670 tree t1, t2;
5671 int same_p = tcode == code;
5672 tree op0 = NULL_TREE, op1 = NULL_TREE;
5673 bool sub_strict_overflow_p;
5675 /* Don't deal with constants of zero here; they confuse the code below. */
5676 if (integer_zerop (c))
5677 return NULL_TREE;
5679 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5680 op0 = TREE_OPERAND (t, 0);
5682 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5683 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5685 /* Note that we need not handle conditional operations here since fold
5686 already handles those cases. So just do arithmetic here. */
5687 switch (tcode)
5689 case INTEGER_CST:
5690 /* For a constant, we can always simplify if we are a multiply
5691 or (for divide and modulus) if it is a multiple of our constant. */
5692 if (code == MULT_EXPR
5693 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5694 return const_binop (code, fold_convert (ctype, t),
5695 fold_convert (ctype, c));
5696 break;
5698 CASE_CONVERT: case NON_LVALUE_EXPR:
5699 /* If op0 is an expression ... */
5700 if ((COMPARISON_CLASS_P (op0)
5701 || UNARY_CLASS_P (op0)
5702 || BINARY_CLASS_P (op0)
5703 || VL_EXP_CLASS_P (op0)
5704 || EXPRESSION_CLASS_P (op0))
5705 /* ... and has wrapping overflow, and its type is smaller
5706 than ctype, then we cannot pass through as widening. */
5707 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5708 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5709 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5710 && (TYPE_PRECISION (ctype)
5711 > TYPE_PRECISION (TREE_TYPE (op0))))
5712 /* ... or this is a truncation (t is narrower than op0),
5713 then we cannot pass through this narrowing. */
5714 || (TYPE_PRECISION (type)
5715 < TYPE_PRECISION (TREE_TYPE (op0)))
5716 /* ... or signedness changes for division or modulus,
5717 then we cannot pass through this conversion. */
5718 || (code != MULT_EXPR
5719 && (TYPE_UNSIGNED (ctype)
5720 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5721 /* ... or has undefined overflow while the converted to
5722 type has not, we cannot do the operation in the inner type
5723 as that would introduce undefined overflow. */
5724 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5725 && !TYPE_OVERFLOW_UNDEFINED (type))))
5726 break;
5728 /* Pass the constant down and see if we can make a simplification. If
5729 we can, replace this expression with the inner simplification for
5730 possible later conversion to our or some other type. */
5731 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5732 && TREE_CODE (t2) == INTEGER_CST
5733 && !TREE_OVERFLOW (t2)
5734 && (0 != (t1 = extract_muldiv (op0, t2, code,
5735 code == MULT_EXPR
5736 ? ctype : NULL_TREE,
5737 strict_overflow_p))))
5738 return t1;
5739 break;
5741 case ABS_EXPR:
5742 /* If widening the type changes it from signed to unsigned, then we
5743 must avoid building ABS_EXPR itself as unsigned. */
5744 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5746 tree cstype = (*signed_type_for) (ctype);
5747 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5748 != 0)
5750 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5751 return fold_convert (ctype, t1);
5753 break;
5755 /* If the constant is negative, we cannot simplify this. */
5756 if (tree_int_cst_sgn (c) == -1)
5757 break;
5758 /* FALLTHROUGH */
5759 case NEGATE_EXPR:
5760 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5761 != 0)
5762 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5763 break;
5765 case MIN_EXPR: case MAX_EXPR:
5766 /* If widening the type changes the signedness, then we can't perform
5767 this optimization as that changes the result. */
5768 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5769 break;
5771 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5772 sub_strict_overflow_p = false;
5773 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5774 &sub_strict_overflow_p)) != 0
5775 && (t2 = extract_muldiv (op1, c, code, wide_type,
5776 &sub_strict_overflow_p)) != 0)
5778 if (tree_int_cst_sgn (c) < 0)
5779 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5780 if (sub_strict_overflow_p)
5781 *strict_overflow_p = true;
5782 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5783 fold_convert (ctype, t2));
5785 break;
5787 case LSHIFT_EXPR: case RSHIFT_EXPR:
5788 /* If the second operand is constant, this is a multiplication
5789 or floor division, by a power of two, so we can treat it that
5790 way unless the multiplier or divisor overflows. Signed
5791 left-shift overflow is implementation-defined rather than
5792 undefined in C90, so do not convert signed left shift into
5793 multiplication. */
5794 if (TREE_CODE (op1) == INTEGER_CST
5795 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5796 /* const_binop may not detect overflow correctly,
5797 so check for it explicitly here. */
5798 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5799 && TREE_INT_CST_HIGH (op1) == 0
5800 && 0 != (t1 = fold_convert (ctype,
5801 const_binop (LSHIFT_EXPR,
5802 size_one_node,
5803 op1)))
5804 && !TREE_OVERFLOW (t1))
5805 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5806 ? MULT_EXPR : FLOOR_DIV_EXPR,
5807 ctype,
5808 fold_convert (ctype, op0),
5809 t1),
5810 c, code, wide_type, strict_overflow_p);
5811 break;
5813 case PLUS_EXPR: case MINUS_EXPR:
5814 /* See if we can eliminate the operation on both sides. If we can, we
5815 can return a new PLUS or MINUS. If we can't, the only remaining
5816 cases where we can do anything are if the second operand is a
5817 constant. */
5818 sub_strict_overflow_p = false;
5819 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5820 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5821 if (t1 != 0 && t2 != 0
5822 && (code == MULT_EXPR
5823 /* If not multiplication, we can only do this if both operands
5824 are divisible by c. */
5825 || (multiple_of_p (ctype, op0, c)
5826 && multiple_of_p (ctype, op1, c))))
5828 if (sub_strict_overflow_p)
5829 *strict_overflow_p = true;
5830 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5831 fold_convert (ctype, t2));
5834 /* If this was a subtraction, negate OP1 and set it to be an addition.
5835 This simplifies the logic below. */
5836 if (tcode == MINUS_EXPR)
5838 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5839 /* If OP1 was not easily negatable, the constant may be OP0. */
5840 if (TREE_CODE (op0) == INTEGER_CST)
5842 tree tem = op0;
5843 op0 = op1;
5844 op1 = tem;
5845 tem = t1;
5846 t1 = t2;
5847 t2 = tem;
5851 if (TREE_CODE (op1) != INTEGER_CST)
5852 break;
5854 /* If either OP1 or C are negative, this optimization is not safe for
5855 some of the division and remainder types while for others we need
5856 to change the code. */
5857 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5859 if (code == CEIL_DIV_EXPR)
5860 code = FLOOR_DIV_EXPR;
5861 else if (code == FLOOR_DIV_EXPR)
5862 code = CEIL_DIV_EXPR;
5863 else if (code != MULT_EXPR
5864 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5865 break;
5868 /* If it's a multiply or a division/modulus operation of a multiple
5869 of our constant, do the operation and verify it doesn't overflow. */
5870 if (code == MULT_EXPR
5871 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5873 op1 = const_binop (code, fold_convert (ctype, op1),
5874 fold_convert (ctype, c));
5875 /* We allow the constant to overflow with wrapping semantics. */
5876 if (op1 == 0
5877 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5878 break;
5880 else
5881 break;
5883 /* If we have an unsigned type is not a sizetype, we cannot widen
5884 the operation since it will change the result if the original
5885 computation overflowed. */
5886 if (TYPE_UNSIGNED (ctype)
5887 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5888 && ctype != type)
5889 break;
5891 /* If we were able to eliminate our operation from the first side,
5892 apply our operation to the second side and reform the PLUS. */
5893 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5894 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5896 /* The last case is if we are a multiply. In that case, we can
5897 apply the distributive law to commute the multiply and addition
5898 if the multiplication of the constants doesn't overflow. */
5899 if (code == MULT_EXPR)
5900 return fold_build2 (tcode, ctype,
5901 fold_build2 (code, ctype,
5902 fold_convert (ctype, op0),
5903 fold_convert (ctype, c)),
5904 op1);
5906 break;
5908 case MULT_EXPR:
5909 /* We have a special case here if we are doing something like
5910 (C * 8) % 4 since we know that's zero. */
5911 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5912 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5913 /* If the multiplication can overflow we cannot optimize this.
5914 ??? Until we can properly mark individual operations as
5915 not overflowing we need to treat sizetype special here as
5916 stor-layout relies on this opimization to make
5917 DECL_FIELD_BIT_OFFSET always a constant. */
5918 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5919 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5920 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5921 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5922 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5924 *strict_overflow_p = true;
5925 return omit_one_operand (type, integer_zero_node, op0);
5928 /* ... fall through ... */
5930 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5931 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5932 /* If we can extract our operation from the LHS, do so and return a
5933 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5934 do something only if the second operand is a constant. */
5935 if (same_p
5936 && (t1 = extract_muldiv (op0, c, code, wide_type,
5937 strict_overflow_p)) != 0)
5938 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5939 fold_convert (ctype, op1));
5940 else if (tcode == MULT_EXPR && code == MULT_EXPR
5941 && (t1 = extract_muldiv (op1, c, code, wide_type,
5942 strict_overflow_p)) != 0)
5943 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5944 fold_convert (ctype, t1));
5945 else if (TREE_CODE (op1) != INTEGER_CST)
5946 return 0;
5948 /* If these are the same operation types, we can associate them
5949 assuming no overflow. */
5950 if (tcode == code
5951 && 0 != (t1 = int_const_binop (MULT_EXPR,
5952 fold_convert (ctype, op1),
5953 fold_convert (ctype, c), 1))
5954 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5955 (TYPE_UNSIGNED (ctype)
5956 && tcode != MULT_EXPR) ? -1 : 1,
5957 TREE_OVERFLOW (t1)))
5958 && !TREE_OVERFLOW (t1))
5959 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5961 /* If these operations "cancel" each other, we have the main
5962 optimizations of this pass, which occur when either constant is a
5963 multiple of the other, in which case we replace this with either an
5964 operation or CODE or TCODE.
5966 If we have an unsigned type that is not a sizetype, we cannot do
5967 this since it will change the result if the original computation
5968 overflowed. */
5969 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5970 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5971 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5972 || (tcode == MULT_EXPR
5973 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5974 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5975 && code != MULT_EXPR)))
5977 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5979 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5980 *strict_overflow_p = true;
5981 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5982 fold_convert (ctype,
5983 const_binop (TRUNC_DIV_EXPR,
5984 op1, c)));
5986 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5989 *strict_overflow_p = true;
5990 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5991 fold_convert (ctype,
5992 const_binop (TRUNC_DIV_EXPR,
5993 c, op1)));
5996 break;
5998 default:
5999 break;
6002 return 0;
6005 /* Return a node which has the indicated constant VALUE (either 0 or
6006 1), and is of the indicated TYPE. */
6008 tree
6009 constant_boolean_node (int value, tree type)
6011 if (type == integer_type_node)
6012 return value ? integer_one_node : integer_zero_node;
6013 else if (type == boolean_type_node)
6014 return value ? boolean_true_node : boolean_false_node;
6015 else
6016 return build_int_cst (type, value);
6020 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6021 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6022 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6023 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6024 COND is the first argument to CODE; otherwise (as in the example
6025 given here), it is the second argument. TYPE is the type of the
6026 original expression. Return NULL_TREE if no simplification is
6027 possible. */
6029 static tree
6030 fold_binary_op_with_conditional_arg (location_t loc,
6031 enum tree_code code,
6032 tree type, tree op0, tree op1,
6033 tree cond, tree arg, int cond_first_p)
6035 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6036 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6037 tree test, true_value, false_value;
6038 tree lhs = NULL_TREE;
6039 tree rhs = NULL_TREE;
6041 if (TREE_CODE (cond) == COND_EXPR)
6043 test = TREE_OPERAND (cond, 0);
6044 true_value = TREE_OPERAND (cond, 1);
6045 false_value = TREE_OPERAND (cond, 2);
6046 /* If this operand throws an expression, then it does not make
6047 sense to try to perform a logical or arithmetic operation
6048 involving it. */
6049 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6050 lhs = true_value;
6051 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6052 rhs = false_value;
6054 else
6056 tree testtype = TREE_TYPE (cond);
6057 test = cond;
6058 true_value = constant_boolean_node (true, testtype);
6059 false_value = constant_boolean_node (false, testtype);
6062 /* This transformation is only worthwhile if we don't have to wrap ARG
6063 in a SAVE_EXPR and the operation can be simplified on at least one
6064 of the branches once its pushed inside the COND_EXPR. */
6065 if (!TREE_CONSTANT (arg)
6066 && (TREE_SIDE_EFFECTS (arg)
6067 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6068 return NULL_TREE;
6070 arg = fold_convert_loc (loc, arg_type, arg);
6071 if (lhs == 0)
6073 true_value = fold_convert_loc (loc, cond_type, true_value);
6074 if (cond_first_p)
6075 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6076 else
6077 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6079 if (rhs == 0)
6081 false_value = fold_convert_loc (loc, cond_type, false_value);
6082 if (cond_first_p)
6083 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6084 else
6085 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6088 /* Check that we have simplified at least one of the branches. */
6089 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6090 return NULL_TREE;
6092 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6096 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6098 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6099 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6100 ADDEND is the same as X.
6102 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6103 and finite. The problematic cases are when X is zero, and its mode
6104 has signed zeros. In the case of rounding towards -infinity,
6105 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6106 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6108 bool
6109 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6111 if (!real_zerop (addend))
6112 return false;
6114 /* Don't allow the fold with -fsignaling-nans. */
6115 if (HONOR_SNANS (TYPE_MODE (type)))
6116 return false;
6118 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6119 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6120 return true;
6122 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6123 if (TREE_CODE (addend) == REAL_CST
6124 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6125 negate = !negate;
6127 /* The mode has signed zeros, and we have to honor their sign.
6128 In this situation, there is only one case we can return true for.
6129 X - 0 is the same as X unless rounding towards -infinity is
6130 supported. */
6131 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6134 /* Subroutine of fold() that checks comparisons of built-in math
6135 functions against real constants.
6137 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6138 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6139 is the type of the result and ARG0 and ARG1 are the operands of the
6140 comparison. ARG1 must be a TREE_REAL_CST.
6142 The function returns the constant folded tree if a simplification
6143 can be made, and NULL_TREE otherwise. */
6145 static tree
6146 fold_mathfn_compare (location_t loc,
6147 enum built_in_function fcode, enum tree_code code,
6148 tree type, tree arg0, tree arg1)
6150 REAL_VALUE_TYPE c;
6152 if (BUILTIN_SQRT_P (fcode))
6154 tree arg = CALL_EXPR_ARG (arg0, 0);
6155 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6157 c = TREE_REAL_CST (arg1);
6158 if (REAL_VALUE_NEGATIVE (c))
6160 /* sqrt(x) < y is always false, if y is negative. */
6161 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6162 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6164 /* sqrt(x) > y is always true, if y is negative and we
6165 don't care about NaNs, i.e. negative values of x. */
6166 if (code == NE_EXPR || !HONOR_NANS (mode))
6167 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6169 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6170 return fold_build2_loc (loc, GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg), dconst0));
6173 else if (code == GT_EXPR || code == GE_EXPR)
6175 REAL_VALUE_TYPE c2;
6177 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6178 real_convert (&c2, mode, &c2);
6180 if (REAL_VALUE_ISINF (c2))
6182 /* sqrt(x) > y is x == +Inf, when y is very large. */
6183 if (HONOR_INFINITIES (mode))
6184 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6185 build_real (TREE_TYPE (arg), c2));
6187 /* sqrt(x) > y is always false, when y is very large
6188 and we don't care about infinities. */
6189 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6192 /* sqrt(x) > c is the same as x > c*c. */
6193 return fold_build2_loc (loc, code, type, arg,
6194 build_real (TREE_TYPE (arg), c2));
6196 else if (code == LT_EXPR || code == LE_EXPR)
6198 REAL_VALUE_TYPE c2;
6200 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6201 real_convert (&c2, mode, &c2);
6203 if (REAL_VALUE_ISINF (c2))
6205 /* sqrt(x) < y is always true, when y is a very large
6206 value and we don't care about NaNs or Infinities. */
6207 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6208 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6210 /* sqrt(x) < y is x != +Inf when y is very large and we
6211 don't care about NaNs. */
6212 if (! HONOR_NANS (mode))
6213 return fold_build2_loc (loc, NE_EXPR, type, arg,
6214 build_real (TREE_TYPE (arg), c2));
6216 /* sqrt(x) < y is x >= 0 when y is very large and we
6217 don't care about Infinities. */
6218 if (! HONOR_INFINITIES (mode))
6219 return fold_build2_loc (loc, GE_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), dconst0));
6222 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6223 if (lang_hooks.decls.global_bindings_p () != 0
6224 || CONTAINS_PLACEHOLDER_P (arg))
6225 return NULL_TREE;
6227 arg = save_expr (arg);
6228 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6229 fold_build2_loc (loc, GE_EXPR, type, arg,
6230 build_real (TREE_TYPE (arg),
6231 dconst0)),
6232 fold_build2_loc (loc, NE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg),
6234 c2)));
6237 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6238 if (! HONOR_NANS (mode))
6239 return fold_build2_loc (loc, code, type, arg,
6240 build_real (TREE_TYPE (arg), c2));
6242 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6243 if (lang_hooks.decls.global_bindings_p () == 0
6244 && ! CONTAINS_PLACEHOLDER_P (arg))
6246 arg = save_expr (arg);
6247 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6248 fold_build2_loc (loc, GE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg),
6250 dconst0)),
6251 fold_build2_loc (loc, code, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 c2)));
6258 return NULL_TREE;
6261 /* Subroutine of fold() that optimizes comparisons against Infinities,
6262 either +Inf or -Inf.
6264 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6265 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6266 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6268 The function returns the constant folded tree if a simplification
6269 can be made, and NULL_TREE otherwise. */
6271 static tree
6272 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6273 tree arg0, tree arg1)
6275 enum machine_mode mode;
6276 REAL_VALUE_TYPE max;
6277 tree temp;
6278 bool neg;
6280 mode = TYPE_MODE (TREE_TYPE (arg0));
6282 /* For negative infinity swap the sense of the comparison. */
6283 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6284 if (neg)
6285 code = swap_tree_comparison (code);
6287 switch (code)
6289 case GT_EXPR:
6290 /* x > +Inf is always false, if with ignore sNANs. */
6291 if (HONOR_SNANS (mode))
6292 return NULL_TREE;
6293 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6295 case LE_EXPR:
6296 /* x <= +Inf is always true, if we don't case about NaNs. */
6297 if (! HONOR_NANS (mode))
6298 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6300 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6301 if (lang_hooks.decls.global_bindings_p () == 0
6302 && ! CONTAINS_PLACEHOLDER_P (arg0))
6304 arg0 = save_expr (arg0);
6305 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6307 break;
6309 case EQ_EXPR:
6310 case GE_EXPR:
6311 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6312 real_maxval (&max, neg, mode);
6313 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6314 arg0, build_real (TREE_TYPE (arg0), max));
6316 case LT_EXPR:
6317 /* x < +Inf is always equal to x <= DBL_MAX. */
6318 real_maxval (&max, neg, mode);
6319 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6320 arg0, build_real (TREE_TYPE (arg0), max));
6322 case NE_EXPR:
6323 /* x != +Inf is always equal to !(x > DBL_MAX). */
6324 real_maxval (&max, neg, mode);
6325 if (! HONOR_NANS (mode))
6326 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6329 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6333 default:
6334 break;
6337 return NULL_TREE;
6340 /* Subroutine of fold() that optimizes comparisons of a division by
6341 a nonzero integer constant against an integer constant, i.e.
6342 X/C1 op C2.
6344 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6345 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6346 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6348 The function returns the constant folded tree if a simplification
6349 can be made, and NULL_TREE otherwise. */
6351 static tree
6352 fold_div_compare (location_t loc,
6353 enum tree_code code, tree type, tree arg0, tree arg1)
6355 tree prod, tmp, hi, lo;
6356 tree arg00 = TREE_OPERAND (arg0, 0);
6357 tree arg01 = TREE_OPERAND (arg0, 1);
6358 double_int val;
6359 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6360 bool neg_overflow;
6361 int overflow;
6363 /* We have to do this the hard way to detect unsigned overflow.
6364 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6365 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6366 TREE_INT_CST_HIGH (arg01),
6367 TREE_INT_CST_LOW (arg1),
6368 TREE_INT_CST_HIGH (arg1),
6369 &val.low, &val.high, unsigned_p);
6370 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6371 neg_overflow = false;
6373 if (unsigned_p)
6375 tmp = int_const_binop (MINUS_EXPR, arg01,
6376 build_int_cst (TREE_TYPE (arg01), 1), 0);
6377 lo = prod;
6379 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6380 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6381 TREE_INT_CST_HIGH (prod),
6382 TREE_INT_CST_LOW (tmp),
6383 TREE_INT_CST_HIGH (tmp),
6384 &val.low, &val.high, unsigned_p);
6385 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6386 -1, overflow | TREE_OVERFLOW (prod));
6388 else if (tree_int_cst_sgn (arg01) >= 0)
6390 tmp = int_const_binop (MINUS_EXPR, arg01,
6391 build_int_cst (TREE_TYPE (arg01), 1), 0);
6392 switch (tree_int_cst_sgn (arg1))
6394 case -1:
6395 neg_overflow = true;
6396 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6397 hi = prod;
6398 break;
6400 case 0:
6401 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6402 hi = tmp;
6403 break;
6405 case 1:
6406 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6407 lo = prod;
6408 break;
6410 default:
6411 gcc_unreachable ();
6414 else
6416 /* A negative divisor reverses the relational operators. */
6417 code = swap_tree_comparison (code);
6419 tmp = int_const_binop (PLUS_EXPR, arg01,
6420 build_int_cst (TREE_TYPE (arg01), 1), 0);
6421 switch (tree_int_cst_sgn (arg1))
6423 case -1:
6424 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6425 lo = prod;
6426 break;
6428 case 0:
6429 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6430 lo = tmp;
6431 break;
6433 case 1:
6434 neg_overflow = true;
6435 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6436 hi = prod;
6437 break;
6439 default:
6440 gcc_unreachable ();
6444 switch (code)
6446 case EQ_EXPR:
6447 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6448 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6449 if (TREE_OVERFLOW (hi))
6450 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6451 if (TREE_OVERFLOW (lo))
6452 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6453 return build_range_check (loc, type, arg00, 1, lo, hi);
6455 case NE_EXPR:
6456 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6457 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6458 if (TREE_OVERFLOW (hi))
6459 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6460 if (TREE_OVERFLOW (lo))
6461 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6462 return build_range_check (loc, type, arg00, 0, lo, hi);
6464 case LT_EXPR:
6465 if (TREE_OVERFLOW (lo))
6467 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6468 return omit_one_operand_loc (loc, type, tmp, arg00);
6470 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6472 case LE_EXPR:
6473 if (TREE_OVERFLOW (hi))
6475 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6476 return omit_one_operand_loc (loc, type, tmp, arg00);
6478 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6480 case GT_EXPR:
6481 if (TREE_OVERFLOW (hi))
6483 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6484 return omit_one_operand_loc (loc, type, tmp, arg00);
6486 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6488 case GE_EXPR:
6489 if (TREE_OVERFLOW (lo))
6491 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6492 return omit_one_operand_loc (loc, type, tmp, arg00);
6494 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6496 default:
6497 break;
6500 return NULL_TREE;
6504 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6505 equality/inequality test, then return a simplified form of the test
6506 using a sign testing. Otherwise return NULL. TYPE is the desired
6507 result type. */
6509 static tree
6510 fold_single_bit_test_into_sign_test (location_t loc,
6511 enum tree_code code, tree arg0, tree arg1,
6512 tree result_type)
6514 /* If this is testing a single bit, we can optimize the test. */
6515 if ((code == NE_EXPR || code == EQ_EXPR)
6516 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6517 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6519 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6520 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6521 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6523 if (arg00 != NULL_TREE
6524 /* This is only a win if casting to a signed type is cheap,
6525 i.e. when arg00's type is not a partial mode. */
6526 && TYPE_PRECISION (TREE_TYPE (arg00))
6527 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6529 tree stype = signed_type_for (TREE_TYPE (arg00));
6530 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6531 result_type,
6532 fold_convert_loc (loc, stype, arg00),
6533 build_int_cst (stype, 0));
6537 return NULL_TREE;
6540 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6541 equality/inequality test, then return a simplified form of
6542 the test using shifts and logical operations. Otherwise return
6543 NULL. TYPE is the desired result type. */
6545 tree
6546 fold_single_bit_test (location_t loc, enum tree_code code,
6547 tree arg0, tree arg1, tree result_type)
6549 /* If this is testing a single bit, we can optimize the test. */
6550 if ((code == NE_EXPR || code == EQ_EXPR)
6551 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6552 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 tree inner = TREE_OPERAND (arg0, 0);
6555 tree type = TREE_TYPE (arg0);
6556 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6557 enum machine_mode operand_mode = TYPE_MODE (type);
6558 int ops_unsigned;
6559 tree signed_type, unsigned_type, intermediate_type;
6560 tree tem, one;
6562 /* First, see if we can fold the single bit test into a sign-bit
6563 test. */
6564 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6565 result_type);
6566 if (tem)
6567 return tem;
6569 /* Otherwise we have (A & C) != 0 where C is a single bit,
6570 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6571 Similarly for (A & C) == 0. */
6573 /* If INNER is a right shift of a constant and it plus BITNUM does
6574 not overflow, adjust BITNUM and INNER. */
6575 if (TREE_CODE (inner) == RSHIFT_EXPR
6576 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6577 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6578 && bitnum < TYPE_PRECISION (type)
6579 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6580 bitnum - TYPE_PRECISION (type)))
6582 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6583 inner = TREE_OPERAND (inner, 0);
6586 /* If we are going to be able to omit the AND below, we must do our
6587 operations as unsigned. If we must use the AND, we have a choice.
6588 Normally unsigned is faster, but for some machines signed is. */
6589 #ifdef LOAD_EXTEND_OP
6590 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6591 && !flag_syntax_only) ? 0 : 1;
6592 #else
6593 ops_unsigned = 1;
6594 #endif
6596 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6597 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6598 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6599 inner = fold_convert_loc (loc, intermediate_type, inner);
6601 if (bitnum != 0)
6602 inner = build2 (RSHIFT_EXPR, intermediate_type,
6603 inner, size_int (bitnum));
6605 one = build_int_cst (intermediate_type, 1);
6607 if (code == EQ_EXPR)
6608 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6610 /* Put the AND last so it can combine with more things. */
6611 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6613 /* Make sure to return the proper type. */
6614 inner = fold_convert_loc (loc, result_type, inner);
6616 return inner;
6618 return NULL_TREE;
6621 /* Check whether we are allowed to reorder operands arg0 and arg1,
6622 such that the evaluation of arg1 occurs before arg0. */
6624 static bool
6625 reorder_operands_p (const_tree arg0, const_tree arg1)
6627 if (! flag_evaluation_order)
6628 return true;
6629 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6630 return true;
6631 return ! TREE_SIDE_EFFECTS (arg0)
6632 && ! TREE_SIDE_EFFECTS (arg1);
6635 /* Test whether it is preferable two swap two operands, ARG0 and
6636 ARG1, for example because ARG0 is an integer constant and ARG1
6637 isn't. If REORDER is true, only recommend swapping if we can
6638 evaluate the operands in reverse order. */
6640 bool
6641 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6643 STRIP_SIGN_NOPS (arg0);
6644 STRIP_SIGN_NOPS (arg1);
6646 if (TREE_CODE (arg1) == INTEGER_CST)
6647 return 0;
6648 if (TREE_CODE (arg0) == INTEGER_CST)
6649 return 1;
6651 if (TREE_CODE (arg1) == REAL_CST)
6652 return 0;
6653 if (TREE_CODE (arg0) == REAL_CST)
6654 return 1;
6656 if (TREE_CODE (arg1) == FIXED_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == FIXED_CST)
6659 return 1;
6661 if (TREE_CODE (arg1) == COMPLEX_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == COMPLEX_CST)
6664 return 1;
6666 if (TREE_CONSTANT (arg1))
6667 return 0;
6668 if (TREE_CONSTANT (arg0))
6669 return 1;
6671 if (optimize_function_for_size_p (cfun))
6672 return 0;
6674 if (reorder && flag_evaluation_order
6675 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6676 return 0;
6678 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6679 for commutative and comparison operators. Ensuring a canonical
6680 form allows the optimizers to find additional redundancies without
6681 having to explicitly check for both orderings. */
6682 if (TREE_CODE (arg0) == SSA_NAME
6683 && TREE_CODE (arg1) == SSA_NAME
6684 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6685 return 1;
6687 /* Put SSA_NAMEs last. */
6688 if (TREE_CODE (arg1) == SSA_NAME)
6689 return 0;
6690 if (TREE_CODE (arg0) == SSA_NAME)
6691 return 1;
6693 /* Put variables last. */
6694 if (DECL_P (arg1))
6695 return 0;
6696 if (DECL_P (arg0))
6697 return 1;
6699 return 0;
6702 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6703 ARG0 is extended to a wider type. */
6705 static tree
6706 fold_widened_comparison (location_t loc, enum tree_code code,
6707 tree type, tree arg0, tree arg1)
6709 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6710 tree arg1_unw;
6711 tree shorter_type, outer_type;
6712 tree min, max;
6713 bool above, below;
6715 if (arg0_unw == arg0)
6716 return NULL_TREE;
6717 shorter_type = TREE_TYPE (arg0_unw);
6719 #ifdef HAVE_canonicalize_funcptr_for_compare
6720 /* Disable this optimization if we're casting a function pointer
6721 type on targets that require function pointer canonicalization. */
6722 if (HAVE_canonicalize_funcptr_for_compare
6723 && TREE_CODE (shorter_type) == POINTER_TYPE
6724 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6725 return NULL_TREE;
6726 #endif
6728 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6729 return NULL_TREE;
6731 arg1_unw = get_unwidened (arg1, NULL_TREE);
6733 /* If possible, express the comparison in the shorter mode. */
6734 if ((code == EQ_EXPR || code == NE_EXPR
6735 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6736 && (TREE_TYPE (arg1_unw) == shorter_type
6737 || ((TYPE_PRECISION (shorter_type)
6738 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6739 && (TYPE_UNSIGNED (shorter_type)
6740 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6741 || (TREE_CODE (arg1_unw) == INTEGER_CST
6742 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6743 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6744 && int_fits_type_p (arg1_unw, shorter_type))))
6745 return fold_build2_loc (loc, code, type, arg0_unw,
6746 fold_convert_loc (loc, shorter_type, arg1_unw));
6748 if (TREE_CODE (arg1_unw) != INTEGER_CST
6749 || TREE_CODE (shorter_type) != INTEGER_TYPE
6750 || !int_fits_type_p (arg1_unw, shorter_type))
6751 return NULL_TREE;
6753 /* If we are comparing with the integer that does not fit into the range
6754 of the shorter type, the result is known. */
6755 outer_type = TREE_TYPE (arg1_unw);
6756 min = lower_bound_in_type (outer_type, shorter_type);
6757 max = upper_bound_in_type (outer_type, shorter_type);
6759 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6760 max, arg1_unw));
6761 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 arg1_unw, min));
6764 switch (code)
6766 case EQ_EXPR:
6767 if (above || below)
6768 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6769 break;
6771 case NE_EXPR:
6772 if (above || below)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 break;
6776 case LT_EXPR:
6777 case LE_EXPR:
6778 if (above)
6779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6780 else if (below)
6781 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6783 case GT_EXPR:
6784 case GE_EXPR:
6785 if (above)
6786 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6787 else if (below)
6788 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790 default:
6791 break;
6794 return NULL_TREE;
6797 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6798 ARG0 just the signedness is changed. */
6800 static tree
6801 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6802 tree arg0, tree arg1)
6804 tree arg0_inner;
6805 tree inner_type, outer_type;
6807 if (!CONVERT_EXPR_P (arg0))
6808 return NULL_TREE;
6810 outer_type = TREE_TYPE (arg0);
6811 arg0_inner = TREE_OPERAND (arg0, 0);
6812 inner_type = TREE_TYPE (arg0_inner);
6814 #ifdef HAVE_canonicalize_funcptr_for_compare
6815 /* Disable this optimization if we're casting a function pointer
6816 type on targets that require function pointer canonicalization. */
6817 if (HAVE_canonicalize_funcptr_for_compare
6818 && TREE_CODE (inner_type) == POINTER_TYPE
6819 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6820 return NULL_TREE;
6821 #endif
6823 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6824 return NULL_TREE;
6826 if (TREE_CODE (arg1) != INTEGER_CST
6827 && !(CONVERT_EXPR_P (arg1)
6828 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6829 return NULL_TREE;
6831 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6832 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6833 && code != NE_EXPR
6834 && code != EQ_EXPR)
6835 return NULL_TREE;
6837 if (TREE_CODE (arg1) == INTEGER_CST)
6838 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6839 0, TREE_OVERFLOW (arg1));
6840 else
6841 arg1 = fold_convert_loc (loc, inner_type, arg1);
6843 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6846 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6847 step of the array. Reconstructs s and delta in the case of s *
6848 delta being an integer constant (and thus already folded). ADDR is
6849 the address. MULT is the multiplicative expression. If the
6850 function succeeds, the new address expression is returned.
6851 Otherwise NULL_TREE is returned. LOC is the location of the
6852 resulting expression. */
6854 static tree
6855 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6857 tree s, delta, step;
6858 tree ref = TREE_OPERAND (addr, 0), pref;
6859 tree ret, pos;
6860 tree itype;
6861 bool mdim = false;
6863 /* Strip the nops that might be added when converting op1 to sizetype. */
6864 STRIP_NOPS (op1);
6866 /* Canonicalize op1 into a possibly non-constant delta
6867 and an INTEGER_CST s. */
6868 if (TREE_CODE (op1) == MULT_EXPR)
6870 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6872 STRIP_NOPS (arg0);
6873 STRIP_NOPS (arg1);
6875 if (TREE_CODE (arg0) == INTEGER_CST)
6877 s = arg0;
6878 delta = arg1;
6880 else if (TREE_CODE (arg1) == INTEGER_CST)
6882 s = arg1;
6883 delta = arg0;
6885 else
6886 return NULL_TREE;
6888 else if (TREE_CODE (op1) == INTEGER_CST)
6890 delta = op1;
6891 s = NULL_TREE;
6893 else
6895 /* Simulate we are delta * 1. */
6896 delta = op1;
6897 s = integer_one_node;
6900 for (;; ref = TREE_OPERAND (ref, 0))
6902 if (TREE_CODE (ref) == ARRAY_REF)
6904 tree domain;
6906 /* Remember if this was a multi-dimensional array. */
6907 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6908 mdim = true;
6910 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6911 if (! domain)
6912 continue;
6913 itype = TREE_TYPE (domain);
6915 step = array_ref_element_size (ref);
6916 if (TREE_CODE (step) != INTEGER_CST)
6917 continue;
6919 if (s)
6921 if (! tree_int_cst_equal (step, s))
6922 continue;
6924 else
6926 /* Try if delta is a multiple of step. */
6927 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6928 if (! tmp)
6929 continue;
6930 delta = tmp;
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6935 if (mdim)
6937 tree tmp;
6939 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6940 || !TYPE_MAX_VALUE (domain)
6941 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6942 continue;
6944 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6945 fold_convert_loc (loc, itype,
6946 TREE_OPERAND (ref, 1)),
6947 fold_convert_loc (loc, itype, delta));
6948 if (!tmp
6949 || TREE_CODE (tmp) != INTEGER_CST
6950 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6951 continue;
6954 break;
6956 else
6957 mdim = false;
6959 if (!handled_component_p (ref))
6960 return NULL_TREE;
6963 /* We found the suitable array reference. So copy everything up to it,
6964 and replace the index. */
6966 pref = TREE_OPERAND (addr, 0);
6967 ret = copy_node (pref);
6968 SET_EXPR_LOCATION (ret, loc);
6969 pos = ret;
6971 while (pref != ref)
6973 pref = TREE_OPERAND (pref, 0);
6974 TREE_OPERAND (pos, 0) = copy_node (pref);
6975 pos = TREE_OPERAND (pos, 0);
6978 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6979 fold_convert_loc (loc, itype,
6980 TREE_OPERAND (pos, 1)),
6981 fold_convert_loc (loc, itype, delta));
6983 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991 static tree
6992 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7000 else
7001 return NULL_TREE;
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7006 return NULL_TREE;
7008 if (TREE_CODE (ineq) == LT_EXPR)
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7018 else
7019 return NULL_TREE;
7021 if (TREE_TYPE (a1) != typea)
7022 return NULL_TREE;
7024 if (POINTER_TYPE_P (typea))
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta = fold_convert_loc (loc, ssizetype, a);
7028 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7029 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7031 else
7032 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7034 if (!diff || !integer_onep (diff))
7035 return NULL_TREE;
7037 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7043 static tree
7044 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7045 tree arg0, tree arg1)
7047 tree arg00, arg01, arg10, arg11;
7048 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7050 /* (A * C) +- (B * C) -> (A+-B) * C.
7051 (A * C) +- A -> A * (C+-1).
7052 We are most concerned about the case where C is a constant,
7053 but other combinations show up during loop reduction. Since
7054 it is not difficult, try all four possibilities. */
7056 if (TREE_CODE (arg0) == MULT_EXPR)
7058 arg00 = TREE_OPERAND (arg0, 0);
7059 arg01 = TREE_OPERAND (arg0, 1);
7061 else if (TREE_CODE (arg0) == INTEGER_CST)
7063 arg00 = build_one_cst (type);
7064 arg01 = arg0;
7066 else
7068 /* We cannot generate constant 1 for fract. */
7069 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7070 return NULL_TREE;
7071 arg00 = arg0;
7072 arg01 = build_one_cst (type);
7074 if (TREE_CODE (arg1) == MULT_EXPR)
7076 arg10 = TREE_OPERAND (arg1, 0);
7077 arg11 = TREE_OPERAND (arg1, 1);
7079 else if (TREE_CODE (arg1) == INTEGER_CST)
7081 arg10 = build_one_cst (type);
7082 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7083 the purpose of this canonicalization. */
7084 if (TREE_INT_CST_HIGH (arg1) == -1
7085 && negate_expr_p (arg1)
7086 && code == PLUS_EXPR)
7088 arg11 = negate_expr (arg1);
7089 code = MINUS_EXPR;
7091 else
7092 arg11 = arg1;
7094 else
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7098 return NULL_TREE;
7099 arg10 = arg1;
7100 arg11 = build_one_cst (type);
7102 same = NULL_TREE;
7104 if (operand_equal_p (arg01, arg11, 0))
7105 same = arg01, alt0 = arg00, alt1 = arg10;
7106 else if (operand_equal_p (arg00, arg10, 0))
7107 same = arg00, alt0 = arg01, alt1 = arg11;
7108 else if (operand_equal_p (arg00, arg11, 0))
7109 same = arg00, alt0 = arg01, alt1 = arg10;
7110 else if (operand_equal_p (arg01, arg10, 0))
7111 same = arg01, alt0 = arg00, alt1 = arg11;
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01, 0)
7117 && host_integerp (arg11, 0))
7119 HOST_WIDE_INT int01, int11, tmp;
7120 bool swap = false;
7121 tree maybe_same;
7122 int01 = TREE_INT_CST_LOW (arg01);
7123 int11 = TREE_INT_CST_LOW (arg11);
7125 /* Move min of absolute values to int11. */
7126 if ((int01 >= 0 ? int01 : -int01)
7127 < (int11 >= 0 ? int11 : -int11))
7129 tmp = int01, int01 = int11, int11 = tmp;
7130 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7131 maybe_same = arg01;
7132 swap = true;
7134 else
7135 maybe_same = arg11;
7137 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7138 /* The remainder should not be a constant, otherwise we
7139 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7140 increased the number of multiplications necessary. */
7141 && TREE_CODE (arg10) != INTEGER_CST)
7143 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7144 build_int_cst (TREE_TYPE (arg00),
7145 int01 / int11));
7146 alt1 = arg10;
7147 same = maybe_same;
7148 if (swap)
7149 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7153 if (same)
7154 return fold_build2_loc (loc, MULT_EXPR, type,
7155 fold_build2_loc (loc, code, type,
7156 fold_convert_loc (loc, type, alt0),
7157 fold_convert_loc (loc, type, alt1)),
7158 fold_convert_loc (loc, type, same));
7160 return NULL_TREE;
7163 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7164 specified by EXPR into the buffer PTR of length LEN bytes.
7165 Return the number of bytes placed in the buffer, or zero
7166 upon failure. */
7168 static int
7169 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7171 tree type = TREE_TYPE (expr);
7172 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7173 int byte, offset, word, words;
7174 unsigned char value;
7176 if (total_bytes > len)
7177 return 0;
7178 words = total_bytes / UNITS_PER_WORD;
7180 for (byte = 0; byte < total_bytes; byte++)
7182 int bitpos = byte * BITS_PER_UNIT;
7183 if (bitpos < HOST_BITS_PER_WIDE_INT)
7184 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7185 else
7186 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7187 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7189 if (total_bytes > UNITS_PER_WORD)
7191 word = byte / UNITS_PER_WORD;
7192 if (WORDS_BIG_ENDIAN)
7193 word = (words - 1) - word;
7194 offset = word * UNITS_PER_WORD;
7195 if (BYTES_BIG_ENDIAN)
7196 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7197 else
7198 offset += byte % UNITS_PER_WORD;
7200 else
7201 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7202 ptr[offset] = value;
7204 return total_bytes;
7208 /* Subroutine of native_encode_expr. Encode the REAL_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7211 upon failure. */
7213 static int
7214 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7216 tree type = TREE_TYPE (expr);
7217 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7218 int byte, offset, word, words, bitpos;
7219 unsigned char value;
7221 /* There are always 32 bits in each long, no matter the size of
7222 the hosts long. We handle floating point representations with
7223 up to 192 bits. */
7224 long tmp[6];
7226 if (total_bytes > len)
7227 return 0;
7228 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7230 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7232 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7233 bitpos += BITS_PER_UNIT)
7235 byte = (bitpos / BITS_PER_UNIT) & 3;
7236 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7238 if (UNITS_PER_WORD < 4)
7240 word = byte / UNITS_PER_WORD;
7241 if (WORDS_BIG_ENDIAN)
7242 word = (words - 1) - word;
7243 offset = word * UNITS_PER_WORD;
7244 if (BYTES_BIG_ENDIAN)
7245 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7246 else
7247 offset += byte % UNITS_PER_WORD;
7249 else
7250 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7251 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7253 return total_bytes;
7256 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7257 specified by EXPR into the buffer PTR of length LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero
7259 upon failure. */
7261 static int
7262 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7264 int rsize, isize;
7265 tree part;
7267 part = TREE_REALPART (expr);
7268 rsize = native_encode_expr (part, ptr, len);
7269 if (rsize == 0)
7270 return 0;
7271 part = TREE_IMAGPART (expr);
7272 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7273 if (isize != rsize)
7274 return 0;
7275 return rsize + isize;
7279 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7284 static int
7285 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7287 int i, size, offset, count;
7288 tree itype, elem, elements;
7290 offset = 0;
7291 elements = TREE_VECTOR_CST_ELTS (expr);
7292 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7293 itype = TREE_TYPE (TREE_TYPE (expr));
7294 size = GET_MODE_SIZE (TYPE_MODE (itype));
7295 for (i = 0; i < count; i++)
7297 if (elements)
7299 elem = TREE_VALUE (elements);
7300 elements = TREE_CHAIN (elements);
7302 else
7303 elem = NULL_TREE;
7305 if (elem)
7307 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7308 return 0;
7310 else
7312 if (offset + size > len)
7313 return 0;
7314 memset (ptr+offset, 0, size);
7316 offset += size;
7318 return offset;
7322 /* Subroutine of native_encode_expr. Encode the STRING_CST
7323 specified by EXPR into the buffer PTR of length LEN bytes.
7324 Return the number of bytes placed in the buffer, or zero
7325 upon failure. */
7327 static int
7328 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7330 tree type = TREE_TYPE (expr);
7331 HOST_WIDE_INT total_bytes;
7333 if (TREE_CODE (type) != ARRAY_TYPE
7334 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7335 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7336 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7337 return 0;
7338 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7339 if (total_bytes > len)
7340 return 0;
7341 if (TREE_STRING_LENGTH (expr) < total_bytes)
7343 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7344 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7345 total_bytes - TREE_STRING_LENGTH (expr));
7347 else
7348 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7349 return total_bytes;
7353 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7354 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7355 buffer PTR of length LEN bytes. Return the number of bytes
7356 placed in the buffer, or zero upon failure. */
7359 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7361 switch (TREE_CODE (expr))
7363 case INTEGER_CST:
7364 return native_encode_int (expr, ptr, len);
7366 case REAL_CST:
7367 return native_encode_real (expr, ptr, len);
7369 case COMPLEX_CST:
7370 return native_encode_complex (expr, ptr, len);
7372 case VECTOR_CST:
7373 return native_encode_vector (expr, ptr, len);
7375 case STRING_CST:
7376 return native_encode_string (expr, ptr, len);
7378 default:
7379 return 0;
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7388 static tree
7389 native_interpret_int (tree type, const unsigned char *ptr, int len)
7391 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7392 int byte, offset, word, words;
7393 unsigned char value;
7394 double_int result;
7396 if (total_bytes > len)
7397 return NULL_TREE;
7398 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7399 return NULL_TREE;
7401 result = double_int_zero;
7402 words = total_bytes / UNITS_PER_WORD;
7404 for (byte = 0; byte < total_bytes; byte++)
7406 int bitpos = byte * BITS_PER_UNIT;
7407 if (total_bytes > UNITS_PER_WORD)
7409 word = byte / UNITS_PER_WORD;
7410 if (WORDS_BIG_ENDIAN)
7411 word = (words - 1) - word;
7412 offset = word * UNITS_PER_WORD;
7413 if (BYTES_BIG_ENDIAN)
7414 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7415 else
7416 offset += byte % UNITS_PER_WORD;
7418 else
7419 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7420 value = ptr[offset];
7422 if (bitpos < HOST_BITS_PER_WIDE_INT)
7423 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7424 else
7425 result.high |= (unsigned HOST_WIDE_INT) value
7426 << (bitpos - HOST_BITS_PER_WIDE_INT);
7429 return double_int_to_tree (type, result);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7437 static tree
7438 native_interpret_real (tree type, const unsigned char *ptr, int len)
7440 enum machine_mode mode = TYPE_MODE (type);
7441 int total_bytes = GET_MODE_SIZE (mode);
7442 int byte, offset, word, words, bitpos;
7443 unsigned char value;
7444 /* There are always 32 bits in each long, no matter the size of
7445 the hosts long. We handle floating point representations with
7446 up to 192 bits. */
7447 REAL_VALUE_TYPE r;
7448 long tmp[6];
7450 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7451 if (total_bytes > len || total_bytes > 24)
7452 return NULL_TREE;
7453 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7455 memset (tmp, 0, sizeof (tmp));
7456 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7457 bitpos += BITS_PER_UNIT)
7459 byte = (bitpos / BITS_PER_UNIT) & 3;
7460 if (UNITS_PER_WORD < 4)
7462 word = byte / UNITS_PER_WORD;
7463 if (WORDS_BIG_ENDIAN)
7464 word = (words - 1) - word;
7465 offset = word * UNITS_PER_WORD;
7466 if (BYTES_BIG_ENDIAN)
7467 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7468 else
7469 offset += byte % UNITS_PER_WORD;
7471 else
7472 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7473 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7475 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7478 real_from_target (&r, tmp, mode);
7479 return build_real (type, r);
7483 /* Subroutine of native_interpret_expr. Interpret the contents of
7484 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7485 If the buffer cannot be interpreted, return NULL_TREE. */
7487 static tree
7488 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7490 tree etype, rpart, ipart;
7491 int size;
7493 etype = TREE_TYPE (type);
7494 size = GET_MODE_SIZE (TYPE_MODE (etype));
7495 if (size * 2 > len)
7496 return NULL_TREE;
7497 rpart = native_interpret_expr (etype, ptr, size);
7498 if (!rpart)
7499 return NULL_TREE;
7500 ipart = native_interpret_expr (etype, ptr+size, size);
7501 if (!ipart)
7502 return NULL_TREE;
7503 return build_complex (type, rpart, ipart);
7507 /* Subroutine of native_interpret_expr. Interpret the contents of
7508 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7509 If the buffer cannot be interpreted, return NULL_TREE. */
7511 static tree
7512 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7514 tree etype, elem, elements;
7515 int i, size, count;
7517 etype = TREE_TYPE (type);
7518 size = GET_MODE_SIZE (TYPE_MODE (etype));
7519 count = TYPE_VECTOR_SUBPARTS (type);
7520 if (size * count > len)
7521 return NULL_TREE;
7523 elements = NULL_TREE;
7524 for (i = count - 1; i >= 0; i--)
7526 elem = native_interpret_expr (etype, ptr+(i*size), size);
7527 if (!elem)
7528 return NULL_TREE;
7529 elements = tree_cons (NULL_TREE, elem, elements);
7531 return build_vector (type, elements);
7535 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7536 the buffer PTR of length LEN as a constant of type TYPE. For
7537 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7538 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7539 return NULL_TREE. */
7541 tree
7542 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7544 switch (TREE_CODE (type))
7546 case INTEGER_TYPE:
7547 case ENUMERAL_TYPE:
7548 case BOOLEAN_TYPE:
7549 return native_interpret_int (type, ptr, len);
7551 case REAL_TYPE:
7552 return native_interpret_real (type, ptr, len);
7554 case COMPLEX_TYPE:
7555 return native_interpret_complex (type, ptr, len);
7557 case VECTOR_TYPE:
7558 return native_interpret_vector (type, ptr, len);
7560 default:
7561 return NULL_TREE;
7566 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7567 TYPE at compile-time. If we're unable to perform the conversion
7568 return NULL_TREE. */
7570 static tree
7571 fold_view_convert_expr (tree type, tree expr)
7573 /* We support up to 512-bit values (for V8DFmode). */
7574 unsigned char buffer[64];
7575 int len;
7577 /* Check that the host and target are sane. */
7578 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7579 return NULL_TREE;
7581 len = native_encode_expr (expr, buffer, sizeof (buffer));
7582 if (len == 0)
7583 return NULL_TREE;
7585 return native_interpret_expr (type, buffer, len);
7588 /* Build an expression for the address of T. Folds away INDIRECT_REF
7589 to avoid confusing the gimplify process. */
7591 tree
7592 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7594 /* The size of the object is not relevant when talking about its address. */
7595 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7596 t = TREE_OPERAND (t, 0);
7598 if (TREE_CODE (t) == INDIRECT_REF
7599 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7601 t = TREE_OPERAND (t, 0);
7603 if (TREE_TYPE (t) != ptrtype)
7605 t = build1 (NOP_EXPR, ptrtype, t);
7606 SET_EXPR_LOCATION (t, loc);
7609 else if (TREE_CODE (t) == MEM_REF
7610 && integer_zerop (TREE_OPERAND (t, 1)))
7611 return TREE_OPERAND (t, 0);
7612 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7614 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7616 if (TREE_TYPE (t) != ptrtype)
7617 t = fold_convert_loc (loc, ptrtype, t);
7619 else
7621 t = build1 (ADDR_EXPR, ptrtype, t);
7622 SET_EXPR_LOCATION (t, loc);
7625 return t;
7628 /* Build an expression for the address of T. */
7630 tree
7631 build_fold_addr_expr_loc (location_t loc, tree t)
7633 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7635 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7638 /* Fold a unary expression of code CODE and type TYPE with operand
7639 OP0. Return the folded expression if folding is successful.
7640 Otherwise, return NULL_TREE. */
7642 tree
7643 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7645 tree tem;
7646 tree arg0;
7647 enum tree_code_class kind = TREE_CODE_CLASS (code);
7649 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7650 && TREE_CODE_LENGTH (code) == 1);
7652 arg0 = op0;
7653 if (arg0)
7655 if (CONVERT_EXPR_CODE_P (code)
7656 || code == FLOAT_EXPR || code == ABS_EXPR)
7658 /* Don't use STRIP_NOPS, because signedness of argument type
7659 matters. */
7660 STRIP_SIGN_NOPS (arg0);
7662 else
7664 /* Strip any conversions that don't change the mode. This
7665 is safe for every expression, except for a comparison
7666 expression because its signedness is derived from its
7667 operands.
7669 Note that this is done as an internal manipulation within
7670 the constant folder, in order to find the simplest
7671 representation of the arguments so that their form can be
7672 studied. In any cases, the appropriate type conversions
7673 should be put back in the tree that will get out of the
7674 constant folder. */
7675 STRIP_NOPS (arg0);
7679 if (TREE_CODE_CLASS (code) == tcc_unary)
7681 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7682 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7683 fold_build1_loc (loc, code, type,
7684 fold_convert_loc (loc, TREE_TYPE (op0),
7685 TREE_OPERAND (arg0, 1))));
7686 else if (TREE_CODE (arg0) == COND_EXPR)
7688 tree arg01 = TREE_OPERAND (arg0, 1);
7689 tree arg02 = TREE_OPERAND (arg0, 2);
7690 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7691 arg01 = fold_build1_loc (loc, code, type,
7692 fold_convert_loc (loc,
7693 TREE_TYPE (op0), arg01));
7694 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7695 arg02 = fold_build1_loc (loc, code, type,
7696 fold_convert_loc (loc,
7697 TREE_TYPE (op0), arg02));
7698 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7699 arg01, arg02);
7701 /* If this was a conversion, and all we did was to move into
7702 inside the COND_EXPR, bring it back out. But leave it if
7703 it is a conversion from integer to integer and the
7704 result precision is no wider than a word since such a
7705 conversion is cheap and may be optimized away by combine,
7706 while it couldn't if it were outside the COND_EXPR. Then return
7707 so we don't get into an infinite recursion loop taking the
7708 conversion out and then back in. */
7710 if ((CONVERT_EXPR_CODE_P (code)
7711 || code == NON_LVALUE_EXPR)
7712 && TREE_CODE (tem) == COND_EXPR
7713 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7714 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7715 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7716 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7717 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7718 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7719 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7720 && (INTEGRAL_TYPE_P
7721 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7722 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7723 || flag_syntax_only))
7725 tem = build1 (code, type,
7726 build3 (COND_EXPR,
7727 TREE_TYPE (TREE_OPERAND
7728 (TREE_OPERAND (tem, 1), 0)),
7729 TREE_OPERAND (tem, 0),
7730 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7731 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7732 SET_EXPR_LOCATION (tem, loc);
7734 return tem;
7736 else if (COMPARISON_CLASS_P (arg0))
7738 if (TREE_CODE (type) == BOOLEAN_TYPE)
7740 arg0 = copy_node (arg0);
7741 TREE_TYPE (arg0) = type;
7742 return arg0;
7744 else if (TREE_CODE (type) != INTEGER_TYPE)
7745 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7746 fold_build1_loc (loc, code, type,
7747 integer_one_node),
7748 fold_build1_loc (loc, code, type,
7749 integer_zero_node));
7753 switch (code)
7755 case PAREN_EXPR:
7756 /* Re-association barriers around constants and other re-association
7757 barriers can be removed. */
7758 if (CONSTANT_CLASS_P (op0)
7759 || TREE_CODE (op0) == PAREN_EXPR)
7760 return fold_convert_loc (loc, type, op0);
7761 return NULL_TREE;
7763 CASE_CONVERT:
7764 case FLOAT_EXPR:
7765 case FIX_TRUNC_EXPR:
7766 if (TREE_TYPE (op0) == type)
7767 return op0;
7769 /* If we have (type) (a CMP b) and type is an integral type, return
7770 new expression involving the new type. */
7771 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7772 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7773 TREE_OPERAND (op0, 1));
7775 /* Handle cases of two conversions in a row. */
7776 if (CONVERT_EXPR_P (op0))
7778 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7779 tree inter_type = TREE_TYPE (op0);
7780 int inside_int = INTEGRAL_TYPE_P (inside_type);
7781 int inside_ptr = POINTER_TYPE_P (inside_type);
7782 int inside_float = FLOAT_TYPE_P (inside_type);
7783 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7784 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7785 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7786 int inter_int = INTEGRAL_TYPE_P (inter_type);
7787 int inter_ptr = POINTER_TYPE_P (inter_type);
7788 int inter_float = FLOAT_TYPE_P (inter_type);
7789 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7790 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7791 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7792 int final_int = INTEGRAL_TYPE_P (type);
7793 int final_ptr = POINTER_TYPE_P (type);
7794 int final_float = FLOAT_TYPE_P (type);
7795 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7796 unsigned int final_prec = TYPE_PRECISION (type);
7797 int final_unsignedp = TYPE_UNSIGNED (type);
7799 /* In addition to the cases of two conversions in a row
7800 handled below, if we are converting something to its own
7801 type via an object of identical or wider precision, neither
7802 conversion is needed. */
7803 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7804 && (((inter_int || inter_ptr) && final_int)
7805 || (inter_float && final_float))
7806 && inter_prec >= final_prec)
7807 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7809 /* Likewise, if the intermediate and initial types are either both
7810 float or both integer, we don't need the middle conversion if the
7811 former is wider than the latter and doesn't change the signedness
7812 (for integers). Avoid this if the final type is a pointer since
7813 then we sometimes need the middle conversion. Likewise if the
7814 final type has a precision not equal to the size of its mode. */
7815 if (((inter_int && inside_int)
7816 || (inter_float && inside_float)
7817 || (inter_vec && inside_vec))
7818 && inter_prec >= inside_prec
7819 && (inter_float || inter_vec
7820 || inter_unsignedp == inside_unsignedp)
7821 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7822 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7823 && ! final_ptr
7824 && (! final_vec || inter_prec == inside_prec))
7825 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7827 /* If we have a sign-extension of a zero-extended value, we can
7828 replace that by a single zero-extension. */
7829 if (inside_int && inter_int && final_int
7830 && inside_prec < inter_prec && inter_prec < final_prec
7831 && inside_unsignedp && !inter_unsignedp)
7832 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7834 /* Two conversions in a row are not needed unless:
7835 - some conversion is floating-point (overstrict for now), or
7836 - some conversion is a vector (overstrict for now), or
7837 - the intermediate type is narrower than both initial and
7838 final, or
7839 - the intermediate type and innermost type differ in signedness,
7840 and the outermost type is wider than the intermediate, or
7841 - the initial type is a pointer type and the precisions of the
7842 intermediate and final types differ, or
7843 - the final type is a pointer type and the precisions of the
7844 initial and intermediate types differ. */
7845 if (! inside_float && ! inter_float && ! final_float
7846 && ! inside_vec && ! inter_vec && ! final_vec
7847 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7848 && ! (inside_int && inter_int
7849 && inter_unsignedp != inside_unsignedp
7850 && inter_prec < final_prec)
7851 && ((inter_unsignedp && inter_prec > inside_prec)
7852 == (final_unsignedp && final_prec > inter_prec))
7853 && ! (inside_ptr && inter_prec != final_prec)
7854 && ! (final_ptr && inside_prec != inter_prec)
7855 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7856 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7857 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7860 /* Handle (T *)&A.B.C for A being of type T and B and C
7861 living at offset zero. This occurs frequently in
7862 C++ upcasting and then accessing the base. */
7863 if (TREE_CODE (op0) == ADDR_EXPR
7864 && POINTER_TYPE_P (type)
7865 && handled_component_p (TREE_OPERAND (op0, 0)))
7867 HOST_WIDE_INT bitsize, bitpos;
7868 tree offset;
7869 enum machine_mode mode;
7870 int unsignedp, volatilep;
7871 tree base = TREE_OPERAND (op0, 0);
7872 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7873 &mode, &unsignedp, &volatilep, false);
7874 /* If the reference was to a (constant) zero offset, we can use
7875 the address of the base if it has the same base type
7876 as the result type and the pointer type is unqualified. */
7877 if (! offset && bitpos == 0
7878 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7879 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7880 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7881 return fold_convert_loc (loc, type,
7882 build_fold_addr_expr_loc (loc, base));
7885 if (TREE_CODE (op0) == MODIFY_EXPR
7886 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7887 /* Detect assigning a bitfield. */
7888 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7889 && DECL_BIT_FIELD
7890 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7892 /* Don't leave an assignment inside a conversion
7893 unless assigning a bitfield. */
7894 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7895 /* First do the assignment, then return converted constant. */
7896 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7897 TREE_NO_WARNING (tem) = 1;
7898 TREE_USED (tem) = 1;
7899 SET_EXPR_LOCATION (tem, loc);
7900 return tem;
7903 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7904 constants (if x has signed type, the sign bit cannot be set
7905 in c). This folds extension into the BIT_AND_EXPR.
7906 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7907 very likely don't have maximal range for their precision and this
7908 transformation effectively doesn't preserve non-maximal ranges. */
7909 if (TREE_CODE (type) == INTEGER_TYPE
7910 && TREE_CODE (op0) == BIT_AND_EXPR
7911 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7913 tree and_expr = op0;
7914 tree and0 = TREE_OPERAND (and_expr, 0);
7915 tree and1 = TREE_OPERAND (and_expr, 1);
7916 int change = 0;
7918 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7919 || (TYPE_PRECISION (type)
7920 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7921 change = 1;
7922 else if (TYPE_PRECISION (TREE_TYPE (and1))
7923 <= HOST_BITS_PER_WIDE_INT
7924 && host_integerp (and1, 1))
7926 unsigned HOST_WIDE_INT cst;
7928 cst = tree_low_cst (and1, 1);
7929 cst &= (HOST_WIDE_INT) -1
7930 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7931 change = (cst == 0);
7932 #ifdef LOAD_EXTEND_OP
7933 if (change
7934 && !flag_syntax_only
7935 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7936 == ZERO_EXTEND))
7938 tree uns = unsigned_type_for (TREE_TYPE (and0));
7939 and0 = fold_convert_loc (loc, uns, and0);
7940 and1 = fold_convert_loc (loc, uns, and1);
7942 #endif
7944 if (change)
7946 tem = force_fit_type_double (type, tree_to_double_int (and1),
7947 0, TREE_OVERFLOW (and1));
7948 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7949 fold_convert_loc (loc, type, and0), tem);
7953 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7954 when one of the new casts will fold away. Conservatively we assume
7955 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7956 if (POINTER_TYPE_P (type)
7957 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7958 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7959 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7960 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7962 tree arg00 = TREE_OPERAND (arg0, 0);
7963 tree arg01 = TREE_OPERAND (arg0, 1);
7965 return fold_build2_loc (loc,
7966 TREE_CODE (arg0), type,
7967 fold_convert_loc (loc, type, arg00),
7968 fold_convert_loc (loc, sizetype, arg01));
7971 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7972 of the same precision, and X is an integer type not narrower than
7973 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7974 if (INTEGRAL_TYPE_P (type)
7975 && TREE_CODE (op0) == BIT_NOT_EXPR
7976 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7977 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7978 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7980 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7981 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7982 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7983 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7984 fold_convert_loc (loc, type, tem));
7987 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7988 type of X and Y (integer types only). */
7989 if (INTEGRAL_TYPE_P (type)
7990 && TREE_CODE (op0) == MULT_EXPR
7991 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7994 /* Be careful not to introduce new overflows. */
7995 tree mult_type;
7996 if (TYPE_OVERFLOW_WRAPS (type))
7997 mult_type = type;
7998 else
7999 mult_type = unsigned_type_for (type);
8001 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8003 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8004 fold_convert_loc (loc, mult_type,
8005 TREE_OPERAND (op0, 0)),
8006 fold_convert_loc (loc, mult_type,
8007 TREE_OPERAND (op0, 1)));
8008 return fold_convert_loc (loc, type, tem);
8012 tem = fold_convert_const (code, type, op0);
8013 return tem ? tem : NULL_TREE;
8015 case ADDR_SPACE_CONVERT_EXPR:
8016 if (integer_zerop (arg0))
8017 return fold_convert_const (code, type, arg0);
8018 return NULL_TREE;
8020 case FIXED_CONVERT_EXPR:
8021 tem = fold_convert_const (code, type, arg0);
8022 return tem ? tem : NULL_TREE;
8024 case VIEW_CONVERT_EXPR:
8025 if (TREE_TYPE (op0) == type)
8026 return op0;
8027 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8028 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8029 type, TREE_OPERAND (op0, 0));
8030 if (TREE_CODE (op0) == MEM_REF)
8031 return fold_build2_loc (loc, MEM_REF, type,
8032 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8034 /* For integral conversions with the same precision or pointer
8035 conversions use a NOP_EXPR instead. */
8036 if ((INTEGRAL_TYPE_P (type)
8037 || POINTER_TYPE_P (type))
8038 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8039 || POINTER_TYPE_P (TREE_TYPE (op0)))
8040 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8041 return fold_convert_loc (loc, type, op0);
8043 /* Strip inner integral conversions that do not change the precision. */
8044 if (CONVERT_EXPR_P (op0)
8045 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8046 || POINTER_TYPE_P (TREE_TYPE (op0)))
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8048 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8049 && (TYPE_PRECISION (TREE_TYPE (op0))
8050 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8051 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8052 type, TREE_OPERAND (op0, 0));
8054 return fold_view_convert_expr (type, op0);
8056 case NEGATE_EXPR:
8057 tem = fold_negate_expr (loc, arg0);
8058 if (tem)
8059 return fold_convert_loc (loc, type, tem);
8060 return NULL_TREE;
8062 case ABS_EXPR:
8063 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8064 return fold_abs_const (arg0, type);
8065 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8066 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8067 /* Convert fabs((double)float) into (double)fabsf(float). */
8068 else if (TREE_CODE (arg0) == NOP_EXPR
8069 && TREE_CODE (type) == REAL_TYPE)
8071 tree targ0 = strip_float_extensions (arg0);
8072 if (targ0 != arg0)
8073 return fold_convert_loc (loc, type,
8074 fold_build1_loc (loc, ABS_EXPR,
8075 TREE_TYPE (targ0),
8076 targ0));
8078 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8079 else if (TREE_CODE (arg0) == ABS_EXPR)
8080 return arg0;
8081 else if (tree_expr_nonnegative_p (arg0))
8082 return arg0;
8084 /* Strip sign ops from argument. */
8085 if (TREE_CODE (type) == REAL_TYPE)
8087 tem = fold_strip_sign_ops (arg0);
8088 if (tem)
8089 return fold_build1_loc (loc, ABS_EXPR, type,
8090 fold_convert_loc (loc, type, tem));
8092 return NULL_TREE;
8094 case CONJ_EXPR:
8095 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8096 return fold_convert_loc (loc, type, arg0);
8097 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8099 tree itype = TREE_TYPE (type);
8100 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8101 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8102 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8103 negate_expr (ipart));
8105 if (TREE_CODE (arg0) == COMPLEX_CST)
8107 tree itype = TREE_TYPE (type);
8108 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8109 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8110 return build_complex (type, rpart, negate_expr (ipart));
8112 if (TREE_CODE (arg0) == CONJ_EXPR)
8113 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8114 return NULL_TREE;
8116 case BIT_NOT_EXPR:
8117 if (TREE_CODE (arg0) == INTEGER_CST)
8118 return fold_not_const (arg0, type);
8119 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8120 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8121 /* Convert ~ (-A) to A - 1. */
8122 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8123 return fold_build2_loc (loc, MINUS_EXPR, type,
8124 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8125 build_int_cst (type, 1));
8126 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8127 else if (INTEGRAL_TYPE_P (type)
8128 && ((TREE_CODE (arg0) == MINUS_EXPR
8129 && integer_onep (TREE_OPERAND (arg0, 1)))
8130 || (TREE_CODE (arg0) == PLUS_EXPR
8131 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8132 return fold_build1_loc (loc, NEGATE_EXPR, type,
8133 fold_convert_loc (loc, type,
8134 TREE_OPERAND (arg0, 0)));
8135 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8136 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8137 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8138 fold_convert_loc (loc, type,
8139 TREE_OPERAND (arg0, 0)))))
8140 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8141 fold_convert_loc (loc, type,
8142 TREE_OPERAND (arg0, 1)));
8143 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8144 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8145 fold_convert_loc (loc, type,
8146 TREE_OPERAND (arg0, 1)))))
8147 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8148 fold_convert_loc (loc, type,
8149 TREE_OPERAND (arg0, 0)), tem);
8150 /* Perform BIT_NOT_EXPR on each element individually. */
8151 else if (TREE_CODE (arg0) == VECTOR_CST)
8153 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8154 int count = TYPE_VECTOR_SUBPARTS (type), i;
8156 for (i = 0; i < count; i++)
8158 if (elements)
8160 elem = TREE_VALUE (elements);
8161 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8162 if (elem == NULL_TREE)
8163 break;
8164 elements = TREE_CHAIN (elements);
8166 else
8167 elem = build_int_cst (TREE_TYPE (type), -1);
8168 list = tree_cons (NULL_TREE, elem, list);
8170 if (i == count)
8171 return build_vector (type, nreverse (list));
8174 return NULL_TREE;
8176 case TRUTH_NOT_EXPR:
8177 /* The argument to invert_truthvalue must have Boolean type. */
8178 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8179 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8181 /* Note that the operand of this must be an int
8182 and its values must be 0 or 1.
8183 ("true" is a fixed value perhaps depending on the language,
8184 but we don't handle values other than 1 correctly yet.) */
8185 tem = fold_truth_not_expr (loc, arg0);
8186 if (!tem)
8187 return NULL_TREE;
8188 return fold_convert_loc (loc, type, tem);
8190 case REALPART_EXPR:
8191 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8192 return fold_convert_loc (loc, type, arg0);
8193 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8194 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8195 TREE_OPERAND (arg0, 1));
8196 if (TREE_CODE (arg0) == COMPLEX_CST)
8197 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8198 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8200 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8201 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8202 fold_build1_loc (loc, REALPART_EXPR, itype,
8203 TREE_OPERAND (arg0, 0)),
8204 fold_build1_loc (loc, REALPART_EXPR, itype,
8205 TREE_OPERAND (arg0, 1)));
8206 return fold_convert_loc (loc, type, tem);
8208 if (TREE_CODE (arg0) == CONJ_EXPR)
8210 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8211 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8212 TREE_OPERAND (arg0, 0));
8213 return fold_convert_loc (loc, type, tem);
8215 if (TREE_CODE (arg0) == CALL_EXPR)
8217 tree fn = get_callee_fndecl (arg0);
8218 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8219 switch (DECL_FUNCTION_CODE (fn))
8221 CASE_FLT_FN (BUILT_IN_CEXPI):
8222 fn = mathfn_built_in (type, BUILT_IN_COS);
8223 if (fn)
8224 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8225 break;
8227 default:
8228 break;
8231 return NULL_TREE;
8233 case IMAGPART_EXPR:
8234 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8235 return fold_convert_loc (loc, type, integer_zero_node);
8236 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8237 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8238 TREE_OPERAND (arg0, 0));
8239 if (TREE_CODE (arg0) == COMPLEX_CST)
8240 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8241 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8243 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8244 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8245 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8246 TREE_OPERAND (arg0, 0)),
8247 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8248 TREE_OPERAND (arg0, 1)));
8249 return fold_convert_loc (loc, type, tem);
8251 if (TREE_CODE (arg0) == CONJ_EXPR)
8253 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8254 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8255 return fold_convert_loc (loc, type, negate_expr (tem));
8257 if (TREE_CODE (arg0) == CALL_EXPR)
8259 tree fn = get_callee_fndecl (arg0);
8260 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8261 switch (DECL_FUNCTION_CODE (fn))
8263 CASE_FLT_FN (BUILT_IN_CEXPI):
8264 fn = mathfn_built_in (type, BUILT_IN_SIN);
8265 if (fn)
8266 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8267 break;
8269 default:
8270 break;
8273 return NULL_TREE;
8275 case INDIRECT_REF:
8276 /* Fold *&X to X if X is an lvalue. */
8277 if (TREE_CODE (op0) == ADDR_EXPR)
8279 tree op00 = TREE_OPERAND (op0, 0);
8280 if ((TREE_CODE (op00) == VAR_DECL
8281 || TREE_CODE (op00) == PARM_DECL
8282 || TREE_CODE (op00) == RESULT_DECL)
8283 && !TREE_READONLY (op00))
8284 return op00;
8286 return NULL_TREE;
8288 default:
8289 return NULL_TREE;
8290 } /* switch (code) */
8294 /* If the operation was a conversion do _not_ mark a resulting constant
8295 with TREE_OVERFLOW if the original constant was not. These conversions
8296 have implementation defined behavior and retaining the TREE_OVERFLOW
8297 flag here would confuse later passes such as VRP. */
8298 tree
8299 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8300 tree type, tree op0)
8302 tree res = fold_unary_loc (loc, code, type, op0);
8303 if (res
8304 && TREE_CODE (res) == INTEGER_CST
8305 && TREE_CODE (op0) == INTEGER_CST
8306 && CONVERT_EXPR_CODE_P (code))
8307 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8309 return res;
8312 /* Fold a binary expression of code CODE and type TYPE with operands
8313 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8314 Return the folded expression if folding is successful. Otherwise,
8315 return NULL_TREE. */
8317 static tree
8318 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8320 enum tree_code compl_code;
8322 if (code == MIN_EXPR)
8323 compl_code = MAX_EXPR;
8324 else if (code == MAX_EXPR)
8325 compl_code = MIN_EXPR;
8326 else
8327 gcc_unreachable ();
8329 /* MIN (MAX (a, b), b) == b. */
8330 if (TREE_CODE (op0) == compl_code
8331 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8332 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8334 /* MIN (MAX (b, a), b) == b. */
8335 if (TREE_CODE (op0) == compl_code
8336 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8337 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8338 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8340 /* MIN (a, MAX (a, b)) == a. */
8341 if (TREE_CODE (op1) == compl_code
8342 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8343 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8344 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8346 /* MIN (a, MAX (b, a)) == a. */
8347 if (TREE_CODE (op1) == compl_code
8348 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8349 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8350 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8352 return NULL_TREE;
8355 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8356 by changing CODE to reduce the magnitude of constants involved in
8357 ARG0 of the comparison.
8358 Returns a canonicalized comparison tree if a simplification was
8359 possible, otherwise returns NULL_TREE.
8360 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8361 valid if signed overflow is undefined. */
8363 static tree
8364 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8365 tree arg0, tree arg1,
8366 bool *strict_overflow_p)
8368 enum tree_code code0 = TREE_CODE (arg0);
8369 tree t, cst0 = NULL_TREE;
8370 int sgn0;
8371 bool swap = false;
8373 /* Match A +- CST code arg1 and CST code arg1. We can change the
8374 first form only if overflow is undefined. */
8375 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8376 /* In principle pointers also have undefined overflow behavior,
8377 but that causes problems elsewhere. */
8378 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8379 && (code0 == MINUS_EXPR
8380 || code0 == PLUS_EXPR)
8381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8382 || code0 == INTEGER_CST))
8383 return NULL_TREE;
8385 /* Identify the constant in arg0 and its sign. */
8386 if (code0 == INTEGER_CST)
8387 cst0 = arg0;
8388 else
8389 cst0 = TREE_OPERAND (arg0, 1);
8390 sgn0 = tree_int_cst_sgn (cst0);
8392 /* Overflowed constants and zero will cause problems. */
8393 if (integer_zerop (cst0)
8394 || TREE_OVERFLOW (cst0))
8395 return NULL_TREE;
8397 /* See if we can reduce the magnitude of the constant in
8398 arg0 by changing the comparison code. */
8399 if (code0 == INTEGER_CST)
8401 /* CST <= arg1 -> CST-1 < arg1. */
8402 if (code == LE_EXPR && sgn0 == 1)
8403 code = LT_EXPR;
8404 /* -CST < arg1 -> -CST-1 <= arg1. */
8405 else if (code == LT_EXPR && sgn0 == -1)
8406 code = LE_EXPR;
8407 /* CST > arg1 -> CST-1 >= arg1. */
8408 else if (code == GT_EXPR && sgn0 == 1)
8409 code = GE_EXPR;
8410 /* -CST >= arg1 -> -CST-1 > arg1. */
8411 else if (code == GE_EXPR && sgn0 == -1)
8412 code = GT_EXPR;
8413 else
8414 return NULL_TREE;
8415 /* arg1 code' CST' might be more canonical. */
8416 swap = true;
8418 else
8420 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8421 if (code == LT_EXPR
8422 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8423 code = LE_EXPR;
8424 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8425 else if (code == GT_EXPR
8426 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8427 code = GE_EXPR;
8428 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8429 else if (code == LE_EXPR
8430 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8431 code = LT_EXPR;
8432 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8433 else if (code == GE_EXPR
8434 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8435 code = GT_EXPR;
8436 else
8437 return NULL_TREE;
8438 *strict_overflow_p = true;
8441 /* Now build the constant reduced in magnitude. But not if that
8442 would produce one outside of its types range. */
8443 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8444 && ((sgn0 == 1
8445 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8446 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8447 || (sgn0 == -1
8448 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8449 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8450 /* We cannot swap the comparison here as that would cause us to
8451 endlessly recurse. */
8452 return NULL_TREE;
8454 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8455 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8456 if (code0 != INTEGER_CST)
8457 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8459 /* If swapping might yield to a more canonical form, do so. */
8460 if (swap)
8461 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8462 else
8463 return fold_build2_loc (loc, code, type, t, arg1);
8466 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8467 overflow further. Try to decrease the magnitude of constants involved
8468 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8469 and put sole constants at the second argument position.
8470 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8472 static tree
8473 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8474 tree arg0, tree arg1)
8476 tree t;
8477 bool strict_overflow_p;
8478 const char * const warnmsg = G_("assuming signed overflow does not occur "
8479 "when reducing constant in comparison");
8481 /* Try canonicalization by simplifying arg0. */
8482 strict_overflow_p = false;
8483 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8484 &strict_overflow_p);
8485 if (t)
8487 if (strict_overflow_p)
8488 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8489 return t;
8492 /* Try canonicalization by simplifying arg1 using the swapped
8493 comparison. */
8494 code = swap_tree_comparison (code);
8495 strict_overflow_p = false;
8496 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8497 &strict_overflow_p);
8498 if (t && strict_overflow_p)
8499 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8500 return t;
8503 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8504 space. This is used to avoid issuing overflow warnings for
8505 expressions like &p->x which can not wrap. */
8507 static bool
8508 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8510 unsigned HOST_WIDE_INT offset_low, total_low;
8511 HOST_WIDE_INT size, offset_high, total_high;
8513 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8514 return true;
8516 if (bitpos < 0)
8517 return true;
8519 if (offset == NULL_TREE)
8521 offset_low = 0;
8522 offset_high = 0;
8524 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8525 return true;
8526 else
8528 offset_low = TREE_INT_CST_LOW (offset);
8529 offset_high = TREE_INT_CST_HIGH (offset);
8532 if (add_double_with_sign (offset_low, offset_high,
8533 bitpos / BITS_PER_UNIT, 0,
8534 &total_low, &total_high,
8535 true))
8536 return true;
8538 if (total_high != 0)
8539 return true;
8541 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8542 if (size <= 0)
8543 return true;
8545 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8546 array. */
8547 if (TREE_CODE (base) == ADDR_EXPR)
8549 HOST_WIDE_INT base_size;
8551 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8552 if (base_size > 0 && size < base_size)
8553 size = base_size;
8556 return total_low > (unsigned HOST_WIDE_INT) size;
8559 /* Subroutine of fold_binary. This routine performs all of the
8560 transformations that are common to the equality/inequality
8561 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8562 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8563 fold_binary should call fold_binary. Fold a comparison with
8564 tree code CODE and type TYPE with operands OP0 and OP1. Return
8565 the folded comparison or NULL_TREE. */
8567 static tree
8568 fold_comparison (location_t loc, enum tree_code code, tree type,
8569 tree op0, tree op1)
8571 tree arg0, arg1, tem;
8573 arg0 = op0;
8574 arg1 = op1;
8576 STRIP_SIGN_NOPS (arg0);
8577 STRIP_SIGN_NOPS (arg1);
8579 tem = fold_relational_const (code, type, arg0, arg1);
8580 if (tem != NULL_TREE)
8581 return tem;
8583 /* If one arg is a real or integer constant, put it last. */
8584 if (tree_swap_operands_p (arg0, arg1, true))
8585 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8587 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8588 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8589 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8590 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8591 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8592 && (TREE_CODE (arg1) == INTEGER_CST
8593 && !TREE_OVERFLOW (arg1)))
8595 tree const1 = TREE_OPERAND (arg0, 1);
8596 tree const2 = arg1;
8597 tree variable = TREE_OPERAND (arg0, 0);
8598 tree lhs;
8599 int lhs_add;
8600 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8602 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8603 TREE_TYPE (arg1), const2, const1);
8605 /* If the constant operation overflowed this can be
8606 simplified as a comparison against INT_MAX/INT_MIN. */
8607 if (TREE_CODE (lhs) == INTEGER_CST
8608 && TREE_OVERFLOW (lhs))
8610 int const1_sgn = tree_int_cst_sgn (const1);
8611 enum tree_code code2 = code;
8613 /* Get the sign of the constant on the lhs if the
8614 operation were VARIABLE + CONST1. */
8615 if (TREE_CODE (arg0) == MINUS_EXPR)
8616 const1_sgn = -const1_sgn;
8618 /* The sign of the constant determines if we overflowed
8619 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8620 Canonicalize to the INT_MIN overflow by swapping the comparison
8621 if necessary. */
8622 if (const1_sgn == -1)
8623 code2 = swap_tree_comparison (code);
8625 /* We now can look at the canonicalized case
8626 VARIABLE + 1 CODE2 INT_MIN
8627 and decide on the result. */
8628 if (code2 == LT_EXPR
8629 || code2 == LE_EXPR
8630 || code2 == EQ_EXPR)
8631 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8632 else if (code2 == NE_EXPR
8633 || code2 == GE_EXPR
8634 || code2 == GT_EXPR)
8635 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8638 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8639 && (TREE_CODE (lhs) != INTEGER_CST
8640 || !TREE_OVERFLOW (lhs)))
8642 fold_overflow_warning ("assuming signed overflow does not occur "
8643 "when changing X +- C1 cmp C2 to "
8644 "X cmp C1 +- C2",
8645 WARN_STRICT_OVERFLOW_COMPARISON);
8646 return fold_build2_loc (loc, code, type, variable, lhs);
8650 /* For comparisons of pointers we can decompose it to a compile time
8651 comparison of the base objects and the offsets into the object.
8652 This requires at least one operand being an ADDR_EXPR or a
8653 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8654 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8655 && (TREE_CODE (arg0) == ADDR_EXPR
8656 || TREE_CODE (arg1) == ADDR_EXPR
8657 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8658 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8660 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8661 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8662 enum machine_mode mode;
8663 int volatilep, unsignedp;
8664 bool indirect_base0 = false, indirect_base1 = false;
8666 /* Get base and offset for the access. Strip ADDR_EXPR for
8667 get_inner_reference, but put it back by stripping INDIRECT_REF
8668 off the base object if possible. indirect_baseN will be true
8669 if baseN is not an address but refers to the object itself. */
8670 base0 = arg0;
8671 if (TREE_CODE (arg0) == ADDR_EXPR)
8673 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8674 &bitsize, &bitpos0, &offset0, &mode,
8675 &unsignedp, &volatilep, false);
8676 if (TREE_CODE (base0) == INDIRECT_REF)
8677 base0 = TREE_OPERAND (base0, 0);
8678 else
8679 indirect_base0 = true;
8681 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8683 base0 = TREE_OPERAND (arg0, 0);
8684 if (TREE_CODE (base0) == ADDR_EXPR)
8686 base0 = TREE_OPERAND (base0, 0);
8687 indirect_base0 = true;
8689 offset0 = TREE_OPERAND (arg0, 1);
8692 base1 = arg1;
8693 if (TREE_CODE (arg1) == ADDR_EXPR)
8695 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8696 &bitsize, &bitpos1, &offset1, &mode,
8697 &unsignedp, &volatilep, false);
8698 if (TREE_CODE (base1) == INDIRECT_REF)
8699 base1 = TREE_OPERAND (base1, 0);
8700 else
8701 indirect_base1 = true;
8703 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8705 base1 = TREE_OPERAND (arg1, 0);
8706 if (TREE_CODE (base1) == ADDR_EXPR)
8708 base1 = TREE_OPERAND (base1, 0);
8709 indirect_base1 = true;
8711 offset1 = TREE_OPERAND (arg1, 1);
8714 /* A local variable can never be pointed to by
8715 the default SSA name of an incoming parameter. */
8716 if ((TREE_CODE (arg0) == ADDR_EXPR
8717 && indirect_base0
8718 && TREE_CODE (base0) == VAR_DECL
8719 && auto_var_in_fn_p (base0, current_function_decl)
8720 && !indirect_base1
8721 && TREE_CODE (base1) == SSA_NAME
8722 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8723 && SSA_NAME_IS_DEFAULT_DEF (base1))
8724 || (TREE_CODE (arg1) == ADDR_EXPR
8725 && indirect_base1
8726 && TREE_CODE (base1) == VAR_DECL
8727 && auto_var_in_fn_p (base1, current_function_decl)
8728 && !indirect_base0
8729 && TREE_CODE (base0) == SSA_NAME
8730 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8731 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8733 if (code == NE_EXPR)
8734 return constant_boolean_node (1, type);
8735 else if (code == EQ_EXPR)
8736 return constant_boolean_node (0, type);
8738 /* If we have equivalent bases we might be able to simplify. */
8739 else if (indirect_base0 == indirect_base1
8740 && operand_equal_p (base0, base1, 0))
8742 /* We can fold this expression to a constant if the non-constant
8743 offset parts are equal. */
8744 if ((offset0 == offset1
8745 || (offset0 && offset1
8746 && operand_equal_p (offset0, offset1, 0)))
8747 && (code == EQ_EXPR
8748 || code == NE_EXPR
8749 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8752 if (code != EQ_EXPR
8753 && code != NE_EXPR
8754 && bitpos0 != bitpos1
8755 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8756 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8757 fold_overflow_warning (("assuming pointer wraparound does not "
8758 "occur when comparing P +- C1 with "
8759 "P +- C2"),
8760 WARN_STRICT_OVERFLOW_CONDITIONAL);
8762 switch (code)
8764 case EQ_EXPR:
8765 return constant_boolean_node (bitpos0 == bitpos1, type);
8766 case NE_EXPR:
8767 return constant_boolean_node (bitpos0 != bitpos1, type);
8768 case LT_EXPR:
8769 return constant_boolean_node (bitpos0 < bitpos1, type);
8770 case LE_EXPR:
8771 return constant_boolean_node (bitpos0 <= bitpos1, type);
8772 case GE_EXPR:
8773 return constant_boolean_node (bitpos0 >= bitpos1, type);
8774 case GT_EXPR:
8775 return constant_boolean_node (bitpos0 > bitpos1, type);
8776 default:;
8779 /* We can simplify the comparison to a comparison of the variable
8780 offset parts if the constant offset parts are equal.
8781 Be careful to use signed size type here because otherwise we
8782 mess with array offsets in the wrong way. This is possible
8783 because pointer arithmetic is restricted to retain within an
8784 object and overflow on pointer differences is undefined as of
8785 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8786 else if (bitpos0 == bitpos1
8787 && ((code == EQ_EXPR || code == NE_EXPR)
8788 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8790 /* By converting to signed size type we cover middle-end pointer
8791 arithmetic which operates on unsigned pointer types of size
8792 type size and ARRAY_REF offsets which are properly sign or
8793 zero extended from their type in case it is narrower than
8794 size type. */
8795 if (offset0 == NULL_TREE)
8796 offset0 = build_int_cst (ssizetype, 0);
8797 else
8798 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8799 if (offset1 == NULL_TREE)
8800 offset1 = build_int_cst (ssizetype, 0);
8801 else
8802 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8804 if (code != EQ_EXPR
8805 && code != NE_EXPR
8806 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8807 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8808 fold_overflow_warning (("assuming pointer wraparound does not "
8809 "occur when comparing P +- C1 with "
8810 "P +- C2"),
8811 WARN_STRICT_OVERFLOW_COMPARISON);
8813 return fold_build2_loc (loc, code, type, offset0, offset1);
8816 /* For non-equal bases we can simplify if they are addresses
8817 of local binding decls or constants. */
8818 else if (indirect_base0 && indirect_base1
8819 /* We know that !operand_equal_p (base0, base1, 0)
8820 because the if condition was false. But make
8821 sure two decls are not the same. */
8822 && base0 != base1
8823 && TREE_CODE (arg0) == ADDR_EXPR
8824 && TREE_CODE (arg1) == ADDR_EXPR
8825 && (((TREE_CODE (base0) == VAR_DECL
8826 || TREE_CODE (base0) == PARM_DECL)
8827 && (targetm.binds_local_p (base0)
8828 || CONSTANT_CLASS_P (base1)))
8829 || CONSTANT_CLASS_P (base0))
8830 && (((TREE_CODE (base1) == VAR_DECL
8831 || TREE_CODE (base1) == PARM_DECL)
8832 && (targetm.binds_local_p (base1)
8833 || CONSTANT_CLASS_P (base0)))
8834 || CONSTANT_CLASS_P (base1)))
8836 if (code == EQ_EXPR)
8837 return omit_two_operands_loc (loc, type, boolean_false_node,
8838 arg0, arg1);
8839 else if (code == NE_EXPR)
8840 return omit_two_operands_loc (loc, type, boolean_true_node,
8841 arg0, arg1);
8843 /* For equal offsets we can simplify to a comparison of the
8844 base addresses. */
8845 else if (bitpos0 == bitpos1
8846 && (indirect_base0
8847 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8848 && (indirect_base1
8849 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8850 && ((offset0 == offset1)
8851 || (offset0 && offset1
8852 && operand_equal_p (offset0, offset1, 0))))
8854 if (indirect_base0)
8855 base0 = build_fold_addr_expr_loc (loc, base0);
8856 if (indirect_base1)
8857 base1 = build_fold_addr_expr_loc (loc, base1);
8858 return fold_build2_loc (loc, code, type, base0, base1);
8862 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8863 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8864 the resulting offset is smaller in absolute value than the
8865 original one. */
8866 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8867 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8868 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8869 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8870 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8871 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8872 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8874 tree const1 = TREE_OPERAND (arg0, 1);
8875 tree const2 = TREE_OPERAND (arg1, 1);
8876 tree variable1 = TREE_OPERAND (arg0, 0);
8877 tree variable2 = TREE_OPERAND (arg1, 0);
8878 tree cst;
8879 const char * const warnmsg = G_("assuming signed overflow does not "
8880 "occur when combining constants around "
8881 "a comparison");
8883 /* Put the constant on the side where it doesn't overflow and is
8884 of lower absolute value than before. */
8885 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8886 ? MINUS_EXPR : PLUS_EXPR,
8887 const2, const1, 0);
8888 if (!TREE_OVERFLOW (cst)
8889 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8891 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8892 return fold_build2_loc (loc, code, type,
8893 variable1,
8894 fold_build2_loc (loc,
8895 TREE_CODE (arg1), TREE_TYPE (arg1),
8896 variable2, cst));
8899 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8900 ? MINUS_EXPR : PLUS_EXPR,
8901 const1, const2, 0);
8902 if (!TREE_OVERFLOW (cst)
8903 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8905 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8906 return fold_build2_loc (loc, code, type,
8907 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8908 variable1, cst),
8909 variable2);
8913 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8914 signed arithmetic case. That form is created by the compiler
8915 often enough for folding it to be of value. One example is in
8916 computing loop trip counts after Operator Strength Reduction. */
8917 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8918 && TREE_CODE (arg0) == MULT_EXPR
8919 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8920 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8921 && integer_zerop (arg1))
8923 tree const1 = TREE_OPERAND (arg0, 1);
8924 tree const2 = arg1; /* zero */
8925 tree variable1 = TREE_OPERAND (arg0, 0);
8926 enum tree_code cmp_code = code;
8928 /* Handle unfolded multiplication by zero. */
8929 if (integer_zerop (const1))
8930 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8932 fold_overflow_warning (("assuming signed overflow does not occur when "
8933 "eliminating multiplication in comparison "
8934 "with zero"),
8935 WARN_STRICT_OVERFLOW_COMPARISON);
8937 /* If const1 is negative we swap the sense of the comparison. */
8938 if (tree_int_cst_sgn (const1) < 0)
8939 cmp_code = swap_tree_comparison (cmp_code);
8941 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8944 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8945 if (tem)
8946 return tem;
8948 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8950 tree targ0 = strip_float_extensions (arg0);
8951 tree targ1 = strip_float_extensions (arg1);
8952 tree newtype = TREE_TYPE (targ0);
8954 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8955 newtype = TREE_TYPE (targ1);
8957 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8958 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8959 return fold_build2_loc (loc, code, type,
8960 fold_convert_loc (loc, newtype, targ0),
8961 fold_convert_loc (loc, newtype, targ1));
8963 /* (-a) CMP (-b) -> b CMP a */
8964 if (TREE_CODE (arg0) == NEGATE_EXPR
8965 && TREE_CODE (arg1) == NEGATE_EXPR)
8966 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8967 TREE_OPERAND (arg0, 0));
8969 if (TREE_CODE (arg1) == REAL_CST)
8971 REAL_VALUE_TYPE cst;
8972 cst = TREE_REAL_CST (arg1);
8974 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8975 if (TREE_CODE (arg0) == NEGATE_EXPR)
8976 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8977 TREE_OPERAND (arg0, 0),
8978 build_real (TREE_TYPE (arg1),
8979 real_value_negate (&cst)));
8981 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8982 /* a CMP (-0) -> a CMP 0 */
8983 if (REAL_VALUE_MINUS_ZERO (cst))
8984 return fold_build2_loc (loc, code, type, arg0,
8985 build_real (TREE_TYPE (arg1), dconst0));
8987 /* x != NaN is always true, other ops are always false. */
8988 if (REAL_VALUE_ISNAN (cst)
8989 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8991 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8992 return omit_one_operand_loc (loc, type, tem, arg0);
8995 /* Fold comparisons against infinity. */
8996 if (REAL_VALUE_ISINF (cst)
8997 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8999 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9000 if (tem != NULL_TREE)
9001 return tem;
9005 /* If this is a comparison of a real constant with a PLUS_EXPR
9006 or a MINUS_EXPR of a real constant, we can convert it into a
9007 comparison with a revised real constant as long as no overflow
9008 occurs when unsafe_math_optimizations are enabled. */
9009 if (flag_unsafe_math_optimizations
9010 && TREE_CODE (arg1) == REAL_CST
9011 && (TREE_CODE (arg0) == PLUS_EXPR
9012 || TREE_CODE (arg0) == MINUS_EXPR)
9013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9014 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9015 ? MINUS_EXPR : PLUS_EXPR,
9016 arg1, TREE_OPERAND (arg0, 1)))
9017 && !TREE_OVERFLOW (tem))
9018 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9020 /* Likewise, we can simplify a comparison of a real constant with
9021 a MINUS_EXPR whose first operand is also a real constant, i.e.
9022 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9023 floating-point types only if -fassociative-math is set. */
9024 if (flag_associative_math
9025 && TREE_CODE (arg1) == REAL_CST
9026 && TREE_CODE (arg0) == MINUS_EXPR
9027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9028 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9029 arg1))
9030 && !TREE_OVERFLOW (tem))
9031 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9032 TREE_OPERAND (arg0, 1), tem);
9034 /* Fold comparisons against built-in math functions. */
9035 if (TREE_CODE (arg1) == REAL_CST
9036 && flag_unsafe_math_optimizations
9037 && ! flag_errno_math)
9039 enum built_in_function fcode = builtin_mathfn_code (arg0);
9041 if (fcode != END_BUILTINS)
9043 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9044 if (tem != NULL_TREE)
9045 return tem;
9050 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9051 && CONVERT_EXPR_P (arg0))
9053 /* If we are widening one operand of an integer comparison,
9054 see if the other operand is similarly being widened. Perhaps we
9055 can do the comparison in the narrower type. */
9056 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9057 if (tem)
9058 return tem;
9060 /* Or if we are changing signedness. */
9061 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9062 if (tem)
9063 return tem;
9066 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9067 constant, we can simplify it. */
9068 if (TREE_CODE (arg1) == INTEGER_CST
9069 && (TREE_CODE (arg0) == MIN_EXPR
9070 || TREE_CODE (arg0) == MAX_EXPR)
9071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9073 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9074 if (tem)
9075 return tem;
9078 /* Simplify comparison of something with itself. (For IEEE
9079 floating-point, we can only do some of these simplifications.) */
9080 if (operand_equal_p (arg0, arg1, 0))
9082 switch (code)
9084 case EQ_EXPR:
9085 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9086 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9087 return constant_boolean_node (1, type);
9088 break;
9090 case GE_EXPR:
9091 case LE_EXPR:
9092 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9093 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9094 return constant_boolean_node (1, type);
9095 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9097 case NE_EXPR:
9098 /* For NE, we can only do this simplification if integer
9099 or we don't honor IEEE floating point NaNs. */
9100 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9101 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9102 break;
9103 /* ... fall through ... */
9104 case GT_EXPR:
9105 case LT_EXPR:
9106 return constant_boolean_node (0, type);
9107 default:
9108 gcc_unreachable ();
9112 /* If we are comparing an expression that just has comparisons
9113 of two integer values, arithmetic expressions of those comparisons,
9114 and constants, we can simplify it. There are only three cases
9115 to check: the two values can either be equal, the first can be
9116 greater, or the second can be greater. Fold the expression for
9117 those three values. Since each value must be 0 or 1, we have
9118 eight possibilities, each of which corresponds to the constant 0
9119 or 1 or one of the six possible comparisons.
9121 This handles common cases like (a > b) == 0 but also handles
9122 expressions like ((x > y) - (y > x)) > 0, which supposedly
9123 occur in macroized code. */
9125 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9127 tree cval1 = 0, cval2 = 0;
9128 int save_p = 0;
9130 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9131 /* Don't handle degenerate cases here; they should already
9132 have been handled anyway. */
9133 && cval1 != 0 && cval2 != 0
9134 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9135 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9136 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9137 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9138 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9139 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9140 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9142 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9143 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9145 /* We can't just pass T to eval_subst in case cval1 or cval2
9146 was the same as ARG1. */
9148 tree high_result
9149 = fold_build2_loc (loc, code, type,
9150 eval_subst (loc, arg0, cval1, maxval,
9151 cval2, minval),
9152 arg1);
9153 tree equal_result
9154 = fold_build2_loc (loc, code, type,
9155 eval_subst (loc, arg0, cval1, maxval,
9156 cval2, maxval),
9157 arg1);
9158 tree low_result
9159 = fold_build2_loc (loc, code, type,
9160 eval_subst (loc, arg0, cval1, minval,
9161 cval2, maxval),
9162 arg1);
9164 /* All three of these results should be 0 or 1. Confirm they are.
9165 Then use those values to select the proper code to use. */
9167 if (TREE_CODE (high_result) == INTEGER_CST
9168 && TREE_CODE (equal_result) == INTEGER_CST
9169 && TREE_CODE (low_result) == INTEGER_CST)
9171 /* Make a 3-bit mask with the high-order bit being the
9172 value for `>', the next for '=', and the low for '<'. */
9173 switch ((integer_onep (high_result) * 4)
9174 + (integer_onep (equal_result) * 2)
9175 + integer_onep (low_result))
9177 case 0:
9178 /* Always false. */
9179 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9180 case 1:
9181 code = LT_EXPR;
9182 break;
9183 case 2:
9184 code = EQ_EXPR;
9185 break;
9186 case 3:
9187 code = LE_EXPR;
9188 break;
9189 case 4:
9190 code = GT_EXPR;
9191 break;
9192 case 5:
9193 code = NE_EXPR;
9194 break;
9195 case 6:
9196 code = GE_EXPR;
9197 break;
9198 case 7:
9199 /* Always true. */
9200 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9203 if (save_p)
9205 tem = save_expr (build2 (code, type, cval1, cval2));
9206 SET_EXPR_LOCATION (tem, loc);
9207 return tem;
9209 return fold_build2_loc (loc, code, type, cval1, cval2);
9214 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9215 into a single range test. */
9216 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9217 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9218 && TREE_CODE (arg1) == INTEGER_CST
9219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9220 && !integer_zerop (TREE_OPERAND (arg0, 1))
9221 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9222 && !TREE_OVERFLOW (arg1))
9224 tem = fold_div_compare (loc, code, type, arg0, arg1);
9225 if (tem != NULL_TREE)
9226 return tem;
9229 /* Fold ~X op ~Y as Y op X. */
9230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9231 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9233 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9234 return fold_build2_loc (loc, code, type,
9235 fold_convert_loc (loc, cmp_type,
9236 TREE_OPERAND (arg1, 0)),
9237 TREE_OPERAND (arg0, 0));
9240 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9242 && TREE_CODE (arg1) == INTEGER_CST)
9244 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9245 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9246 TREE_OPERAND (arg0, 0),
9247 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9248 fold_convert_loc (loc, cmp_type, arg1)));
9251 return NULL_TREE;
9255 /* Subroutine of fold_binary. Optimize complex multiplications of the
9256 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9257 argument EXPR represents the expression "z" of type TYPE. */
9259 static tree
9260 fold_mult_zconjz (location_t loc, tree type, tree expr)
9262 tree itype = TREE_TYPE (type);
9263 tree rpart, ipart, tem;
9265 if (TREE_CODE (expr) == COMPLEX_EXPR)
9267 rpart = TREE_OPERAND (expr, 0);
9268 ipart = TREE_OPERAND (expr, 1);
9270 else if (TREE_CODE (expr) == COMPLEX_CST)
9272 rpart = TREE_REALPART (expr);
9273 ipart = TREE_IMAGPART (expr);
9275 else
9277 expr = save_expr (expr);
9278 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9279 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9282 rpart = save_expr (rpart);
9283 ipart = save_expr (ipart);
9284 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9285 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9286 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9287 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9288 fold_convert_loc (loc, itype, integer_zero_node));
9292 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9293 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9294 guarantees that P and N have the same least significant log2(M) bits.
9295 N is not otherwise constrained. In particular, N is not normalized to
9296 0 <= N < M as is common. In general, the precise value of P is unknown.
9297 M is chosen as large as possible such that constant N can be determined.
9299 Returns M and sets *RESIDUE to N.
9301 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9302 account. This is not always possible due to PR 35705.
9305 static unsigned HOST_WIDE_INT
9306 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9307 bool allow_func_align)
9309 enum tree_code code;
9311 *residue = 0;
9313 code = TREE_CODE (expr);
9314 if (code == ADDR_EXPR)
9316 expr = TREE_OPERAND (expr, 0);
9317 if (handled_component_p (expr))
9319 HOST_WIDE_INT bitsize, bitpos;
9320 tree offset;
9321 enum machine_mode mode;
9322 int unsignedp, volatilep;
9324 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9325 &mode, &unsignedp, &volatilep, false);
9326 *residue = bitpos / BITS_PER_UNIT;
9327 if (offset)
9329 if (TREE_CODE (offset) == INTEGER_CST)
9330 *residue += TREE_INT_CST_LOW (offset);
9331 else
9332 /* We don't handle more complicated offset expressions. */
9333 return 1;
9337 if (DECL_P (expr)
9338 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9339 return DECL_ALIGN_UNIT (expr);
9341 else if (code == POINTER_PLUS_EXPR)
9343 tree op0, op1;
9344 unsigned HOST_WIDE_INT modulus;
9345 enum tree_code inner_code;
9347 op0 = TREE_OPERAND (expr, 0);
9348 STRIP_NOPS (op0);
9349 modulus = get_pointer_modulus_and_residue (op0, residue,
9350 allow_func_align);
9352 op1 = TREE_OPERAND (expr, 1);
9353 STRIP_NOPS (op1);
9354 inner_code = TREE_CODE (op1);
9355 if (inner_code == INTEGER_CST)
9357 *residue += TREE_INT_CST_LOW (op1);
9358 return modulus;
9360 else if (inner_code == MULT_EXPR)
9362 op1 = TREE_OPERAND (op1, 1);
9363 if (TREE_CODE (op1) == INTEGER_CST)
9365 unsigned HOST_WIDE_INT align;
9367 /* Compute the greatest power-of-2 divisor of op1. */
9368 align = TREE_INT_CST_LOW (op1);
9369 align &= -align;
9371 /* If align is non-zero and less than *modulus, replace
9372 *modulus with align., If align is 0, then either op1 is 0
9373 or the greatest power-of-2 divisor of op1 doesn't fit in an
9374 unsigned HOST_WIDE_INT. In either case, no additional
9375 constraint is imposed. */
9376 if (align)
9377 modulus = MIN (modulus, align);
9379 return modulus;
9384 /* If we get here, we were unable to determine anything useful about the
9385 expression. */
9386 return 1;
9390 /* Fold a binary expression of code CODE and type TYPE with operands
9391 OP0 and OP1. LOC is the location of the resulting expression.
9392 Return the folded expression if folding is successful. Otherwise,
9393 return NULL_TREE. */
9395 tree
9396 fold_binary_loc (location_t loc,
9397 enum tree_code code, tree type, tree op0, tree op1)
9399 enum tree_code_class kind = TREE_CODE_CLASS (code);
9400 tree arg0, arg1, tem;
9401 tree t1 = NULL_TREE;
9402 bool strict_overflow_p;
9404 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9405 && TREE_CODE_LENGTH (code) == 2
9406 && op0 != NULL_TREE
9407 && op1 != NULL_TREE);
9409 arg0 = op0;
9410 arg1 = op1;
9412 /* Strip any conversions that don't change the mode. This is
9413 safe for every expression, except for a comparison expression
9414 because its signedness is derived from its operands. So, in
9415 the latter case, only strip conversions that don't change the
9416 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9417 preserved.
9419 Note that this is done as an internal manipulation within the
9420 constant folder, in order to find the simplest representation
9421 of the arguments so that their form can be studied. In any
9422 cases, the appropriate type conversions should be put back in
9423 the tree that will get out of the constant folder. */
9425 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9427 STRIP_SIGN_NOPS (arg0);
9428 STRIP_SIGN_NOPS (arg1);
9430 else
9432 STRIP_NOPS (arg0);
9433 STRIP_NOPS (arg1);
9436 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9437 constant but we can't do arithmetic on them. */
9438 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9439 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9440 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9441 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9442 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9443 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9445 if (kind == tcc_binary)
9447 /* Make sure type and arg0 have the same saturating flag. */
9448 gcc_assert (TYPE_SATURATING (type)
9449 == TYPE_SATURATING (TREE_TYPE (arg0)));
9450 tem = const_binop (code, arg0, arg1);
9452 else if (kind == tcc_comparison)
9453 tem = fold_relational_const (code, type, arg0, arg1);
9454 else
9455 tem = NULL_TREE;
9457 if (tem != NULL_TREE)
9459 if (TREE_TYPE (tem) != type)
9460 tem = fold_convert_loc (loc, type, tem);
9461 return tem;
9465 /* If this is a commutative operation, and ARG0 is a constant, move it
9466 to ARG1 to reduce the number of tests below. */
9467 if (commutative_tree_code (code)
9468 && tree_swap_operands_p (arg0, arg1, true))
9469 return fold_build2_loc (loc, code, type, op1, op0);
9471 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9473 First check for cases where an arithmetic operation is applied to a
9474 compound, conditional, or comparison operation. Push the arithmetic
9475 operation inside the compound or conditional to see if any folding
9476 can then be done. Convert comparison to conditional for this purpose.
9477 The also optimizes non-constant cases that used to be done in
9478 expand_expr.
9480 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9481 one of the operands is a comparison and the other is a comparison, a
9482 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9483 code below would make the expression more complex. Change it to a
9484 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9485 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9487 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9488 || code == EQ_EXPR || code == NE_EXPR)
9489 && ((truth_value_p (TREE_CODE (arg0))
9490 && (truth_value_p (TREE_CODE (arg1))
9491 || (TREE_CODE (arg1) == BIT_AND_EXPR
9492 && integer_onep (TREE_OPERAND (arg1, 1)))))
9493 || (truth_value_p (TREE_CODE (arg1))
9494 && (truth_value_p (TREE_CODE (arg0))
9495 || (TREE_CODE (arg0) == BIT_AND_EXPR
9496 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9498 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9499 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9500 : TRUTH_XOR_EXPR,
9501 boolean_type_node,
9502 fold_convert_loc (loc, boolean_type_node, arg0),
9503 fold_convert_loc (loc, boolean_type_node, arg1));
9505 if (code == EQ_EXPR)
9506 tem = invert_truthvalue_loc (loc, tem);
9508 return fold_convert_loc (loc, type, tem);
9511 if (TREE_CODE_CLASS (code) == tcc_binary
9512 || TREE_CODE_CLASS (code) == tcc_comparison)
9514 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9516 tem = fold_build2_loc (loc, code, type,
9517 fold_convert_loc (loc, TREE_TYPE (op0),
9518 TREE_OPERAND (arg0, 1)), op1);
9519 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9520 goto fold_binary_exit;
9522 if (TREE_CODE (arg1) == COMPOUND_EXPR
9523 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9525 tem = fold_build2_loc (loc, code, type, op0,
9526 fold_convert_loc (loc, TREE_TYPE (op1),
9527 TREE_OPERAND (arg1, 1)));
9528 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9529 goto fold_binary_exit;
9532 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9534 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9535 arg0, arg1,
9536 /*cond_first_p=*/1);
9537 if (tem != NULL_TREE)
9538 return tem;
9541 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9543 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9544 arg1, arg0,
9545 /*cond_first_p=*/0);
9546 if (tem != NULL_TREE)
9547 return tem;
9551 switch (code)
9553 case MEM_REF:
9554 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9555 if (TREE_CODE (arg0) == ADDR_EXPR
9556 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9558 tree iref = TREE_OPERAND (arg0, 0);
9559 return fold_build2 (MEM_REF, type,
9560 TREE_OPERAND (iref, 0),
9561 int_const_binop (PLUS_EXPR, arg1,
9562 TREE_OPERAND (iref, 1), 0));
9565 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9566 if (TREE_CODE (arg0) == ADDR_EXPR
9567 && handled_component_p (TREE_OPERAND (arg0, 0)))
9569 tree base;
9570 HOST_WIDE_INT coffset;
9571 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9572 &coffset);
9573 if (!base)
9574 return NULL_TREE;
9575 return fold_build2 (MEM_REF, type,
9576 build_fold_addr_expr (base),
9577 int_const_binop (PLUS_EXPR, arg1,
9578 size_int (coffset), 0));
9581 return NULL_TREE;
9583 case POINTER_PLUS_EXPR:
9584 /* 0 +p index -> (type)index */
9585 if (integer_zerop (arg0))
9586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9588 /* PTR +p 0 -> PTR */
9589 if (integer_zerop (arg1))
9590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9592 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9593 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9594 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9595 return fold_convert_loc (loc, type,
9596 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9597 fold_convert_loc (loc, sizetype,
9598 arg1),
9599 fold_convert_loc (loc, sizetype,
9600 arg0)));
9602 /* index +p PTR -> PTR +p index */
9603 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9604 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9605 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9606 fold_convert_loc (loc, type, arg1),
9607 fold_convert_loc (loc, sizetype, arg0));
9609 /* (PTR +p B) +p A -> PTR +p (B + A) */
9610 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9612 tree inner;
9613 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9614 tree arg00 = TREE_OPERAND (arg0, 0);
9615 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9616 arg01, fold_convert_loc (loc, sizetype, arg1));
9617 return fold_convert_loc (loc, type,
9618 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9619 TREE_TYPE (arg00),
9620 arg00, inner));
9623 /* PTR_CST +p CST -> CST1 */
9624 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9625 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9626 fold_convert_loc (loc, type, arg1));
9628 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9629 of the array. Loop optimizer sometimes produce this type of
9630 expressions. */
9631 if (TREE_CODE (arg0) == ADDR_EXPR)
9633 tem = try_move_mult_to_index (loc, arg0,
9634 fold_convert_loc (loc, sizetype, arg1));
9635 if (tem)
9636 return fold_convert_loc (loc, type, tem);
9639 return NULL_TREE;
9641 case PLUS_EXPR:
9642 /* A + (-B) -> A - B */
9643 if (TREE_CODE (arg1) == NEGATE_EXPR)
9644 return fold_build2_loc (loc, MINUS_EXPR, type,
9645 fold_convert_loc (loc, type, arg0),
9646 fold_convert_loc (loc, type,
9647 TREE_OPERAND (arg1, 0)));
9648 /* (-A) + B -> B - A */
9649 if (TREE_CODE (arg0) == NEGATE_EXPR
9650 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9651 return fold_build2_loc (loc, MINUS_EXPR, type,
9652 fold_convert_loc (loc, type, arg1),
9653 fold_convert_loc (loc, type,
9654 TREE_OPERAND (arg0, 0)));
9656 if (INTEGRAL_TYPE_P (type))
9658 /* Convert ~A + 1 to -A. */
9659 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9660 && integer_onep (arg1))
9661 return fold_build1_loc (loc, NEGATE_EXPR, type,
9662 fold_convert_loc (loc, type,
9663 TREE_OPERAND (arg0, 0)));
9665 /* ~X + X is -1. */
9666 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9667 && !TYPE_OVERFLOW_TRAPS (type))
9669 tree tem = TREE_OPERAND (arg0, 0);
9671 STRIP_NOPS (tem);
9672 if (operand_equal_p (tem, arg1, 0))
9674 t1 = build_int_cst_type (type, -1);
9675 return omit_one_operand_loc (loc, type, t1, arg1);
9679 /* X + ~X is -1. */
9680 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9681 && !TYPE_OVERFLOW_TRAPS (type))
9683 tree tem = TREE_OPERAND (arg1, 0);
9685 STRIP_NOPS (tem);
9686 if (operand_equal_p (arg0, tem, 0))
9688 t1 = build_int_cst_type (type, -1);
9689 return omit_one_operand_loc (loc, type, t1, arg0);
9693 /* X + (X / CST) * -CST is X % CST. */
9694 if (TREE_CODE (arg1) == MULT_EXPR
9695 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9696 && operand_equal_p (arg0,
9697 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9699 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9700 tree cst1 = TREE_OPERAND (arg1, 1);
9701 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9702 cst1, cst0);
9703 if (sum && integer_zerop (sum))
9704 return fold_convert_loc (loc, type,
9705 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9706 TREE_TYPE (arg0), arg0,
9707 cst0));
9711 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9712 same or one. Make sure type is not saturating.
9713 fold_plusminus_mult_expr will re-associate. */
9714 if ((TREE_CODE (arg0) == MULT_EXPR
9715 || TREE_CODE (arg1) == MULT_EXPR)
9716 && !TYPE_SATURATING (type)
9717 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9719 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9720 if (tem)
9721 return tem;
9724 if (! FLOAT_TYPE_P (type))
9726 if (integer_zerop (arg1))
9727 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9729 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9730 with a constant, and the two constants have no bits in common,
9731 we should treat this as a BIT_IOR_EXPR since this may produce more
9732 simplifications. */
9733 if (TREE_CODE (arg0) == BIT_AND_EXPR
9734 && TREE_CODE (arg1) == BIT_AND_EXPR
9735 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9736 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9737 && integer_zerop (const_binop (BIT_AND_EXPR,
9738 TREE_OPERAND (arg0, 1),
9739 TREE_OPERAND (arg1, 1))))
9741 code = BIT_IOR_EXPR;
9742 goto bit_ior;
9745 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9746 (plus (plus (mult) (mult)) (foo)) so that we can
9747 take advantage of the factoring cases below. */
9748 if (((TREE_CODE (arg0) == PLUS_EXPR
9749 || TREE_CODE (arg0) == MINUS_EXPR)
9750 && TREE_CODE (arg1) == MULT_EXPR)
9751 || ((TREE_CODE (arg1) == PLUS_EXPR
9752 || TREE_CODE (arg1) == MINUS_EXPR)
9753 && TREE_CODE (arg0) == MULT_EXPR))
9755 tree parg0, parg1, parg, marg;
9756 enum tree_code pcode;
9758 if (TREE_CODE (arg1) == MULT_EXPR)
9759 parg = arg0, marg = arg1;
9760 else
9761 parg = arg1, marg = arg0;
9762 pcode = TREE_CODE (parg);
9763 parg0 = TREE_OPERAND (parg, 0);
9764 parg1 = TREE_OPERAND (parg, 1);
9765 STRIP_NOPS (parg0);
9766 STRIP_NOPS (parg1);
9768 if (TREE_CODE (parg0) == MULT_EXPR
9769 && TREE_CODE (parg1) != MULT_EXPR)
9770 return fold_build2_loc (loc, pcode, type,
9771 fold_build2_loc (loc, PLUS_EXPR, type,
9772 fold_convert_loc (loc, type,
9773 parg0),
9774 fold_convert_loc (loc, type,
9775 marg)),
9776 fold_convert_loc (loc, type, parg1));
9777 if (TREE_CODE (parg0) != MULT_EXPR
9778 && TREE_CODE (parg1) == MULT_EXPR)
9779 return
9780 fold_build2_loc (loc, PLUS_EXPR, type,
9781 fold_convert_loc (loc, type, parg0),
9782 fold_build2_loc (loc, pcode, type,
9783 fold_convert_loc (loc, type, marg),
9784 fold_convert_loc (loc, type,
9785 parg1)));
9788 else
9790 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9791 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9792 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9794 /* Likewise if the operands are reversed. */
9795 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9798 /* Convert X + -C into X - C. */
9799 if (TREE_CODE (arg1) == REAL_CST
9800 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9802 tem = fold_negate_const (arg1, type);
9803 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9804 return fold_build2_loc (loc, MINUS_EXPR, type,
9805 fold_convert_loc (loc, type, arg0),
9806 fold_convert_loc (loc, type, tem));
9809 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9810 to __complex__ ( x, y ). This is not the same for SNaNs or
9811 if signed zeros are involved. */
9812 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9813 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9814 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9816 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9817 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9818 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9819 bool arg0rz = false, arg0iz = false;
9820 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9821 || (arg0i && (arg0iz = real_zerop (arg0i))))
9823 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9824 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9825 if (arg0rz && arg1i && real_zerop (arg1i))
9827 tree rp = arg1r ? arg1r
9828 : build1 (REALPART_EXPR, rtype, arg1);
9829 tree ip = arg0i ? arg0i
9830 : build1 (IMAGPART_EXPR, rtype, arg0);
9831 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9833 else if (arg0iz && arg1r && real_zerop (arg1r))
9835 tree rp = arg0r ? arg0r
9836 : build1 (REALPART_EXPR, rtype, arg0);
9837 tree ip = arg1i ? arg1i
9838 : build1 (IMAGPART_EXPR, rtype, arg1);
9839 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9844 if (flag_unsafe_math_optimizations
9845 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9846 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9847 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9848 return tem;
9850 /* Convert x+x into x*2.0. */
9851 if (operand_equal_p (arg0, arg1, 0)
9852 && SCALAR_FLOAT_TYPE_P (type))
9853 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9854 build_real (type, dconst2));
9856 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9857 We associate floats only if the user has specified
9858 -fassociative-math. */
9859 if (flag_associative_math
9860 && TREE_CODE (arg1) == PLUS_EXPR
9861 && TREE_CODE (arg0) != MULT_EXPR)
9863 tree tree10 = TREE_OPERAND (arg1, 0);
9864 tree tree11 = TREE_OPERAND (arg1, 1);
9865 if (TREE_CODE (tree11) == MULT_EXPR
9866 && TREE_CODE (tree10) == MULT_EXPR)
9868 tree tree0;
9869 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9870 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9873 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9874 We associate floats only if the user has specified
9875 -fassociative-math. */
9876 if (flag_associative_math
9877 && TREE_CODE (arg0) == PLUS_EXPR
9878 && TREE_CODE (arg1) != MULT_EXPR)
9880 tree tree00 = TREE_OPERAND (arg0, 0);
9881 tree tree01 = TREE_OPERAND (arg0, 1);
9882 if (TREE_CODE (tree01) == MULT_EXPR
9883 && TREE_CODE (tree00) == MULT_EXPR)
9885 tree tree0;
9886 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9887 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9892 bit_rotate:
9893 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9894 is a rotate of A by C1 bits. */
9895 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9896 is a rotate of A by B bits. */
9898 enum tree_code code0, code1;
9899 tree rtype;
9900 code0 = TREE_CODE (arg0);
9901 code1 = TREE_CODE (arg1);
9902 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9903 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9904 && operand_equal_p (TREE_OPERAND (arg0, 0),
9905 TREE_OPERAND (arg1, 0), 0)
9906 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9907 TYPE_UNSIGNED (rtype))
9908 /* Only create rotates in complete modes. Other cases are not
9909 expanded properly. */
9910 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9912 tree tree01, tree11;
9913 enum tree_code code01, code11;
9915 tree01 = TREE_OPERAND (arg0, 1);
9916 tree11 = TREE_OPERAND (arg1, 1);
9917 STRIP_NOPS (tree01);
9918 STRIP_NOPS (tree11);
9919 code01 = TREE_CODE (tree01);
9920 code11 = TREE_CODE (tree11);
9921 if (code01 == INTEGER_CST
9922 && code11 == INTEGER_CST
9923 && TREE_INT_CST_HIGH (tree01) == 0
9924 && TREE_INT_CST_HIGH (tree11) == 0
9925 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9926 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9928 tem = build2 (LROTATE_EXPR,
9929 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9930 TREE_OPERAND (arg0, 0),
9931 code0 == LSHIFT_EXPR
9932 ? tree01 : tree11);
9933 SET_EXPR_LOCATION (tem, loc);
9934 return fold_convert_loc (loc, type, tem);
9936 else if (code11 == MINUS_EXPR)
9938 tree tree110, tree111;
9939 tree110 = TREE_OPERAND (tree11, 0);
9940 tree111 = TREE_OPERAND (tree11, 1);
9941 STRIP_NOPS (tree110);
9942 STRIP_NOPS (tree111);
9943 if (TREE_CODE (tree110) == INTEGER_CST
9944 && 0 == compare_tree_int (tree110,
9945 TYPE_PRECISION
9946 (TREE_TYPE (TREE_OPERAND
9947 (arg0, 0))))
9948 && operand_equal_p (tree01, tree111, 0))
9949 return
9950 fold_convert_loc (loc, type,
9951 build2 ((code0 == LSHIFT_EXPR
9952 ? LROTATE_EXPR
9953 : RROTATE_EXPR),
9954 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9955 TREE_OPERAND (arg0, 0), tree01));
9957 else if (code01 == MINUS_EXPR)
9959 tree tree010, tree011;
9960 tree010 = TREE_OPERAND (tree01, 0);
9961 tree011 = TREE_OPERAND (tree01, 1);
9962 STRIP_NOPS (tree010);
9963 STRIP_NOPS (tree011);
9964 if (TREE_CODE (tree010) == INTEGER_CST
9965 && 0 == compare_tree_int (tree010,
9966 TYPE_PRECISION
9967 (TREE_TYPE (TREE_OPERAND
9968 (arg0, 0))))
9969 && operand_equal_p (tree11, tree011, 0))
9970 return fold_convert_loc
9971 (loc, type,
9972 build2 ((code0 != LSHIFT_EXPR
9973 ? LROTATE_EXPR
9974 : RROTATE_EXPR),
9975 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9976 TREE_OPERAND (arg0, 0), tree11));
9981 associate:
9982 /* In most languages, can't associate operations on floats through
9983 parentheses. Rather than remember where the parentheses were, we
9984 don't associate floats at all, unless the user has specified
9985 -fassociative-math.
9986 And, we need to make sure type is not saturating. */
9988 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9989 && !TYPE_SATURATING (type))
9991 tree var0, con0, lit0, minus_lit0;
9992 tree var1, con1, lit1, minus_lit1;
9993 bool ok = true;
9995 /* Split both trees into variables, constants, and literals. Then
9996 associate each group together, the constants with literals,
9997 then the result with variables. This increases the chances of
9998 literals being recombined later and of generating relocatable
9999 expressions for the sum of a constant and literal. */
10000 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10001 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10002 code == MINUS_EXPR);
10004 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10005 if (code == MINUS_EXPR)
10006 code = PLUS_EXPR;
10008 /* With undefined overflow we can only associate constants with one
10009 variable, and constants whose association doesn't overflow. */
10010 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10011 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10013 if (var0 && var1)
10015 tree tmp0 = var0;
10016 tree tmp1 = var1;
10018 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10019 tmp0 = TREE_OPERAND (tmp0, 0);
10020 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10021 tmp1 = TREE_OPERAND (tmp1, 0);
10022 /* The only case we can still associate with two variables
10023 is if they are the same, modulo negation. */
10024 if (!operand_equal_p (tmp0, tmp1, 0))
10025 ok = false;
10028 if (ok && lit0 && lit1)
10030 tree tmp0 = fold_convert (type, lit0);
10031 tree tmp1 = fold_convert (type, lit1);
10033 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10034 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10035 ok = false;
10039 /* Only do something if we found more than two objects. Otherwise,
10040 nothing has changed and we risk infinite recursion. */
10041 if (ok
10042 && (2 < ((var0 != 0) + (var1 != 0)
10043 + (con0 != 0) + (con1 != 0)
10044 + (lit0 != 0) + (lit1 != 0)
10045 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10047 var0 = associate_trees (loc, var0, var1, code, type);
10048 con0 = associate_trees (loc, con0, con1, code, type);
10049 lit0 = associate_trees (loc, lit0, lit1, code, type);
10050 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10052 /* Preserve the MINUS_EXPR if the negative part of the literal is
10053 greater than the positive part. Otherwise, the multiplicative
10054 folding code (i.e extract_muldiv) may be fooled in case
10055 unsigned constants are subtracted, like in the following
10056 example: ((X*2 + 4) - 8U)/2. */
10057 if (minus_lit0 && lit0)
10059 if (TREE_CODE (lit0) == INTEGER_CST
10060 && TREE_CODE (minus_lit0) == INTEGER_CST
10061 && tree_int_cst_lt (lit0, minus_lit0))
10063 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10064 MINUS_EXPR, type);
10065 lit0 = 0;
10067 else
10069 lit0 = associate_trees (loc, lit0, minus_lit0,
10070 MINUS_EXPR, type);
10071 minus_lit0 = 0;
10074 if (minus_lit0)
10076 if (con0 == 0)
10077 return
10078 fold_convert_loc (loc, type,
10079 associate_trees (loc, var0, minus_lit0,
10080 MINUS_EXPR, type));
10081 else
10083 con0 = associate_trees (loc, con0, minus_lit0,
10084 MINUS_EXPR, type);
10085 return
10086 fold_convert_loc (loc, type,
10087 associate_trees (loc, var0, con0,
10088 PLUS_EXPR, type));
10092 con0 = associate_trees (loc, con0, lit0, code, type);
10093 return
10094 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10095 code, type));
10099 return NULL_TREE;
10101 case MINUS_EXPR:
10102 /* Pointer simplifications for subtraction, simple reassociations. */
10103 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10105 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10106 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10107 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10109 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10110 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10111 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10112 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10113 return fold_build2_loc (loc, PLUS_EXPR, type,
10114 fold_build2_loc (loc, MINUS_EXPR, type,
10115 arg00, arg10),
10116 fold_build2_loc (loc, MINUS_EXPR, type,
10117 arg01, arg11));
10119 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10120 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10122 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10123 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10124 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10125 fold_convert_loc (loc, type, arg1));
10126 if (tmp)
10127 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10130 /* A - (-B) -> A + B */
10131 if (TREE_CODE (arg1) == NEGATE_EXPR)
10132 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10133 fold_convert_loc (loc, type,
10134 TREE_OPERAND (arg1, 0)));
10135 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10136 if (TREE_CODE (arg0) == NEGATE_EXPR
10137 && (FLOAT_TYPE_P (type)
10138 || INTEGRAL_TYPE_P (type))
10139 && negate_expr_p (arg1)
10140 && reorder_operands_p (arg0, arg1))
10141 return fold_build2_loc (loc, MINUS_EXPR, type,
10142 fold_convert_loc (loc, type,
10143 negate_expr (arg1)),
10144 fold_convert_loc (loc, type,
10145 TREE_OPERAND (arg0, 0)));
10146 /* Convert -A - 1 to ~A. */
10147 if (INTEGRAL_TYPE_P (type)
10148 && TREE_CODE (arg0) == NEGATE_EXPR
10149 && integer_onep (arg1)
10150 && !TYPE_OVERFLOW_TRAPS (type))
10151 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10152 fold_convert_loc (loc, type,
10153 TREE_OPERAND (arg0, 0)));
10155 /* Convert -1 - A to ~A. */
10156 if (INTEGRAL_TYPE_P (type)
10157 && integer_all_onesp (arg0))
10158 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10161 /* X - (X / CST) * CST is X % CST. */
10162 if (INTEGRAL_TYPE_P (type)
10163 && TREE_CODE (arg1) == MULT_EXPR
10164 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10165 && operand_equal_p (arg0,
10166 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10167 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10168 TREE_OPERAND (arg1, 1), 0))
10169 return
10170 fold_convert_loc (loc, type,
10171 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10172 arg0, TREE_OPERAND (arg1, 1)));
10174 if (! FLOAT_TYPE_P (type))
10176 if (integer_zerop (arg0))
10177 return negate_expr (fold_convert_loc (loc, type, arg1));
10178 if (integer_zerop (arg1))
10179 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10181 /* Fold A - (A & B) into ~B & A. */
10182 if (!TREE_SIDE_EFFECTS (arg0)
10183 && TREE_CODE (arg1) == BIT_AND_EXPR)
10185 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10187 tree arg10 = fold_convert_loc (loc, type,
10188 TREE_OPERAND (arg1, 0));
10189 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10190 fold_build1_loc (loc, BIT_NOT_EXPR,
10191 type, arg10),
10192 fold_convert_loc (loc, type, arg0));
10194 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10196 tree arg11 = fold_convert_loc (loc,
10197 type, TREE_OPERAND (arg1, 1));
10198 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10199 fold_build1_loc (loc, BIT_NOT_EXPR,
10200 type, arg11),
10201 fold_convert_loc (loc, type, arg0));
10205 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10206 any power of 2 minus 1. */
10207 if (TREE_CODE (arg0) == BIT_AND_EXPR
10208 && TREE_CODE (arg1) == BIT_AND_EXPR
10209 && operand_equal_p (TREE_OPERAND (arg0, 0),
10210 TREE_OPERAND (arg1, 0), 0))
10212 tree mask0 = TREE_OPERAND (arg0, 1);
10213 tree mask1 = TREE_OPERAND (arg1, 1);
10214 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10216 if (operand_equal_p (tem, mask1, 0))
10218 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10219 TREE_OPERAND (arg0, 0), mask1);
10220 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10225 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10226 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10229 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10230 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10231 (-ARG1 + ARG0) reduces to -ARG1. */
10232 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10233 return negate_expr (fold_convert_loc (loc, type, arg1));
10235 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10236 __complex__ ( x, -y ). This is not the same for SNaNs or if
10237 signed zeros are involved. */
10238 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10239 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10240 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10242 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10243 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10244 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10245 bool arg0rz = false, arg0iz = false;
10246 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10247 || (arg0i && (arg0iz = real_zerop (arg0i))))
10249 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10250 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10251 if (arg0rz && arg1i && real_zerop (arg1i))
10253 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10254 arg1r ? arg1r
10255 : build1 (REALPART_EXPR, rtype, arg1));
10256 tree ip = arg0i ? arg0i
10257 : build1 (IMAGPART_EXPR, rtype, arg0);
10258 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10260 else if (arg0iz && arg1r && real_zerop (arg1r))
10262 tree rp = arg0r ? arg0r
10263 : build1 (REALPART_EXPR, rtype, arg0);
10264 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10265 arg1i ? arg1i
10266 : build1 (IMAGPART_EXPR, rtype, arg1));
10267 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10272 /* Fold &x - &x. This can happen from &x.foo - &x.
10273 This is unsafe for certain floats even in non-IEEE formats.
10274 In IEEE, it is unsafe because it does wrong for NaNs.
10275 Also note that operand_equal_p is always false if an operand
10276 is volatile. */
10278 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10279 && operand_equal_p (arg0, arg1, 0))
10280 return fold_convert_loc (loc, type, integer_zero_node);
10282 /* A - B -> A + (-B) if B is easily negatable. */
10283 if (negate_expr_p (arg1)
10284 && ((FLOAT_TYPE_P (type)
10285 /* Avoid this transformation if B is a positive REAL_CST. */
10286 && (TREE_CODE (arg1) != REAL_CST
10287 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10288 || INTEGRAL_TYPE_P (type)))
10289 return fold_build2_loc (loc, PLUS_EXPR, type,
10290 fold_convert_loc (loc, type, arg0),
10291 fold_convert_loc (loc, type,
10292 negate_expr (arg1)));
10294 /* Try folding difference of addresses. */
10296 HOST_WIDE_INT diff;
10298 if ((TREE_CODE (arg0) == ADDR_EXPR
10299 || TREE_CODE (arg1) == ADDR_EXPR)
10300 && ptr_difference_const (arg0, arg1, &diff))
10301 return build_int_cst_type (type, diff);
10304 /* Fold &a[i] - &a[j] to i-j. */
10305 if (TREE_CODE (arg0) == ADDR_EXPR
10306 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10307 && TREE_CODE (arg1) == ADDR_EXPR
10308 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10310 tree aref0 = TREE_OPERAND (arg0, 0);
10311 tree aref1 = TREE_OPERAND (arg1, 0);
10312 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10313 TREE_OPERAND (aref1, 0), 0))
10315 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10316 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10317 tree esz = array_ref_element_size (aref0);
10318 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10319 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10320 fold_convert_loc (loc, type, esz));
10325 if (FLOAT_TYPE_P (type)
10326 && flag_unsafe_math_optimizations
10327 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10328 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10329 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10330 return tem;
10332 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10333 same or one. Make sure type is not saturating.
10334 fold_plusminus_mult_expr will re-associate. */
10335 if ((TREE_CODE (arg0) == MULT_EXPR
10336 || TREE_CODE (arg1) == MULT_EXPR)
10337 && !TYPE_SATURATING (type)
10338 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10340 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10341 if (tem)
10342 return tem;
10345 goto associate;
10347 case MULT_EXPR:
10348 /* (-A) * (-B) -> A * B */
10349 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10350 return fold_build2_loc (loc, MULT_EXPR, type,
10351 fold_convert_loc (loc, type,
10352 TREE_OPERAND (arg0, 0)),
10353 fold_convert_loc (loc, type,
10354 negate_expr (arg1)));
10355 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10356 return fold_build2_loc (loc, MULT_EXPR, type,
10357 fold_convert_loc (loc, type,
10358 negate_expr (arg0)),
10359 fold_convert_loc (loc, type,
10360 TREE_OPERAND (arg1, 0)));
10362 if (! FLOAT_TYPE_P (type))
10364 if (integer_zerop (arg1))
10365 return omit_one_operand_loc (loc, type, arg1, arg0);
10366 if (integer_onep (arg1))
10367 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10368 /* Transform x * -1 into -x. Make sure to do the negation
10369 on the original operand with conversions not stripped
10370 because we can only strip non-sign-changing conversions. */
10371 if (integer_all_onesp (arg1))
10372 return fold_convert_loc (loc, type, negate_expr (op0));
10373 /* Transform x * -C into -x * C if x is easily negatable. */
10374 if (TREE_CODE (arg1) == INTEGER_CST
10375 && tree_int_cst_sgn (arg1) == -1
10376 && negate_expr_p (arg0)
10377 && (tem = negate_expr (arg1)) != arg1
10378 && !TREE_OVERFLOW (tem))
10379 return fold_build2_loc (loc, MULT_EXPR, type,
10380 fold_convert_loc (loc, type,
10381 negate_expr (arg0)),
10382 tem);
10384 /* (a * (1 << b)) is (a << b) */
10385 if (TREE_CODE (arg1) == LSHIFT_EXPR
10386 && integer_onep (TREE_OPERAND (arg1, 0)))
10387 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10388 TREE_OPERAND (arg1, 1));
10389 if (TREE_CODE (arg0) == LSHIFT_EXPR
10390 && integer_onep (TREE_OPERAND (arg0, 0)))
10391 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10392 TREE_OPERAND (arg0, 1));
10394 /* (A + A) * C -> A * 2 * C */
10395 if (TREE_CODE (arg0) == PLUS_EXPR
10396 && TREE_CODE (arg1) == INTEGER_CST
10397 && operand_equal_p (TREE_OPERAND (arg0, 0),
10398 TREE_OPERAND (arg0, 1), 0))
10399 return fold_build2_loc (loc, MULT_EXPR, type,
10400 omit_one_operand_loc (loc, type,
10401 TREE_OPERAND (arg0, 0),
10402 TREE_OPERAND (arg0, 1)),
10403 fold_build2_loc (loc, MULT_EXPR, type,
10404 build_int_cst (type, 2) , arg1));
10406 strict_overflow_p = false;
10407 if (TREE_CODE (arg1) == INTEGER_CST
10408 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10409 &strict_overflow_p)))
10411 if (strict_overflow_p)
10412 fold_overflow_warning (("assuming signed overflow does not "
10413 "occur when simplifying "
10414 "multiplication"),
10415 WARN_STRICT_OVERFLOW_MISC);
10416 return fold_convert_loc (loc, type, tem);
10419 /* Optimize z * conj(z) for integer complex numbers. */
10420 if (TREE_CODE (arg0) == CONJ_EXPR
10421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10422 return fold_mult_zconjz (loc, type, arg1);
10423 if (TREE_CODE (arg1) == CONJ_EXPR
10424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10425 return fold_mult_zconjz (loc, type, arg0);
10427 else
10429 /* Maybe fold x * 0 to 0. The expressions aren't the same
10430 when x is NaN, since x * 0 is also NaN. Nor are they the
10431 same in modes with signed zeros, since multiplying a
10432 negative value by 0 gives -0, not +0. */
10433 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && real_zerop (arg1))
10436 return omit_one_operand_loc (loc, type, arg1, arg0);
10437 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10438 Likewise for complex arithmetic with signed zeros. */
10439 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10440 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10441 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10442 && real_onep (arg1))
10443 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10445 /* Transform x * -1.0 into -x. */
10446 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10447 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10448 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10449 && real_minus_onep (arg1))
10450 return fold_convert_loc (loc, type, negate_expr (arg0));
10452 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10453 the result for floating point types due to rounding so it is applied
10454 only if -fassociative-math was specify. */
10455 if (flag_associative_math
10456 && TREE_CODE (arg0) == RDIV_EXPR
10457 && TREE_CODE (arg1) == REAL_CST
10458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10460 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10461 arg1);
10462 if (tem)
10463 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10464 TREE_OPERAND (arg0, 1));
10467 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10468 if (operand_equal_p (arg0, arg1, 0))
10470 tree tem = fold_strip_sign_ops (arg0);
10471 if (tem != NULL_TREE)
10473 tem = fold_convert_loc (loc, type, tem);
10474 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10478 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10479 This is not the same for NaNs or if signed zeros are
10480 involved. */
10481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10482 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10483 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10484 && TREE_CODE (arg1) == COMPLEX_CST
10485 && real_zerop (TREE_REALPART (arg1)))
10487 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10488 if (real_onep (TREE_IMAGPART (arg1)))
10489 return
10490 fold_build2_loc (loc, COMPLEX_EXPR, type,
10491 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10492 rtype, arg0)),
10493 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10494 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10495 return
10496 fold_build2_loc (loc, COMPLEX_EXPR, type,
10497 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10498 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10499 rtype, arg0)));
10502 /* Optimize z * conj(z) for floating point complex numbers.
10503 Guarded by flag_unsafe_math_optimizations as non-finite
10504 imaginary components don't produce scalar results. */
10505 if (flag_unsafe_math_optimizations
10506 && TREE_CODE (arg0) == CONJ_EXPR
10507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10508 return fold_mult_zconjz (loc, type, arg1);
10509 if (flag_unsafe_math_optimizations
10510 && TREE_CODE (arg1) == CONJ_EXPR
10511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10512 return fold_mult_zconjz (loc, type, arg0);
10514 if (flag_unsafe_math_optimizations)
10516 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10517 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10519 /* Optimizations of root(...)*root(...). */
10520 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10522 tree rootfn, arg;
10523 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10524 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10526 /* Optimize sqrt(x)*sqrt(x) as x. */
10527 if (BUILTIN_SQRT_P (fcode0)
10528 && operand_equal_p (arg00, arg10, 0)
10529 && ! HONOR_SNANS (TYPE_MODE (type)))
10530 return arg00;
10532 /* Optimize root(x)*root(y) as root(x*y). */
10533 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10534 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10535 return build_call_expr_loc (loc, rootfn, 1, arg);
10538 /* Optimize expN(x)*expN(y) as expN(x+y). */
10539 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10541 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10542 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10543 CALL_EXPR_ARG (arg0, 0),
10544 CALL_EXPR_ARG (arg1, 0));
10545 return build_call_expr_loc (loc, expfn, 1, arg);
10548 /* Optimizations of pow(...)*pow(...). */
10549 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10550 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10551 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10553 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10554 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10555 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10556 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10558 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10559 if (operand_equal_p (arg01, arg11, 0))
10561 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10562 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10563 arg00, arg10);
10564 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10567 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10568 if (operand_equal_p (arg00, arg10, 0))
10570 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10571 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10572 arg01, arg11);
10573 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10577 /* Optimize tan(x)*cos(x) as sin(x). */
10578 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10579 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10580 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10581 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10582 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10583 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10584 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10585 CALL_EXPR_ARG (arg1, 0), 0))
10587 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10589 if (sinfn != NULL_TREE)
10590 return build_call_expr_loc (loc, sinfn, 1,
10591 CALL_EXPR_ARG (arg0, 0));
10594 /* Optimize x*pow(x,c) as pow(x,c+1). */
10595 if (fcode1 == BUILT_IN_POW
10596 || fcode1 == BUILT_IN_POWF
10597 || fcode1 == BUILT_IN_POWL)
10599 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10600 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10601 if (TREE_CODE (arg11) == REAL_CST
10602 && !TREE_OVERFLOW (arg11)
10603 && operand_equal_p (arg0, arg10, 0))
10605 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10606 REAL_VALUE_TYPE c;
10607 tree arg;
10609 c = TREE_REAL_CST (arg11);
10610 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10611 arg = build_real (type, c);
10612 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10616 /* Optimize pow(x,c)*x as pow(x,c+1). */
10617 if (fcode0 == BUILT_IN_POW
10618 || fcode0 == BUILT_IN_POWF
10619 || fcode0 == BUILT_IN_POWL)
10621 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10622 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10623 if (TREE_CODE (arg01) == REAL_CST
10624 && !TREE_OVERFLOW (arg01)
10625 && operand_equal_p (arg1, arg00, 0))
10627 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10628 REAL_VALUE_TYPE c;
10629 tree arg;
10631 c = TREE_REAL_CST (arg01);
10632 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10633 arg = build_real (type, c);
10634 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10638 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10639 if (optimize_function_for_speed_p (cfun)
10640 && operand_equal_p (arg0, arg1, 0))
10642 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10644 if (powfn)
10646 tree arg = build_real (type, dconst2);
10647 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10652 goto associate;
10654 case BIT_IOR_EXPR:
10655 bit_ior:
10656 if (integer_all_onesp (arg1))
10657 return omit_one_operand_loc (loc, type, arg1, arg0);
10658 if (integer_zerop (arg1))
10659 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10660 if (operand_equal_p (arg0, arg1, 0))
10661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10663 /* ~X | X is -1. */
10664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10667 t1 = fold_convert_loc (loc, type, integer_zero_node);
10668 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10669 return omit_one_operand_loc (loc, type, t1, arg1);
10672 /* X | ~X is -1. */
10673 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10676 t1 = fold_convert_loc (loc, type, integer_zero_node);
10677 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10678 return omit_one_operand_loc (loc, type, t1, arg0);
10681 /* Canonicalize (X & C1) | C2. */
10682 if (TREE_CODE (arg0) == BIT_AND_EXPR
10683 && TREE_CODE (arg1) == INTEGER_CST
10684 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10686 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10687 int width = TYPE_PRECISION (type), w;
10688 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10689 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10690 hi2 = TREE_INT_CST_HIGH (arg1);
10691 lo2 = TREE_INT_CST_LOW (arg1);
10693 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10694 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10695 return omit_one_operand_loc (loc, type, arg1,
10696 TREE_OPERAND (arg0, 0));
10698 if (width > HOST_BITS_PER_WIDE_INT)
10700 mhi = (unsigned HOST_WIDE_INT) -1
10701 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10702 mlo = -1;
10704 else
10706 mhi = 0;
10707 mlo = (unsigned HOST_WIDE_INT) -1
10708 >> (HOST_BITS_PER_WIDE_INT - width);
10711 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10712 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10713 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10714 TREE_OPERAND (arg0, 0), arg1);
10716 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10717 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10718 mode which allows further optimizations. */
10719 hi1 &= mhi;
10720 lo1 &= mlo;
10721 hi2 &= mhi;
10722 lo2 &= mlo;
10723 hi3 = hi1 & ~hi2;
10724 lo3 = lo1 & ~lo2;
10725 for (w = BITS_PER_UNIT;
10726 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10727 w <<= 1)
10729 unsigned HOST_WIDE_INT mask
10730 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10731 if (((lo1 | lo2) & mask) == mask
10732 && (lo1 & ~mask) == 0 && hi1 == 0)
10734 hi3 = 0;
10735 lo3 = mask;
10736 break;
10739 if (hi3 != hi1 || lo3 != lo1)
10740 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10741 fold_build2_loc (loc, BIT_AND_EXPR, type,
10742 TREE_OPERAND (arg0, 0),
10743 build_int_cst_wide (type,
10744 lo3, hi3)),
10745 arg1);
10748 /* (X & Y) | Y is (X, Y). */
10749 if (TREE_CODE (arg0) == BIT_AND_EXPR
10750 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10751 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10752 /* (X & Y) | X is (Y, X). */
10753 if (TREE_CODE (arg0) == BIT_AND_EXPR
10754 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10755 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10756 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10757 /* X | (X & Y) is (Y, X). */
10758 if (TREE_CODE (arg1) == BIT_AND_EXPR
10759 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10760 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10761 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10762 /* X | (Y & X) is (Y, X). */
10763 if (TREE_CODE (arg1) == BIT_AND_EXPR
10764 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10765 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10766 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10768 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10769 if (t1 != NULL_TREE)
10770 return t1;
10772 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10774 This results in more efficient code for machines without a NAND
10775 instruction. Combine will canonicalize to the first form
10776 which will allow use of NAND instructions provided by the
10777 backend if they exist. */
10778 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10779 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10781 return
10782 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10783 build2 (BIT_AND_EXPR, type,
10784 fold_convert_loc (loc, type,
10785 TREE_OPERAND (arg0, 0)),
10786 fold_convert_loc (loc, type,
10787 TREE_OPERAND (arg1, 0))));
10790 /* See if this can be simplified into a rotate first. If that
10791 is unsuccessful continue in the association code. */
10792 goto bit_rotate;
10794 case BIT_XOR_EXPR:
10795 if (integer_zerop (arg1))
10796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10797 if (integer_all_onesp (arg1))
10798 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10799 if (operand_equal_p (arg0, arg1, 0))
10800 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10802 /* ~X ^ X is -1. */
10803 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10804 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10806 t1 = fold_convert_loc (loc, type, integer_zero_node);
10807 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10808 return omit_one_operand_loc (loc, type, t1, arg1);
10811 /* X ^ ~X is -1. */
10812 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10815 t1 = fold_convert_loc (loc, type, integer_zero_node);
10816 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10817 return omit_one_operand_loc (loc, type, t1, arg0);
10820 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10821 with a constant, and the two constants have no bits in common,
10822 we should treat this as a BIT_IOR_EXPR since this may produce more
10823 simplifications. */
10824 if (TREE_CODE (arg0) == BIT_AND_EXPR
10825 && TREE_CODE (arg1) == BIT_AND_EXPR
10826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10827 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10828 && integer_zerop (const_binop (BIT_AND_EXPR,
10829 TREE_OPERAND (arg0, 1),
10830 TREE_OPERAND (arg1, 1))))
10832 code = BIT_IOR_EXPR;
10833 goto bit_ior;
10836 /* (X | Y) ^ X -> Y & ~ X*/
10837 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10840 tree t2 = TREE_OPERAND (arg0, 1);
10841 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10842 arg1);
10843 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10844 fold_convert_loc (loc, type, t2),
10845 fold_convert_loc (loc, type, t1));
10846 return t1;
10849 /* (Y | X) ^ X -> Y & ~ X*/
10850 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10851 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10853 tree t2 = TREE_OPERAND (arg0, 0);
10854 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10855 arg1);
10856 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10857 fold_convert_loc (loc, type, t2),
10858 fold_convert_loc (loc, type, t1));
10859 return t1;
10862 /* X ^ (X | Y) -> Y & ~ X*/
10863 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10864 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10866 tree t2 = TREE_OPERAND (arg1, 1);
10867 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10868 arg0);
10869 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10870 fold_convert_loc (loc, type, t2),
10871 fold_convert_loc (loc, type, t1));
10872 return t1;
10875 /* X ^ (Y | X) -> Y & ~ X*/
10876 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10877 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10879 tree t2 = TREE_OPERAND (arg1, 0);
10880 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10881 arg0);
10882 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10883 fold_convert_loc (loc, type, t2),
10884 fold_convert_loc (loc, type, t1));
10885 return t1;
10888 /* Convert ~X ^ ~Y to X ^ Y. */
10889 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10890 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10891 return fold_build2_loc (loc, code, type,
10892 fold_convert_loc (loc, type,
10893 TREE_OPERAND (arg0, 0)),
10894 fold_convert_loc (loc, type,
10895 TREE_OPERAND (arg1, 0)));
10897 /* Convert ~X ^ C to X ^ ~C. */
10898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10899 && TREE_CODE (arg1) == INTEGER_CST)
10900 return fold_build2_loc (loc, code, type,
10901 fold_convert_loc (loc, type,
10902 TREE_OPERAND (arg0, 0)),
10903 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10905 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10906 if (TREE_CODE (arg0) == BIT_AND_EXPR
10907 && integer_onep (TREE_OPERAND (arg0, 1))
10908 && integer_onep (arg1))
10909 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10910 build_int_cst (TREE_TYPE (arg0), 0));
10912 /* Fold (X & Y) ^ Y as ~X & Y. */
10913 if (TREE_CODE (arg0) == BIT_AND_EXPR
10914 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10916 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10917 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10918 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10919 fold_convert_loc (loc, type, arg1));
10921 /* Fold (X & Y) ^ X as ~Y & X. */
10922 if (TREE_CODE (arg0) == BIT_AND_EXPR
10923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10924 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10926 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10927 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10928 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10929 fold_convert_loc (loc, type, arg1));
10931 /* Fold X ^ (X & Y) as X & ~Y. */
10932 if (TREE_CODE (arg1) == BIT_AND_EXPR
10933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10935 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10936 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10937 fold_convert_loc (loc, type, arg0),
10938 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10940 /* Fold X ^ (Y & X) as ~Y & X. */
10941 if (TREE_CODE (arg1) == BIT_AND_EXPR
10942 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10943 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10945 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10946 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10947 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10948 fold_convert_loc (loc, type, arg0));
10951 /* See if this can be simplified into a rotate first. If that
10952 is unsuccessful continue in the association code. */
10953 goto bit_rotate;
10955 case BIT_AND_EXPR:
10956 if (integer_all_onesp (arg1))
10957 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10958 if (integer_zerop (arg1))
10959 return omit_one_operand_loc (loc, type, arg1, arg0);
10960 if (operand_equal_p (arg0, arg1, 0))
10961 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10963 /* ~X & X is always zero. */
10964 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10965 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10966 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10968 /* X & ~X is always zero. */
10969 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10970 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10971 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10973 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10974 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10975 && TREE_CODE (arg1) == INTEGER_CST
10976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10978 tree tmp1 = fold_convert_loc (loc, type, arg1);
10979 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10980 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10981 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10982 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10983 return
10984 fold_convert_loc (loc, type,
10985 fold_build2_loc (loc, BIT_IOR_EXPR,
10986 type, tmp2, tmp3));
10989 /* (X | Y) & Y is (X, Y). */
10990 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10991 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10992 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10993 /* (X | Y) & X is (Y, X). */
10994 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10996 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10997 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10998 /* X & (X | Y) is (Y, X). */
10999 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11000 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11001 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11002 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11003 /* X & (Y | X) is (Y, X). */
11004 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11006 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11007 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11009 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11010 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11011 && integer_onep (TREE_OPERAND (arg0, 1))
11012 && integer_onep (arg1))
11014 tem = TREE_OPERAND (arg0, 0);
11015 return fold_build2_loc (loc, EQ_EXPR, type,
11016 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11017 build_int_cst (TREE_TYPE (tem), 1)),
11018 build_int_cst (TREE_TYPE (tem), 0));
11020 /* Fold ~X & 1 as (X & 1) == 0. */
11021 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11022 && integer_onep (arg1))
11024 tem = TREE_OPERAND (arg0, 0);
11025 return fold_build2_loc (loc, EQ_EXPR, type,
11026 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11027 build_int_cst (TREE_TYPE (tem), 1)),
11028 build_int_cst (TREE_TYPE (tem), 0));
11031 /* Fold (X ^ Y) & Y as ~X & Y. */
11032 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11033 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11035 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11036 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11037 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11038 fold_convert_loc (loc, type, arg1));
11040 /* Fold (X ^ Y) & X as ~Y & X. */
11041 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11043 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11045 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11046 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11047 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11048 fold_convert_loc (loc, type, arg1));
11050 /* Fold X & (X ^ Y) as X & ~Y. */
11051 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11052 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11054 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11055 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11056 fold_convert_loc (loc, type, arg0),
11057 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11059 /* Fold X & (Y ^ X) as ~Y & X. */
11060 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11062 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11064 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11065 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11066 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11067 fold_convert_loc (loc, type, arg0));
11070 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11071 if (t1 != NULL_TREE)
11072 return t1;
11073 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11074 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11075 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11077 unsigned int prec
11078 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11080 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11081 && (~TREE_INT_CST_LOW (arg1)
11082 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11083 return
11084 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11087 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11089 This results in more efficient code for machines without a NOR
11090 instruction. Combine will canonicalize to the first form
11091 which will allow use of NOR instructions provided by the
11092 backend if they exist. */
11093 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11094 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11096 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11097 build2 (BIT_IOR_EXPR, type,
11098 fold_convert_loc (loc, type,
11099 TREE_OPERAND (arg0, 0)),
11100 fold_convert_loc (loc, type,
11101 TREE_OPERAND (arg1, 0))));
11104 /* If arg0 is derived from the address of an object or function, we may
11105 be able to fold this expression using the object or function's
11106 alignment. */
11107 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11109 unsigned HOST_WIDE_INT modulus, residue;
11110 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11112 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11113 integer_onep (arg1));
11115 /* This works because modulus is a power of 2. If this weren't the
11116 case, we'd have to replace it by its greatest power-of-2
11117 divisor: modulus & -modulus. */
11118 if (low < modulus)
11119 return build_int_cst (type, residue & low);
11122 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11123 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11124 if the new mask might be further optimized. */
11125 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11126 || TREE_CODE (arg0) == RSHIFT_EXPR)
11127 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11128 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11129 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11130 < TYPE_PRECISION (TREE_TYPE (arg0))
11131 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11132 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11134 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11135 unsigned HOST_WIDE_INT mask
11136 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11137 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11138 tree shift_type = TREE_TYPE (arg0);
11140 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11141 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11142 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11143 && TYPE_PRECISION (TREE_TYPE (arg0))
11144 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11146 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11147 tree arg00 = TREE_OPERAND (arg0, 0);
11148 /* See if more bits can be proven as zero because of
11149 zero extension. */
11150 if (TREE_CODE (arg00) == NOP_EXPR
11151 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11153 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11154 if (TYPE_PRECISION (inner_type)
11155 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11156 && TYPE_PRECISION (inner_type) < prec)
11158 prec = TYPE_PRECISION (inner_type);
11159 /* See if we can shorten the right shift. */
11160 if (shiftc < prec)
11161 shift_type = inner_type;
11164 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11165 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11166 zerobits <<= prec - shiftc;
11167 /* For arithmetic shift if sign bit could be set, zerobits
11168 can contain actually sign bits, so no transformation is
11169 possible, unless MASK masks them all away. In that
11170 case the shift needs to be converted into logical shift. */
11171 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11172 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11174 if ((mask & zerobits) == 0)
11175 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11176 else
11177 zerobits = 0;
11181 /* ((X << 16) & 0xff00) is (X, 0). */
11182 if ((mask & zerobits) == mask)
11183 return omit_one_operand_loc (loc, type,
11184 build_int_cst (type, 0), arg0);
11186 newmask = mask | zerobits;
11187 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11189 unsigned int prec;
11191 /* Only do the transformation if NEWMASK is some integer
11192 mode's mask. */
11193 for (prec = BITS_PER_UNIT;
11194 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11195 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11196 break;
11197 if (prec < HOST_BITS_PER_WIDE_INT
11198 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11200 tree newmaskt;
11202 if (shift_type != TREE_TYPE (arg0))
11204 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11205 fold_convert_loc (loc, shift_type,
11206 TREE_OPERAND (arg0, 0)),
11207 TREE_OPERAND (arg0, 1));
11208 tem = fold_convert_loc (loc, type, tem);
11210 else
11211 tem = op0;
11212 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11213 if (!tree_int_cst_equal (newmaskt, arg1))
11214 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11219 goto associate;
11221 case RDIV_EXPR:
11222 /* Don't touch a floating-point divide by zero unless the mode
11223 of the constant can represent infinity. */
11224 if (TREE_CODE (arg1) == REAL_CST
11225 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11226 && real_zerop (arg1))
11227 return NULL_TREE;
11229 /* Optimize A / A to 1.0 if we don't care about
11230 NaNs or Infinities. Skip the transformation
11231 for non-real operands. */
11232 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11233 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11234 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11235 && operand_equal_p (arg0, arg1, 0))
11237 tree r = build_real (TREE_TYPE (arg0), dconst1);
11239 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11242 /* The complex version of the above A / A optimization. */
11243 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11244 && operand_equal_p (arg0, arg1, 0))
11246 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11247 if (! HONOR_NANS (TYPE_MODE (elem_type))
11248 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11250 tree r = build_real (elem_type, dconst1);
11251 /* omit_two_operands will call fold_convert for us. */
11252 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11256 /* (-A) / (-B) -> A / B */
11257 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11258 return fold_build2_loc (loc, RDIV_EXPR, type,
11259 TREE_OPERAND (arg0, 0),
11260 negate_expr (arg1));
11261 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11262 return fold_build2_loc (loc, RDIV_EXPR, type,
11263 negate_expr (arg0),
11264 TREE_OPERAND (arg1, 0));
11266 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11267 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11268 && real_onep (arg1))
11269 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11271 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11272 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11273 && real_minus_onep (arg1))
11274 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11275 negate_expr (arg0)));
11277 /* If ARG1 is a constant, we can convert this to a multiply by the
11278 reciprocal. This does not have the same rounding properties,
11279 so only do this if -freciprocal-math. We can actually
11280 always safely do it if ARG1 is a power of two, but it's hard to
11281 tell if it is or not in a portable manner. */
11282 if (TREE_CODE (arg1) == REAL_CST)
11284 if (flag_reciprocal_math
11285 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11286 arg1)))
11287 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11288 /* Find the reciprocal if optimizing and the result is exact. */
11289 if (optimize)
11291 REAL_VALUE_TYPE r;
11292 r = TREE_REAL_CST (arg1);
11293 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11295 tem = build_real (type, r);
11296 return fold_build2_loc (loc, MULT_EXPR, type,
11297 fold_convert_loc (loc, type, arg0), tem);
11301 /* Convert A/B/C to A/(B*C). */
11302 if (flag_reciprocal_math
11303 && TREE_CODE (arg0) == RDIV_EXPR)
11304 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11305 fold_build2_loc (loc, MULT_EXPR, type,
11306 TREE_OPERAND (arg0, 1), arg1));
11308 /* Convert A/(B/C) to (A/B)*C. */
11309 if (flag_reciprocal_math
11310 && TREE_CODE (arg1) == RDIV_EXPR)
11311 return fold_build2_loc (loc, MULT_EXPR, type,
11312 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11313 TREE_OPERAND (arg1, 0)),
11314 TREE_OPERAND (arg1, 1));
11316 /* Convert C1/(X*C2) into (C1/C2)/X. */
11317 if (flag_reciprocal_math
11318 && TREE_CODE (arg1) == MULT_EXPR
11319 && TREE_CODE (arg0) == REAL_CST
11320 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11322 tree tem = const_binop (RDIV_EXPR, arg0,
11323 TREE_OPERAND (arg1, 1));
11324 if (tem)
11325 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11326 TREE_OPERAND (arg1, 0));
11329 if (flag_unsafe_math_optimizations)
11331 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11332 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11334 /* Optimize sin(x)/cos(x) as tan(x). */
11335 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11336 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11337 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11338 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11339 CALL_EXPR_ARG (arg1, 0), 0))
11341 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11343 if (tanfn != NULL_TREE)
11344 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11347 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11348 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11349 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11350 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11351 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11352 CALL_EXPR_ARG (arg1, 0), 0))
11354 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11356 if (tanfn != NULL_TREE)
11358 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11359 CALL_EXPR_ARG (arg0, 0));
11360 return fold_build2_loc (loc, RDIV_EXPR, type,
11361 build_real (type, dconst1), tmp);
11365 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11366 NaNs or Infinities. */
11367 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11368 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11369 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11371 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11372 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11374 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11375 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11376 && operand_equal_p (arg00, arg01, 0))
11378 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11380 if (cosfn != NULL_TREE)
11381 return build_call_expr_loc (loc, cosfn, 1, arg00);
11385 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11386 NaNs or Infinities. */
11387 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11388 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11389 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11391 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11392 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11394 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11395 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11396 && operand_equal_p (arg00, arg01, 0))
11398 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11400 if (cosfn != NULL_TREE)
11402 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11403 return fold_build2_loc (loc, RDIV_EXPR, type,
11404 build_real (type, dconst1),
11405 tmp);
11410 /* Optimize pow(x,c)/x as pow(x,c-1). */
11411 if (fcode0 == BUILT_IN_POW
11412 || fcode0 == BUILT_IN_POWF
11413 || fcode0 == BUILT_IN_POWL)
11415 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11416 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11417 if (TREE_CODE (arg01) == REAL_CST
11418 && !TREE_OVERFLOW (arg01)
11419 && operand_equal_p (arg1, arg00, 0))
11421 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11422 REAL_VALUE_TYPE c;
11423 tree arg;
11425 c = TREE_REAL_CST (arg01);
11426 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11427 arg = build_real (type, c);
11428 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11432 /* Optimize a/root(b/c) into a*root(c/b). */
11433 if (BUILTIN_ROOT_P (fcode1))
11435 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11437 if (TREE_CODE (rootarg) == RDIV_EXPR)
11439 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11440 tree b = TREE_OPERAND (rootarg, 0);
11441 tree c = TREE_OPERAND (rootarg, 1);
11443 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11445 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11446 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11450 /* Optimize x/expN(y) into x*expN(-y). */
11451 if (BUILTIN_EXPONENT_P (fcode1))
11453 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11454 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11455 arg1 = build_call_expr_loc (loc,
11456 expfn, 1,
11457 fold_convert_loc (loc, type, arg));
11458 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11461 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11462 if (fcode1 == BUILT_IN_POW
11463 || fcode1 == BUILT_IN_POWF
11464 || fcode1 == BUILT_IN_POWL)
11466 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11467 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11468 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11469 tree neg11 = fold_convert_loc (loc, type,
11470 negate_expr (arg11));
11471 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11472 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11475 return NULL_TREE;
11477 case TRUNC_DIV_EXPR:
11478 case FLOOR_DIV_EXPR:
11479 /* Simplify A / (B << N) where A and B are positive and B is
11480 a power of 2, to A >> (N + log2(B)). */
11481 strict_overflow_p = false;
11482 if (TREE_CODE (arg1) == LSHIFT_EXPR
11483 && (TYPE_UNSIGNED (type)
11484 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11486 tree sval = TREE_OPERAND (arg1, 0);
11487 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11489 tree sh_cnt = TREE_OPERAND (arg1, 1);
11490 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11492 if (strict_overflow_p)
11493 fold_overflow_warning (("assuming signed overflow does not "
11494 "occur when simplifying A / (B << N)"),
11495 WARN_STRICT_OVERFLOW_MISC);
11497 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11498 sh_cnt, build_int_cst (NULL_TREE, pow2));
11499 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11500 fold_convert_loc (loc, type, arg0), sh_cnt);
11504 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11505 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11506 if (INTEGRAL_TYPE_P (type)
11507 && TYPE_UNSIGNED (type)
11508 && code == FLOOR_DIV_EXPR)
11509 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11511 /* Fall thru */
11513 case ROUND_DIV_EXPR:
11514 case CEIL_DIV_EXPR:
11515 case EXACT_DIV_EXPR:
11516 if (integer_onep (arg1))
11517 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11518 if (integer_zerop (arg1))
11519 return NULL_TREE;
11520 /* X / -1 is -X. */
11521 if (!TYPE_UNSIGNED (type)
11522 && TREE_CODE (arg1) == INTEGER_CST
11523 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11524 && TREE_INT_CST_HIGH (arg1) == -1)
11525 return fold_convert_loc (loc, type, negate_expr (arg0));
11527 /* Convert -A / -B to A / B when the type is signed and overflow is
11528 undefined. */
11529 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11530 && TREE_CODE (arg0) == NEGATE_EXPR
11531 && negate_expr_p (arg1))
11533 if (INTEGRAL_TYPE_P (type))
11534 fold_overflow_warning (("assuming signed overflow does not occur "
11535 "when distributing negation across "
11536 "division"),
11537 WARN_STRICT_OVERFLOW_MISC);
11538 return fold_build2_loc (loc, code, type,
11539 fold_convert_loc (loc, type,
11540 TREE_OPERAND (arg0, 0)),
11541 fold_convert_loc (loc, type,
11542 negate_expr (arg1)));
11544 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11545 && TREE_CODE (arg1) == NEGATE_EXPR
11546 && negate_expr_p (arg0))
11548 if (INTEGRAL_TYPE_P (type))
11549 fold_overflow_warning (("assuming signed overflow does not occur "
11550 "when distributing negation across "
11551 "division"),
11552 WARN_STRICT_OVERFLOW_MISC);
11553 return fold_build2_loc (loc, code, type,
11554 fold_convert_loc (loc, type,
11555 negate_expr (arg0)),
11556 fold_convert_loc (loc, type,
11557 TREE_OPERAND (arg1, 0)));
11560 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11561 operation, EXACT_DIV_EXPR.
11563 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11564 At one time others generated faster code, it's not clear if they do
11565 after the last round to changes to the DIV code in expmed.c. */
11566 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11567 && multiple_of_p (type, arg0, arg1))
11568 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11570 strict_overflow_p = false;
11571 if (TREE_CODE (arg1) == INTEGER_CST
11572 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11573 &strict_overflow_p)))
11575 if (strict_overflow_p)
11576 fold_overflow_warning (("assuming signed overflow does not occur "
11577 "when simplifying division"),
11578 WARN_STRICT_OVERFLOW_MISC);
11579 return fold_convert_loc (loc, type, tem);
11582 return NULL_TREE;
11584 case CEIL_MOD_EXPR:
11585 case FLOOR_MOD_EXPR:
11586 case ROUND_MOD_EXPR:
11587 case TRUNC_MOD_EXPR:
11588 /* X % 1 is always zero, but be sure to preserve any side
11589 effects in X. */
11590 if (integer_onep (arg1))
11591 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11593 /* X % 0, return X % 0 unchanged so that we can get the
11594 proper warnings and errors. */
11595 if (integer_zerop (arg1))
11596 return NULL_TREE;
11598 /* 0 % X is always zero, but be sure to preserve any side
11599 effects in X. Place this after checking for X == 0. */
11600 if (integer_zerop (arg0))
11601 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11603 /* X % -1 is zero. */
11604 if (!TYPE_UNSIGNED (type)
11605 && TREE_CODE (arg1) == INTEGER_CST
11606 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11607 && TREE_INT_CST_HIGH (arg1) == -1)
11608 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11610 /* X % -C is the same as X % C. */
11611 if (code == TRUNC_MOD_EXPR
11612 && !TYPE_UNSIGNED (type)
11613 && TREE_CODE (arg1) == INTEGER_CST
11614 && !TREE_OVERFLOW (arg1)
11615 && TREE_INT_CST_HIGH (arg1) < 0
11616 && !TYPE_OVERFLOW_TRAPS (type)
11617 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11618 && !sign_bit_p (arg1, arg1))
11619 return fold_build2_loc (loc, code, type,
11620 fold_convert_loc (loc, type, arg0),
11621 fold_convert_loc (loc, type,
11622 negate_expr (arg1)));
11624 /* X % -Y is the same as X % Y. */
11625 if (code == TRUNC_MOD_EXPR
11626 && !TYPE_UNSIGNED (type)
11627 && TREE_CODE (arg1) == NEGATE_EXPR
11628 && !TYPE_OVERFLOW_TRAPS (type))
11629 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11630 fold_convert_loc (loc, type,
11631 TREE_OPERAND (arg1, 0)));
11633 strict_overflow_p = false;
11634 if (TREE_CODE (arg1) == INTEGER_CST
11635 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11636 &strict_overflow_p)))
11638 if (strict_overflow_p)
11639 fold_overflow_warning (("assuming signed overflow does not occur "
11640 "when simplifying modulus"),
11641 WARN_STRICT_OVERFLOW_MISC);
11642 return fold_convert_loc (loc, type, tem);
11645 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11646 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11647 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11648 && (TYPE_UNSIGNED (type)
11649 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11651 tree c = arg1;
11652 /* Also optimize A % (C << N) where C is a power of 2,
11653 to A & ((C << N) - 1). */
11654 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11655 c = TREE_OPERAND (arg1, 0);
11657 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11659 tree mask
11660 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11661 build_int_cst (TREE_TYPE (arg1), 1));
11662 if (strict_overflow_p)
11663 fold_overflow_warning (("assuming signed overflow does not "
11664 "occur when simplifying "
11665 "X % (power of two)"),
11666 WARN_STRICT_OVERFLOW_MISC);
11667 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11668 fold_convert_loc (loc, type, arg0),
11669 fold_convert_loc (loc, type, mask));
11673 return NULL_TREE;
11675 case LROTATE_EXPR:
11676 case RROTATE_EXPR:
11677 if (integer_all_onesp (arg0))
11678 return omit_one_operand_loc (loc, type, arg0, arg1);
11679 goto shift;
11681 case RSHIFT_EXPR:
11682 /* Optimize -1 >> x for arithmetic right shifts. */
11683 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11684 && tree_expr_nonnegative_p (arg1))
11685 return omit_one_operand_loc (loc, type, arg0, arg1);
11686 /* ... fall through ... */
11688 case LSHIFT_EXPR:
11689 shift:
11690 if (integer_zerop (arg1))
11691 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11692 if (integer_zerop (arg0))
11693 return omit_one_operand_loc (loc, type, arg0, arg1);
11695 /* Since negative shift count is not well-defined,
11696 don't try to compute it in the compiler. */
11697 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11698 return NULL_TREE;
11700 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11701 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11702 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11703 && host_integerp (TREE_OPERAND (arg0, 1), false)
11704 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11706 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11707 + TREE_INT_CST_LOW (arg1));
11709 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11710 being well defined. */
11711 if (low >= TYPE_PRECISION (type))
11713 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11714 low = low % TYPE_PRECISION (type);
11715 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11716 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11717 TREE_OPERAND (arg0, 0));
11718 else
11719 low = TYPE_PRECISION (type) - 1;
11722 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11723 build_int_cst (type, low));
11726 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11727 into x & ((unsigned)-1 >> c) for unsigned types. */
11728 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11729 || (TYPE_UNSIGNED (type)
11730 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11731 && host_integerp (arg1, false)
11732 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11733 && host_integerp (TREE_OPERAND (arg0, 1), false)
11734 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11736 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11737 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11738 tree lshift;
11739 tree arg00;
11741 if (low0 == low1)
11743 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11745 lshift = build_int_cst (type, -1);
11746 lshift = int_const_binop (code, lshift, arg1, 0);
11748 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11752 /* Rewrite an LROTATE_EXPR by a constant into an
11753 RROTATE_EXPR by a new constant. */
11754 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11756 tree tem = build_int_cst (TREE_TYPE (arg1),
11757 TYPE_PRECISION (type));
11758 tem = const_binop (MINUS_EXPR, tem, arg1);
11759 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11762 /* If we have a rotate of a bit operation with the rotate count and
11763 the second operand of the bit operation both constant,
11764 permute the two operations. */
11765 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11766 && (TREE_CODE (arg0) == BIT_AND_EXPR
11767 || TREE_CODE (arg0) == BIT_IOR_EXPR
11768 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11769 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11770 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11771 fold_build2_loc (loc, code, type,
11772 TREE_OPERAND (arg0, 0), arg1),
11773 fold_build2_loc (loc, code, type,
11774 TREE_OPERAND (arg0, 1), arg1));
11776 /* Two consecutive rotates adding up to the precision of the
11777 type can be ignored. */
11778 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11779 && TREE_CODE (arg0) == RROTATE_EXPR
11780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11781 && TREE_INT_CST_HIGH (arg1) == 0
11782 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11783 && ((TREE_INT_CST_LOW (arg1)
11784 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11785 == (unsigned int) TYPE_PRECISION (type)))
11786 return TREE_OPERAND (arg0, 0);
11788 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11789 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11790 if the latter can be further optimized. */
11791 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11792 && TREE_CODE (arg0) == BIT_AND_EXPR
11793 && TREE_CODE (arg1) == INTEGER_CST
11794 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11796 tree mask = fold_build2_loc (loc, code, type,
11797 fold_convert_loc (loc, type,
11798 TREE_OPERAND (arg0, 1)),
11799 arg1);
11800 tree shift = fold_build2_loc (loc, code, type,
11801 fold_convert_loc (loc, type,
11802 TREE_OPERAND (arg0, 0)),
11803 arg1);
11804 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11805 if (tem)
11806 return tem;
11809 return NULL_TREE;
11811 case MIN_EXPR:
11812 if (operand_equal_p (arg0, arg1, 0))
11813 return omit_one_operand_loc (loc, type, arg0, arg1);
11814 if (INTEGRAL_TYPE_P (type)
11815 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11816 return omit_one_operand_loc (loc, type, arg1, arg0);
11817 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11818 if (tem)
11819 return tem;
11820 goto associate;
11822 case MAX_EXPR:
11823 if (operand_equal_p (arg0, arg1, 0))
11824 return omit_one_operand_loc (loc, type, arg0, arg1);
11825 if (INTEGRAL_TYPE_P (type)
11826 && TYPE_MAX_VALUE (type)
11827 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11828 return omit_one_operand_loc (loc, type, arg1, arg0);
11829 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11830 if (tem)
11831 return tem;
11832 goto associate;
11834 case TRUTH_ANDIF_EXPR:
11835 /* Note that the operands of this must be ints
11836 and their values must be 0 or 1.
11837 ("true" is a fixed value perhaps depending on the language.) */
11838 /* If first arg is constant zero, return it. */
11839 if (integer_zerop (arg0))
11840 return fold_convert_loc (loc, type, arg0);
11841 case TRUTH_AND_EXPR:
11842 /* If either arg is constant true, drop it. */
11843 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11844 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11845 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11846 /* Preserve sequence points. */
11847 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11848 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11849 /* If second arg is constant zero, result is zero, but first arg
11850 must be evaluated. */
11851 if (integer_zerop (arg1))
11852 return omit_one_operand_loc (loc, type, arg1, arg0);
11853 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11854 case will be handled here. */
11855 if (integer_zerop (arg0))
11856 return omit_one_operand_loc (loc, type, arg0, arg1);
11858 /* !X && X is always false. */
11859 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11861 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11862 /* X && !X is always false. */
11863 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11865 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11867 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11868 means A >= Y && A != MAX, but in this case we know that
11869 A < X <= MAX. */
11871 if (!TREE_SIDE_EFFECTS (arg0)
11872 && !TREE_SIDE_EFFECTS (arg1))
11874 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11875 if (tem && !operand_equal_p (tem, arg0, 0))
11876 return fold_build2_loc (loc, code, type, tem, arg1);
11878 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11879 if (tem && !operand_equal_p (tem, arg1, 0))
11880 return fold_build2_loc (loc, code, type, arg0, tem);
11883 truth_andor:
11884 /* We only do these simplifications if we are optimizing. */
11885 if (!optimize)
11886 return NULL_TREE;
11888 /* Check for things like (A || B) && (A || C). We can convert this
11889 to A || (B && C). Note that either operator can be any of the four
11890 truth and/or operations and the transformation will still be
11891 valid. Also note that we only care about order for the
11892 ANDIF and ORIF operators. If B contains side effects, this
11893 might change the truth-value of A. */
11894 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11895 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11896 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11897 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11898 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11899 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11901 tree a00 = TREE_OPERAND (arg0, 0);
11902 tree a01 = TREE_OPERAND (arg0, 1);
11903 tree a10 = TREE_OPERAND (arg1, 0);
11904 tree a11 = TREE_OPERAND (arg1, 1);
11905 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11906 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11907 && (code == TRUTH_AND_EXPR
11908 || code == TRUTH_OR_EXPR));
11910 if (operand_equal_p (a00, a10, 0))
11911 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11912 fold_build2_loc (loc, code, type, a01, a11));
11913 else if (commutative && operand_equal_p (a00, a11, 0))
11914 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11915 fold_build2_loc (loc, code, type, a01, a10));
11916 else if (commutative && operand_equal_p (a01, a10, 0))
11917 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11918 fold_build2_loc (loc, code, type, a00, a11));
11920 /* This case if tricky because we must either have commutative
11921 operators or else A10 must not have side-effects. */
11923 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11924 && operand_equal_p (a01, a11, 0))
11925 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11926 fold_build2_loc (loc, code, type, a00, a10),
11927 a01);
11930 /* See if we can build a range comparison. */
11931 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11932 return tem;
11934 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11935 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11937 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11938 if (tem)
11939 return fold_build2_loc (loc, code, type, tem, arg1);
11942 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11943 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11945 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11946 if (tem)
11947 return fold_build2_loc (loc, code, type, arg0, tem);
11950 /* Check for the possibility of merging component references. If our
11951 lhs is another similar operation, try to merge its rhs with our
11952 rhs. Then try to merge our lhs and rhs. */
11953 if (TREE_CODE (arg0) == code
11954 && 0 != (tem = fold_truthop (loc, code, type,
11955 TREE_OPERAND (arg0, 1), arg1)))
11956 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11958 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11959 return tem;
11961 return NULL_TREE;
11963 case TRUTH_ORIF_EXPR:
11964 /* Note that the operands of this must be ints
11965 and their values must be 0 or true.
11966 ("true" is a fixed value perhaps depending on the language.) */
11967 /* If first arg is constant true, return it. */
11968 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11969 return fold_convert_loc (loc, type, arg0);
11970 case TRUTH_OR_EXPR:
11971 /* If either arg is constant zero, drop it. */
11972 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11973 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11974 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11975 /* Preserve sequence points. */
11976 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11977 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11978 /* If second arg is constant true, result is true, but we must
11979 evaluate first arg. */
11980 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11981 return omit_one_operand_loc (loc, type, arg1, arg0);
11982 /* Likewise for first arg, but note this only occurs here for
11983 TRUTH_OR_EXPR. */
11984 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11985 return omit_one_operand_loc (loc, type, arg0, arg1);
11987 /* !X || X is always true. */
11988 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11990 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11991 /* X || !X is always true. */
11992 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11993 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11994 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11996 goto truth_andor;
11998 case TRUTH_XOR_EXPR:
11999 /* If the second arg is constant zero, drop it. */
12000 if (integer_zerop (arg1))
12001 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12002 /* If the second arg is constant true, this is a logical inversion. */
12003 if (integer_onep (arg1))
12005 /* Only call invert_truthvalue if operand is a truth value. */
12006 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12007 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12008 else
12009 tem = invert_truthvalue_loc (loc, arg0);
12010 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12012 /* Identical arguments cancel to zero. */
12013 if (operand_equal_p (arg0, arg1, 0))
12014 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12016 /* !X ^ X is always true. */
12017 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12019 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12021 /* X ^ !X is always true. */
12022 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12024 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12026 return NULL_TREE;
12028 case EQ_EXPR:
12029 case NE_EXPR:
12030 tem = fold_comparison (loc, code, type, op0, op1);
12031 if (tem != NULL_TREE)
12032 return tem;
12034 /* bool_var != 0 becomes bool_var. */
12035 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12036 && code == NE_EXPR)
12037 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12039 /* bool_var == 1 becomes bool_var. */
12040 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12041 && code == EQ_EXPR)
12042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12044 /* bool_var != 1 becomes !bool_var. */
12045 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12046 && code == NE_EXPR)
12047 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12048 fold_convert_loc (loc, type, arg0));
12050 /* bool_var == 0 becomes !bool_var. */
12051 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12052 && code == EQ_EXPR)
12053 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12054 fold_convert_loc (loc, type, arg0));
12056 /* !exp != 0 becomes !exp */
12057 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12058 && code == NE_EXPR)
12059 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12061 /* If this is an equality comparison of the address of two non-weak,
12062 unaliased symbols neither of which are extern (since we do not
12063 have access to attributes for externs), then we know the result. */
12064 if (TREE_CODE (arg0) == ADDR_EXPR
12065 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12066 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12067 && ! lookup_attribute ("alias",
12068 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12069 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12070 && TREE_CODE (arg1) == ADDR_EXPR
12071 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12072 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12073 && ! lookup_attribute ("alias",
12074 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12075 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12077 /* We know that we're looking at the address of two
12078 non-weak, unaliased, static _DECL nodes.
12080 It is both wasteful and incorrect to call operand_equal_p
12081 to compare the two ADDR_EXPR nodes. It is wasteful in that
12082 all we need to do is test pointer equality for the arguments
12083 to the two ADDR_EXPR nodes. It is incorrect to use
12084 operand_equal_p as that function is NOT equivalent to a
12085 C equality test. It can in fact return false for two
12086 objects which would test as equal using the C equality
12087 operator. */
12088 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12089 return constant_boolean_node (equal
12090 ? code == EQ_EXPR : code != EQ_EXPR,
12091 type);
12094 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12095 a MINUS_EXPR of a constant, we can convert it into a comparison with
12096 a revised constant as long as no overflow occurs. */
12097 if (TREE_CODE (arg1) == INTEGER_CST
12098 && (TREE_CODE (arg0) == PLUS_EXPR
12099 || TREE_CODE (arg0) == MINUS_EXPR)
12100 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12101 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12102 ? MINUS_EXPR : PLUS_EXPR,
12103 fold_convert_loc (loc, TREE_TYPE (arg0),
12104 arg1),
12105 TREE_OPERAND (arg0, 1)))
12106 && !TREE_OVERFLOW (tem))
12107 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12109 /* Similarly for a NEGATE_EXPR. */
12110 if (TREE_CODE (arg0) == NEGATE_EXPR
12111 && TREE_CODE (arg1) == INTEGER_CST
12112 && 0 != (tem = negate_expr (arg1))
12113 && TREE_CODE (tem) == INTEGER_CST
12114 && !TREE_OVERFLOW (tem))
12115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12117 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12119 && TREE_CODE (arg1) == INTEGER_CST
12120 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12121 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12122 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12123 fold_convert_loc (loc,
12124 TREE_TYPE (arg0),
12125 arg1),
12126 TREE_OPERAND (arg0, 1)));
12128 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12129 if ((TREE_CODE (arg0) == PLUS_EXPR
12130 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12131 || TREE_CODE (arg0) == MINUS_EXPR)
12132 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12133 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12134 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12136 tree val = TREE_OPERAND (arg0, 1);
12137 return omit_two_operands_loc (loc, type,
12138 fold_build2_loc (loc, code, type,
12139 val,
12140 build_int_cst (TREE_TYPE (val),
12141 0)),
12142 TREE_OPERAND (arg0, 0), arg1);
12145 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12146 if (TREE_CODE (arg0) == MINUS_EXPR
12147 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12148 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12149 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12151 return omit_two_operands_loc (loc, type,
12152 code == NE_EXPR
12153 ? boolean_true_node : boolean_false_node,
12154 TREE_OPERAND (arg0, 1), arg1);
12157 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12158 for !=. Don't do this for ordered comparisons due to overflow. */
12159 if (TREE_CODE (arg0) == MINUS_EXPR
12160 && integer_zerop (arg1))
12161 return fold_build2_loc (loc, code, type,
12162 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12164 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12165 if (TREE_CODE (arg0) == ABS_EXPR
12166 && (integer_zerop (arg1) || real_zerop (arg1)))
12167 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12169 /* If this is an EQ or NE comparison with zero and ARG0 is
12170 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12171 two operations, but the latter can be done in one less insn
12172 on machines that have only two-operand insns or on which a
12173 constant cannot be the first operand. */
12174 if (TREE_CODE (arg0) == BIT_AND_EXPR
12175 && integer_zerop (arg1))
12177 tree arg00 = TREE_OPERAND (arg0, 0);
12178 tree arg01 = TREE_OPERAND (arg0, 1);
12179 if (TREE_CODE (arg00) == LSHIFT_EXPR
12180 && integer_onep (TREE_OPERAND (arg00, 0)))
12182 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12183 arg01, TREE_OPERAND (arg00, 1));
12184 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12185 build_int_cst (TREE_TYPE (arg0), 1));
12186 return fold_build2_loc (loc, code, type,
12187 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12188 arg1);
12190 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12191 && integer_onep (TREE_OPERAND (arg01, 0)))
12193 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12194 arg00, TREE_OPERAND (arg01, 1));
12195 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12196 build_int_cst (TREE_TYPE (arg0), 1));
12197 return fold_build2_loc (loc, code, type,
12198 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12199 arg1);
12203 /* If this is an NE or EQ comparison of zero against the result of a
12204 signed MOD operation whose second operand is a power of 2, make
12205 the MOD operation unsigned since it is simpler and equivalent. */
12206 if (integer_zerop (arg1)
12207 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12208 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12209 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12210 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12211 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12212 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12214 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12215 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12216 fold_convert_loc (loc, newtype,
12217 TREE_OPERAND (arg0, 0)),
12218 fold_convert_loc (loc, newtype,
12219 TREE_OPERAND (arg0, 1)));
12221 return fold_build2_loc (loc, code, type, newmod,
12222 fold_convert_loc (loc, newtype, arg1));
12225 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12226 C1 is a valid shift constant, and C2 is a power of two, i.e.
12227 a single bit. */
12228 if (TREE_CODE (arg0) == BIT_AND_EXPR
12229 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12230 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12231 == INTEGER_CST
12232 && integer_pow2p (TREE_OPERAND (arg0, 1))
12233 && integer_zerop (arg1))
12235 tree itype = TREE_TYPE (arg0);
12236 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12237 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12239 /* Check for a valid shift count. */
12240 if (TREE_INT_CST_HIGH (arg001) == 0
12241 && TREE_INT_CST_LOW (arg001) < prec)
12243 tree arg01 = TREE_OPERAND (arg0, 1);
12244 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12245 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12246 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12247 can be rewritten as (X & (C2 << C1)) != 0. */
12248 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12250 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12251 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12252 return fold_build2_loc (loc, code, type, tem, arg1);
12254 /* Otherwise, for signed (arithmetic) shifts,
12255 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12256 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12257 else if (!TYPE_UNSIGNED (itype))
12258 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12259 arg000, build_int_cst (itype, 0));
12260 /* Otherwise, of unsigned (logical) shifts,
12261 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12262 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12263 else
12264 return omit_one_operand_loc (loc, type,
12265 code == EQ_EXPR ? integer_one_node
12266 : integer_zero_node,
12267 arg000);
12271 /* If this is an NE comparison of zero with an AND of one, remove the
12272 comparison since the AND will give the correct value. */
12273 if (code == NE_EXPR
12274 && integer_zerop (arg1)
12275 && TREE_CODE (arg0) == BIT_AND_EXPR
12276 && integer_onep (TREE_OPERAND (arg0, 1)))
12277 return fold_convert_loc (loc, type, arg0);
12279 /* If we have (A & C) == C where C is a power of 2, convert this into
12280 (A & C) != 0. Similarly for NE_EXPR. */
12281 if (TREE_CODE (arg0) == BIT_AND_EXPR
12282 && integer_pow2p (TREE_OPERAND (arg0, 1))
12283 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12284 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12285 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12286 integer_zero_node));
12288 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12289 bit, then fold the expression into A < 0 or A >= 0. */
12290 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12291 if (tem)
12292 return tem;
12294 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12295 Similarly for NE_EXPR. */
12296 if (TREE_CODE (arg0) == BIT_AND_EXPR
12297 && TREE_CODE (arg1) == INTEGER_CST
12298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12300 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12301 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12302 TREE_OPERAND (arg0, 1));
12303 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12304 arg1, notc);
12305 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12306 if (integer_nonzerop (dandnotc))
12307 return omit_one_operand_loc (loc, type, rslt, arg0);
12310 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12311 Similarly for NE_EXPR. */
12312 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12313 && TREE_CODE (arg1) == INTEGER_CST
12314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12316 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12317 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12318 TREE_OPERAND (arg0, 1), notd);
12319 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12320 if (integer_nonzerop (candnotd))
12321 return omit_one_operand_loc (loc, type, rslt, arg0);
12324 /* If this is a comparison of a field, we may be able to simplify it. */
12325 if ((TREE_CODE (arg0) == COMPONENT_REF
12326 || TREE_CODE (arg0) == BIT_FIELD_REF)
12327 /* Handle the constant case even without -O
12328 to make sure the warnings are given. */
12329 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12331 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12332 if (t1)
12333 return t1;
12336 /* Optimize comparisons of strlen vs zero to a compare of the
12337 first character of the string vs zero. To wit,
12338 strlen(ptr) == 0 => *ptr == 0
12339 strlen(ptr) != 0 => *ptr != 0
12340 Other cases should reduce to one of these two (or a constant)
12341 due to the return value of strlen being unsigned. */
12342 if (TREE_CODE (arg0) == CALL_EXPR
12343 && integer_zerop (arg1))
12345 tree fndecl = get_callee_fndecl (arg0);
12347 if (fndecl
12348 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12349 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12350 && call_expr_nargs (arg0) == 1
12351 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12353 tree iref = build_fold_indirect_ref_loc (loc,
12354 CALL_EXPR_ARG (arg0, 0));
12355 return fold_build2_loc (loc, code, type, iref,
12356 build_int_cst (TREE_TYPE (iref), 0));
12360 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12361 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12362 if (TREE_CODE (arg0) == RSHIFT_EXPR
12363 && integer_zerop (arg1)
12364 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12366 tree arg00 = TREE_OPERAND (arg0, 0);
12367 tree arg01 = TREE_OPERAND (arg0, 1);
12368 tree itype = TREE_TYPE (arg00);
12369 if (TREE_INT_CST_HIGH (arg01) == 0
12370 && TREE_INT_CST_LOW (arg01)
12371 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12373 if (TYPE_UNSIGNED (itype))
12375 itype = signed_type_for (itype);
12376 arg00 = fold_convert_loc (loc, itype, arg00);
12378 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12379 type, arg00, build_int_cst (itype, 0));
12383 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12384 if (integer_zerop (arg1)
12385 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12386 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12387 TREE_OPERAND (arg0, 1));
12389 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12390 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12391 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12392 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12393 build_int_cst (TREE_TYPE (arg1), 0));
12394 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12395 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12396 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12397 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12398 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12399 build_int_cst (TREE_TYPE (arg1), 0));
12401 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12402 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12403 && TREE_CODE (arg1) == INTEGER_CST
12404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12405 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12406 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12407 TREE_OPERAND (arg0, 1), arg1));
12409 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12410 (X & C) == 0 when C is a single bit. */
12411 if (TREE_CODE (arg0) == BIT_AND_EXPR
12412 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12413 && integer_zerop (arg1)
12414 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12416 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12417 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12418 TREE_OPERAND (arg0, 1));
12419 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12420 type, tem, arg1);
12423 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12424 constant C is a power of two, i.e. a single bit. */
12425 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12426 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12427 && integer_zerop (arg1)
12428 && integer_pow2p (TREE_OPERAND (arg0, 1))
12429 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12430 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12432 tree arg00 = TREE_OPERAND (arg0, 0);
12433 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12434 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12437 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12438 when is C is a power of two, i.e. a single bit. */
12439 if (TREE_CODE (arg0) == BIT_AND_EXPR
12440 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12441 && integer_zerop (arg1)
12442 && integer_pow2p (TREE_OPERAND (arg0, 1))
12443 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12444 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12446 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12447 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12448 arg000, TREE_OPERAND (arg0, 1));
12449 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12450 tem, build_int_cst (TREE_TYPE (tem), 0));
12453 if (integer_zerop (arg1)
12454 && tree_expr_nonzero_p (arg0))
12456 tree res = constant_boolean_node (code==NE_EXPR, type);
12457 return omit_one_operand_loc (loc, type, res, arg0);
12460 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12461 if (TREE_CODE (arg0) == NEGATE_EXPR
12462 && TREE_CODE (arg1) == NEGATE_EXPR)
12463 return fold_build2_loc (loc, code, type,
12464 TREE_OPERAND (arg0, 0),
12465 TREE_OPERAND (arg1, 0));
12467 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12468 if (TREE_CODE (arg0) == BIT_AND_EXPR
12469 && TREE_CODE (arg1) == BIT_AND_EXPR)
12471 tree arg00 = TREE_OPERAND (arg0, 0);
12472 tree arg01 = TREE_OPERAND (arg0, 1);
12473 tree arg10 = TREE_OPERAND (arg1, 0);
12474 tree arg11 = TREE_OPERAND (arg1, 1);
12475 tree itype = TREE_TYPE (arg0);
12477 if (operand_equal_p (arg01, arg11, 0))
12478 return fold_build2_loc (loc, code, type,
12479 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12480 fold_build2_loc (loc,
12481 BIT_XOR_EXPR, itype,
12482 arg00, arg10),
12483 arg01),
12484 build_int_cst (itype, 0));
12486 if (operand_equal_p (arg01, arg10, 0))
12487 return fold_build2_loc (loc, code, type,
12488 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12489 fold_build2_loc (loc,
12490 BIT_XOR_EXPR, itype,
12491 arg00, arg11),
12492 arg01),
12493 build_int_cst (itype, 0));
12495 if (operand_equal_p (arg00, arg11, 0))
12496 return fold_build2_loc (loc, code, type,
12497 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12498 fold_build2_loc (loc,
12499 BIT_XOR_EXPR, itype,
12500 arg01, arg10),
12501 arg00),
12502 build_int_cst (itype, 0));
12504 if (operand_equal_p (arg00, arg10, 0))
12505 return fold_build2_loc (loc, code, type,
12506 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12507 fold_build2_loc (loc,
12508 BIT_XOR_EXPR, itype,
12509 arg01, arg11),
12510 arg00),
12511 build_int_cst (itype, 0));
12514 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12515 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12517 tree arg00 = TREE_OPERAND (arg0, 0);
12518 tree arg01 = TREE_OPERAND (arg0, 1);
12519 tree arg10 = TREE_OPERAND (arg1, 0);
12520 tree arg11 = TREE_OPERAND (arg1, 1);
12521 tree itype = TREE_TYPE (arg0);
12523 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12524 operand_equal_p guarantees no side-effects so we don't need
12525 to use omit_one_operand on Z. */
12526 if (operand_equal_p (arg01, arg11, 0))
12527 return fold_build2_loc (loc, code, type, arg00, arg10);
12528 if (operand_equal_p (arg01, arg10, 0))
12529 return fold_build2_loc (loc, code, type, arg00, arg11);
12530 if (operand_equal_p (arg00, arg11, 0))
12531 return fold_build2_loc (loc, code, type, arg01, arg10);
12532 if (operand_equal_p (arg00, arg10, 0))
12533 return fold_build2_loc (loc, code, type, arg01, arg11);
12535 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12536 if (TREE_CODE (arg01) == INTEGER_CST
12537 && TREE_CODE (arg11) == INTEGER_CST)
12538 return fold_build2_loc (loc, code, type,
12539 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12540 fold_build2_loc (loc,
12541 BIT_XOR_EXPR, itype,
12542 arg01, arg11)),
12543 arg10);
12546 /* Attempt to simplify equality/inequality comparisons of complex
12547 values. Only lower the comparison if the result is known or
12548 can be simplified to a single scalar comparison. */
12549 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12550 || TREE_CODE (arg0) == COMPLEX_CST)
12551 && (TREE_CODE (arg1) == COMPLEX_EXPR
12552 || TREE_CODE (arg1) == COMPLEX_CST))
12554 tree real0, imag0, real1, imag1;
12555 tree rcond, icond;
12557 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12559 real0 = TREE_OPERAND (arg0, 0);
12560 imag0 = TREE_OPERAND (arg0, 1);
12562 else
12564 real0 = TREE_REALPART (arg0);
12565 imag0 = TREE_IMAGPART (arg0);
12568 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12570 real1 = TREE_OPERAND (arg1, 0);
12571 imag1 = TREE_OPERAND (arg1, 1);
12573 else
12575 real1 = TREE_REALPART (arg1);
12576 imag1 = TREE_IMAGPART (arg1);
12579 rcond = fold_binary_loc (loc, code, type, real0, real1);
12580 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12582 if (integer_zerop (rcond))
12584 if (code == EQ_EXPR)
12585 return omit_two_operands_loc (loc, type, boolean_false_node,
12586 imag0, imag1);
12587 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12589 else
12591 if (code == NE_EXPR)
12592 return omit_two_operands_loc (loc, type, boolean_true_node,
12593 imag0, imag1);
12594 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12598 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12599 if (icond && TREE_CODE (icond) == INTEGER_CST)
12601 if (integer_zerop (icond))
12603 if (code == EQ_EXPR)
12604 return omit_two_operands_loc (loc, type, boolean_false_node,
12605 real0, real1);
12606 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12608 else
12610 if (code == NE_EXPR)
12611 return omit_two_operands_loc (loc, type, boolean_true_node,
12612 real0, real1);
12613 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12618 return NULL_TREE;
12620 case LT_EXPR:
12621 case GT_EXPR:
12622 case LE_EXPR:
12623 case GE_EXPR:
12624 tem = fold_comparison (loc, code, type, op0, op1);
12625 if (tem != NULL_TREE)
12626 return tem;
12628 /* Transform comparisons of the form X +- C CMP X. */
12629 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12630 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12631 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12632 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12633 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12634 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12636 tree arg01 = TREE_OPERAND (arg0, 1);
12637 enum tree_code code0 = TREE_CODE (arg0);
12638 int is_positive;
12640 if (TREE_CODE (arg01) == REAL_CST)
12641 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12642 else
12643 is_positive = tree_int_cst_sgn (arg01);
12645 /* (X - c) > X becomes false. */
12646 if (code == GT_EXPR
12647 && ((code0 == MINUS_EXPR && is_positive >= 0)
12648 || (code0 == PLUS_EXPR && is_positive <= 0)))
12650 if (TREE_CODE (arg01) == INTEGER_CST
12651 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12652 fold_overflow_warning (("assuming signed overflow does not "
12653 "occur when assuming that (X - c) > X "
12654 "is always false"),
12655 WARN_STRICT_OVERFLOW_ALL);
12656 return constant_boolean_node (0, type);
12659 /* Likewise (X + c) < X becomes false. */
12660 if (code == LT_EXPR
12661 && ((code0 == PLUS_EXPR && is_positive >= 0)
12662 || (code0 == MINUS_EXPR && is_positive <= 0)))
12664 if (TREE_CODE (arg01) == INTEGER_CST
12665 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12666 fold_overflow_warning (("assuming signed overflow does not "
12667 "occur when assuming that "
12668 "(X + c) < X is always false"),
12669 WARN_STRICT_OVERFLOW_ALL);
12670 return constant_boolean_node (0, type);
12673 /* Convert (X - c) <= X to true. */
12674 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12675 && code == LE_EXPR
12676 && ((code0 == MINUS_EXPR && is_positive >= 0)
12677 || (code0 == PLUS_EXPR && is_positive <= 0)))
12679 if (TREE_CODE (arg01) == INTEGER_CST
12680 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12681 fold_overflow_warning (("assuming signed overflow does not "
12682 "occur when assuming that "
12683 "(X - c) <= X is always true"),
12684 WARN_STRICT_OVERFLOW_ALL);
12685 return constant_boolean_node (1, type);
12688 /* Convert (X + c) >= X to true. */
12689 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12690 && code == GE_EXPR
12691 && ((code0 == PLUS_EXPR && is_positive >= 0)
12692 || (code0 == MINUS_EXPR && is_positive <= 0)))
12694 if (TREE_CODE (arg01) == INTEGER_CST
12695 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12696 fold_overflow_warning (("assuming signed overflow does not "
12697 "occur when assuming that "
12698 "(X + c) >= X is always true"),
12699 WARN_STRICT_OVERFLOW_ALL);
12700 return constant_boolean_node (1, type);
12703 if (TREE_CODE (arg01) == INTEGER_CST)
12705 /* Convert X + c > X and X - c < X to true for integers. */
12706 if (code == GT_EXPR
12707 && ((code0 == PLUS_EXPR && is_positive > 0)
12708 || (code0 == MINUS_EXPR && is_positive < 0)))
12710 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12711 fold_overflow_warning (("assuming signed overflow does "
12712 "not occur when assuming that "
12713 "(X + c) > X is always true"),
12714 WARN_STRICT_OVERFLOW_ALL);
12715 return constant_boolean_node (1, type);
12718 if (code == LT_EXPR
12719 && ((code0 == MINUS_EXPR && is_positive > 0)
12720 || (code0 == PLUS_EXPR && is_positive < 0)))
12722 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12723 fold_overflow_warning (("assuming signed overflow does "
12724 "not occur when assuming that "
12725 "(X - c) < X is always true"),
12726 WARN_STRICT_OVERFLOW_ALL);
12727 return constant_boolean_node (1, type);
12730 /* Convert X + c <= X and X - c >= X to false for integers. */
12731 if (code == LE_EXPR
12732 && ((code0 == PLUS_EXPR && is_positive > 0)
12733 || (code0 == MINUS_EXPR && is_positive < 0)))
12735 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12736 fold_overflow_warning (("assuming signed overflow does "
12737 "not occur when assuming that "
12738 "(X + c) <= X is always false"),
12739 WARN_STRICT_OVERFLOW_ALL);
12740 return constant_boolean_node (0, type);
12743 if (code == GE_EXPR
12744 && ((code0 == MINUS_EXPR && is_positive > 0)
12745 || (code0 == PLUS_EXPR && is_positive < 0)))
12747 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12748 fold_overflow_warning (("assuming signed overflow does "
12749 "not occur when assuming that "
12750 "(X - c) >= X is always false"),
12751 WARN_STRICT_OVERFLOW_ALL);
12752 return constant_boolean_node (0, type);
12757 /* Comparisons with the highest or lowest possible integer of
12758 the specified precision will have known values. */
12760 tree arg1_type = TREE_TYPE (arg1);
12761 unsigned int width = TYPE_PRECISION (arg1_type);
12763 if (TREE_CODE (arg1) == INTEGER_CST
12764 && width <= 2 * HOST_BITS_PER_WIDE_INT
12765 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12767 HOST_WIDE_INT signed_max_hi;
12768 unsigned HOST_WIDE_INT signed_max_lo;
12769 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12771 if (width <= HOST_BITS_PER_WIDE_INT)
12773 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12774 - 1;
12775 signed_max_hi = 0;
12776 max_hi = 0;
12778 if (TYPE_UNSIGNED (arg1_type))
12780 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12781 min_lo = 0;
12782 min_hi = 0;
12784 else
12786 max_lo = signed_max_lo;
12787 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12788 min_hi = -1;
12791 else
12793 width -= HOST_BITS_PER_WIDE_INT;
12794 signed_max_lo = -1;
12795 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12796 - 1;
12797 max_lo = -1;
12798 min_lo = 0;
12800 if (TYPE_UNSIGNED (arg1_type))
12802 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12803 min_hi = 0;
12805 else
12807 max_hi = signed_max_hi;
12808 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12812 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12813 && TREE_INT_CST_LOW (arg1) == max_lo)
12814 switch (code)
12816 case GT_EXPR:
12817 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12819 case GE_EXPR:
12820 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12822 case LE_EXPR:
12823 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12825 case LT_EXPR:
12826 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12828 /* The GE_EXPR and LT_EXPR cases above are not normally
12829 reached because of previous transformations. */
12831 default:
12832 break;
12834 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12835 == max_hi
12836 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12837 switch (code)
12839 case GT_EXPR:
12840 arg1 = const_binop (PLUS_EXPR, arg1,
12841 build_int_cst (TREE_TYPE (arg1), 1));
12842 return fold_build2_loc (loc, EQ_EXPR, type,
12843 fold_convert_loc (loc,
12844 TREE_TYPE (arg1), arg0),
12845 arg1);
12846 case LE_EXPR:
12847 arg1 = const_binop (PLUS_EXPR, arg1,
12848 build_int_cst (TREE_TYPE (arg1), 1));
12849 return fold_build2_loc (loc, NE_EXPR, type,
12850 fold_convert_loc (loc, TREE_TYPE (arg1),
12851 arg0),
12852 arg1);
12853 default:
12854 break;
12856 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12857 == min_hi
12858 && TREE_INT_CST_LOW (arg1) == min_lo)
12859 switch (code)
12861 case LT_EXPR:
12862 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12864 case LE_EXPR:
12865 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12867 case GE_EXPR:
12868 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12870 case GT_EXPR:
12871 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12873 default:
12874 break;
12876 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12877 == min_hi
12878 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12879 switch (code)
12881 case GE_EXPR:
12882 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12883 return fold_build2_loc (loc, NE_EXPR, type,
12884 fold_convert_loc (loc,
12885 TREE_TYPE (arg1), arg0),
12886 arg1);
12887 case LT_EXPR:
12888 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12889 return fold_build2_loc (loc, EQ_EXPR, type,
12890 fold_convert_loc (loc, TREE_TYPE (arg1),
12891 arg0),
12892 arg1);
12893 default:
12894 break;
12897 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12898 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12899 && TYPE_UNSIGNED (arg1_type)
12900 /* We will flip the signedness of the comparison operator
12901 associated with the mode of arg1, so the sign bit is
12902 specified by this mode. Check that arg1 is the signed
12903 max associated with this sign bit. */
12904 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12905 /* signed_type does not work on pointer types. */
12906 && INTEGRAL_TYPE_P (arg1_type))
12908 /* The following case also applies to X < signed_max+1
12909 and X >= signed_max+1 because previous transformations. */
12910 if (code == LE_EXPR || code == GT_EXPR)
12912 tree st;
12913 st = signed_type_for (TREE_TYPE (arg1));
12914 return fold_build2_loc (loc,
12915 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12916 type, fold_convert_loc (loc, st, arg0),
12917 build_int_cst (st, 0));
12923 /* If we are comparing an ABS_EXPR with a constant, we can
12924 convert all the cases into explicit comparisons, but they may
12925 well not be faster than doing the ABS and one comparison.
12926 But ABS (X) <= C is a range comparison, which becomes a subtraction
12927 and a comparison, and is probably faster. */
12928 if (code == LE_EXPR
12929 && TREE_CODE (arg1) == INTEGER_CST
12930 && TREE_CODE (arg0) == ABS_EXPR
12931 && ! TREE_SIDE_EFFECTS (arg0)
12932 && (0 != (tem = negate_expr (arg1)))
12933 && TREE_CODE (tem) == INTEGER_CST
12934 && !TREE_OVERFLOW (tem))
12935 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12936 build2 (GE_EXPR, type,
12937 TREE_OPERAND (arg0, 0), tem),
12938 build2 (LE_EXPR, type,
12939 TREE_OPERAND (arg0, 0), arg1));
12941 /* Convert ABS_EXPR<x> >= 0 to true. */
12942 strict_overflow_p = false;
12943 if (code == GE_EXPR
12944 && (integer_zerop (arg1)
12945 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12946 && real_zerop (arg1)))
12947 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12949 if (strict_overflow_p)
12950 fold_overflow_warning (("assuming signed overflow does not occur "
12951 "when simplifying comparison of "
12952 "absolute value and zero"),
12953 WARN_STRICT_OVERFLOW_CONDITIONAL);
12954 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12957 /* Convert ABS_EXPR<x> < 0 to false. */
12958 strict_overflow_p = false;
12959 if (code == LT_EXPR
12960 && (integer_zerop (arg1) || real_zerop (arg1))
12961 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12963 if (strict_overflow_p)
12964 fold_overflow_warning (("assuming signed overflow does not occur "
12965 "when simplifying comparison of "
12966 "absolute value and zero"),
12967 WARN_STRICT_OVERFLOW_CONDITIONAL);
12968 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12971 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12972 and similarly for >= into !=. */
12973 if ((code == LT_EXPR || code == GE_EXPR)
12974 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12975 && TREE_CODE (arg1) == LSHIFT_EXPR
12976 && integer_onep (TREE_OPERAND (arg1, 0)))
12978 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12979 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12980 TREE_OPERAND (arg1, 1)),
12981 build_int_cst (TREE_TYPE (arg0), 0));
12982 goto fold_binary_exit;
12985 if ((code == LT_EXPR || code == GE_EXPR)
12986 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12987 && CONVERT_EXPR_P (arg1)
12988 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12989 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12991 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12992 fold_convert_loc (loc, TREE_TYPE (arg0),
12993 build2 (RSHIFT_EXPR,
12994 TREE_TYPE (arg0), arg0,
12995 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12996 1))),
12997 build_int_cst (TREE_TYPE (arg0), 0));
12998 goto fold_binary_exit;
13001 return NULL_TREE;
13003 case UNORDERED_EXPR:
13004 case ORDERED_EXPR:
13005 case UNLT_EXPR:
13006 case UNLE_EXPR:
13007 case UNGT_EXPR:
13008 case UNGE_EXPR:
13009 case UNEQ_EXPR:
13010 case LTGT_EXPR:
13011 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13013 t1 = fold_relational_const (code, type, arg0, arg1);
13014 if (t1 != NULL_TREE)
13015 return t1;
13018 /* If the first operand is NaN, the result is constant. */
13019 if (TREE_CODE (arg0) == REAL_CST
13020 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13021 && (code != LTGT_EXPR || ! flag_trapping_math))
13023 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13024 ? integer_zero_node
13025 : integer_one_node;
13026 return omit_one_operand_loc (loc, type, t1, arg1);
13029 /* If the second operand is NaN, the result is constant. */
13030 if (TREE_CODE (arg1) == REAL_CST
13031 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13032 && (code != LTGT_EXPR || ! flag_trapping_math))
13034 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13035 ? integer_zero_node
13036 : integer_one_node;
13037 return omit_one_operand_loc (loc, type, t1, arg0);
13040 /* Simplify unordered comparison of something with itself. */
13041 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13042 && operand_equal_p (arg0, arg1, 0))
13043 return constant_boolean_node (1, type);
13045 if (code == LTGT_EXPR
13046 && !flag_trapping_math
13047 && operand_equal_p (arg0, arg1, 0))
13048 return constant_boolean_node (0, type);
13050 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13052 tree targ0 = strip_float_extensions (arg0);
13053 tree targ1 = strip_float_extensions (arg1);
13054 tree newtype = TREE_TYPE (targ0);
13056 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13057 newtype = TREE_TYPE (targ1);
13059 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13060 return fold_build2_loc (loc, code, type,
13061 fold_convert_loc (loc, newtype, targ0),
13062 fold_convert_loc (loc, newtype, targ1));
13065 return NULL_TREE;
13067 case COMPOUND_EXPR:
13068 /* When pedantic, a compound expression can be neither an lvalue
13069 nor an integer constant expression. */
13070 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13071 return NULL_TREE;
13072 /* Don't let (0, 0) be null pointer constant. */
13073 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13074 : fold_convert_loc (loc, type, arg1);
13075 return pedantic_non_lvalue_loc (loc, tem);
13077 case COMPLEX_EXPR:
13078 if ((TREE_CODE (arg0) == REAL_CST
13079 && TREE_CODE (arg1) == REAL_CST)
13080 || (TREE_CODE (arg0) == INTEGER_CST
13081 && TREE_CODE (arg1) == INTEGER_CST))
13082 return build_complex (type, arg0, arg1);
13083 return NULL_TREE;
13085 case ASSERT_EXPR:
13086 /* An ASSERT_EXPR should never be passed to fold_binary. */
13087 gcc_unreachable ();
13089 default:
13090 return NULL_TREE;
13091 } /* switch (code) */
13092 fold_binary_exit:
13093 protected_set_expr_location (tem, loc);
13094 return tem;
13097 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13098 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13099 of GOTO_EXPR. */
13101 static tree
13102 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13104 switch (TREE_CODE (*tp))
13106 case LABEL_EXPR:
13107 return *tp;
13109 case GOTO_EXPR:
13110 *walk_subtrees = 0;
13112 /* ... fall through ... */
13114 default:
13115 return NULL_TREE;
13119 /* Return whether the sub-tree ST contains a label which is accessible from
13120 outside the sub-tree. */
13122 static bool
13123 contains_label_p (tree st)
13125 return
13126 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13129 /* Fold a ternary expression of code CODE and type TYPE with operands
13130 OP0, OP1, and OP2. Return the folded expression if folding is
13131 successful. Otherwise, return NULL_TREE. */
13133 tree
13134 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13135 tree op0, tree op1, tree op2)
13137 tree tem;
13138 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13139 enum tree_code_class kind = TREE_CODE_CLASS (code);
13141 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13142 && TREE_CODE_LENGTH (code) == 3);
13144 /* Strip any conversions that don't change the mode. This is safe
13145 for every expression, except for a comparison expression because
13146 its signedness is derived from its operands. So, in the latter
13147 case, only strip conversions that don't change the signedness.
13149 Note that this is done as an internal manipulation within the
13150 constant folder, in order to find the simplest representation of
13151 the arguments so that their form can be studied. In any cases,
13152 the appropriate type conversions should be put back in the tree
13153 that will get out of the constant folder. */
13154 if (op0)
13156 arg0 = op0;
13157 STRIP_NOPS (arg0);
13160 if (op1)
13162 arg1 = op1;
13163 STRIP_NOPS (arg1);
13166 switch (code)
13168 case COMPONENT_REF:
13169 if (TREE_CODE (arg0) == CONSTRUCTOR
13170 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13172 unsigned HOST_WIDE_INT idx;
13173 tree field, value;
13174 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13175 if (field == arg1)
13176 return value;
13178 return NULL_TREE;
13180 case COND_EXPR:
13181 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13182 so all simple results must be passed through pedantic_non_lvalue. */
13183 if (TREE_CODE (arg0) == INTEGER_CST)
13185 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13186 tem = integer_zerop (arg0) ? op2 : op1;
13187 /* Only optimize constant conditions when the selected branch
13188 has the same type as the COND_EXPR. This avoids optimizing
13189 away "c ? x : throw", where the throw has a void type.
13190 Avoid throwing away that operand which contains label. */
13191 if ((!TREE_SIDE_EFFECTS (unused_op)
13192 || !contains_label_p (unused_op))
13193 && (! VOID_TYPE_P (TREE_TYPE (tem))
13194 || VOID_TYPE_P (type)))
13195 return pedantic_non_lvalue_loc (loc, tem);
13196 return NULL_TREE;
13198 if (operand_equal_p (arg1, op2, 0))
13199 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13201 /* If we have A op B ? A : C, we may be able to convert this to a
13202 simpler expression, depending on the operation and the values
13203 of B and C. Signed zeros prevent all of these transformations,
13204 for reasons given above each one.
13206 Also try swapping the arguments and inverting the conditional. */
13207 if (COMPARISON_CLASS_P (arg0)
13208 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13209 arg1, TREE_OPERAND (arg0, 1))
13210 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13212 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13213 if (tem)
13214 return tem;
13217 if (COMPARISON_CLASS_P (arg0)
13218 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13219 op2,
13220 TREE_OPERAND (arg0, 1))
13221 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13223 tem = fold_truth_not_expr (loc, arg0);
13224 if (tem && COMPARISON_CLASS_P (tem))
13226 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13227 if (tem)
13228 return tem;
13232 /* If the second operand is simpler than the third, swap them
13233 since that produces better jump optimization results. */
13234 if (truth_value_p (TREE_CODE (arg0))
13235 && tree_swap_operands_p (op1, op2, false))
13237 /* See if this can be inverted. If it can't, possibly because
13238 it was a floating-point inequality comparison, don't do
13239 anything. */
13240 tem = fold_truth_not_expr (loc, arg0);
13241 if (tem)
13242 return fold_build3_loc (loc, code, type, tem, op2, op1);
13245 /* Convert A ? 1 : 0 to simply A. */
13246 if (integer_onep (op1)
13247 && integer_zerop (op2)
13248 /* If we try to convert OP0 to our type, the
13249 call to fold will try to move the conversion inside
13250 a COND, which will recurse. In that case, the COND_EXPR
13251 is probably the best choice, so leave it alone. */
13252 && type == TREE_TYPE (arg0))
13253 return pedantic_non_lvalue_loc (loc, arg0);
13255 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13256 over COND_EXPR in cases such as floating point comparisons. */
13257 if (integer_zerop (op1)
13258 && integer_onep (op2)
13259 && truth_value_p (TREE_CODE (arg0)))
13260 return pedantic_non_lvalue_loc (loc,
13261 fold_convert_loc (loc, type,
13262 invert_truthvalue_loc (loc,
13263 arg0)));
13265 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13266 if (TREE_CODE (arg0) == LT_EXPR
13267 && integer_zerop (TREE_OPERAND (arg0, 1))
13268 && integer_zerop (op2)
13269 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13271 /* sign_bit_p only checks ARG1 bits within A's precision.
13272 If <sign bit of A> has wider type than A, bits outside
13273 of A's precision in <sign bit of A> need to be checked.
13274 If they are all 0, this optimization needs to be done
13275 in unsigned A's type, if they are all 1 in signed A's type,
13276 otherwise this can't be done. */
13277 if (TYPE_PRECISION (TREE_TYPE (tem))
13278 < TYPE_PRECISION (TREE_TYPE (arg1))
13279 && TYPE_PRECISION (TREE_TYPE (tem))
13280 < TYPE_PRECISION (type))
13282 unsigned HOST_WIDE_INT mask_lo;
13283 HOST_WIDE_INT mask_hi;
13284 int inner_width, outer_width;
13285 tree tem_type;
13287 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13288 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13289 if (outer_width > TYPE_PRECISION (type))
13290 outer_width = TYPE_PRECISION (type);
13292 if (outer_width > HOST_BITS_PER_WIDE_INT)
13294 mask_hi = ((unsigned HOST_WIDE_INT) -1
13295 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13296 mask_lo = -1;
13298 else
13300 mask_hi = 0;
13301 mask_lo = ((unsigned HOST_WIDE_INT) -1
13302 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13304 if (inner_width > HOST_BITS_PER_WIDE_INT)
13306 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13307 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13308 mask_lo = 0;
13310 else
13311 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13312 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13314 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13315 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13317 tem_type = signed_type_for (TREE_TYPE (tem));
13318 tem = fold_convert_loc (loc, tem_type, tem);
13320 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13321 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13323 tem_type = unsigned_type_for (TREE_TYPE (tem));
13324 tem = fold_convert_loc (loc, tem_type, tem);
13326 else
13327 tem = NULL;
13330 if (tem)
13331 return
13332 fold_convert_loc (loc, type,
13333 fold_build2_loc (loc, BIT_AND_EXPR,
13334 TREE_TYPE (tem), tem,
13335 fold_convert_loc (loc,
13336 TREE_TYPE (tem),
13337 arg1)));
13340 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13341 already handled above. */
13342 if (TREE_CODE (arg0) == BIT_AND_EXPR
13343 && integer_onep (TREE_OPERAND (arg0, 1))
13344 && integer_zerop (op2)
13345 && integer_pow2p (arg1))
13347 tree tem = TREE_OPERAND (arg0, 0);
13348 STRIP_NOPS (tem);
13349 if (TREE_CODE (tem) == RSHIFT_EXPR
13350 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13351 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13352 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13353 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13354 TREE_OPERAND (tem, 0), arg1);
13357 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13358 is probably obsolete because the first operand should be a
13359 truth value (that's why we have the two cases above), but let's
13360 leave it in until we can confirm this for all front-ends. */
13361 if (integer_zerop (op2)
13362 && TREE_CODE (arg0) == NE_EXPR
13363 && integer_zerop (TREE_OPERAND (arg0, 1))
13364 && integer_pow2p (arg1)
13365 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13366 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13367 arg1, OEP_ONLY_CONST))
13368 return pedantic_non_lvalue_loc (loc,
13369 fold_convert_loc (loc, type,
13370 TREE_OPERAND (arg0, 0)));
13372 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13373 if (integer_zerop (op2)
13374 && truth_value_p (TREE_CODE (arg0))
13375 && truth_value_p (TREE_CODE (arg1)))
13376 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13377 fold_convert_loc (loc, type, arg0),
13378 arg1);
13380 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13381 if (integer_onep (op2)
13382 && truth_value_p (TREE_CODE (arg0))
13383 && truth_value_p (TREE_CODE (arg1)))
13385 /* Only perform transformation if ARG0 is easily inverted. */
13386 tem = fold_truth_not_expr (loc, arg0);
13387 if (tem)
13388 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13389 fold_convert_loc (loc, type, tem),
13390 arg1);
13393 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13394 if (integer_zerop (arg1)
13395 && truth_value_p (TREE_CODE (arg0))
13396 && truth_value_p (TREE_CODE (op2)))
13398 /* Only perform transformation if ARG0 is easily inverted. */
13399 tem = fold_truth_not_expr (loc, arg0);
13400 if (tem)
13401 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13402 fold_convert_loc (loc, type, tem),
13403 op2);
13406 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13407 if (integer_onep (arg1)
13408 && truth_value_p (TREE_CODE (arg0))
13409 && truth_value_p (TREE_CODE (op2)))
13410 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13411 fold_convert_loc (loc, type, arg0),
13412 op2);
13414 return NULL_TREE;
13416 case CALL_EXPR:
13417 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13418 of fold_ternary on them. */
13419 gcc_unreachable ();
13421 case BIT_FIELD_REF:
13422 if ((TREE_CODE (arg0) == VECTOR_CST
13423 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13424 && type == TREE_TYPE (TREE_TYPE (arg0)))
13426 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13427 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13429 if (width != 0
13430 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13431 && (idx % width) == 0
13432 && (idx = idx / width)
13433 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13435 tree elements = NULL_TREE;
13437 if (TREE_CODE (arg0) == VECTOR_CST)
13438 elements = TREE_VECTOR_CST_ELTS (arg0);
13439 else
13441 unsigned HOST_WIDE_INT idx;
13442 tree value;
13444 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13445 elements = tree_cons (NULL_TREE, value, elements);
13447 while (idx-- > 0 && elements)
13448 elements = TREE_CHAIN (elements);
13449 if (elements)
13450 return TREE_VALUE (elements);
13451 else
13452 return fold_convert_loc (loc, type, integer_zero_node);
13456 /* A bit-field-ref that referenced the full argument can be stripped. */
13457 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13458 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13459 && integer_zerop (op2))
13460 return fold_convert_loc (loc, type, arg0);
13462 return NULL_TREE;
13464 default:
13465 return NULL_TREE;
13466 } /* switch (code) */
13469 /* Perform constant folding and related simplification of EXPR.
13470 The related simplifications include x*1 => x, x*0 => 0, etc.,
13471 and application of the associative law.
13472 NOP_EXPR conversions may be removed freely (as long as we
13473 are careful not to change the type of the overall expression).
13474 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13475 but we can constant-fold them if they have constant operands. */
13477 #ifdef ENABLE_FOLD_CHECKING
13478 # define fold(x) fold_1 (x)
13479 static tree fold_1 (tree);
13480 static
13481 #endif
13482 tree
13483 fold (tree expr)
13485 const tree t = expr;
13486 enum tree_code code = TREE_CODE (t);
13487 enum tree_code_class kind = TREE_CODE_CLASS (code);
13488 tree tem;
13489 location_t loc = EXPR_LOCATION (expr);
13491 /* Return right away if a constant. */
13492 if (kind == tcc_constant)
13493 return t;
13495 /* CALL_EXPR-like objects with variable numbers of operands are
13496 treated specially. */
13497 if (kind == tcc_vl_exp)
13499 if (code == CALL_EXPR)
13501 tem = fold_call_expr (loc, expr, false);
13502 return tem ? tem : expr;
13504 return expr;
13507 if (IS_EXPR_CODE_CLASS (kind))
13509 tree type = TREE_TYPE (t);
13510 tree op0, op1, op2;
13512 switch (TREE_CODE_LENGTH (code))
13514 case 1:
13515 op0 = TREE_OPERAND (t, 0);
13516 tem = fold_unary_loc (loc, code, type, op0);
13517 return tem ? tem : expr;
13518 case 2:
13519 op0 = TREE_OPERAND (t, 0);
13520 op1 = TREE_OPERAND (t, 1);
13521 tem = fold_binary_loc (loc, code, type, op0, op1);
13522 return tem ? tem : expr;
13523 case 3:
13524 op0 = TREE_OPERAND (t, 0);
13525 op1 = TREE_OPERAND (t, 1);
13526 op2 = TREE_OPERAND (t, 2);
13527 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13528 return tem ? tem : expr;
13529 default:
13530 break;
13534 switch (code)
13536 case ARRAY_REF:
13538 tree op0 = TREE_OPERAND (t, 0);
13539 tree op1 = TREE_OPERAND (t, 1);
13541 if (TREE_CODE (op1) == INTEGER_CST
13542 && TREE_CODE (op0) == CONSTRUCTOR
13543 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13545 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13546 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13547 unsigned HOST_WIDE_INT begin = 0;
13549 /* Find a matching index by means of a binary search. */
13550 while (begin != end)
13552 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13553 tree index = VEC_index (constructor_elt, elts, middle)->index;
13555 if (TREE_CODE (index) == INTEGER_CST
13556 && tree_int_cst_lt (index, op1))
13557 begin = middle + 1;
13558 else if (TREE_CODE (index) == INTEGER_CST
13559 && tree_int_cst_lt (op1, index))
13560 end = middle;
13561 else if (TREE_CODE (index) == RANGE_EXPR
13562 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13563 begin = middle + 1;
13564 else if (TREE_CODE (index) == RANGE_EXPR
13565 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13566 end = middle;
13567 else
13568 return VEC_index (constructor_elt, elts, middle)->value;
13572 return t;
13575 case CONST_DECL:
13576 return fold (DECL_INITIAL (t));
13578 default:
13579 return t;
13580 } /* switch (code) */
13583 #ifdef ENABLE_FOLD_CHECKING
13584 #undef fold
13586 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13587 static void fold_check_failed (const_tree, const_tree);
13588 void print_fold_checksum (const_tree);
13590 /* When --enable-checking=fold, compute a digest of expr before
13591 and after actual fold call to see if fold did not accidentally
13592 change original expr. */
13594 tree
13595 fold (tree expr)
13597 tree ret;
13598 struct md5_ctx ctx;
13599 unsigned char checksum_before[16], checksum_after[16];
13600 htab_t ht;
13602 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13603 md5_init_ctx (&ctx);
13604 fold_checksum_tree (expr, &ctx, ht);
13605 md5_finish_ctx (&ctx, checksum_before);
13606 htab_empty (ht);
13608 ret = fold_1 (expr);
13610 md5_init_ctx (&ctx);
13611 fold_checksum_tree (expr, &ctx, ht);
13612 md5_finish_ctx (&ctx, checksum_after);
13613 htab_delete (ht);
13615 if (memcmp (checksum_before, checksum_after, 16))
13616 fold_check_failed (expr, ret);
13618 return ret;
13621 void
13622 print_fold_checksum (const_tree expr)
13624 struct md5_ctx ctx;
13625 unsigned char checksum[16], cnt;
13626 htab_t ht;
13628 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13629 md5_init_ctx (&ctx);
13630 fold_checksum_tree (expr, &ctx, ht);
13631 md5_finish_ctx (&ctx, checksum);
13632 htab_delete (ht);
13633 for (cnt = 0; cnt < 16; ++cnt)
13634 fprintf (stderr, "%02x", checksum[cnt]);
13635 putc ('\n', stderr);
13638 static void
13639 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13641 internal_error ("fold check: original tree changed by fold");
13644 static void
13645 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13647 const void **slot;
13648 enum tree_code code;
13649 union tree_node buf;
13650 int i, len;
13652 recursive_label:
13654 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13655 <= sizeof (struct tree_function_decl))
13656 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13657 if (expr == NULL)
13658 return;
13659 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13660 if (*slot != NULL)
13661 return;
13662 *slot = expr;
13663 code = TREE_CODE (expr);
13664 if (TREE_CODE_CLASS (code) == tcc_declaration
13665 && DECL_ASSEMBLER_NAME_SET_P (expr))
13667 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13668 memcpy ((char *) &buf, expr, tree_size (expr));
13669 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13670 expr = (tree) &buf;
13672 else if (TREE_CODE_CLASS (code) == tcc_type
13673 && (TYPE_POINTER_TO (expr)
13674 || TYPE_REFERENCE_TO (expr)
13675 || TYPE_CACHED_VALUES_P (expr)
13676 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13677 || TYPE_NEXT_VARIANT (expr)))
13679 /* Allow these fields to be modified. */
13680 tree tmp;
13681 memcpy ((char *) &buf, expr, tree_size (expr));
13682 expr = tmp = (tree) &buf;
13683 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13684 TYPE_POINTER_TO (tmp) = NULL;
13685 TYPE_REFERENCE_TO (tmp) = NULL;
13686 TYPE_NEXT_VARIANT (tmp) = NULL;
13687 if (TYPE_CACHED_VALUES_P (tmp))
13689 TYPE_CACHED_VALUES_P (tmp) = 0;
13690 TYPE_CACHED_VALUES (tmp) = NULL;
13693 md5_process_bytes (expr, tree_size (expr), ctx);
13694 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13695 if (TREE_CODE_CLASS (code) != tcc_type
13696 && TREE_CODE_CLASS (code) != tcc_declaration
13697 && code != TREE_LIST
13698 && code != SSA_NAME)
13699 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13700 switch (TREE_CODE_CLASS (code))
13702 case tcc_constant:
13703 switch (code)
13705 case STRING_CST:
13706 md5_process_bytes (TREE_STRING_POINTER (expr),
13707 TREE_STRING_LENGTH (expr), ctx);
13708 break;
13709 case COMPLEX_CST:
13710 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13711 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13712 break;
13713 case VECTOR_CST:
13714 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13715 break;
13716 default:
13717 break;
13719 break;
13720 case tcc_exceptional:
13721 switch (code)
13723 case TREE_LIST:
13724 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13725 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13726 expr = TREE_CHAIN (expr);
13727 goto recursive_label;
13728 break;
13729 case TREE_VEC:
13730 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13731 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13732 break;
13733 default:
13734 break;
13736 break;
13737 case tcc_expression:
13738 case tcc_reference:
13739 case tcc_comparison:
13740 case tcc_unary:
13741 case tcc_binary:
13742 case tcc_statement:
13743 case tcc_vl_exp:
13744 len = TREE_OPERAND_LENGTH (expr);
13745 for (i = 0; i < len; ++i)
13746 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13747 break;
13748 case tcc_declaration:
13749 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13750 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13751 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13753 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13754 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13755 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13756 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13757 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13759 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13760 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13762 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13764 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13765 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13766 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13768 break;
13769 case tcc_type:
13770 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13771 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13772 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13773 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13774 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13775 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13776 if (INTEGRAL_TYPE_P (expr)
13777 || SCALAR_FLOAT_TYPE_P (expr))
13779 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13780 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13782 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13783 if (TREE_CODE (expr) == RECORD_TYPE
13784 || TREE_CODE (expr) == UNION_TYPE
13785 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13786 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13787 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13788 break;
13789 default:
13790 break;
13794 /* Helper function for outputting the checksum of a tree T. When
13795 debugging with gdb, you can "define mynext" to be "next" followed
13796 by "call debug_fold_checksum (op0)", then just trace down till the
13797 outputs differ. */
13799 DEBUG_FUNCTION void
13800 debug_fold_checksum (const_tree t)
13802 int i;
13803 unsigned char checksum[16];
13804 struct md5_ctx ctx;
13805 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13807 md5_init_ctx (&ctx);
13808 fold_checksum_tree (t, &ctx, ht);
13809 md5_finish_ctx (&ctx, checksum);
13810 htab_empty (ht);
13812 for (i = 0; i < 16; i++)
13813 fprintf (stderr, "%d ", checksum[i]);
13815 fprintf (stderr, "\n");
13818 #endif
13820 /* Fold a unary tree expression with code CODE of type TYPE with an
13821 operand OP0. LOC is the location of the resulting expression.
13822 Return a folded expression if successful. Otherwise, return a tree
13823 expression with code CODE of type TYPE with an operand OP0. */
13825 tree
13826 fold_build1_stat_loc (location_t loc,
13827 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13829 tree tem;
13830 #ifdef ENABLE_FOLD_CHECKING
13831 unsigned char checksum_before[16], checksum_after[16];
13832 struct md5_ctx ctx;
13833 htab_t ht;
13835 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13836 md5_init_ctx (&ctx);
13837 fold_checksum_tree (op0, &ctx, ht);
13838 md5_finish_ctx (&ctx, checksum_before);
13839 htab_empty (ht);
13840 #endif
13842 tem = fold_unary_loc (loc, code, type, op0);
13843 if (!tem)
13845 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13846 SET_EXPR_LOCATION (tem, loc);
13849 #ifdef ENABLE_FOLD_CHECKING
13850 md5_init_ctx (&ctx);
13851 fold_checksum_tree (op0, &ctx, ht);
13852 md5_finish_ctx (&ctx, checksum_after);
13853 htab_delete (ht);
13855 if (memcmp (checksum_before, checksum_after, 16))
13856 fold_check_failed (op0, tem);
13857 #endif
13858 return tem;
13861 /* Fold a binary tree expression with code CODE of type TYPE with
13862 operands OP0 and OP1. LOC is the location of the resulting
13863 expression. Return a folded expression if successful. Otherwise,
13864 return a tree expression with code CODE of type TYPE with operands
13865 OP0 and OP1. */
13867 tree
13868 fold_build2_stat_loc (location_t loc,
13869 enum tree_code code, tree type, tree op0, tree op1
13870 MEM_STAT_DECL)
13872 tree tem;
13873 #ifdef ENABLE_FOLD_CHECKING
13874 unsigned char checksum_before_op0[16],
13875 checksum_before_op1[16],
13876 checksum_after_op0[16],
13877 checksum_after_op1[16];
13878 struct md5_ctx ctx;
13879 htab_t ht;
13881 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13882 md5_init_ctx (&ctx);
13883 fold_checksum_tree (op0, &ctx, ht);
13884 md5_finish_ctx (&ctx, checksum_before_op0);
13885 htab_empty (ht);
13887 md5_init_ctx (&ctx);
13888 fold_checksum_tree (op1, &ctx, ht);
13889 md5_finish_ctx (&ctx, checksum_before_op1);
13890 htab_empty (ht);
13891 #endif
13893 tem = fold_binary_loc (loc, code, type, op0, op1);
13894 if (!tem)
13896 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13897 SET_EXPR_LOCATION (tem, loc);
13900 #ifdef ENABLE_FOLD_CHECKING
13901 md5_init_ctx (&ctx);
13902 fold_checksum_tree (op0, &ctx, ht);
13903 md5_finish_ctx (&ctx, checksum_after_op0);
13904 htab_empty (ht);
13906 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13907 fold_check_failed (op0, tem);
13909 md5_init_ctx (&ctx);
13910 fold_checksum_tree (op1, &ctx, ht);
13911 md5_finish_ctx (&ctx, checksum_after_op1);
13912 htab_delete (ht);
13914 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13915 fold_check_failed (op1, tem);
13916 #endif
13917 return tem;
13920 /* Fold a ternary tree expression with code CODE of type TYPE with
13921 operands OP0, OP1, and OP2. Return a folded expression if
13922 successful. Otherwise, return a tree expression with code CODE of
13923 type TYPE with operands OP0, OP1, and OP2. */
13925 tree
13926 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13927 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13929 tree tem;
13930 #ifdef ENABLE_FOLD_CHECKING
13931 unsigned char checksum_before_op0[16],
13932 checksum_before_op1[16],
13933 checksum_before_op2[16],
13934 checksum_after_op0[16],
13935 checksum_after_op1[16],
13936 checksum_after_op2[16];
13937 struct md5_ctx ctx;
13938 htab_t ht;
13940 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13941 md5_init_ctx (&ctx);
13942 fold_checksum_tree (op0, &ctx, ht);
13943 md5_finish_ctx (&ctx, checksum_before_op0);
13944 htab_empty (ht);
13946 md5_init_ctx (&ctx);
13947 fold_checksum_tree (op1, &ctx, ht);
13948 md5_finish_ctx (&ctx, checksum_before_op1);
13949 htab_empty (ht);
13951 md5_init_ctx (&ctx);
13952 fold_checksum_tree (op2, &ctx, ht);
13953 md5_finish_ctx (&ctx, checksum_before_op2);
13954 htab_empty (ht);
13955 #endif
13957 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13958 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13959 if (!tem)
13961 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13962 SET_EXPR_LOCATION (tem, loc);
13965 #ifdef ENABLE_FOLD_CHECKING
13966 md5_init_ctx (&ctx);
13967 fold_checksum_tree (op0, &ctx, ht);
13968 md5_finish_ctx (&ctx, checksum_after_op0);
13969 htab_empty (ht);
13971 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13972 fold_check_failed (op0, tem);
13974 md5_init_ctx (&ctx);
13975 fold_checksum_tree (op1, &ctx, ht);
13976 md5_finish_ctx (&ctx, checksum_after_op1);
13977 htab_empty (ht);
13979 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13980 fold_check_failed (op1, tem);
13982 md5_init_ctx (&ctx);
13983 fold_checksum_tree (op2, &ctx, ht);
13984 md5_finish_ctx (&ctx, checksum_after_op2);
13985 htab_delete (ht);
13987 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13988 fold_check_failed (op2, tem);
13989 #endif
13990 return tem;
13993 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13994 arguments in ARGARRAY, and a null static chain.
13995 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13996 of type TYPE from the given operands as constructed by build_call_array. */
13998 tree
13999 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14000 int nargs, tree *argarray)
14002 tree tem;
14003 #ifdef ENABLE_FOLD_CHECKING
14004 unsigned char checksum_before_fn[16],
14005 checksum_before_arglist[16],
14006 checksum_after_fn[16],
14007 checksum_after_arglist[16];
14008 struct md5_ctx ctx;
14009 htab_t ht;
14010 int i;
14012 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14013 md5_init_ctx (&ctx);
14014 fold_checksum_tree (fn, &ctx, ht);
14015 md5_finish_ctx (&ctx, checksum_before_fn);
14016 htab_empty (ht);
14018 md5_init_ctx (&ctx);
14019 for (i = 0; i < nargs; i++)
14020 fold_checksum_tree (argarray[i], &ctx, ht);
14021 md5_finish_ctx (&ctx, checksum_before_arglist);
14022 htab_empty (ht);
14023 #endif
14025 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14027 #ifdef ENABLE_FOLD_CHECKING
14028 md5_init_ctx (&ctx);
14029 fold_checksum_tree (fn, &ctx, ht);
14030 md5_finish_ctx (&ctx, checksum_after_fn);
14031 htab_empty (ht);
14033 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14034 fold_check_failed (fn, tem);
14036 md5_init_ctx (&ctx);
14037 for (i = 0; i < nargs; i++)
14038 fold_checksum_tree (argarray[i], &ctx, ht);
14039 md5_finish_ctx (&ctx, checksum_after_arglist);
14040 htab_delete (ht);
14042 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14043 fold_check_failed (NULL_TREE, tem);
14044 #endif
14045 return tem;
14048 /* Perform constant folding and related simplification of initializer
14049 expression EXPR. These behave identically to "fold_buildN" but ignore
14050 potential run-time traps and exceptions that fold must preserve. */
14052 #define START_FOLD_INIT \
14053 int saved_signaling_nans = flag_signaling_nans;\
14054 int saved_trapping_math = flag_trapping_math;\
14055 int saved_rounding_math = flag_rounding_math;\
14056 int saved_trapv = flag_trapv;\
14057 int saved_folding_initializer = folding_initializer;\
14058 flag_signaling_nans = 0;\
14059 flag_trapping_math = 0;\
14060 flag_rounding_math = 0;\
14061 flag_trapv = 0;\
14062 folding_initializer = 1;
14064 #define END_FOLD_INIT \
14065 flag_signaling_nans = saved_signaling_nans;\
14066 flag_trapping_math = saved_trapping_math;\
14067 flag_rounding_math = saved_rounding_math;\
14068 flag_trapv = saved_trapv;\
14069 folding_initializer = saved_folding_initializer;
14071 tree
14072 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14073 tree type, tree op)
14075 tree result;
14076 START_FOLD_INIT;
14078 result = fold_build1_loc (loc, code, type, op);
14080 END_FOLD_INIT;
14081 return result;
14084 tree
14085 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14086 tree type, tree op0, tree op1)
14088 tree result;
14089 START_FOLD_INIT;
14091 result = fold_build2_loc (loc, code, type, op0, op1);
14093 END_FOLD_INIT;
14094 return result;
14097 tree
14098 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14099 tree type, tree op0, tree op1, tree op2)
14101 tree result;
14102 START_FOLD_INIT;
14104 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14106 END_FOLD_INIT;
14107 return result;
14110 tree
14111 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14112 int nargs, tree *argarray)
14114 tree result;
14115 START_FOLD_INIT;
14117 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14119 END_FOLD_INIT;
14120 return result;
14123 #undef START_FOLD_INIT
14124 #undef END_FOLD_INIT
14126 /* Determine if first argument is a multiple of second argument. Return 0 if
14127 it is not, or we cannot easily determined it to be.
14129 An example of the sort of thing we care about (at this point; this routine
14130 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14131 fold cases do now) is discovering that
14133 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14135 is a multiple of
14137 SAVE_EXPR (J * 8)
14139 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14141 This code also handles discovering that
14143 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14145 is a multiple of 8 so we don't have to worry about dealing with a
14146 possible remainder.
14148 Note that we *look* inside a SAVE_EXPR only to determine how it was
14149 calculated; it is not safe for fold to do much of anything else with the
14150 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14151 at run time. For example, the latter example above *cannot* be implemented
14152 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14153 evaluation time of the original SAVE_EXPR is not necessarily the same at
14154 the time the new expression is evaluated. The only optimization of this
14155 sort that would be valid is changing
14157 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14159 divided by 8 to
14161 SAVE_EXPR (I) * SAVE_EXPR (J)
14163 (where the same SAVE_EXPR (J) is used in the original and the
14164 transformed version). */
14167 multiple_of_p (tree type, const_tree top, const_tree bottom)
14169 if (operand_equal_p (top, bottom, 0))
14170 return 1;
14172 if (TREE_CODE (type) != INTEGER_TYPE)
14173 return 0;
14175 switch (TREE_CODE (top))
14177 case BIT_AND_EXPR:
14178 /* Bitwise and provides a power of two multiple. If the mask is
14179 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14180 if (!integer_pow2p (bottom))
14181 return 0;
14182 /* FALLTHRU */
14184 case MULT_EXPR:
14185 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14186 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14188 case PLUS_EXPR:
14189 case MINUS_EXPR:
14190 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14191 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14193 case LSHIFT_EXPR:
14194 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14196 tree op1, t1;
14198 op1 = TREE_OPERAND (top, 1);
14199 /* const_binop may not detect overflow correctly,
14200 so check for it explicitly here. */
14201 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14202 > TREE_INT_CST_LOW (op1)
14203 && TREE_INT_CST_HIGH (op1) == 0
14204 && 0 != (t1 = fold_convert (type,
14205 const_binop (LSHIFT_EXPR,
14206 size_one_node,
14207 op1)))
14208 && !TREE_OVERFLOW (t1))
14209 return multiple_of_p (type, t1, bottom);
14211 return 0;
14213 case NOP_EXPR:
14214 /* Can't handle conversions from non-integral or wider integral type. */
14215 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14216 || (TYPE_PRECISION (type)
14217 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14218 return 0;
14220 /* .. fall through ... */
14222 case SAVE_EXPR:
14223 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14225 case COND_EXPR:
14226 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14227 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14229 case INTEGER_CST:
14230 if (TREE_CODE (bottom) != INTEGER_CST
14231 || integer_zerop (bottom)
14232 || (TYPE_UNSIGNED (type)
14233 && (tree_int_cst_sgn (top) < 0
14234 || tree_int_cst_sgn (bottom) < 0)))
14235 return 0;
14236 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14237 top, bottom, 0));
14239 default:
14240 return 0;
14244 /* Return true if CODE or TYPE is known to be non-negative. */
14246 static bool
14247 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14249 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14250 && truth_value_p (code))
14251 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14252 have a signed:1 type (where the value is -1 and 0). */
14253 return true;
14254 return false;
14257 /* Return true if (CODE OP0) is known to be non-negative. If the return
14258 value is based on the assumption that signed overflow is undefined,
14259 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14260 *STRICT_OVERFLOW_P. */
14262 bool
14263 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14264 bool *strict_overflow_p)
14266 if (TYPE_UNSIGNED (type))
14267 return true;
14269 switch (code)
14271 case ABS_EXPR:
14272 /* We can't return 1 if flag_wrapv is set because
14273 ABS_EXPR<INT_MIN> = INT_MIN. */
14274 if (!INTEGRAL_TYPE_P (type))
14275 return true;
14276 if (TYPE_OVERFLOW_UNDEFINED (type))
14278 *strict_overflow_p = true;
14279 return true;
14281 break;
14283 case NON_LVALUE_EXPR:
14284 case FLOAT_EXPR:
14285 case FIX_TRUNC_EXPR:
14286 return tree_expr_nonnegative_warnv_p (op0,
14287 strict_overflow_p);
14289 case NOP_EXPR:
14291 tree inner_type = TREE_TYPE (op0);
14292 tree outer_type = type;
14294 if (TREE_CODE (outer_type) == REAL_TYPE)
14296 if (TREE_CODE (inner_type) == REAL_TYPE)
14297 return tree_expr_nonnegative_warnv_p (op0,
14298 strict_overflow_p);
14299 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14301 if (TYPE_UNSIGNED (inner_type))
14302 return true;
14303 return tree_expr_nonnegative_warnv_p (op0,
14304 strict_overflow_p);
14307 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14309 if (TREE_CODE (inner_type) == REAL_TYPE)
14310 return tree_expr_nonnegative_warnv_p (op0,
14311 strict_overflow_p);
14312 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14313 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14314 && TYPE_UNSIGNED (inner_type);
14317 break;
14319 default:
14320 return tree_simple_nonnegative_warnv_p (code, type);
14323 /* We don't know sign of `t', so be conservative and return false. */
14324 return false;
14327 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14328 value is based on the assumption that signed overflow is undefined,
14329 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14330 *STRICT_OVERFLOW_P. */
14332 bool
14333 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14334 tree op1, bool *strict_overflow_p)
14336 if (TYPE_UNSIGNED (type))
14337 return true;
14339 switch (code)
14341 case POINTER_PLUS_EXPR:
14342 case PLUS_EXPR:
14343 if (FLOAT_TYPE_P (type))
14344 return (tree_expr_nonnegative_warnv_p (op0,
14345 strict_overflow_p)
14346 && tree_expr_nonnegative_warnv_p (op1,
14347 strict_overflow_p));
14349 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14350 both unsigned and at least 2 bits shorter than the result. */
14351 if (TREE_CODE (type) == INTEGER_TYPE
14352 && TREE_CODE (op0) == NOP_EXPR
14353 && TREE_CODE (op1) == NOP_EXPR)
14355 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14356 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14357 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14358 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14360 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14361 TYPE_PRECISION (inner2)) + 1;
14362 return prec < TYPE_PRECISION (type);
14365 break;
14367 case MULT_EXPR:
14368 if (FLOAT_TYPE_P (type))
14370 /* x * x for floating point x is always non-negative. */
14371 if (operand_equal_p (op0, op1, 0))
14372 return true;
14373 return (tree_expr_nonnegative_warnv_p (op0,
14374 strict_overflow_p)
14375 && tree_expr_nonnegative_warnv_p (op1,
14376 strict_overflow_p));
14379 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14380 both unsigned and their total bits is shorter than the result. */
14381 if (TREE_CODE (type) == INTEGER_TYPE
14382 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14383 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14385 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14386 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14387 : TREE_TYPE (op0);
14388 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14389 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14390 : TREE_TYPE (op1);
14392 bool unsigned0 = TYPE_UNSIGNED (inner0);
14393 bool unsigned1 = TYPE_UNSIGNED (inner1);
14395 if (TREE_CODE (op0) == INTEGER_CST)
14396 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14398 if (TREE_CODE (op1) == INTEGER_CST)
14399 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14401 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14402 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14404 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14405 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14406 : TYPE_PRECISION (inner0);
14408 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14409 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14410 : TYPE_PRECISION (inner1);
14412 return precision0 + precision1 < TYPE_PRECISION (type);
14415 return false;
14417 case BIT_AND_EXPR:
14418 case MAX_EXPR:
14419 return (tree_expr_nonnegative_warnv_p (op0,
14420 strict_overflow_p)
14421 || tree_expr_nonnegative_warnv_p (op1,
14422 strict_overflow_p));
14424 case BIT_IOR_EXPR:
14425 case BIT_XOR_EXPR:
14426 case MIN_EXPR:
14427 case RDIV_EXPR:
14428 case TRUNC_DIV_EXPR:
14429 case CEIL_DIV_EXPR:
14430 case FLOOR_DIV_EXPR:
14431 case ROUND_DIV_EXPR:
14432 return (tree_expr_nonnegative_warnv_p (op0,
14433 strict_overflow_p)
14434 && tree_expr_nonnegative_warnv_p (op1,
14435 strict_overflow_p));
14437 case TRUNC_MOD_EXPR:
14438 case CEIL_MOD_EXPR:
14439 case FLOOR_MOD_EXPR:
14440 case ROUND_MOD_EXPR:
14441 return tree_expr_nonnegative_warnv_p (op0,
14442 strict_overflow_p);
14443 default:
14444 return tree_simple_nonnegative_warnv_p (code, type);
14447 /* We don't know sign of `t', so be conservative and return false. */
14448 return false;
14451 /* Return true if T is known to be non-negative. If the return
14452 value is based on the assumption that signed overflow is undefined,
14453 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14454 *STRICT_OVERFLOW_P. */
14456 bool
14457 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14459 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14460 return true;
14462 switch (TREE_CODE (t))
14464 case INTEGER_CST:
14465 return tree_int_cst_sgn (t) >= 0;
14467 case REAL_CST:
14468 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14470 case FIXED_CST:
14471 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14473 case COND_EXPR:
14474 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14475 strict_overflow_p)
14476 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14477 strict_overflow_p));
14478 default:
14479 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14480 TREE_TYPE (t));
14482 /* We don't know sign of `t', so be conservative and return false. */
14483 return false;
14486 /* Return true if T is known to be non-negative. If the return
14487 value is based on the assumption that signed overflow is undefined,
14488 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14489 *STRICT_OVERFLOW_P. */
14491 bool
14492 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14493 tree arg0, tree arg1, bool *strict_overflow_p)
14495 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14496 switch (DECL_FUNCTION_CODE (fndecl))
14498 CASE_FLT_FN (BUILT_IN_ACOS):
14499 CASE_FLT_FN (BUILT_IN_ACOSH):
14500 CASE_FLT_FN (BUILT_IN_CABS):
14501 CASE_FLT_FN (BUILT_IN_COSH):
14502 CASE_FLT_FN (BUILT_IN_ERFC):
14503 CASE_FLT_FN (BUILT_IN_EXP):
14504 CASE_FLT_FN (BUILT_IN_EXP10):
14505 CASE_FLT_FN (BUILT_IN_EXP2):
14506 CASE_FLT_FN (BUILT_IN_FABS):
14507 CASE_FLT_FN (BUILT_IN_FDIM):
14508 CASE_FLT_FN (BUILT_IN_HYPOT):
14509 CASE_FLT_FN (BUILT_IN_POW10):
14510 CASE_INT_FN (BUILT_IN_FFS):
14511 CASE_INT_FN (BUILT_IN_PARITY):
14512 CASE_INT_FN (BUILT_IN_POPCOUNT):
14513 case BUILT_IN_BSWAP32:
14514 case BUILT_IN_BSWAP64:
14515 /* Always true. */
14516 return true;
14518 CASE_FLT_FN (BUILT_IN_SQRT):
14519 /* sqrt(-0.0) is -0.0. */
14520 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14521 return true;
14522 return tree_expr_nonnegative_warnv_p (arg0,
14523 strict_overflow_p);
14525 CASE_FLT_FN (BUILT_IN_ASINH):
14526 CASE_FLT_FN (BUILT_IN_ATAN):
14527 CASE_FLT_FN (BUILT_IN_ATANH):
14528 CASE_FLT_FN (BUILT_IN_CBRT):
14529 CASE_FLT_FN (BUILT_IN_CEIL):
14530 CASE_FLT_FN (BUILT_IN_ERF):
14531 CASE_FLT_FN (BUILT_IN_EXPM1):
14532 CASE_FLT_FN (BUILT_IN_FLOOR):
14533 CASE_FLT_FN (BUILT_IN_FMOD):
14534 CASE_FLT_FN (BUILT_IN_FREXP):
14535 CASE_FLT_FN (BUILT_IN_LCEIL):
14536 CASE_FLT_FN (BUILT_IN_LDEXP):
14537 CASE_FLT_FN (BUILT_IN_LFLOOR):
14538 CASE_FLT_FN (BUILT_IN_LLCEIL):
14539 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14540 CASE_FLT_FN (BUILT_IN_LLRINT):
14541 CASE_FLT_FN (BUILT_IN_LLROUND):
14542 CASE_FLT_FN (BUILT_IN_LRINT):
14543 CASE_FLT_FN (BUILT_IN_LROUND):
14544 CASE_FLT_FN (BUILT_IN_MODF):
14545 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14546 CASE_FLT_FN (BUILT_IN_RINT):
14547 CASE_FLT_FN (BUILT_IN_ROUND):
14548 CASE_FLT_FN (BUILT_IN_SCALB):
14549 CASE_FLT_FN (BUILT_IN_SCALBLN):
14550 CASE_FLT_FN (BUILT_IN_SCALBN):
14551 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14552 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14553 CASE_FLT_FN (BUILT_IN_SINH):
14554 CASE_FLT_FN (BUILT_IN_TANH):
14555 CASE_FLT_FN (BUILT_IN_TRUNC):
14556 /* True if the 1st argument is nonnegative. */
14557 return tree_expr_nonnegative_warnv_p (arg0,
14558 strict_overflow_p);
14560 CASE_FLT_FN (BUILT_IN_FMAX):
14561 /* True if the 1st OR 2nd arguments are nonnegative. */
14562 return (tree_expr_nonnegative_warnv_p (arg0,
14563 strict_overflow_p)
14564 || (tree_expr_nonnegative_warnv_p (arg1,
14565 strict_overflow_p)));
14567 CASE_FLT_FN (BUILT_IN_FMIN):
14568 /* True if the 1st AND 2nd arguments are nonnegative. */
14569 return (tree_expr_nonnegative_warnv_p (arg0,
14570 strict_overflow_p)
14571 && (tree_expr_nonnegative_warnv_p (arg1,
14572 strict_overflow_p)));
14574 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14575 /* True if the 2nd argument is nonnegative. */
14576 return tree_expr_nonnegative_warnv_p (arg1,
14577 strict_overflow_p);
14579 CASE_FLT_FN (BUILT_IN_POWI):
14580 /* True if the 1st argument is nonnegative or the second
14581 argument is an even integer. */
14582 if (TREE_CODE (arg1) == INTEGER_CST
14583 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14584 return true;
14585 return tree_expr_nonnegative_warnv_p (arg0,
14586 strict_overflow_p);
14588 CASE_FLT_FN (BUILT_IN_POW):
14589 /* True if the 1st argument is nonnegative or the second
14590 argument is an even integer valued real. */
14591 if (TREE_CODE (arg1) == REAL_CST)
14593 REAL_VALUE_TYPE c;
14594 HOST_WIDE_INT n;
14596 c = TREE_REAL_CST (arg1);
14597 n = real_to_integer (&c);
14598 if ((n & 1) == 0)
14600 REAL_VALUE_TYPE cint;
14601 real_from_integer (&cint, VOIDmode, n,
14602 n < 0 ? -1 : 0, 0);
14603 if (real_identical (&c, &cint))
14604 return true;
14607 return tree_expr_nonnegative_warnv_p (arg0,
14608 strict_overflow_p);
14610 default:
14611 break;
14613 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14614 type);
14617 /* Return true if T is known to be non-negative. If the return
14618 value is based on the assumption that signed overflow is undefined,
14619 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14620 *STRICT_OVERFLOW_P. */
14622 bool
14623 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14625 enum tree_code code = TREE_CODE (t);
14626 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14627 return true;
14629 switch (code)
14631 case TARGET_EXPR:
14633 tree temp = TARGET_EXPR_SLOT (t);
14634 t = TARGET_EXPR_INITIAL (t);
14636 /* If the initializer is non-void, then it's a normal expression
14637 that will be assigned to the slot. */
14638 if (!VOID_TYPE_P (t))
14639 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14641 /* Otherwise, the initializer sets the slot in some way. One common
14642 way is an assignment statement at the end of the initializer. */
14643 while (1)
14645 if (TREE_CODE (t) == BIND_EXPR)
14646 t = expr_last (BIND_EXPR_BODY (t));
14647 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14648 || TREE_CODE (t) == TRY_CATCH_EXPR)
14649 t = expr_last (TREE_OPERAND (t, 0));
14650 else if (TREE_CODE (t) == STATEMENT_LIST)
14651 t = expr_last (t);
14652 else
14653 break;
14655 if (TREE_CODE (t) == MODIFY_EXPR
14656 && TREE_OPERAND (t, 0) == temp)
14657 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14658 strict_overflow_p);
14660 return false;
14663 case CALL_EXPR:
14665 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14666 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14668 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14669 get_callee_fndecl (t),
14670 arg0,
14671 arg1,
14672 strict_overflow_p);
14674 case COMPOUND_EXPR:
14675 case MODIFY_EXPR:
14676 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14677 strict_overflow_p);
14678 case BIND_EXPR:
14679 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14680 strict_overflow_p);
14681 case SAVE_EXPR:
14682 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14683 strict_overflow_p);
14685 default:
14686 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14687 TREE_TYPE (t));
14690 /* We don't know sign of `t', so be conservative and return false. */
14691 return false;
14694 /* Return true if T is known to be non-negative. If the return
14695 value is based on the assumption that signed overflow is undefined,
14696 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14697 *STRICT_OVERFLOW_P. */
14699 bool
14700 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14702 enum tree_code code;
14703 if (t == error_mark_node)
14704 return false;
14706 code = TREE_CODE (t);
14707 switch (TREE_CODE_CLASS (code))
14709 case tcc_binary:
14710 case tcc_comparison:
14711 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14712 TREE_TYPE (t),
14713 TREE_OPERAND (t, 0),
14714 TREE_OPERAND (t, 1),
14715 strict_overflow_p);
14717 case tcc_unary:
14718 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14719 TREE_TYPE (t),
14720 TREE_OPERAND (t, 0),
14721 strict_overflow_p);
14723 case tcc_constant:
14724 case tcc_declaration:
14725 case tcc_reference:
14726 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14728 default:
14729 break;
14732 switch (code)
14734 case TRUTH_AND_EXPR:
14735 case TRUTH_OR_EXPR:
14736 case TRUTH_XOR_EXPR:
14737 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14738 TREE_TYPE (t),
14739 TREE_OPERAND (t, 0),
14740 TREE_OPERAND (t, 1),
14741 strict_overflow_p);
14742 case TRUTH_NOT_EXPR:
14743 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14744 TREE_TYPE (t),
14745 TREE_OPERAND (t, 0),
14746 strict_overflow_p);
14748 case COND_EXPR:
14749 case CONSTRUCTOR:
14750 case OBJ_TYPE_REF:
14751 case ASSERT_EXPR:
14752 case ADDR_EXPR:
14753 case WITH_SIZE_EXPR:
14754 case SSA_NAME:
14755 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14757 default:
14758 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14762 /* Return true if `t' is known to be non-negative. Handle warnings
14763 about undefined signed overflow. */
14765 bool
14766 tree_expr_nonnegative_p (tree t)
14768 bool ret, strict_overflow_p;
14770 strict_overflow_p = false;
14771 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14772 if (strict_overflow_p)
14773 fold_overflow_warning (("assuming signed overflow does not occur when "
14774 "determining that expression is always "
14775 "non-negative"),
14776 WARN_STRICT_OVERFLOW_MISC);
14777 return ret;
14781 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14782 For floating point we further ensure that T is not denormal.
14783 Similar logic is present in nonzero_address in rtlanal.h.
14785 If the return value is based on the assumption that signed overflow
14786 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14787 change *STRICT_OVERFLOW_P. */
14789 bool
14790 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14791 bool *strict_overflow_p)
14793 switch (code)
14795 case ABS_EXPR:
14796 return tree_expr_nonzero_warnv_p (op0,
14797 strict_overflow_p);
14799 case NOP_EXPR:
14801 tree inner_type = TREE_TYPE (op0);
14802 tree outer_type = type;
14804 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14805 && tree_expr_nonzero_warnv_p (op0,
14806 strict_overflow_p));
14808 break;
14810 case NON_LVALUE_EXPR:
14811 return tree_expr_nonzero_warnv_p (op0,
14812 strict_overflow_p);
14814 default:
14815 break;
14818 return false;
14821 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14822 For floating point we further ensure that T is not denormal.
14823 Similar logic is present in nonzero_address in rtlanal.h.
14825 If the return value is based on the assumption that signed overflow
14826 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14827 change *STRICT_OVERFLOW_P. */
14829 bool
14830 tree_binary_nonzero_warnv_p (enum tree_code code,
14831 tree type,
14832 tree op0,
14833 tree op1, bool *strict_overflow_p)
14835 bool sub_strict_overflow_p;
14836 switch (code)
14838 case POINTER_PLUS_EXPR:
14839 case PLUS_EXPR:
14840 if (TYPE_OVERFLOW_UNDEFINED (type))
14842 /* With the presence of negative values it is hard
14843 to say something. */
14844 sub_strict_overflow_p = false;
14845 if (!tree_expr_nonnegative_warnv_p (op0,
14846 &sub_strict_overflow_p)
14847 || !tree_expr_nonnegative_warnv_p (op1,
14848 &sub_strict_overflow_p))
14849 return false;
14850 /* One of operands must be positive and the other non-negative. */
14851 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14852 overflows, on a twos-complement machine the sum of two
14853 nonnegative numbers can never be zero. */
14854 return (tree_expr_nonzero_warnv_p (op0,
14855 strict_overflow_p)
14856 || tree_expr_nonzero_warnv_p (op1,
14857 strict_overflow_p));
14859 break;
14861 case MULT_EXPR:
14862 if (TYPE_OVERFLOW_UNDEFINED (type))
14864 if (tree_expr_nonzero_warnv_p (op0,
14865 strict_overflow_p)
14866 && tree_expr_nonzero_warnv_p (op1,
14867 strict_overflow_p))
14869 *strict_overflow_p = true;
14870 return true;
14873 break;
14875 case MIN_EXPR:
14876 sub_strict_overflow_p = false;
14877 if (tree_expr_nonzero_warnv_p (op0,
14878 &sub_strict_overflow_p)
14879 && tree_expr_nonzero_warnv_p (op1,
14880 &sub_strict_overflow_p))
14882 if (sub_strict_overflow_p)
14883 *strict_overflow_p = true;
14885 break;
14887 case MAX_EXPR:
14888 sub_strict_overflow_p = false;
14889 if (tree_expr_nonzero_warnv_p (op0,
14890 &sub_strict_overflow_p))
14892 if (sub_strict_overflow_p)
14893 *strict_overflow_p = true;
14895 /* When both operands are nonzero, then MAX must be too. */
14896 if (tree_expr_nonzero_warnv_p (op1,
14897 strict_overflow_p))
14898 return true;
14900 /* MAX where operand 0 is positive is positive. */
14901 return tree_expr_nonnegative_warnv_p (op0,
14902 strict_overflow_p);
14904 /* MAX where operand 1 is positive is positive. */
14905 else if (tree_expr_nonzero_warnv_p (op1,
14906 &sub_strict_overflow_p)
14907 && tree_expr_nonnegative_warnv_p (op1,
14908 &sub_strict_overflow_p))
14910 if (sub_strict_overflow_p)
14911 *strict_overflow_p = true;
14912 return true;
14914 break;
14916 case BIT_IOR_EXPR:
14917 return (tree_expr_nonzero_warnv_p (op1,
14918 strict_overflow_p)
14919 || tree_expr_nonzero_warnv_p (op0,
14920 strict_overflow_p));
14922 default:
14923 break;
14926 return false;
14929 /* Return true when T is an address and is known to be nonzero.
14930 For floating point we further ensure that T is not denormal.
14931 Similar logic is present in nonzero_address in rtlanal.h.
14933 If the return value is based on the assumption that signed overflow
14934 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14935 change *STRICT_OVERFLOW_P. */
14937 bool
14938 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14940 bool sub_strict_overflow_p;
14941 switch (TREE_CODE (t))
14943 case INTEGER_CST:
14944 return !integer_zerop (t);
14946 case ADDR_EXPR:
14948 tree base = TREE_OPERAND (t, 0);
14949 if (!DECL_P (base))
14950 base = get_base_address (base);
14952 if (!base)
14953 return false;
14955 /* Weak declarations may link to NULL. Other things may also be NULL
14956 so protect with -fdelete-null-pointer-checks; but not variables
14957 allocated on the stack. */
14958 if (DECL_P (base)
14959 && (flag_delete_null_pointer_checks
14960 || (DECL_CONTEXT (base)
14961 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14962 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14963 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14965 /* Constants are never weak. */
14966 if (CONSTANT_CLASS_P (base))
14967 return true;
14969 return false;
14972 case COND_EXPR:
14973 sub_strict_overflow_p = false;
14974 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14975 &sub_strict_overflow_p)
14976 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14977 &sub_strict_overflow_p))
14979 if (sub_strict_overflow_p)
14980 *strict_overflow_p = true;
14981 return true;
14983 break;
14985 default:
14986 break;
14988 return false;
14991 /* Return true when T is an address and is known to be nonzero.
14992 For floating point we further ensure that T is not denormal.
14993 Similar logic is present in nonzero_address in rtlanal.h.
14995 If the return value is based on the assumption that signed overflow
14996 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14997 change *STRICT_OVERFLOW_P. */
14999 bool
15000 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15002 tree type = TREE_TYPE (t);
15003 enum tree_code code;
15005 /* Doing something useful for floating point would need more work. */
15006 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15007 return false;
15009 code = TREE_CODE (t);
15010 switch (TREE_CODE_CLASS (code))
15012 case tcc_unary:
15013 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15014 strict_overflow_p);
15015 case tcc_binary:
15016 case tcc_comparison:
15017 return tree_binary_nonzero_warnv_p (code, type,
15018 TREE_OPERAND (t, 0),
15019 TREE_OPERAND (t, 1),
15020 strict_overflow_p);
15021 case tcc_constant:
15022 case tcc_declaration:
15023 case tcc_reference:
15024 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15026 default:
15027 break;
15030 switch (code)
15032 case TRUTH_NOT_EXPR:
15033 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15034 strict_overflow_p);
15036 case TRUTH_AND_EXPR:
15037 case TRUTH_OR_EXPR:
15038 case TRUTH_XOR_EXPR:
15039 return tree_binary_nonzero_warnv_p (code, type,
15040 TREE_OPERAND (t, 0),
15041 TREE_OPERAND (t, 1),
15042 strict_overflow_p);
15044 case COND_EXPR:
15045 case CONSTRUCTOR:
15046 case OBJ_TYPE_REF:
15047 case ASSERT_EXPR:
15048 case ADDR_EXPR:
15049 case WITH_SIZE_EXPR:
15050 case SSA_NAME:
15051 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15053 case COMPOUND_EXPR:
15054 case MODIFY_EXPR:
15055 case BIND_EXPR:
15056 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15057 strict_overflow_p);
15059 case SAVE_EXPR:
15060 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15061 strict_overflow_p);
15063 case CALL_EXPR:
15064 return alloca_call_p (t);
15066 default:
15067 break;
15069 return false;
15072 /* Return true when T is an address and is known to be nonzero.
15073 Handle warnings about undefined signed overflow. */
15075 bool
15076 tree_expr_nonzero_p (tree t)
15078 bool ret, strict_overflow_p;
15080 strict_overflow_p = false;
15081 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15082 if (strict_overflow_p)
15083 fold_overflow_warning (("assuming signed overflow does not occur when "
15084 "determining that expression is always "
15085 "non-zero"),
15086 WARN_STRICT_OVERFLOW_MISC);
15087 return ret;
15090 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15091 attempt to fold the expression to a constant without modifying TYPE,
15092 OP0 or OP1.
15094 If the expression could be simplified to a constant, then return
15095 the constant. If the expression would not be simplified to a
15096 constant, then return NULL_TREE. */
15098 tree
15099 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15101 tree tem = fold_binary (code, type, op0, op1);
15102 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15105 /* Given the components of a unary expression CODE, TYPE and OP0,
15106 attempt to fold the expression to a constant without modifying
15107 TYPE or OP0.
15109 If the expression could be simplified to a constant, then return
15110 the constant. If the expression would not be simplified to a
15111 constant, then return NULL_TREE. */
15113 tree
15114 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15116 tree tem = fold_unary (code, type, op0);
15117 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15120 /* If EXP represents referencing an element in a constant string
15121 (either via pointer arithmetic or array indexing), return the
15122 tree representing the value accessed, otherwise return NULL. */
15124 tree
15125 fold_read_from_constant_string (tree exp)
15127 if ((TREE_CODE (exp) == INDIRECT_REF
15128 || TREE_CODE (exp) == ARRAY_REF)
15129 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15131 tree exp1 = TREE_OPERAND (exp, 0);
15132 tree index;
15133 tree string;
15134 location_t loc = EXPR_LOCATION (exp);
15136 if (TREE_CODE (exp) == INDIRECT_REF)
15137 string = string_constant (exp1, &index);
15138 else
15140 tree low_bound = array_ref_low_bound (exp);
15141 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15143 /* Optimize the special-case of a zero lower bound.
15145 We convert the low_bound to sizetype to avoid some problems
15146 with constant folding. (E.g. suppose the lower bound is 1,
15147 and its mode is QI. Without the conversion,l (ARRAY
15148 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15149 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15150 if (! integer_zerop (low_bound))
15151 index = size_diffop_loc (loc, index,
15152 fold_convert_loc (loc, sizetype, low_bound));
15154 string = exp1;
15157 if (string
15158 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15159 && TREE_CODE (string) == STRING_CST
15160 && TREE_CODE (index) == INTEGER_CST
15161 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15162 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15163 == MODE_INT)
15164 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15165 return build_int_cst_type (TREE_TYPE (exp),
15166 (TREE_STRING_POINTER (string)
15167 [TREE_INT_CST_LOW (index)]));
15169 return NULL;
15172 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15173 an integer constant, real, or fixed-point constant.
15175 TYPE is the type of the result. */
15177 static tree
15178 fold_negate_const (tree arg0, tree type)
15180 tree t = NULL_TREE;
15182 switch (TREE_CODE (arg0))
15184 case INTEGER_CST:
15186 double_int val = tree_to_double_int (arg0);
15187 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15189 t = force_fit_type_double (type, val, 1,
15190 (overflow | TREE_OVERFLOW (arg0))
15191 && !TYPE_UNSIGNED (type));
15192 break;
15195 case REAL_CST:
15196 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15197 break;
15199 case FIXED_CST:
15201 FIXED_VALUE_TYPE f;
15202 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15203 &(TREE_FIXED_CST (arg0)), NULL,
15204 TYPE_SATURATING (type));
15205 t = build_fixed (type, f);
15206 /* Propagate overflow flags. */
15207 if (overflow_p | TREE_OVERFLOW (arg0))
15208 TREE_OVERFLOW (t) = 1;
15209 break;
15212 default:
15213 gcc_unreachable ();
15216 return t;
15219 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15220 an integer constant or real constant.
15222 TYPE is the type of the result. */
15224 tree
15225 fold_abs_const (tree arg0, tree type)
15227 tree t = NULL_TREE;
15229 switch (TREE_CODE (arg0))
15231 case INTEGER_CST:
15233 double_int val = tree_to_double_int (arg0);
15235 /* If the value is unsigned or non-negative, then the absolute value
15236 is the same as the ordinary value. */
15237 if (TYPE_UNSIGNED (type)
15238 || !double_int_negative_p (val))
15239 t = arg0;
15241 /* If the value is negative, then the absolute value is
15242 its negation. */
15243 else
15245 int overflow;
15247 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15248 t = force_fit_type_double (type, val, -1,
15249 overflow | TREE_OVERFLOW (arg0));
15252 break;
15254 case REAL_CST:
15255 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15256 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15257 else
15258 t = arg0;
15259 break;
15261 default:
15262 gcc_unreachable ();
15265 return t;
15268 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15269 constant. TYPE is the type of the result. */
15271 static tree
15272 fold_not_const (const_tree arg0, tree type)
15274 double_int val;
15276 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15278 val = double_int_not (tree_to_double_int (arg0));
15279 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15282 /* Given CODE, a relational operator, the target type, TYPE and two
15283 constant operands OP0 and OP1, return the result of the
15284 relational operation. If the result is not a compile time
15285 constant, then return NULL_TREE. */
15287 static tree
15288 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15290 int result, invert;
15292 /* From here on, the only cases we handle are when the result is
15293 known to be a constant. */
15295 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15297 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15298 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15300 /* Handle the cases where either operand is a NaN. */
15301 if (real_isnan (c0) || real_isnan (c1))
15303 switch (code)
15305 case EQ_EXPR:
15306 case ORDERED_EXPR:
15307 result = 0;
15308 break;
15310 case NE_EXPR:
15311 case UNORDERED_EXPR:
15312 case UNLT_EXPR:
15313 case UNLE_EXPR:
15314 case UNGT_EXPR:
15315 case UNGE_EXPR:
15316 case UNEQ_EXPR:
15317 result = 1;
15318 break;
15320 case LT_EXPR:
15321 case LE_EXPR:
15322 case GT_EXPR:
15323 case GE_EXPR:
15324 case LTGT_EXPR:
15325 if (flag_trapping_math)
15326 return NULL_TREE;
15327 result = 0;
15328 break;
15330 default:
15331 gcc_unreachable ();
15334 return constant_boolean_node (result, type);
15337 return constant_boolean_node (real_compare (code, c0, c1), type);
15340 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15342 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15343 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15344 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15347 /* Handle equality/inequality of complex constants. */
15348 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15350 tree rcond = fold_relational_const (code, type,
15351 TREE_REALPART (op0),
15352 TREE_REALPART (op1));
15353 tree icond = fold_relational_const (code, type,
15354 TREE_IMAGPART (op0),
15355 TREE_IMAGPART (op1));
15356 if (code == EQ_EXPR)
15357 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15358 else if (code == NE_EXPR)
15359 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15360 else
15361 return NULL_TREE;
15364 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15366 To compute GT, swap the arguments and do LT.
15367 To compute GE, do LT and invert the result.
15368 To compute LE, swap the arguments, do LT and invert the result.
15369 To compute NE, do EQ and invert the result.
15371 Therefore, the code below must handle only EQ and LT. */
15373 if (code == LE_EXPR || code == GT_EXPR)
15375 tree tem = op0;
15376 op0 = op1;
15377 op1 = tem;
15378 code = swap_tree_comparison (code);
15381 /* Note that it is safe to invert for real values here because we
15382 have already handled the one case that it matters. */
15384 invert = 0;
15385 if (code == NE_EXPR || code == GE_EXPR)
15387 invert = 1;
15388 code = invert_tree_comparison (code, false);
15391 /* Compute a result for LT or EQ if args permit;
15392 Otherwise return T. */
15393 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15395 if (code == EQ_EXPR)
15396 result = tree_int_cst_equal (op0, op1);
15397 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15398 result = INT_CST_LT_UNSIGNED (op0, op1);
15399 else
15400 result = INT_CST_LT (op0, op1);
15402 else
15403 return NULL_TREE;
15405 if (invert)
15406 result ^= 1;
15407 return constant_boolean_node (result, type);
15410 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15411 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15412 itself. */
15414 tree
15415 fold_build_cleanup_point_expr (tree type, tree expr)
15417 /* If the expression does not have side effects then we don't have to wrap
15418 it with a cleanup point expression. */
15419 if (!TREE_SIDE_EFFECTS (expr))
15420 return expr;
15422 /* If the expression is a return, check to see if the expression inside the
15423 return has no side effects or the right hand side of the modify expression
15424 inside the return. If either don't have side effects set we don't need to
15425 wrap the expression in a cleanup point expression. Note we don't check the
15426 left hand side of the modify because it should always be a return decl. */
15427 if (TREE_CODE (expr) == RETURN_EXPR)
15429 tree op = TREE_OPERAND (expr, 0);
15430 if (!op || !TREE_SIDE_EFFECTS (op))
15431 return expr;
15432 op = TREE_OPERAND (op, 1);
15433 if (!TREE_SIDE_EFFECTS (op))
15434 return expr;
15437 return build1 (CLEANUP_POINT_EXPR, type, expr);
15440 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15441 of an indirection through OP0, or NULL_TREE if no simplification is
15442 possible. */
15444 tree
15445 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15447 tree sub = op0;
15448 tree subtype;
15450 STRIP_NOPS (sub);
15451 subtype = TREE_TYPE (sub);
15452 if (!POINTER_TYPE_P (subtype))
15453 return NULL_TREE;
15455 if (TREE_CODE (sub) == ADDR_EXPR)
15457 tree op = TREE_OPERAND (sub, 0);
15458 tree optype = TREE_TYPE (op);
15459 /* *&CONST_DECL -> to the value of the const decl. */
15460 if (TREE_CODE (op) == CONST_DECL)
15461 return DECL_INITIAL (op);
15462 /* *&p => p; make sure to handle *&"str"[cst] here. */
15463 if (type == optype)
15465 tree fop = fold_read_from_constant_string (op);
15466 if (fop)
15467 return fop;
15468 else
15469 return op;
15471 /* *(foo *)&fooarray => fooarray[0] */
15472 else if (TREE_CODE (optype) == ARRAY_TYPE
15473 && type == TREE_TYPE (optype))
15475 tree type_domain = TYPE_DOMAIN (optype);
15476 tree min_val = size_zero_node;
15477 if (type_domain && TYPE_MIN_VALUE (type_domain))
15478 min_val = TYPE_MIN_VALUE (type_domain);
15479 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15480 SET_EXPR_LOCATION (op0, loc);
15481 return op0;
15483 /* *(foo *)&complexfoo => __real__ complexfoo */
15484 else if (TREE_CODE (optype) == COMPLEX_TYPE
15485 && type == TREE_TYPE (optype))
15486 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15487 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15488 else if (TREE_CODE (optype) == VECTOR_TYPE
15489 && type == TREE_TYPE (optype))
15491 tree part_width = TYPE_SIZE (type);
15492 tree index = bitsize_int (0);
15493 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15497 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15498 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15499 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15501 tree op00 = TREE_OPERAND (sub, 0);
15502 tree op01 = TREE_OPERAND (sub, 1);
15503 tree op00type;
15505 STRIP_NOPS (op00);
15506 op00type = TREE_TYPE (op00);
15507 if (TREE_CODE (op00) == ADDR_EXPR
15508 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15509 && type == TREE_TYPE (TREE_TYPE (op00type)))
15511 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15512 tree part_width = TYPE_SIZE (type);
15513 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15514 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15515 tree index = bitsize_int (indexi);
15517 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15518 return fold_build3_loc (loc,
15519 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15520 part_width, index);
15526 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15527 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15528 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15530 tree op00 = TREE_OPERAND (sub, 0);
15531 tree op01 = TREE_OPERAND (sub, 1);
15532 tree op00type;
15534 STRIP_NOPS (op00);
15535 op00type = TREE_TYPE (op00);
15536 if (TREE_CODE (op00) == ADDR_EXPR
15537 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15538 && type == TREE_TYPE (TREE_TYPE (op00type)))
15540 tree size = TYPE_SIZE_UNIT (type);
15541 if (tree_int_cst_equal (size, op01))
15542 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15543 TREE_OPERAND (op00, 0));
15547 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15548 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15549 && type == TREE_TYPE (TREE_TYPE (subtype)))
15551 tree type_domain;
15552 tree min_val = size_zero_node;
15553 sub = build_fold_indirect_ref_loc (loc, sub);
15554 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15555 if (type_domain && TYPE_MIN_VALUE (type_domain))
15556 min_val = TYPE_MIN_VALUE (type_domain);
15557 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15558 SET_EXPR_LOCATION (op0, loc);
15559 return op0;
15562 return NULL_TREE;
15565 /* Builds an expression for an indirection through T, simplifying some
15566 cases. */
15568 tree
15569 build_fold_indirect_ref_loc (location_t loc, tree t)
15571 tree type = TREE_TYPE (TREE_TYPE (t));
15572 tree sub = fold_indirect_ref_1 (loc, type, t);
15574 if (sub)
15575 return sub;
15577 t = build1 (INDIRECT_REF, type, t);
15578 SET_EXPR_LOCATION (t, loc);
15579 return t;
15582 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15584 tree
15585 fold_indirect_ref_loc (location_t loc, tree t)
15587 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15589 if (sub)
15590 return sub;
15591 else
15592 return t;
15595 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15596 whose result is ignored. The type of the returned tree need not be
15597 the same as the original expression. */
15599 tree
15600 fold_ignored_result (tree t)
15602 if (!TREE_SIDE_EFFECTS (t))
15603 return integer_zero_node;
15605 for (;;)
15606 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15608 case tcc_unary:
15609 t = TREE_OPERAND (t, 0);
15610 break;
15612 case tcc_binary:
15613 case tcc_comparison:
15614 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15615 t = TREE_OPERAND (t, 0);
15616 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15617 t = TREE_OPERAND (t, 1);
15618 else
15619 return t;
15620 break;
15622 case tcc_expression:
15623 switch (TREE_CODE (t))
15625 case COMPOUND_EXPR:
15626 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15627 return t;
15628 t = TREE_OPERAND (t, 0);
15629 break;
15631 case COND_EXPR:
15632 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15633 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15634 return t;
15635 t = TREE_OPERAND (t, 0);
15636 break;
15638 default:
15639 return t;
15641 break;
15643 default:
15644 return t;
15648 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15649 This can only be applied to objects of a sizetype. */
15651 tree
15652 round_up_loc (location_t loc, tree value, int divisor)
15654 tree div = NULL_TREE;
15656 gcc_assert (divisor > 0);
15657 if (divisor == 1)
15658 return value;
15660 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15661 have to do anything. Only do this when we are not given a const,
15662 because in that case, this check is more expensive than just
15663 doing it. */
15664 if (TREE_CODE (value) != INTEGER_CST)
15666 div = build_int_cst (TREE_TYPE (value), divisor);
15668 if (multiple_of_p (TREE_TYPE (value), value, div))
15669 return value;
15672 /* If divisor is a power of two, simplify this to bit manipulation. */
15673 if (divisor == (divisor & -divisor))
15675 if (TREE_CODE (value) == INTEGER_CST)
15677 double_int val = tree_to_double_int (value);
15678 bool overflow_p;
15680 if ((val.low & (divisor - 1)) == 0)
15681 return value;
15683 overflow_p = TREE_OVERFLOW (value);
15684 val.low &= ~(divisor - 1);
15685 val.low += divisor;
15686 if (val.low == 0)
15688 val.high++;
15689 if (val.high == 0)
15690 overflow_p = true;
15693 return force_fit_type_double (TREE_TYPE (value), val,
15694 -1, overflow_p);
15696 else
15698 tree t;
15700 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15701 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15702 t = build_int_cst (TREE_TYPE (value), -divisor);
15703 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15706 else
15708 if (!div)
15709 div = build_int_cst (TREE_TYPE (value), divisor);
15710 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15711 value = size_binop_loc (loc, MULT_EXPR, value, div);
15714 return value;
15717 /* Likewise, but round down. */
15719 tree
15720 round_down_loc (location_t loc, tree value, int divisor)
15722 tree div = NULL_TREE;
15724 gcc_assert (divisor > 0);
15725 if (divisor == 1)
15726 return value;
15728 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15729 have to do anything. Only do this when we are not given a const,
15730 because in that case, this check is more expensive than just
15731 doing it. */
15732 if (TREE_CODE (value) != INTEGER_CST)
15734 div = build_int_cst (TREE_TYPE (value), divisor);
15736 if (multiple_of_p (TREE_TYPE (value), value, div))
15737 return value;
15740 /* If divisor is a power of two, simplify this to bit manipulation. */
15741 if (divisor == (divisor & -divisor))
15743 tree t;
15745 t = build_int_cst (TREE_TYPE (value), -divisor);
15746 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15748 else
15750 if (!div)
15751 div = build_int_cst (TREE_TYPE (value), divisor);
15752 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15753 value = size_binop_loc (loc, MULT_EXPR, value, div);
15756 return value;
15759 /* Returns the pointer to the base of the object addressed by EXP and
15760 extracts the information about the offset of the access, storing it
15761 to PBITPOS and POFFSET. */
15763 static tree
15764 split_address_to_core_and_offset (tree exp,
15765 HOST_WIDE_INT *pbitpos, tree *poffset)
15767 tree core;
15768 enum machine_mode mode;
15769 int unsignedp, volatilep;
15770 HOST_WIDE_INT bitsize;
15771 location_t loc = EXPR_LOCATION (exp);
15773 if (TREE_CODE (exp) == ADDR_EXPR)
15775 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15776 poffset, &mode, &unsignedp, &volatilep,
15777 false);
15778 core = build_fold_addr_expr_loc (loc, core);
15780 else
15782 core = exp;
15783 *pbitpos = 0;
15784 *poffset = NULL_TREE;
15787 return core;
15790 /* Returns true if addresses of E1 and E2 differ by a constant, false
15791 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15793 bool
15794 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15796 tree core1, core2;
15797 HOST_WIDE_INT bitpos1, bitpos2;
15798 tree toffset1, toffset2, tdiff, type;
15800 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15801 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15803 if (bitpos1 % BITS_PER_UNIT != 0
15804 || bitpos2 % BITS_PER_UNIT != 0
15805 || !operand_equal_p (core1, core2, 0))
15806 return false;
15808 if (toffset1 && toffset2)
15810 type = TREE_TYPE (toffset1);
15811 if (type != TREE_TYPE (toffset2))
15812 toffset2 = fold_convert (type, toffset2);
15814 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15815 if (!cst_and_fits_in_hwi (tdiff))
15816 return false;
15818 *diff = int_cst_value (tdiff);
15820 else if (toffset1 || toffset2)
15822 /* If only one of the offsets is non-constant, the difference cannot
15823 be a constant. */
15824 return false;
15826 else
15827 *diff = 0;
15829 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15830 return true;
15833 /* Simplify the floating point expression EXP when the sign of the
15834 result is not significant. Return NULL_TREE if no simplification
15835 is possible. */
15837 tree
15838 fold_strip_sign_ops (tree exp)
15840 tree arg0, arg1;
15841 location_t loc = EXPR_LOCATION (exp);
15843 switch (TREE_CODE (exp))
15845 case ABS_EXPR:
15846 case NEGATE_EXPR:
15847 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15848 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15850 case MULT_EXPR:
15851 case RDIV_EXPR:
15852 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15853 return NULL_TREE;
15854 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15855 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15856 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15857 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15858 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15859 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15860 break;
15862 case COMPOUND_EXPR:
15863 arg0 = TREE_OPERAND (exp, 0);
15864 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15865 if (arg1)
15866 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15867 break;
15869 case COND_EXPR:
15870 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15871 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15872 if (arg0 || arg1)
15873 return fold_build3_loc (loc,
15874 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15875 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15876 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15877 break;
15879 case CALL_EXPR:
15881 const enum built_in_function fcode = builtin_mathfn_code (exp);
15882 switch (fcode)
15884 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15885 /* Strip copysign function call, return the 1st argument. */
15886 arg0 = CALL_EXPR_ARG (exp, 0);
15887 arg1 = CALL_EXPR_ARG (exp, 1);
15888 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15890 default:
15891 /* Strip sign ops from the argument of "odd" math functions. */
15892 if (negate_mathfn_p (fcode))
15894 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15895 if (arg0)
15896 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15898 break;
15901 break;
15903 default:
15904 break;
15906 return NULL_TREE;