* config/i386/i386.md (*fscalexf4): Correct insn "mode"
[official-gcc.git] / gcc / fold-const.c
blob1c2bdf4e1c2acb3f1aec104b7f37532a0c0d2717
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static enum tree_code invert_tree_comparison (enum tree_code);
74 static enum tree_code swap_tree_comparison (enum tree_code);
75 static int comparison_to_compcode (enum tree_code);
76 static enum tree_code compcode_to_comparison (int);
77 static int truth_value_p (enum tree_code);
78 static int operand_equal_for_comparison_p (tree, tree, tree);
79 static int twoval_comparison_p (tree, tree *, tree *, int *);
80 static tree eval_subst (tree, tree, tree, tree, tree);
81 static tree pedantic_omit_one_operand (tree, tree, tree);
82 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
83 static tree make_bit_field_ref (tree, tree, int, int, int);
84 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
85 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
86 enum machine_mode *, int *, int *,
87 tree *, tree *);
88 static int all_ones_mask_p (tree, int);
89 static tree sign_bit_p (tree, tree);
90 static int simple_operand_p (tree);
91 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
92 static tree make_range (tree, int *, tree *, tree *);
93 static tree build_range_check (tree, tree, int, tree, tree);
94 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
95 tree);
96 static tree fold_range_test (tree);
97 static tree unextend (tree, int, int, tree);
98 static tree fold_truthop (enum tree_code, tree, tree, tree);
99 static tree optimize_minmax_comparison (tree);
100 static tree extract_muldiv (tree, tree, enum tree_code, tree);
101 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
102 static tree strip_compound_expr (tree, tree);
103 static int multiple_of_p (tree, tree, tree);
104 static tree constant_boolean_node (int, tree);
105 static int count_cond (tree, int);
106 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
107 tree, int);
108 static bool fold_real_zero_addition_p (tree, tree, int);
109 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
110 tree, tree, tree);
111 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
112 static bool reorder_operands_p (tree, tree);
113 static bool tree_swap_operands_p (tree, tree, bool);
115 static tree fold_negate_const (tree, tree);
116 static tree fold_abs_const (tree, tree);
117 static tree fold_relational_const (enum tree_code, tree, tree, tree);
119 /* The following constants represent a bit based encoding of GCC's
120 comparison operators. This encoding simplifies transformations
121 on relational comparison operators, such as AND and OR. */
122 #define COMPCODE_FALSE 0
123 #define COMPCODE_LT 1
124 #define COMPCODE_EQ 2
125 #define COMPCODE_LE 3
126 #define COMPCODE_GT 4
127 #define COMPCODE_NE 5
128 #define COMPCODE_GE 6
129 #define COMPCODE_TRUE 7
131 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
132 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
133 and SUM1. Then this yields nonzero if overflow occurred during the
134 addition.
136 Overflow occurs if A and B have the same sign, but A and SUM differ in
137 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
138 sign. */
139 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
141 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
142 We do that by representing the two-word integer in 4 words, with only
143 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
144 number. The value of the word is LOWPART + HIGHPART * BASE. */
146 #define LOWPART(x) \
147 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
148 #define HIGHPART(x) \
149 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
150 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
152 /* Unpack a two-word integer into 4 words.
153 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
154 WORDS points to the array of HOST_WIDE_INTs. */
156 static void
157 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
159 words[0] = LOWPART (low);
160 words[1] = HIGHPART (low);
161 words[2] = LOWPART (hi);
162 words[3] = HIGHPART (hi);
165 /* Pack an array of 4 words into a two-word integer.
166 WORDS points to the array of words.
167 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
169 static void
170 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
171 HOST_WIDE_INT *hi)
173 *low = words[0] + words[1] * BASE;
174 *hi = words[2] + words[3] * BASE;
177 /* Make the integer constant T valid for its type by setting to 0 or 1 all
178 the bits in the constant that don't belong in the type.
180 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
181 nonzero, a signed overflow has already occurred in calculating T, so
182 propagate it. */
185 force_fit_type (tree t, int overflow)
187 unsigned HOST_WIDE_INT low;
188 HOST_WIDE_INT high;
189 unsigned int prec;
191 if (TREE_CODE (t) == REAL_CST)
193 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
194 Consider doing it via real_convert now. */
195 return overflow;
198 else if (TREE_CODE (t) != INTEGER_CST)
199 return overflow;
201 low = TREE_INT_CST_LOW (t);
202 high = TREE_INT_CST_HIGH (t);
204 if (POINTER_TYPE_P (TREE_TYPE (t))
205 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
206 prec = POINTER_SIZE;
207 else
208 prec = TYPE_PRECISION (TREE_TYPE (t));
210 /* First clear all bits that are beyond the type's precision. */
212 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214 else if (prec > HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_HIGH (t)
216 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
217 else
219 TREE_INT_CST_HIGH (t) = 0;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
224 /* Unsigned types do not suffer sign extension or overflow unless they
225 are a sizetype. */
226 if (TYPE_UNSIGNED (TREE_TYPE (t))
227 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
229 return overflow;
231 /* If the value's sign bit is set, extend the sign. */
232 if (prec != 2 * HOST_BITS_PER_WIDE_INT
233 && (prec > HOST_BITS_PER_WIDE_INT
234 ? 0 != (TREE_INT_CST_HIGH (t)
235 & ((HOST_WIDE_INT) 1
236 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
237 : 0 != (TREE_INT_CST_LOW (t)
238 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240 /* Value is negative:
241 set to 1 all the bits that are outside this type's precision. */
242 if (prec > HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_HIGH (t)
244 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
245 else
247 TREE_INT_CST_HIGH (t) = -1;
248 if (prec < HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
253 /* Return nonzero if signed overflow occurred. */
254 return
255 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 != 0);
259 /* Add two doubleword integers with doubleword result.
260 Each argument is given as two `HOST_WIDE_INT' pieces.
261 One argument is L1 and H1; the other, L2 and H2.
262 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
265 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
266 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
267 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
269 unsigned HOST_WIDE_INT l;
270 HOST_WIDE_INT h;
272 l = l1 + l2;
273 h = h1 + h2 + (l < l1);
275 *lv = l;
276 *hv = h;
277 return OVERFLOW_SUM_SIGN (h1, h2, h);
280 /* Negate a doubleword integer with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
286 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
287 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
289 if (l1 == 0)
291 *lv = 0;
292 *hv = - h1;
293 return (*hv & h1) < 0;
295 else
297 *lv = -l1;
298 *hv = ~h1;
299 return 0;
303 /* Multiply two doubleword integers with doubleword result.
304 Return nonzero if the operation overflows, assuming it's signed.
305 Each argument is given as two `HOST_WIDE_INT' pieces.
306 One argument is L1 and H1; the other, L2 and H2.
307 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
310 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
311 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 HOST_WIDE_INT arg1[4];
315 HOST_WIDE_INT arg2[4];
316 HOST_WIDE_INT prod[4 * 2];
317 unsigned HOST_WIDE_INT carry;
318 int i, j, k;
319 unsigned HOST_WIDE_INT toplow, neglow;
320 HOST_WIDE_INT tophigh, neghigh;
322 encode (arg1, l1, h1);
323 encode (arg2, l2, h2);
325 memset (prod, 0, sizeof prod);
327 for (i = 0; i < 4; i++)
329 carry = 0;
330 for (j = 0; j < 4; j++)
332 k = i + j;
333 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
334 carry += arg1[i] * arg2[j];
335 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
336 carry += prod[k];
337 prod[k] = LOWPART (carry);
338 carry = HIGHPART (carry);
340 prod[i + 4] = carry;
343 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
345 /* Check for overflow by calculating the top half of the answer in full;
346 it should agree with the low half's sign bit. */
347 decode (prod + 4, &toplow, &tophigh);
348 if (h1 < 0)
350 neg_double (l2, h2, &neglow, &neghigh);
351 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
353 if (h2 < 0)
355 neg_double (l1, h1, &neglow, &neghigh);
356 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
358 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
361 /* Shift the doubleword integer in L1, H1 left by COUNT places
362 keeping only PREC bits of result.
363 Shift right if COUNT is negative.
364 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
365 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
367 void
368 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
369 HOST_WIDE_INT count, unsigned int prec,
370 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
372 unsigned HOST_WIDE_INT signmask;
374 if (count < 0)
376 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 return;
380 if (SHIFT_COUNT_TRUNCATED)
381 count %= prec;
383 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
385 /* Shifting by the host word size is undefined according to the
386 ANSI standard, so we must handle this as a special case. */
387 *hv = 0;
388 *lv = 0;
390 else if (count >= HOST_BITS_PER_WIDE_INT)
392 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
393 *lv = 0;
395 else
397 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
398 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
399 *lv = l1 << count;
402 /* Sign extend all bits that are beyond the precision. */
404 signmask = -((prec > HOST_BITS_PER_WIDE_INT
405 ? ((unsigned HOST_WIDE_INT) *hv
406 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
407 : (*lv >> (prec - 1))) & 1);
409 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
411 else if (prec >= HOST_BITS_PER_WIDE_INT)
413 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
414 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
416 else
418 *hv = signmask;
419 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
420 *lv |= signmask << prec;
424 /* Shift the doubleword integer in L1, H1 right by COUNT places
425 keeping only PREC bits of result. COUNT must be positive.
426 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
427 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 void
430 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
431 HOST_WIDE_INT count, unsigned int prec,
432 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
433 int arith)
435 unsigned HOST_WIDE_INT signmask;
437 signmask = (arith
438 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
439 : 0);
441 if (SHIFT_COUNT_TRUNCATED)
442 count %= prec;
444 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
448 *hv = 0;
449 *lv = 0;
451 else if (count >= HOST_BITS_PER_WIDE_INT)
453 *hv = 0;
454 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
456 else
458 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
459 *lv = ((l1 >> count)
460 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
463 /* Zero / sign extend all bits that are beyond the precision. */
465 if (count >= (HOST_WIDE_INT)prec)
467 *hv = signmask;
468 *lv = signmask;
470 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
475 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
477 else
479 *hv = signmask;
480 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
481 *lv |= signmask << (prec - count);
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
490 void
491 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
492 HOST_WIDE_INT count, unsigned int prec,
493 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495 unsigned HOST_WIDE_INT s1l, s2l;
496 HOST_WIDE_INT s1h, s2h;
498 count %= prec;
499 if (count < 0)
500 count += prec;
502 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
503 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
504 *lv = s1l | s2l;
505 *hv = s1h | s2h;
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
512 void
513 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
520 count %= prec;
521 if (count < 0)
522 count += prec;
524 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526 *lv = s1l | s2l;
527 *hv = s1h | s2h;
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
534 or EXACT_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (enum tree_code code, int uns,
541 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig,
543 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig,
545 unsigned HOST_WIDE_INT *lquo,
546 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
547 HOST_WIDE_INT *hrem)
549 int quo_neg = 0;
550 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den[4], quo[4];
552 int i, j;
553 unsigned HOST_WIDE_INT work;
554 unsigned HOST_WIDE_INT carry = 0;
555 unsigned HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 unsigned HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
559 int overflow = 0;
561 if (hden == 0 && lden == 0)
562 overflow = 1, lden = 1;
564 /* Calculate quotient sign and convert operands to unsigned. */
565 if (!uns)
567 if (hnum < 0)
569 quo_neg = ~ quo_neg;
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum)
572 && ((HOST_WIDE_INT) lden & hden) == -1)
573 overflow = 1;
575 if (hden < 0)
577 quo_neg = ~ quo_neg;
578 neg_double (lden, hden, &lden, &hden);
582 if (hnum == 0 && hden == 0)
583 { /* single precision */
584 *hquo = *hrem = 0;
585 /* This unsigned division rounds toward zero. */
586 *lquo = lnum / lden;
587 goto finish_up;
590 if (hnum == 0)
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
593 *hquo = *lquo = 0;
594 *hrem = hnum;
595 *lrem = lnum;
596 goto finish_up;
599 memset (quo, 0, sizeof quo);
601 memset (num, 0, sizeof num); /* to zero 9th element */
602 memset (den, 0, sizeof den);
604 encode (num, lnum, hnum);
605 encode (den, lden, hden);
607 /* Special code for when the divisor < BASE. */
608 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610 /* hnum != 0 already checked. */
611 for (i = 4 - 1; i >= 0; i--)
613 work = num[i] + carry * BASE;
614 quo[i] = work / lden;
615 carry = work % lden;
618 else
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig, den_hi_sig;
623 unsigned HOST_WIDE_INT quo_est, scale;
625 /* Find the highest nonzero divisor digit. */
626 for (i = 4 - 1;; i--)
627 if (den[i] != 0)
629 den_hi_sig = i;
630 break;
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
636 scale = BASE / (den[den_hi_sig] + 1);
637 if (scale > 1)
638 { /* scale divisor and dividend */
639 carry = 0;
640 for (i = 0; i <= 4 - 1; i++)
642 work = (num[i] * scale) + carry;
643 num[i] = LOWPART (work);
644 carry = HIGHPART (work);
647 num[4] = carry;
648 carry = 0;
649 for (i = 0; i <= 4 - 1; i++)
651 work = (den[i] * scale) + carry;
652 den[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 if (den[i] != 0) den_hi_sig = i;
658 num_hi_sig = 4;
660 /* Main loop */
661 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp;
668 num_hi_sig = i + den_hi_sig + 1;
669 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
670 if (num[num_hi_sig] != den[den_hi_sig])
671 quo_est = work / den[den_hi_sig];
672 else
673 quo_est = BASE - 1;
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp = work - quo_est * den[den_hi_sig];
677 if (tmp < BASE
678 && (den[den_hi_sig - 1] * quo_est
679 > (tmp * BASE + num[num_hi_sig - 2])))
680 quo_est--;
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
686 carry = 0;
687 for (j = 0; j <= den_hi_sig; j++)
689 work = quo_est * den[j] + carry;
690 carry = HIGHPART (work);
691 work = num[i + j] - LOWPART (work);
692 num[i + j] = LOWPART (work);
693 carry += HIGHPART (work) != 0;
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
700 quo_est--;
701 carry = 0; /* add divisor back in */
702 for (j = 0; j <= den_hi_sig; j++)
704 work = num[i + j] + den[j] + carry;
705 carry = HIGHPART (work);
706 num[i + j] = LOWPART (work);
709 num [num_hi_sig] += carry;
712 /* Store the quotient digit. */
713 quo[i] = quo_est;
717 decode (quo, lquo, hquo);
719 finish_up:
720 /* If result is negative, make it so. */
721 if (quo_neg)
722 neg_double (*lquo, *hquo, lquo, hquo);
724 /* Compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
726 neg_double (*lrem, *hrem, lrem, hrem);
727 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
729 switch (code)
731 case TRUNC_DIV_EXPR:
732 case TRUNC_MOD_EXPR: /* round toward zero */
733 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
734 return overflow;
736 case FLOOR_DIV_EXPR:
737 case FLOOR_MOD_EXPR: /* round toward negative infinity */
738 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
740 /* quo = quo - 1; */
741 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
742 lquo, hquo);
744 else
745 return overflow;
746 break;
748 case CEIL_DIV_EXPR:
749 case CEIL_MOD_EXPR: /* round toward positive infinity */
750 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
753 lquo, hquo);
755 else
756 return overflow;
757 break;
759 case ROUND_DIV_EXPR:
760 case ROUND_MOD_EXPR: /* round to closest integer */
762 unsigned HOST_WIDE_INT labs_rem = *lrem;
763 HOST_WIDE_INT habs_rem = *hrem;
764 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
765 HOST_WIDE_INT habs_den = hden, htwice;
767 /* Get absolute values. */
768 if (*hrem < 0)
769 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
770 if (hden < 0)
771 neg_double (lden, hden, &labs_den, &habs_den);
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
775 labs_rem, habs_rem, &ltwice, &htwice);
777 if (((unsigned HOST_WIDE_INT) habs_den
778 < (unsigned HOST_WIDE_INT) htwice)
779 || (((unsigned HOST_WIDE_INT) habs_den
780 == (unsigned HOST_WIDE_INT) htwice)
781 && (labs_den < ltwice)))
783 if (*hquo < 0)
784 /* quo = quo - 1; */
785 add_double (*lquo, *hquo,
786 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
787 else
788 /* quo = quo + 1; */
789 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 lquo, hquo);
792 else
793 return overflow;
795 break;
797 default:
798 abort ();
801 /* Compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
805 return overflow;
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
811 static bool
812 negate_mathfn_p (enum built_in_function code)
814 switch (code)
816 case BUILT_IN_ASIN:
817 case BUILT_IN_ASINF:
818 case BUILT_IN_ASINL:
819 case BUILT_IN_ATAN:
820 case BUILT_IN_ATANF:
821 case BUILT_IN_ATANL:
822 case BUILT_IN_SIN:
823 case BUILT_IN_SINF:
824 case BUILT_IN_SINL:
825 case BUILT_IN_TAN:
826 case BUILT_IN_TANF:
827 case BUILT_IN_TANL:
828 return true;
830 default:
831 break;
833 return false;
836 /* Determine whether an expression T can be cheaply negated using
837 the function negate_expr. */
839 static bool
840 negate_expr_p (tree t)
842 unsigned HOST_WIDE_INT val;
843 unsigned int prec;
844 tree type;
846 if (t == 0)
847 return false;
849 type = TREE_TYPE (t);
851 STRIP_SIGN_NOPS (t);
852 switch (TREE_CODE (t))
854 case INTEGER_CST:
855 if (TYPE_UNSIGNED (type) || ! flag_trapv)
856 return true;
858 /* Check that -CST will not overflow type. */
859 prec = TYPE_PRECISION (type);
860 if (prec > HOST_BITS_PER_WIDE_INT)
862 if (TREE_INT_CST_LOW (t) != 0)
863 return true;
864 prec -= HOST_BITS_PER_WIDE_INT;
865 val = TREE_INT_CST_HIGH (t);
867 else
868 val = TREE_INT_CST_LOW (t);
869 if (prec < HOST_BITS_PER_WIDE_INT)
870 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
871 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
873 case REAL_CST:
874 case NEGATE_EXPR:
875 return true;
877 case COMPLEX_CST:
878 return negate_expr_p (TREE_REALPART (t))
879 && negate_expr_p (TREE_IMAGPART (t));
881 case PLUS_EXPR:
882 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
883 return false;
884 /* -(A + B) -> (-B) - A. */
885 if (negate_expr_p (TREE_OPERAND (t, 1))
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1)))
888 return true;
889 /* -(A + B) -> (-A) - B. */
890 return negate_expr_p (TREE_OPERAND (t, 0));
892 case MINUS_EXPR:
893 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
894 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1));
898 case MULT_EXPR:
899 if (TYPE_UNSIGNED (TREE_TYPE (t)))
900 break;
902 /* Fall through. */
904 case RDIV_EXPR:
905 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
906 return negate_expr_p (TREE_OPERAND (t, 1))
907 || negate_expr_p (TREE_OPERAND (t, 0));
908 break;
910 case NOP_EXPR:
911 /* Negate -((double)float) as (double)(-float). */
912 if (TREE_CODE (type) == REAL_TYPE)
914 tree tem = strip_float_extensions (t);
915 if (tem != t)
916 return negate_expr_p (tem);
918 break;
920 case CALL_EXPR:
921 /* Negate -f(x) as f(-x). */
922 if (negate_mathfn_p (builtin_mathfn_code (t)))
923 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
924 break;
926 case RSHIFT_EXPR:
927 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
928 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
930 tree op1 = TREE_OPERAND (t, 1);
931 if (TREE_INT_CST_HIGH (op1) == 0
932 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
933 == TREE_INT_CST_LOW (op1))
934 return true;
936 break;
938 default:
939 break;
941 return false;
944 /* Given T, an expression, return the negation of T. Allow for T to be
945 null, in which case return null. */
947 static tree
948 negate_expr (tree t)
950 tree type;
951 tree tem;
953 if (t == 0)
954 return 0;
956 type = TREE_TYPE (t);
957 STRIP_SIGN_NOPS (t);
959 switch (TREE_CODE (t))
961 case INTEGER_CST:
962 tem = fold_negate_const (t, type);
963 if (! TREE_OVERFLOW (tem)
964 || TYPE_UNSIGNED (type)
965 || ! flag_trapv)
966 return tem;
967 break;
969 case REAL_CST:
970 tem = fold_negate_const (t, type);
971 /* Two's complement FP formats, such as c4x, may overflow. */
972 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
973 return fold_convert (type, tem);
974 break;
976 case COMPLEX_CST:
978 tree rpart = negate_expr (TREE_REALPART (t));
979 tree ipart = negate_expr (TREE_IMAGPART (t));
981 if ((TREE_CODE (rpart) == REAL_CST
982 && TREE_CODE (ipart) == REAL_CST)
983 || (TREE_CODE (rpart) == INTEGER_CST
984 && TREE_CODE (ipart) == INTEGER_CST))
985 return build_complex (type, rpart, ipart);
987 break;
989 case NEGATE_EXPR:
990 return fold_convert (type, TREE_OPERAND (t, 0));
992 case PLUS_EXPR:
993 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
995 /* -(A + B) -> (-B) - A. */
996 if (negate_expr_p (TREE_OPERAND (t, 1))
997 && reorder_operands_p (TREE_OPERAND (t, 0),
998 TREE_OPERAND (t, 1)))
999 return fold_convert (type,
1000 fold (build (MINUS_EXPR, TREE_TYPE (t),
1001 negate_expr (TREE_OPERAND (t, 1)),
1002 TREE_OPERAND (t, 0))));
1003 /* -(A + B) -> (-A) - B. */
1004 if (negate_expr_p (TREE_OPERAND (t, 0)))
1005 return fold_convert (type,
1006 fold (build (MINUS_EXPR, TREE_TYPE (t),
1007 negate_expr (TREE_OPERAND (t, 0)),
1008 TREE_OPERAND (t, 1))));
1010 break;
1012 case MINUS_EXPR:
1013 /* - (A - B) -> B - A */
1014 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1015 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1016 return fold_convert (type,
1017 fold (build (MINUS_EXPR, TREE_TYPE (t),
1018 TREE_OPERAND (t, 1),
1019 TREE_OPERAND (t, 0))));
1020 break;
1022 case MULT_EXPR:
1023 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1024 break;
1026 /* Fall through. */
1028 case RDIV_EXPR:
1029 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1031 tem = TREE_OPERAND (t, 1);
1032 if (negate_expr_p (tem))
1033 return fold_convert (type,
1034 fold (build (TREE_CODE (t), TREE_TYPE (t),
1035 TREE_OPERAND (t, 0),
1036 negate_expr (tem))));
1037 tem = TREE_OPERAND (t, 0);
1038 if (negate_expr_p (tem))
1039 return fold_convert (type,
1040 fold (build (TREE_CODE (t), TREE_TYPE (t),
1041 negate_expr (tem),
1042 TREE_OPERAND (t, 1))));
1044 break;
1046 case NOP_EXPR:
1047 /* Convert -((double)float) into (double)(-float). */
1048 if (TREE_CODE (type) == REAL_TYPE)
1050 tem = strip_float_extensions (t);
1051 if (tem != t && negate_expr_p (tem))
1052 return fold_convert (type, negate_expr (tem));
1054 break;
1056 case CALL_EXPR:
1057 /* Negate -f(x) as f(-x). */
1058 if (negate_mathfn_p (builtin_mathfn_code (t))
1059 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1061 tree fndecl, arg, arglist;
1063 fndecl = get_callee_fndecl (t);
1064 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1065 arglist = build_tree_list (NULL_TREE, arg);
1066 return build_function_call_expr (fndecl, arglist);
1068 break;
1070 case RSHIFT_EXPR:
1071 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1072 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1074 tree op1 = TREE_OPERAND (t, 1);
1075 if (TREE_INT_CST_HIGH (op1) == 0
1076 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1077 == TREE_INT_CST_LOW (op1))
1079 tree ntype = TYPE_UNSIGNED (type)
1080 ? lang_hooks.types.signed_type (type)
1081 : lang_hooks.types.unsigned_type (type);
1082 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1083 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1084 return fold_convert (type, temp);
1087 break;
1089 default:
1090 break;
1093 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1094 return fold_convert (type, tem);
1097 /* Split a tree IN into a constant, literal and variable parts that could be
1098 combined with CODE to make IN. "constant" means an expression with
1099 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1100 commutative arithmetic operation. Store the constant part into *CONP,
1101 the literal in *LITP and return the variable part. If a part isn't
1102 present, set it to null. If the tree does not decompose in this way,
1103 return the entire tree as the variable part and the other parts as null.
1105 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1106 case, we negate an operand that was subtracted. Except if it is a
1107 literal for which we use *MINUS_LITP instead.
1109 If NEGATE_P is true, we are negating all of IN, again except a literal
1110 for which we use *MINUS_LITP instead.
1112 If IN is itself a literal or constant, return it as appropriate.
1114 Note that we do not guarantee that any of the three values will be the
1115 same type as IN, but they will have the same signedness and mode. */
1117 static tree
1118 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1119 tree *minus_litp, int negate_p)
1121 tree var = 0;
1123 *conp = 0;
1124 *litp = 0;
1125 *minus_litp = 0;
1127 /* Strip any conversions that don't change the machine mode or signedness. */
1128 STRIP_SIGN_NOPS (in);
1130 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1131 *litp = in;
1132 else if (TREE_CODE (in) == code
1133 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1134 /* We can associate addition and subtraction together (even
1135 though the C standard doesn't say so) for integers because
1136 the value is not affected. For reals, the value might be
1137 affected, so we can't. */
1138 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1139 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1141 tree op0 = TREE_OPERAND (in, 0);
1142 tree op1 = TREE_OPERAND (in, 1);
1143 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1144 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1146 /* First see if either of the operands is a literal, then a constant. */
1147 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1148 *litp = op0, op0 = 0;
1149 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1150 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1152 if (op0 != 0 && TREE_CONSTANT (op0))
1153 *conp = op0, op0 = 0;
1154 else if (op1 != 0 && TREE_CONSTANT (op1))
1155 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1157 /* If we haven't dealt with either operand, this is not a case we can
1158 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1159 if (op0 != 0 && op1 != 0)
1160 var = in;
1161 else if (op0 != 0)
1162 var = op0;
1163 else
1164 var = op1, neg_var_p = neg1_p;
1166 /* Now do any needed negations. */
1167 if (neg_litp_p)
1168 *minus_litp = *litp, *litp = 0;
1169 if (neg_conp_p)
1170 *conp = negate_expr (*conp);
1171 if (neg_var_p)
1172 var = negate_expr (var);
1174 else if (TREE_CONSTANT (in))
1175 *conp = in;
1176 else
1177 var = in;
1179 if (negate_p)
1181 if (*litp)
1182 *minus_litp = *litp, *litp = 0;
1183 else if (*minus_litp)
1184 *litp = *minus_litp, *minus_litp = 0;
1185 *conp = negate_expr (*conp);
1186 var = negate_expr (var);
1189 return var;
1192 /* Re-associate trees split by the above function. T1 and T2 are either
1193 expressions to associate or null. Return the new expression, if any. If
1194 we build an operation, do it in TYPE and with CODE. */
1196 static tree
1197 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1199 if (t1 == 0)
1200 return t2;
1201 else if (t2 == 0)
1202 return t1;
1204 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1205 try to fold this since we will have infinite recursion. But do
1206 deal with any NEGATE_EXPRs. */
1207 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1208 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1210 if (code == PLUS_EXPR)
1212 if (TREE_CODE (t1) == NEGATE_EXPR)
1213 return build (MINUS_EXPR, type, fold_convert (type, t2),
1214 fold_convert (type, TREE_OPERAND (t1, 0)));
1215 else if (TREE_CODE (t2) == NEGATE_EXPR)
1216 return build (MINUS_EXPR, type, fold_convert (type, t1),
1217 fold_convert (type, TREE_OPERAND (t2, 0)));
1219 return build (code, type, fold_convert (type, t1),
1220 fold_convert (type, t2));
1223 return fold (build (code, type, fold_convert (type, t1),
1224 fold_convert (type, t2)));
1227 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1228 to produce a new constant.
1230 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1232 static tree
1233 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1235 unsigned HOST_WIDE_INT int1l, int2l;
1236 HOST_WIDE_INT int1h, int2h;
1237 unsigned HOST_WIDE_INT low;
1238 HOST_WIDE_INT hi;
1239 unsigned HOST_WIDE_INT garbagel;
1240 HOST_WIDE_INT garbageh;
1241 tree t;
1242 tree type = TREE_TYPE (arg1);
1243 int uns = TYPE_UNSIGNED (type);
1244 int is_sizetype
1245 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1246 int overflow = 0;
1247 int no_overflow = 0;
1249 int1l = TREE_INT_CST_LOW (arg1);
1250 int1h = TREE_INT_CST_HIGH (arg1);
1251 int2l = TREE_INT_CST_LOW (arg2);
1252 int2h = TREE_INT_CST_HIGH (arg2);
1254 switch (code)
1256 case BIT_IOR_EXPR:
1257 low = int1l | int2l, hi = int1h | int2h;
1258 break;
1260 case BIT_XOR_EXPR:
1261 low = int1l ^ int2l, hi = int1h ^ int2h;
1262 break;
1264 case BIT_AND_EXPR:
1265 low = int1l & int2l, hi = int1h & int2h;
1266 break;
1268 case RSHIFT_EXPR:
1269 int2l = -int2l;
1270 case LSHIFT_EXPR:
1271 /* It's unclear from the C standard whether shifts can overflow.
1272 The following code ignores overflow; perhaps a C standard
1273 interpretation ruling is needed. */
1274 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1275 &low, &hi, !uns);
1276 no_overflow = 1;
1277 break;
1279 case RROTATE_EXPR:
1280 int2l = - int2l;
1281 case LROTATE_EXPR:
1282 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1283 &low, &hi);
1284 break;
1286 case PLUS_EXPR:
1287 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1288 break;
1290 case MINUS_EXPR:
1291 neg_double (int2l, int2h, &low, &hi);
1292 add_double (int1l, int1h, low, hi, &low, &hi);
1293 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1294 break;
1296 case MULT_EXPR:
1297 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1298 break;
1300 case TRUNC_DIV_EXPR:
1301 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1302 case EXACT_DIV_EXPR:
1303 /* This is a shortcut for a common special case. */
1304 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1305 && ! TREE_CONSTANT_OVERFLOW (arg1)
1306 && ! TREE_CONSTANT_OVERFLOW (arg2)
1307 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1309 if (code == CEIL_DIV_EXPR)
1310 int1l += int2l - 1;
1312 low = int1l / int2l, hi = 0;
1313 break;
1316 /* ... fall through ... */
1318 case ROUND_DIV_EXPR:
1319 if (int2h == 0 && int2l == 1)
1321 low = int1l, hi = int1h;
1322 break;
1324 if (int1l == int2l && int1h == int2h
1325 && ! (int1l == 0 && int1h == 0))
1327 low = 1, hi = 0;
1328 break;
1330 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1331 &low, &hi, &garbagel, &garbageh);
1332 break;
1334 case TRUNC_MOD_EXPR:
1335 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1336 /* This is a shortcut for a common special case. */
1337 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1338 && ! TREE_CONSTANT_OVERFLOW (arg1)
1339 && ! TREE_CONSTANT_OVERFLOW (arg2)
1340 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1342 if (code == CEIL_MOD_EXPR)
1343 int1l += int2l - 1;
1344 low = int1l % int2l, hi = 0;
1345 break;
1348 /* ... fall through ... */
1350 case ROUND_MOD_EXPR:
1351 overflow = div_and_round_double (code, uns,
1352 int1l, int1h, int2l, int2h,
1353 &garbagel, &garbageh, &low, &hi);
1354 break;
1356 case MIN_EXPR:
1357 case MAX_EXPR:
1358 if (uns)
1359 low = (((unsigned HOST_WIDE_INT) int1h
1360 < (unsigned HOST_WIDE_INT) int2h)
1361 || (((unsigned HOST_WIDE_INT) int1h
1362 == (unsigned HOST_WIDE_INT) int2h)
1363 && int1l < int2l));
1364 else
1365 low = (int1h < int2h
1366 || (int1h == int2h && int1l < int2l));
1368 if (low == (code == MIN_EXPR))
1369 low = int1l, hi = int1h;
1370 else
1371 low = int2l, hi = int2h;
1372 break;
1374 default:
1375 abort ();
1378 /* If this is for a sizetype, can be represented as one (signed)
1379 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1380 constants. */
1381 if (is_sizetype
1382 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1383 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1384 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1385 return size_int_type_wide (low, type);
1386 else
1388 t = build_int_2 (low, hi);
1389 TREE_TYPE (t) = TREE_TYPE (arg1);
1392 TREE_OVERFLOW (t)
1393 = ((notrunc
1394 ? (!uns || is_sizetype) && overflow
1395 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1396 && ! no_overflow))
1397 | TREE_OVERFLOW (arg1)
1398 | TREE_OVERFLOW (arg2));
1400 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1401 So check if force_fit_type truncated the value. */
1402 if (is_sizetype
1403 && ! TREE_OVERFLOW (t)
1404 && (TREE_INT_CST_HIGH (t) != hi
1405 || TREE_INT_CST_LOW (t) != low))
1406 TREE_OVERFLOW (t) = 1;
1408 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1409 | TREE_CONSTANT_OVERFLOW (arg1)
1410 | TREE_CONSTANT_OVERFLOW (arg2));
1411 return t;
1414 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1415 constant. We assume ARG1 and ARG2 have the same data type, or at least
1416 are the same kind of constant and the same machine mode.
1418 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1420 static tree
1421 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1423 STRIP_NOPS (arg1);
1424 STRIP_NOPS (arg2);
1426 if (TREE_CODE (arg1) == INTEGER_CST)
1427 return int_const_binop (code, arg1, arg2, notrunc);
1429 if (TREE_CODE (arg1) == REAL_CST)
1431 enum machine_mode mode;
1432 REAL_VALUE_TYPE d1;
1433 REAL_VALUE_TYPE d2;
1434 REAL_VALUE_TYPE value;
1435 tree t, type;
1437 d1 = TREE_REAL_CST (arg1);
1438 d2 = TREE_REAL_CST (arg2);
1440 type = TREE_TYPE (arg1);
1441 mode = TYPE_MODE (type);
1443 /* Don't perform operation if we honor signaling NaNs and
1444 either operand is a NaN. */
1445 if (HONOR_SNANS (mode)
1446 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1447 return NULL_TREE;
1449 /* Don't perform operation if it would raise a division
1450 by zero exception. */
1451 if (code == RDIV_EXPR
1452 && REAL_VALUES_EQUAL (d2, dconst0)
1453 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1454 return NULL_TREE;
1456 /* If either operand is a NaN, just return it. Otherwise, set up
1457 for floating-point trap; we return an overflow. */
1458 if (REAL_VALUE_ISNAN (d1))
1459 return arg1;
1460 else if (REAL_VALUE_ISNAN (d2))
1461 return arg2;
1463 REAL_ARITHMETIC (value, code, d1, d2);
1465 t = build_real (type, real_value_truncate (mode, value));
1467 TREE_OVERFLOW (t)
1468 = (force_fit_type (t, 0)
1469 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1470 TREE_CONSTANT_OVERFLOW (t)
1471 = TREE_OVERFLOW (t)
1472 | TREE_CONSTANT_OVERFLOW (arg1)
1473 | TREE_CONSTANT_OVERFLOW (arg2);
1474 return t;
1476 if (TREE_CODE (arg1) == COMPLEX_CST)
1478 tree type = TREE_TYPE (arg1);
1479 tree r1 = TREE_REALPART (arg1);
1480 tree i1 = TREE_IMAGPART (arg1);
1481 tree r2 = TREE_REALPART (arg2);
1482 tree i2 = TREE_IMAGPART (arg2);
1483 tree t;
1485 switch (code)
1487 case PLUS_EXPR:
1488 t = build_complex (type,
1489 const_binop (PLUS_EXPR, r1, r2, notrunc),
1490 const_binop (PLUS_EXPR, i1, i2, notrunc));
1491 break;
1493 case MINUS_EXPR:
1494 t = build_complex (type,
1495 const_binop (MINUS_EXPR, r1, r2, notrunc),
1496 const_binop (MINUS_EXPR, i1, i2, notrunc));
1497 break;
1499 case MULT_EXPR:
1500 t = build_complex (type,
1501 const_binop (MINUS_EXPR,
1502 const_binop (MULT_EXPR,
1503 r1, r2, notrunc),
1504 const_binop (MULT_EXPR,
1505 i1, i2, notrunc),
1506 notrunc),
1507 const_binop (PLUS_EXPR,
1508 const_binop (MULT_EXPR,
1509 r1, i2, notrunc),
1510 const_binop (MULT_EXPR,
1511 i1, r2, notrunc),
1512 notrunc));
1513 break;
1515 case RDIV_EXPR:
1517 tree magsquared
1518 = const_binop (PLUS_EXPR,
1519 const_binop (MULT_EXPR, r2, r2, notrunc),
1520 const_binop (MULT_EXPR, i2, i2, notrunc),
1521 notrunc);
1523 t = build_complex (type,
1524 const_binop
1525 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1526 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1527 const_binop (PLUS_EXPR,
1528 const_binop (MULT_EXPR, r1, r2,
1529 notrunc),
1530 const_binop (MULT_EXPR, i1, i2,
1531 notrunc),
1532 notrunc),
1533 magsquared, notrunc),
1534 const_binop
1535 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1536 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1537 const_binop (MINUS_EXPR,
1538 const_binop (MULT_EXPR, i1, r2,
1539 notrunc),
1540 const_binop (MULT_EXPR, r1, i2,
1541 notrunc),
1542 notrunc),
1543 magsquared, notrunc));
1545 break;
1547 default:
1548 abort ();
1550 return t;
1552 return 0;
1555 /* These are the hash table functions for the hash table of INTEGER_CST
1556 nodes of a sizetype. */
1558 /* Return the hash code code X, an INTEGER_CST. */
1560 static hashval_t
1561 size_htab_hash (const void *x)
1563 tree t = (tree) x;
1565 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1566 ^ htab_hash_pointer (TREE_TYPE (t))
1567 ^ (TREE_OVERFLOW (t) << 20));
1570 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1571 is the same as that given by *Y, which is the same. */
1573 static int
1574 size_htab_eq (const void *x, const void *y)
1576 tree xt = (tree) x;
1577 tree yt = (tree) y;
1579 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1580 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1581 && TREE_TYPE (xt) == TREE_TYPE (yt)
1582 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1585 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1586 bits are given by NUMBER and of the sizetype represented by KIND. */
1588 tree
1589 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1591 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1594 /* Likewise, but the desired type is specified explicitly. */
1596 static GTY (()) tree new_const;
1597 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1598 htab_t size_htab;
1600 tree
1601 size_int_type_wide (HOST_WIDE_INT number, tree type)
1603 void **slot;
1605 if (size_htab == 0)
1607 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1608 new_const = make_node (INTEGER_CST);
1611 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1612 hash table, we return the value from the hash table. Otherwise, we
1613 place that in the hash table and make a new node for the next time. */
1614 TREE_INT_CST_LOW (new_const) = number;
1615 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1616 TREE_TYPE (new_const) = type;
1617 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1618 = force_fit_type (new_const, 0);
1620 slot = htab_find_slot (size_htab, new_const, INSERT);
1621 if (*slot == 0)
1623 tree t = new_const;
1625 *slot = new_const;
1626 new_const = make_node (INTEGER_CST);
1627 return t;
1629 else
1630 return (tree) *slot;
1633 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1634 is a tree code. The type of the result is taken from the operands.
1635 Both must be the same type integer type and it must be a size type.
1636 If the operands are constant, so is the result. */
1638 tree
1639 size_binop (enum tree_code code, tree arg0, tree arg1)
1641 tree type = TREE_TYPE (arg0);
1643 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1644 || type != TREE_TYPE (arg1))
1645 abort ();
1647 /* Handle the special case of two integer constants faster. */
1648 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1650 /* And some specific cases even faster than that. */
1651 if (code == PLUS_EXPR && integer_zerop (arg0))
1652 return arg1;
1653 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1654 && integer_zerop (arg1))
1655 return arg0;
1656 else if (code == MULT_EXPR && integer_onep (arg0))
1657 return arg1;
1659 /* Handle general case of two integer constants. */
1660 return int_const_binop (code, arg0, arg1, 0);
1663 if (arg0 == error_mark_node || arg1 == error_mark_node)
1664 return error_mark_node;
1666 return fold (build (code, type, arg0, arg1));
1669 /* Given two values, either both of sizetype or both of bitsizetype,
1670 compute the difference between the two values. Return the value
1671 in signed type corresponding to the type of the operands. */
1673 tree
1674 size_diffop (tree arg0, tree arg1)
1676 tree type = TREE_TYPE (arg0);
1677 tree ctype;
1679 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1680 || type != TREE_TYPE (arg1))
1681 abort ();
1683 /* If the type is already signed, just do the simple thing. */
1684 if (!TYPE_UNSIGNED (type))
1685 return size_binop (MINUS_EXPR, arg0, arg1);
1687 ctype = (type == bitsizetype || type == ubitsizetype
1688 ? sbitsizetype : ssizetype);
1690 /* If either operand is not a constant, do the conversions to the signed
1691 type and subtract. The hardware will do the right thing with any
1692 overflow in the subtraction. */
1693 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1694 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1695 fold_convert (ctype, arg1));
1697 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1698 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1699 overflow) and negate (which can't either). Special-case a result
1700 of zero while we're here. */
1701 if (tree_int_cst_equal (arg0, arg1))
1702 return fold_convert (ctype, integer_zero_node);
1703 else if (tree_int_cst_lt (arg1, arg0))
1704 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1705 else
1706 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1707 fold_convert (ctype, size_binop (MINUS_EXPR,
1708 arg1, arg0)));
1712 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1713 type TYPE. If no simplification can be done return NULL_TREE. */
1715 static tree
1716 fold_convert_const (enum tree_code code, tree type, tree arg1)
1718 int overflow = 0;
1719 tree t;
1721 if (TREE_TYPE (arg1) == type)
1722 return arg1;
1724 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1726 if (TREE_CODE (arg1) == INTEGER_CST)
1728 /* If we would build a constant wider than GCC supports,
1729 leave the conversion unfolded. */
1730 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1731 return NULL_TREE;
1733 /* If we are trying to make a sizetype for a small integer, use
1734 size_int to pick up cached types to reduce duplicate nodes. */
1735 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && !TREE_CONSTANT_OVERFLOW (arg1)
1737 && compare_tree_int (arg1, 10000) < 0)
1738 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1740 /* Given an integer constant, make new constant with new type,
1741 appropriately sign-extended or truncated. */
1742 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1743 TREE_INT_CST_HIGH (arg1));
1744 TREE_TYPE (t) = type;
1745 /* Indicate an overflow if (1) ARG1 already overflowed,
1746 or (2) force_fit_type indicates an overflow.
1747 Tell force_fit_type that an overflow has already occurred
1748 if ARG1 is a too-large unsigned value and T is signed.
1749 But don't indicate an overflow if converting a pointer. */
1750 TREE_OVERFLOW (t)
1751 = ((force_fit_type (t,
1752 (TREE_INT_CST_HIGH (arg1) < 0
1753 && (TYPE_UNSIGNED (type)
1754 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1755 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1756 || TREE_OVERFLOW (arg1));
1757 TREE_CONSTANT_OVERFLOW (t)
1758 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1759 return t;
1761 else if (TREE_CODE (arg1) == REAL_CST)
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1772 HOST_WIDE_INT high, low;
1774 REAL_VALUE_TYPE r;
1775 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1777 switch (code)
1779 case FIX_TRUNC_EXPR:
1780 real_trunc (&r, VOIDmode, &x);
1781 break;
1783 case FIX_CEIL_EXPR:
1784 real_ceil (&r, VOIDmode, &x);
1785 break;
1787 case FIX_FLOOR_EXPR:
1788 real_floor (&r, VOIDmode, &x);
1789 break;
1791 case FIX_ROUND_EXPR:
1792 real_round (&r, VOIDmode, &x);
1793 break;
1795 default:
1796 abort ();
1799 /* If R is NaN, return zero and show we have an overflow. */
1800 if (REAL_VALUE_ISNAN (r))
1802 overflow = 1;
1803 high = 0;
1804 low = 0;
1807 /* See if R is less than the lower bound or greater than the
1808 upper bound. */
1810 if (! overflow)
1812 tree lt = TYPE_MIN_VALUE (type);
1813 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1814 if (REAL_VALUES_LESS (r, l))
1816 overflow = 1;
1817 high = TREE_INT_CST_HIGH (lt);
1818 low = TREE_INT_CST_LOW (lt);
1822 if (! overflow)
1824 tree ut = TYPE_MAX_VALUE (type);
1825 if (ut)
1827 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1828 if (REAL_VALUES_LESS (u, r))
1830 overflow = 1;
1831 high = TREE_INT_CST_HIGH (ut);
1832 low = TREE_INT_CST_LOW (ut);
1837 if (! overflow)
1838 REAL_VALUE_TO_INT (&low, &high, r);
1840 t = build_int_2 (low, high);
1841 TREE_TYPE (t) = type;
1842 TREE_OVERFLOW (t)
1843 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1844 TREE_CONSTANT_OVERFLOW (t)
1845 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1846 return t;
1849 else if (TREE_CODE (type) == REAL_TYPE)
1851 if (TREE_CODE (arg1) == INTEGER_CST)
1852 return build_real_from_int_cst (type, arg1);
1853 if (TREE_CODE (arg1) == REAL_CST)
1855 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1857 /* We make a copy of ARG1 so that we don't modify an
1858 existing constant tree. */
1859 t = copy_node (arg1);
1860 TREE_TYPE (t) = type;
1861 return t;
1864 t = build_real (type,
1865 real_value_truncate (TYPE_MODE (type),
1866 TREE_REAL_CST (arg1)));
1868 TREE_OVERFLOW (t)
1869 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1870 TREE_CONSTANT_OVERFLOW (t)
1871 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1872 return t;
1875 return NULL_TREE;
1878 /* Convert expression ARG to type TYPE. Used by the middle-end for
1879 simple conversions in preference to calling the front-end's convert. */
1881 tree
1882 fold_convert (tree type, tree arg)
1884 tree orig = TREE_TYPE (arg);
1885 tree tem;
1887 if (type == orig)
1888 return arg;
1890 if (TREE_CODE (arg) == ERROR_MARK
1891 || TREE_CODE (type) == ERROR_MARK
1892 || TREE_CODE (orig) == ERROR_MARK)
1893 return error_mark_node;
1895 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1896 return fold (build1 (NOP_EXPR, type, arg));
1898 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1900 if (TREE_CODE (arg) == INTEGER_CST)
1902 tem = fold_convert_const (NOP_EXPR, type, arg);
1903 if (tem != NULL_TREE)
1904 return tem;
1906 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1907 return fold (build1 (NOP_EXPR, type, arg));
1908 if (TREE_CODE (orig) == COMPLEX_TYPE)
1910 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1911 return fold_convert (type, tem);
1913 if (TREE_CODE (orig) == VECTOR_TYPE
1914 && GET_MODE_SIZE (TYPE_MODE (type))
1915 == GET_MODE_SIZE (TYPE_MODE (orig)))
1916 return fold (build1 (NOP_EXPR, type, arg));
1918 else if (TREE_CODE (type) == REAL_TYPE)
1920 if (TREE_CODE (arg) == INTEGER_CST)
1922 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1923 if (tem != NULL_TREE)
1924 return tem;
1926 else if (TREE_CODE (arg) == REAL_CST)
1928 tem = fold_convert_const (NOP_EXPR, type, arg);
1929 if (tem != NULL_TREE)
1930 return tem;
1933 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1934 return fold (build1 (FLOAT_EXPR, type, arg));
1935 if (TREE_CODE (orig) == REAL_TYPE)
1936 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1937 type, arg));
1938 if (TREE_CODE (orig) == COMPLEX_TYPE)
1940 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1941 return fold_convert (type, tem);
1944 else if (TREE_CODE (type) == COMPLEX_TYPE)
1946 if (INTEGRAL_TYPE_P (orig)
1947 || POINTER_TYPE_P (orig)
1948 || TREE_CODE (orig) == REAL_TYPE)
1949 return build (COMPLEX_EXPR, type,
1950 fold_convert (TREE_TYPE (type), arg),
1951 fold_convert (TREE_TYPE (type), integer_zero_node));
1952 if (TREE_CODE (orig) == COMPLEX_TYPE)
1954 tree rpart, ipart;
1956 if (TREE_CODE (arg) == COMPLEX_EXPR)
1958 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1959 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1960 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1963 arg = save_expr (arg);
1964 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1965 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1966 rpart = fold_convert (TREE_TYPE (type), rpart);
1967 ipart = fold_convert (TREE_TYPE (type), ipart);
1968 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1971 else if (TREE_CODE (type) == VECTOR_TYPE)
1973 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1974 && GET_MODE_SIZE (TYPE_MODE (type))
1975 == GET_MODE_SIZE (TYPE_MODE (orig)))
1976 return fold (build1 (NOP_EXPR, type, arg));
1977 if (TREE_CODE (orig) == VECTOR_TYPE
1978 && GET_MODE_SIZE (TYPE_MODE (type))
1979 == GET_MODE_SIZE (TYPE_MODE (orig)))
1980 return fold (build1 (NOP_EXPR, type, arg));
1982 else if (VOID_TYPE_P (type))
1983 return fold (build1 (CONVERT_EXPR, type, arg));
1984 abort ();
1987 /* Return an expr equal to X but certainly not valid as an lvalue. */
1989 tree
1990 non_lvalue (tree x)
1992 tree result;
1994 /* These things are certainly not lvalues. */
1995 if (TREE_CODE (x) == NON_LVALUE_EXPR
1996 || TREE_CODE (x) == INTEGER_CST
1997 || TREE_CODE (x) == REAL_CST
1998 || TREE_CODE (x) == STRING_CST
1999 || TREE_CODE (x) == ADDR_EXPR)
2000 return x;
2002 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2003 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2004 return result;
2007 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2008 Zero means allow extended lvalues. */
2010 int pedantic_lvalues;
2012 /* When pedantic, return an expr equal to X but certainly not valid as a
2013 pedantic lvalue. Otherwise, return X. */
2015 tree
2016 pedantic_non_lvalue (tree x)
2018 if (pedantic_lvalues)
2019 return non_lvalue (x);
2020 else
2021 return x;
2024 /* Given a tree comparison code, return the code that is the logical inverse
2025 of the given code. It is not safe to do this for floating-point
2026 comparisons, except for NE_EXPR and EQ_EXPR. */
2028 static enum tree_code
2029 invert_tree_comparison (enum tree_code code)
2031 switch (code)
2033 case EQ_EXPR:
2034 return NE_EXPR;
2035 case NE_EXPR:
2036 return EQ_EXPR;
2037 case GT_EXPR:
2038 return LE_EXPR;
2039 case GE_EXPR:
2040 return LT_EXPR;
2041 case LT_EXPR:
2042 return GE_EXPR;
2043 case LE_EXPR:
2044 return GT_EXPR;
2045 default:
2046 abort ();
2050 /* Similar, but return the comparison that results if the operands are
2051 swapped. This is safe for floating-point. */
2053 static enum tree_code
2054 swap_tree_comparison (enum tree_code code)
2056 switch (code)
2058 case EQ_EXPR:
2059 case NE_EXPR:
2060 return code;
2061 case GT_EXPR:
2062 return LT_EXPR;
2063 case GE_EXPR:
2064 return LE_EXPR;
2065 case LT_EXPR:
2066 return GT_EXPR;
2067 case LE_EXPR:
2068 return GE_EXPR;
2069 default:
2070 abort ();
2075 /* Convert a comparison tree code from an enum tree_code representation
2076 into a compcode bit-based encoding. This function is the inverse of
2077 compcode_to_comparison. */
2079 static int
2080 comparison_to_compcode (enum tree_code code)
2082 switch (code)
2084 case LT_EXPR:
2085 return COMPCODE_LT;
2086 case EQ_EXPR:
2087 return COMPCODE_EQ;
2088 case LE_EXPR:
2089 return COMPCODE_LE;
2090 case GT_EXPR:
2091 return COMPCODE_GT;
2092 case NE_EXPR:
2093 return COMPCODE_NE;
2094 case GE_EXPR:
2095 return COMPCODE_GE;
2096 default:
2097 abort ();
2101 /* Convert a compcode bit-based encoding of a comparison operator back
2102 to GCC's enum tree_code representation. This function is the
2103 inverse of comparison_to_compcode. */
2105 static enum tree_code
2106 compcode_to_comparison (int code)
2108 switch (code)
2110 case COMPCODE_LT:
2111 return LT_EXPR;
2112 case COMPCODE_EQ:
2113 return EQ_EXPR;
2114 case COMPCODE_LE:
2115 return LE_EXPR;
2116 case COMPCODE_GT:
2117 return GT_EXPR;
2118 case COMPCODE_NE:
2119 return NE_EXPR;
2120 case COMPCODE_GE:
2121 return GE_EXPR;
2122 default:
2123 abort ();
2127 /* Return nonzero if CODE is a tree code that represents a truth value. */
2129 static int
2130 truth_value_p (enum tree_code code)
2132 return (TREE_CODE_CLASS (code) == '<'
2133 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2134 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2135 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2138 /* Return nonzero if two operands (typically of the same tree node)
2139 are necessarily equal. If either argument has side-effects this
2140 function returns zero.
2142 If ONLY_CONST is nonzero, only return nonzero for constants.
2143 This function tests whether the operands are indistinguishable;
2144 it does not test whether they are equal using C's == operation.
2145 The distinction is important for IEEE floating point, because
2146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2149 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2150 even though it may hold multiple values during a function.
2151 This is because a GCC tree node guarantees that nothing else is
2152 executed between the evaluation of its "operands" (which may often
2153 be evaluated in arbitrary order). Hence if the operands themselves
2154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2155 same value in each operand/subexpression. Hence a zero value for
2156 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2157 If comparing arbitrary expression trees, such as from different
2158 statements, ONLY_CONST must usually be nonzero. */
2161 operand_equal_p (tree arg0, tree arg1, int only_const)
2163 tree fndecl;
2165 /* If either is ERROR_MARK, they aren't equal. */
2166 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2167 return 0;
2169 /* If both types don't have the same signedness, then we can't consider
2170 them equal. We must check this before the STRIP_NOPS calls
2171 because they may change the signedness of the arguments. */
2172 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2173 return 0;
2175 STRIP_NOPS (arg0);
2176 STRIP_NOPS (arg1);
2178 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2179 /* This is needed for conversions and for COMPONENT_REF.
2180 Might as well play it safe and always test this. */
2181 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2182 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2183 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2184 return 0;
2186 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2187 We don't care about side effects in that case because the SAVE_EXPR
2188 takes care of that for us. In all other cases, two expressions are
2189 equal if they have no side effects. If we have two identical
2190 expressions with side effects that should be treated the same due
2191 to the only side effects being identical SAVE_EXPR's, that will
2192 be detected in the recursive calls below. */
2193 if (arg0 == arg1 && ! only_const
2194 && (TREE_CODE (arg0) == SAVE_EXPR
2195 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2196 return 1;
2198 /* Next handle constant cases, those for which we can return 1 even
2199 if ONLY_CONST is set. */
2200 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2201 switch (TREE_CODE (arg0))
2203 case INTEGER_CST:
2204 return (! TREE_CONSTANT_OVERFLOW (arg0)
2205 && ! TREE_CONSTANT_OVERFLOW (arg1)
2206 && tree_int_cst_equal (arg0, arg1));
2208 case REAL_CST:
2209 return (! TREE_CONSTANT_OVERFLOW (arg0)
2210 && ! TREE_CONSTANT_OVERFLOW (arg1)
2211 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2212 TREE_REAL_CST (arg1)));
2214 case VECTOR_CST:
2216 tree v1, v2;
2218 if (TREE_CONSTANT_OVERFLOW (arg0)
2219 || TREE_CONSTANT_OVERFLOW (arg1))
2220 return 0;
2222 v1 = TREE_VECTOR_CST_ELTS (arg0);
2223 v2 = TREE_VECTOR_CST_ELTS (arg1);
2224 while (v1 && v2)
2226 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2227 only_const))
2228 return 0;
2229 v1 = TREE_CHAIN (v1);
2230 v2 = TREE_CHAIN (v2);
2233 return 1;
2236 case COMPLEX_CST:
2237 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2238 only_const)
2239 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2240 only_const));
2242 case STRING_CST:
2243 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2244 && ! memcmp (TREE_STRING_POINTER (arg0),
2245 TREE_STRING_POINTER (arg1),
2246 TREE_STRING_LENGTH (arg0)));
2248 case ADDR_EXPR:
2249 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2251 default:
2252 break;
2255 if (only_const)
2256 return 0;
2258 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2260 case '1':
2261 /* Two conversions are equal only if signedness and modes match. */
2262 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2263 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2264 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2265 return 0;
2267 return operand_equal_p (TREE_OPERAND (arg0, 0),
2268 TREE_OPERAND (arg1, 0), 0);
2270 case '<':
2271 case '2':
2272 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2273 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2275 return 1;
2277 /* For commutative ops, allow the other order. */
2278 return (commutative_tree_code (TREE_CODE (arg0))
2279 && operand_equal_p (TREE_OPERAND (arg0, 0),
2280 TREE_OPERAND (arg1, 1), 0)
2281 && operand_equal_p (TREE_OPERAND (arg0, 1),
2282 TREE_OPERAND (arg1, 0), 0));
2284 case 'r':
2285 /* If either of the pointer (or reference) expressions we are
2286 dereferencing contain a side effect, these cannot be equal. */
2287 if (TREE_SIDE_EFFECTS (arg0)
2288 || TREE_SIDE_EFFECTS (arg1))
2289 return 0;
2291 switch (TREE_CODE (arg0))
2293 case INDIRECT_REF:
2294 return operand_equal_p (TREE_OPERAND (arg0, 0),
2295 TREE_OPERAND (arg1, 0), 0);
2297 case COMPONENT_REF:
2298 case ARRAY_REF:
2299 case ARRAY_RANGE_REF:
2300 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2301 TREE_OPERAND (arg1, 0), 0)
2302 && operand_equal_p (TREE_OPERAND (arg0, 1),
2303 TREE_OPERAND (arg1, 1), 0));
2305 case BIT_FIELD_REF:
2306 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2307 TREE_OPERAND (arg1, 0), 0)
2308 && operand_equal_p (TREE_OPERAND (arg0, 1),
2309 TREE_OPERAND (arg1, 1), 0)
2310 && operand_equal_p (TREE_OPERAND (arg0, 2),
2311 TREE_OPERAND (arg1, 2), 0));
2312 default:
2313 return 0;
2316 case 'e':
2317 switch (TREE_CODE (arg0))
2319 case ADDR_EXPR:
2320 case TRUTH_NOT_EXPR:
2321 return operand_equal_p (TREE_OPERAND (arg0, 0),
2322 TREE_OPERAND (arg1, 0), 0);
2324 case RTL_EXPR:
2325 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2327 case CALL_EXPR:
2328 /* If the CALL_EXPRs call different functions, then they
2329 clearly can not be equal. */
2330 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2331 TREE_OPERAND (arg1, 0), 0))
2332 return 0;
2334 /* Only consider const functions equivalent. */
2335 fndecl = get_callee_fndecl (arg0);
2336 if (fndecl == NULL_TREE
2337 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2338 return 0;
2340 /* Now see if all the arguments are the same. operand_equal_p
2341 does not handle TREE_LIST, so we walk the operands here
2342 feeding them to operand_equal_p. */
2343 arg0 = TREE_OPERAND (arg0, 1);
2344 arg1 = TREE_OPERAND (arg1, 1);
2345 while (arg0 && arg1)
2347 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2348 return 0;
2350 arg0 = TREE_CHAIN (arg0);
2351 arg1 = TREE_CHAIN (arg1);
2354 /* If we get here and both argument lists are exhausted
2355 then the CALL_EXPRs are equal. */
2356 return ! (arg0 || arg1);
2358 default:
2359 return 0;
2362 case 'd':
2363 /* Consider __builtin_sqrt equal to sqrt. */
2364 return TREE_CODE (arg0) == FUNCTION_DECL
2365 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2366 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2367 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2369 default:
2370 return 0;
2374 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2375 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2377 When in doubt, return 0. */
2379 static int
2380 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2382 int unsignedp1, unsignedpo;
2383 tree primarg0, primarg1, primother;
2384 unsigned int correct_width;
2386 if (operand_equal_p (arg0, arg1, 0))
2387 return 1;
2389 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2390 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2391 return 0;
2393 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2394 and see if the inner values are the same. This removes any
2395 signedness comparison, which doesn't matter here. */
2396 primarg0 = arg0, primarg1 = arg1;
2397 STRIP_NOPS (primarg0);
2398 STRIP_NOPS (primarg1);
2399 if (operand_equal_p (primarg0, primarg1, 0))
2400 return 1;
2402 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2403 actual comparison operand, ARG0.
2405 First throw away any conversions to wider types
2406 already present in the operands. */
2408 primarg1 = get_narrower (arg1, &unsignedp1);
2409 primother = get_narrower (other, &unsignedpo);
2411 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2412 if (unsignedp1 == unsignedpo
2413 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2414 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2416 tree type = TREE_TYPE (arg0);
2418 /* Make sure shorter operand is extended the right way
2419 to match the longer operand. */
2420 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2421 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2423 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2424 return 1;
2427 return 0;
2430 /* See if ARG is an expression that is either a comparison or is performing
2431 arithmetic on comparisons. The comparisons must only be comparing
2432 two different values, which will be stored in *CVAL1 and *CVAL2; if
2433 they are nonzero it means that some operands have already been found.
2434 No variables may be used anywhere else in the expression except in the
2435 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2436 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2438 If this is true, return 1. Otherwise, return zero. */
2440 static int
2441 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2443 enum tree_code code = TREE_CODE (arg);
2444 char class = TREE_CODE_CLASS (code);
2446 /* We can handle some of the 'e' cases here. */
2447 if (class == 'e' && code == TRUTH_NOT_EXPR)
2448 class = '1';
2449 else if (class == 'e'
2450 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2451 || code == COMPOUND_EXPR))
2452 class = '2';
2454 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2455 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2457 /* If we've already found a CVAL1 or CVAL2, this expression is
2458 two complex to handle. */
2459 if (*cval1 || *cval2)
2460 return 0;
2462 class = '1';
2463 *save_p = 1;
2466 switch (class)
2468 case '1':
2469 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2471 case '2':
2472 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2473 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2474 cval1, cval2, save_p));
2476 case 'c':
2477 return 1;
2479 case 'e':
2480 if (code == COND_EXPR)
2481 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2482 cval1, cval2, save_p)
2483 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2484 cval1, cval2, save_p)
2485 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2486 cval1, cval2, save_p));
2487 return 0;
2489 case '<':
2490 /* First see if we can handle the first operand, then the second. For
2491 the second operand, we know *CVAL1 can't be zero. It must be that
2492 one side of the comparison is each of the values; test for the
2493 case where this isn't true by failing if the two operands
2494 are the same. */
2496 if (operand_equal_p (TREE_OPERAND (arg, 0),
2497 TREE_OPERAND (arg, 1), 0))
2498 return 0;
2500 if (*cval1 == 0)
2501 *cval1 = TREE_OPERAND (arg, 0);
2502 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2504 else if (*cval2 == 0)
2505 *cval2 = TREE_OPERAND (arg, 0);
2506 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2508 else
2509 return 0;
2511 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2513 else if (*cval2 == 0)
2514 *cval2 = TREE_OPERAND (arg, 1);
2515 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2517 else
2518 return 0;
2520 return 1;
2522 default:
2523 return 0;
2527 /* ARG is a tree that is known to contain just arithmetic operations and
2528 comparisons. Evaluate the operations in the tree substituting NEW0 for
2529 any occurrence of OLD0 as an operand of a comparison and likewise for
2530 NEW1 and OLD1. */
2532 static tree
2533 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2535 tree type = TREE_TYPE (arg);
2536 enum tree_code code = TREE_CODE (arg);
2537 char class = TREE_CODE_CLASS (code);
2539 /* We can handle some of the 'e' cases here. */
2540 if (class == 'e' && code == TRUTH_NOT_EXPR)
2541 class = '1';
2542 else if (class == 'e'
2543 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2544 class = '2';
2546 switch (class)
2548 case '1':
2549 return fold (build1 (code, type,
2550 eval_subst (TREE_OPERAND (arg, 0),
2551 old0, new0, old1, new1)));
2553 case '2':
2554 return fold (build (code, type,
2555 eval_subst (TREE_OPERAND (arg, 0),
2556 old0, new0, old1, new1),
2557 eval_subst (TREE_OPERAND (arg, 1),
2558 old0, new0, old1, new1)));
2560 case 'e':
2561 switch (code)
2563 case SAVE_EXPR:
2564 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2566 case COMPOUND_EXPR:
2567 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2569 case COND_EXPR:
2570 return fold (build (code, type,
2571 eval_subst (TREE_OPERAND (arg, 0),
2572 old0, new0, old1, new1),
2573 eval_subst (TREE_OPERAND (arg, 1),
2574 old0, new0, old1, new1),
2575 eval_subst (TREE_OPERAND (arg, 2),
2576 old0, new0, old1, new1)));
2577 default:
2578 break;
2580 /* Fall through - ??? */
2582 case '<':
2584 tree arg0 = TREE_OPERAND (arg, 0);
2585 tree arg1 = TREE_OPERAND (arg, 1);
2587 /* We need to check both for exact equality and tree equality. The
2588 former will be true if the operand has a side-effect. In that
2589 case, we know the operand occurred exactly once. */
2591 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2592 arg0 = new0;
2593 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2594 arg0 = new1;
2596 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2597 arg1 = new0;
2598 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2599 arg1 = new1;
2601 return fold (build (code, type, arg0, arg1));
2604 default:
2605 return arg;
2609 /* Return a tree for the case when the result of an expression is RESULT
2610 converted to TYPE and OMITTED was previously an operand of the expression
2611 but is now not needed (e.g., we folded OMITTED * 0).
2613 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2614 the conversion of RESULT to TYPE. */
2616 tree
2617 omit_one_operand (tree type, tree result, tree omitted)
2619 tree t = fold_convert (type, result);
2621 if (TREE_SIDE_EFFECTS (omitted))
2622 return build (COMPOUND_EXPR, type, omitted, t);
2624 return non_lvalue (t);
2627 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2629 static tree
2630 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2632 tree t = fold_convert (type, result);
2634 if (TREE_SIDE_EFFECTS (omitted))
2635 return build (COMPOUND_EXPR, type, omitted, t);
2637 return pedantic_non_lvalue (t);
2640 /* Return a simplified tree node for the truth-negation of ARG. This
2641 never alters ARG itself. We assume that ARG is an operation that
2642 returns a truth value (0 or 1). */
2644 tree
2645 invert_truthvalue (tree arg)
2647 tree type = TREE_TYPE (arg);
2648 enum tree_code code = TREE_CODE (arg);
2650 if (code == ERROR_MARK)
2651 return arg;
2653 /* If this is a comparison, we can simply invert it, except for
2654 floating-point non-equality comparisons, in which case we just
2655 enclose a TRUTH_NOT_EXPR around what we have. */
2657 if (TREE_CODE_CLASS (code) == '<')
2659 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2660 && !flag_unsafe_math_optimizations
2661 && code != NE_EXPR
2662 && code != EQ_EXPR)
2663 return build1 (TRUTH_NOT_EXPR, type, arg);
2664 else if (code == UNORDERED_EXPR
2665 || code == ORDERED_EXPR
2666 || code == UNEQ_EXPR
2667 || code == UNLT_EXPR
2668 || code == UNLE_EXPR
2669 || code == UNGT_EXPR
2670 || code == UNGE_EXPR)
2671 return build1 (TRUTH_NOT_EXPR, type, arg);
2672 else
2673 return build (invert_tree_comparison (code), type,
2674 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2677 switch (code)
2679 case INTEGER_CST:
2680 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2682 case TRUTH_AND_EXPR:
2683 return build (TRUTH_OR_EXPR, type,
2684 invert_truthvalue (TREE_OPERAND (arg, 0)),
2685 invert_truthvalue (TREE_OPERAND (arg, 1)));
2687 case TRUTH_OR_EXPR:
2688 return build (TRUTH_AND_EXPR, type,
2689 invert_truthvalue (TREE_OPERAND (arg, 0)),
2690 invert_truthvalue (TREE_OPERAND (arg, 1)));
2692 case TRUTH_XOR_EXPR:
2693 /* Here we can invert either operand. We invert the first operand
2694 unless the second operand is a TRUTH_NOT_EXPR in which case our
2695 result is the XOR of the first operand with the inside of the
2696 negation of the second operand. */
2698 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2699 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2700 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2701 else
2702 return build (TRUTH_XOR_EXPR, type,
2703 invert_truthvalue (TREE_OPERAND (arg, 0)),
2704 TREE_OPERAND (arg, 1));
2706 case TRUTH_ANDIF_EXPR:
2707 return build (TRUTH_ORIF_EXPR, type,
2708 invert_truthvalue (TREE_OPERAND (arg, 0)),
2709 invert_truthvalue (TREE_OPERAND (arg, 1)));
2711 case TRUTH_ORIF_EXPR:
2712 return build (TRUTH_ANDIF_EXPR, type,
2713 invert_truthvalue (TREE_OPERAND (arg, 0)),
2714 invert_truthvalue (TREE_OPERAND (arg, 1)));
2716 case TRUTH_NOT_EXPR:
2717 return TREE_OPERAND (arg, 0);
2719 case COND_EXPR:
2720 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2721 invert_truthvalue (TREE_OPERAND (arg, 1)),
2722 invert_truthvalue (TREE_OPERAND (arg, 2)));
2724 case COMPOUND_EXPR:
2725 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2726 invert_truthvalue (TREE_OPERAND (arg, 1)));
2728 case NON_LVALUE_EXPR:
2729 return invert_truthvalue (TREE_OPERAND (arg, 0));
2731 case NOP_EXPR:
2732 case CONVERT_EXPR:
2733 case FLOAT_EXPR:
2734 return build1 (TREE_CODE (arg), type,
2735 invert_truthvalue (TREE_OPERAND (arg, 0)));
2737 case BIT_AND_EXPR:
2738 if (!integer_onep (TREE_OPERAND (arg, 1)))
2739 break;
2740 return build (EQ_EXPR, type, arg,
2741 fold_convert (type, integer_zero_node));
2743 case SAVE_EXPR:
2744 return build1 (TRUTH_NOT_EXPR, type, arg);
2746 case CLEANUP_POINT_EXPR:
2747 return build1 (CLEANUP_POINT_EXPR, type,
2748 invert_truthvalue (TREE_OPERAND (arg, 0)));
2750 default:
2751 break;
2753 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2754 abort ();
2755 return build1 (TRUTH_NOT_EXPR, type, arg);
2758 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2759 operands are another bit-wise operation with a common input. If so,
2760 distribute the bit operations to save an operation and possibly two if
2761 constants are involved. For example, convert
2762 (A | B) & (A | C) into A | (B & C)
2763 Further simplification will occur if B and C are constants.
2765 If this optimization cannot be done, 0 will be returned. */
2767 static tree
2768 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2770 tree common;
2771 tree left, right;
2773 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2774 || TREE_CODE (arg0) == code
2775 || (TREE_CODE (arg0) != BIT_AND_EXPR
2776 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2777 return 0;
2779 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2781 common = TREE_OPERAND (arg0, 0);
2782 left = TREE_OPERAND (arg0, 1);
2783 right = TREE_OPERAND (arg1, 1);
2785 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2787 common = TREE_OPERAND (arg0, 0);
2788 left = TREE_OPERAND (arg0, 1);
2789 right = TREE_OPERAND (arg1, 0);
2791 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2793 common = TREE_OPERAND (arg0, 1);
2794 left = TREE_OPERAND (arg0, 0);
2795 right = TREE_OPERAND (arg1, 1);
2797 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2799 common = TREE_OPERAND (arg0, 1);
2800 left = TREE_OPERAND (arg0, 0);
2801 right = TREE_OPERAND (arg1, 0);
2803 else
2804 return 0;
2806 return fold (build (TREE_CODE (arg0), type, common,
2807 fold (build (code, type, left, right))));
2810 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2811 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2813 static tree
2814 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2815 int unsignedp)
2817 tree result = build (BIT_FIELD_REF, type, inner,
2818 size_int (bitsize), bitsize_int (bitpos));
2820 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
2822 return result;
2825 /* Optimize a bit-field compare.
2827 There are two cases: First is a compare against a constant and the
2828 second is a comparison of two items where the fields are at the same
2829 bit position relative to the start of a chunk (byte, halfword, word)
2830 large enough to contain it. In these cases we can avoid the shift
2831 implicit in bitfield extractions.
2833 For constants, we emit a compare of the shifted constant with the
2834 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2835 compared. For two fields at the same position, we do the ANDs with the
2836 similar mask and compare the result of the ANDs.
2838 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2839 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2840 are the left and right operands of the comparison, respectively.
2842 If the optimization described above can be done, we return the resulting
2843 tree. Otherwise we return zero. */
2845 static tree
2846 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2847 tree lhs, tree rhs)
2849 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2850 tree type = TREE_TYPE (lhs);
2851 tree signed_type, unsigned_type;
2852 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2853 enum machine_mode lmode, rmode, nmode;
2854 int lunsignedp, runsignedp;
2855 int lvolatilep = 0, rvolatilep = 0;
2856 tree linner, rinner = NULL_TREE;
2857 tree mask;
2858 tree offset;
2860 /* Get all the information about the extractions being done. If the bit size
2861 if the same as the size of the underlying object, we aren't doing an
2862 extraction at all and so can do nothing. We also don't want to
2863 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2864 then will no longer be able to replace it. */
2865 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2866 &lunsignedp, &lvolatilep);
2867 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2868 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2869 return 0;
2871 if (!const_p)
2873 /* If this is not a constant, we can only do something if bit positions,
2874 sizes, and signedness are the same. */
2875 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2876 &runsignedp, &rvolatilep);
2878 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2879 || lunsignedp != runsignedp || offset != 0
2880 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2881 return 0;
2884 /* See if we can find a mode to refer to this field. We should be able to,
2885 but fail if we can't. */
2886 nmode = get_best_mode (lbitsize, lbitpos,
2887 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2888 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2889 TYPE_ALIGN (TREE_TYPE (rinner))),
2890 word_mode, lvolatilep || rvolatilep);
2891 if (nmode == VOIDmode)
2892 return 0;
2894 /* Set signed and unsigned types of the precision of this mode for the
2895 shifts below. */
2896 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2897 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2899 /* Compute the bit position and size for the new reference and our offset
2900 within it. If the new reference is the same size as the original, we
2901 won't optimize anything, so return zero. */
2902 nbitsize = GET_MODE_BITSIZE (nmode);
2903 nbitpos = lbitpos & ~ (nbitsize - 1);
2904 lbitpos -= nbitpos;
2905 if (nbitsize == lbitsize)
2906 return 0;
2908 if (BYTES_BIG_ENDIAN)
2909 lbitpos = nbitsize - lbitsize - lbitpos;
2911 /* Make the mask to be used against the extracted field. */
2912 mask = build_int_2 (~0, ~0);
2913 TREE_TYPE (mask) = unsigned_type;
2914 force_fit_type (mask, 0);
2915 mask = fold_convert (unsigned_type, mask);
2916 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2917 mask = const_binop (RSHIFT_EXPR, mask,
2918 size_int (nbitsize - lbitsize - lbitpos), 0);
2920 if (! const_p)
2921 /* If not comparing with constant, just rework the comparison
2922 and return. */
2923 return build (code, compare_type,
2924 build (BIT_AND_EXPR, unsigned_type,
2925 make_bit_field_ref (linner, unsigned_type,
2926 nbitsize, nbitpos, 1),
2927 mask),
2928 build (BIT_AND_EXPR, unsigned_type,
2929 make_bit_field_ref (rinner, unsigned_type,
2930 nbitsize, nbitpos, 1),
2931 mask));
2933 /* Otherwise, we are handling the constant case. See if the constant is too
2934 big for the field. Warn and return a tree of for 0 (false) if so. We do
2935 this not only for its own sake, but to avoid having to test for this
2936 error case below. If we didn't, we might generate wrong code.
2938 For unsigned fields, the constant shifted right by the field length should
2939 be all zero. For signed fields, the high-order bits should agree with
2940 the sign bit. */
2942 if (lunsignedp)
2944 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2945 fold_convert (unsigned_type, rhs),
2946 size_int (lbitsize), 0)))
2948 warning ("comparison is always %d due to width of bit-field",
2949 code == NE_EXPR);
2950 return fold_convert (compare_type,
2951 (code == NE_EXPR
2952 ? integer_one_node : integer_zero_node));
2955 else
2957 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2958 size_int (lbitsize - 1), 0);
2959 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2961 warning ("comparison is always %d due to width of bit-field",
2962 code == NE_EXPR);
2963 return fold_convert (compare_type,
2964 (code == NE_EXPR
2965 ? integer_one_node : integer_zero_node));
2969 /* Single-bit compares should always be against zero. */
2970 if (lbitsize == 1 && ! integer_zerop (rhs))
2972 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2973 rhs = fold_convert (type, integer_zero_node);
2976 /* Make a new bitfield reference, shift the constant over the
2977 appropriate number of bits and mask it with the computed mask
2978 (in case this was a signed field). If we changed it, make a new one. */
2979 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2980 if (lvolatilep)
2982 TREE_SIDE_EFFECTS (lhs) = 1;
2983 TREE_THIS_VOLATILE (lhs) = 1;
2986 rhs = fold (const_binop (BIT_AND_EXPR,
2987 const_binop (LSHIFT_EXPR,
2988 fold_convert (unsigned_type, rhs),
2989 size_int (lbitpos), 0),
2990 mask, 0));
2992 return build (code, compare_type,
2993 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2994 rhs);
2997 /* Subroutine for fold_truthop: decode a field reference.
2999 If EXP is a comparison reference, we return the innermost reference.
3001 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3002 set to the starting bit number.
3004 If the innermost field can be completely contained in a mode-sized
3005 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3007 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3008 otherwise it is not changed.
3010 *PUNSIGNEDP is set to the signedness of the field.
3012 *PMASK is set to the mask used. This is either contained in a
3013 BIT_AND_EXPR or derived from the width of the field.
3015 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3017 Return 0 if this is not a component reference or is one that we can't
3018 do anything with. */
3020 static tree
3021 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3022 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3023 int *punsignedp, int *pvolatilep,
3024 tree *pmask, tree *pand_mask)
3026 tree outer_type = 0;
3027 tree and_mask = 0;
3028 tree mask, inner, offset;
3029 tree unsigned_type;
3030 unsigned int precision;
3032 /* All the optimizations using this function assume integer fields.
3033 There are problems with FP fields since the type_for_size call
3034 below can fail for, e.g., XFmode. */
3035 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3036 return 0;
3038 /* We are interested in the bare arrangement of bits, so strip everything
3039 that doesn't affect the machine mode. However, record the type of the
3040 outermost expression if it may matter below. */
3041 if (TREE_CODE (exp) == NOP_EXPR
3042 || TREE_CODE (exp) == CONVERT_EXPR
3043 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3044 outer_type = TREE_TYPE (exp);
3045 STRIP_NOPS (exp);
3047 if (TREE_CODE (exp) == BIT_AND_EXPR)
3049 and_mask = TREE_OPERAND (exp, 1);
3050 exp = TREE_OPERAND (exp, 0);
3051 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3052 if (TREE_CODE (and_mask) != INTEGER_CST)
3053 return 0;
3056 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3057 punsignedp, pvolatilep);
3058 if ((inner == exp && and_mask == 0)
3059 || *pbitsize < 0 || offset != 0
3060 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3061 return 0;
3063 /* If the number of bits in the reference is the same as the bitsize of
3064 the outer type, then the outer type gives the signedness. Otherwise
3065 (in case of a small bitfield) the signedness is unchanged. */
3066 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3067 *punsignedp = TYPE_UNSIGNED (outer_type);
3069 /* Compute the mask to access the bitfield. */
3070 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3071 precision = TYPE_PRECISION (unsigned_type);
3073 mask = build_int_2 (~0, ~0);
3074 TREE_TYPE (mask) = unsigned_type;
3075 force_fit_type (mask, 0);
3076 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3077 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3079 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3080 if (and_mask != 0)
3081 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3082 fold_convert (unsigned_type, and_mask), mask));
3084 *pmask = mask;
3085 *pand_mask = and_mask;
3086 return inner;
3089 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3090 bit positions. */
3092 static int
3093 all_ones_mask_p (tree mask, int size)
3095 tree type = TREE_TYPE (mask);
3096 unsigned int precision = TYPE_PRECISION (type);
3097 tree tmask;
3099 tmask = build_int_2 (~0, ~0);
3100 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3101 force_fit_type (tmask, 0);
3102 return
3103 tree_int_cst_equal (mask,
3104 const_binop (RSHIFT_EXPR,
3105 const_binop (LSHIFT_EXPR, tmask,
3106 size_int (precision - size),
3108 size_int (precision - size), 0));
3111 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3112 represents the sign bit of EXP's type. If EXP represents a sign
3113 or zero extension, also test VAL against the unextended type.
3114 The return value is the (sub)expression whose sign bit is VAL,
3115 or NULL_TREE otherwise. */
3117 static tree
3118 sign_bit_p (tree exp, tree val)
3120 unsigned HOST_WIDE_INT mask_lo, lo;
3121 HOST_WIDE_INT mask_hi, hi;
3122 int width;
3123 tree t;
3125 /* Tree EXP must have an integral type. */
3126 t = TREE_TYPE (exp);
3127 if (! INTEGRAL_TYPE_P (t))
3128 return NULL_TREE;
3130 /* Tree VAL must be an integer constant. */
3131 if (TREE_CODE (val) != INTEGER_CST
3132 || TREE_CONSTANT_OVERFLOW (val))
3133 return NULL_TREE;
3135 width = TYPE_PRECISION (t);
3136 if (width > HOST_BITS_PER_WIDE_INT)
3138 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3139 lo = 0;
3141 mask_hi = ((unsigned HOST_WIDE_INT) -1
3142 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3143 mask_lo = -1;
3145 else
3147 hi = 0;
3148 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3150 mask_hi = 0;
3151 mask_lo = ((unsigned HOST_WIDE_INT) -1
3152 >> (HOST_BITS_PER_WIDE_INT - width));
3155 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3156 treat VAL as if it were unsigned. */
3157 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3158 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3159 return exp;
3161 /* Handle extension from a narrower type. */
3162 if (TREE_CODE (exp) == NOP_EXPR
3163 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3164 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3166 return NULL_TREE;
3169 /* Subroutine for fold_truthop: determine if an operand is simple enough
3170 to be evaluated unconditionally. */
3172 static int
3173 simple_operand_p (tree exp)
3175 /* Strip any conversions that don't change the machine mode. */
3176 while ((TREE_CODE (exp) == NOP_EXPR
3177 || TREE_CODE (exp) == CONVERT_EXPR)
3178 && (TYPE_MODE (TREE_TYPE (exp))
3179 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3180 exp = TREE_OPERAND (exp, 0);
3182 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3183 || (DECL_P (exp)
3184 && ! TREE_ADDRESSABLE (exp)
3185 && ! TREE_THIS_VOLATILE (exp)
3186 && ! DECL_NONLOCAL (exp)
3187 /* Don't regard global variables as simple. They may be
3188 allocated in ways unknown to the compiler (shared memory,
3189 #pragma weak, etc). */
3190 && ! TREE_PUBLIC (exp)
3191 && ! DECL_EXTERNAL (exp)
3192 /* Loading a static variable is unduly expensive, but global
3193 registers aren't expensive. */
3194 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3197 /* The following functions are subroutines to fold_range_test and allow it to
3198 try to change a logical combination of comparisons into a range test.
3200 For example, both
3201 X == 2 || X == 3 || X == 4 || X == 5
3203 X >= 2 && X <= 5
3204 are converted to
3205 (unsigned) (X - 2) <= 3
3207 We describe each set of comparisons as being either inside or outside
3208 a range, using a variable named like IN_P, and then describe the
3209 range with a lower and upper bound. If one of the bounds is omitted,
3210 it represents either the highest or lowest value of the type.
3212 In the comments below, we represent a range by two numbers in brackets
3213 preceded by a "+" to designate being inside that range, or a "-" to
3214 designate being outside that range, so the condition can be inverted by
3215 flipping the prefix. An omitted bound is represented by a "-". For
3216 example, "- [-, 10]" means being outside the range starting at the lowest
3217 possible value and ending at 10, in other words, being greater than 10.
3218 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3219 always false.
3221 We set up things so that the missing bounds are handled in a consistent
3222 manner so neither a missing bound nor "true" and "false" need to be
3223 handled using a special case. */
3225 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3226 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3227 and UPPER1_P are nonzero if the respective argument is an upper bound
3228 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3229 must be specified for a comparison. ARG1 will be converted to ARG0's
3230 type if both are specified. */
3232 static tree
3233 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3234 tree arg1, int upper1_p)
3236 tree tem;
3237 int result;
3238 int sgn0, sgn1;
3240 /* If neither arg represents infinity, do the normal operation.
3241 Else, if not a comparison, return infinity. Else handle the special
3242 comparison rules. Note that most of the cases below won't occur, but
3243 are handled for consistency. */
3245 if (arg0 != 0 && arg1 != 0)
3247 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3248 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3249 STRIP_NOPS (tem);
3250 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3253 if (TREE_CODE_CLASS (code) != '<')
3254 return 0;
3256 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3257 for neither. In real maths, we cannot assume open ended ranges are
3258 the same. But, this is computer arithmetic, where numbers are finite.
3259 We can therefore make the transformation of any unbounded range with
3260 the value Z, Z being greater than any representable number. This permits
3261 us to treat unbounded ranges as equal. */
3262 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3263 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3264 switch (code)
3266 case EQ_EXPR:
3267 result = sgn0 == sgn1;
3268 break;
3269 case NE_EXPR:
3270 result = sgn0 != sgn1;
3271 break;
3272 case LT_EXPR:
3273 result = sgn0 < sgn1;
3274 break;
3275 case LE_EXPR:
3276 result = sgn0 <= sgn1;
3277 break;
3278 case GT_EXPR:
3279 result = sgn0 > sgn1;
3280 break;
3281 case GE_EXPR:
3282 result = sgn0 >= sgn1;
3283 break;
3284 default:
3285 abort ();
3288 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3291 /* Given EXP, a logical expression, set the range it is testing into
3292 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3293 actually being tested. *PLOW and *PHIGH will be made of the same type
3294 as the returned expression. If EXP is not a comparison, we will most
3295 likely not be returning a useful value and range. */
3297 static tree
3298 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3300 enum tree_code code;
3301 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3302 tree orig_type = NULL_TREE;
3303 int in_p, n_in_p;
3304 tree low, high, n_low, n_high;
3306 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3307 and see if we can refine the range. Some of the cases below may not
3308 happen, but it doesn't seem worth worrying about this. We "continue"
3309 the outer loop when we've changed something; otherwise we "break"
3310 the switch, which will "break" the while. */
3312 in_p = 0;
3313 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3315 while (1)
3317 code = TREE_CODE (exp);
3319 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3321 if (first_rtl_op (code) > 0)
3322 arg0 = TREE_OPERAND (exp, 0);
3323 if (TREE_CODE_CLASS (code) == '<'
3324 || TREE_CODE_CLASS (code) == '1'
3325 || TREE_CODE_CLASS (code) == '2')
3326 type = TREE_TYPE (arg0);
3327 if (TREE_CODE_CLASS (code) == '2'
3328 || TREE_CODE_CLASS (code) == '<'
3329 || (TREE_CODE_CLASS (code) == 'e'
3330 && TREE_CODE_LENGTH (code) > 1))
3331 arg1 = TREE_OPERAND (exp, 1);
3334 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3335 lose a cast by accident. */
3336 if (type != NULL_TREE && orig_type == NULL_TREE)
3337 orig_type = type;
3339 switch (code)
3341 case TRUTH_NOT_EXPR:
3342 in_p = ! in_p, exp = arg0;
3343 continue;
3345 case EQ_EXPR: case NE_EXPR:
3346 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3347 /* We can only do something if the range is testing for zero
3348 and if the second operand is an integer constant. Note that
3349 saying something is "in" the range we make is done by
3350 complementing IN_P since it will set in the initial case of
3351 being not equal to zero; "out" is leaving it alone. */
3352 if (low == 0 || high == 0
3353 || ! integer_zerop (low) || ! integer_zerop (high)
3354 || TREE_CODE (arg1) != INTEGER_CST)
3355 break;
3357 switch (code)
3359 case NE_EXPR: /* - [c, c] */
3360 low = high = arg1;
3361 break;
3362 case EQ_EXPR: /* + [c, c] */
3363 in_p = ! in_p, low = high = arg1;
3364 break;
3365 case GT_EXPR: /* - [-, c] */
3366 low = 0, high = arg1;
3367 break;
3368 case GE_EXPR: /* + [c, -] */
3369 in_p = ! in_p, low = arg1, high = 0;
3370 break;
3371 case LT_EXPR: /* - [c, -] */
3372 low = arg1, high = 0;
3373 break;
3374 case LE_EXPR: /* + [-, c] */
3375 in_p = ! in_p, low = 0, high = arg1;
3376 break;
3377 default:
3378 abort ();
3381 exp = arg0;
3383 /* If this is an unsigned comparison, we also know that EXP is
3384 greater than or equal to zero. We base the range tests we make
3385 on that fact, so we record it here so we can parse existing
3386 range tests. */
3387 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3389 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3390 1, fold_convert (type, integer_zero_node),
3391 NULL_TREE))
3392 break;
3394 in_p = n_in_p, low = n_low, high = n_high;
3396 /* If the high bound is missing, but we have a nonzero low
3397 bound, reverse the range so it goes from zero to the low bound
3398 minus 1. */
3399 if (high == 0 && low && ! integer_zerop (low))
3401 in_p = ! in_p;
3402 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3403 integer_one_node, 0);
3404 low = fold_convert (type, integer_zero_node);
3407 continue;
3409 case NEGATE_EXPR:
3410 /* (-x) IN [a,b] -> x in [-b, -a] */
3411 n_low = range_binop (MINUS_EXPR, type,
3412 fold_convert (type, integer_zero_node),
3413 0, high, 1);
3414 n_high = range_binop (MINUS_EXPR, type,
3415 fold_convert (type, integer_zero_node),
3416 0, low, 0);
3417 low = n_low, high = n_high;
3418 exp = arg0;
3419 continue;
3421 case BIT_NOT_EXPR:
3422 /* ~ X -> -X - 1 */
3423 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3424 fold_convert (type, integer_one_node));
3425 continue;
3427 case PLUS_EXPR: case MINUS_EXPR:
3428 if (TREE_CODE (arg1) != INTEGER_CST)
3429 break;
3431 /* If EXP is signed, any overflow in the computation is undefined,
3432 so we don't worry about it so long as our computations on
3433 the bounds don't overflow. For unsigned, overflow is defined
3434 and this is exactly the right thing. */
3435 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3436 type, low, 0, arg1, 0);
3437 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3438 type, high, 1, arg1, 0);
3439 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3440 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3441 break;
3443 /* Check for an unsigned range which has wrapped around the maximum
3444 value thus making n_high < n_low, and normalize it. */
3445 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3447 low = range_binop (PLUS_EXPR, type, n_high, 0,
3448 integer_one_node, 0);
3449 high = range_binop (MINUS_EXPR, type, n_low, 0,
3450 integer_one_node, 0);
3452 /* If the range is of the form +/- [ x+1, x ], we won't
3453 be able to normalize it. But then, it represents the
3454 whole range or the empty set, so make it
3455 +/- [ -, - ]. */
3456 if (tree_int_cst_equal (n_low, low)
3457 && tree_int_cst_equal (n_high, high))
3458 low = high = 0;
3459 else
3460 in_p = ! in_p;
3462 else
3463 low = n_low, high = n_high;
3465 exp = arg0;
3466 continue;
3468 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3469 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3470 break;
3472 if (! INTEGRAL_TYPE_P (type)
3473 || (low != 0 && ! int_fits_type_p (low, type))
3474 || (high != 0 && ! int_fits_type_p (high, type)))
3475 break;
3477 n_low = low, n_high = high;
3479 if (n_low != 0)
3480 n_low = fold_convert (type, n_low);
3482 if (n_high != 0)
3483 n_high = fold_convert (type, n_high);
3485 /* If we're converting from an unsigned to a signed type,
3486 we will be doing the comparison as unsigned. The tests above
3487 have already verified that LOW and HIGH are both positive.
3489 So we have to make sure that the original unsigned value will
3490 be interpreted as positive. */
3491 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3493 tree equiv_type = lang_hooks.types.type_for_mode
3494 (TYPE_MODE (type), 1);
3495 tree high_positive;
3497 /* A range without an upper bound is, naturally, unbounded.
3498 Since convert would have cropped a very large value, use
3499 the max value for the destination type. */
3500 high_positive
3501 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3502 : TYPE_MAX_VALUE (type);
3504 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3505 high_positive = fold (build (RSHIFT_EXPR, type,
3506 fold_convert (type,
3507 high_positive),
3508 fold_convert (type,
3509 integer_one_node)));
3511 /* If the low bound is specified, "and" the range with the
3512 range for which the original unsigned value will be
3513 positive. */
3514 if (low != 0)
3516 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3517 1, n_low, n_high, 1,
3518 fold_convert (type, integer_zero_node),
3519 high_positive))
3520 break;
3522 in_p = (n_in_p == in_p);
3524 else
3526 /* Otherwise, "or" the range with the range of the input
3527 that will be interpreted as negative. */
3528 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3529 0, n_low, n_high, 1,
3530 fold_convert (type, integer_zero_node),
3531 high_positive))
3532 break;
3534 in_p = (in_p != n_in_p);
3538 exp = arg0;
3539 low = n_low, high = n_high;
3540 continue;
3542 default:
3543 break;
3546 break;
3549 /* If EXP is a constant, we can evaluate whether this is true or false. */
3550 if (TREE_CODE (exp) == INTEGER_CST)
3552 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3553 exp, 0, low, 0))
3554 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3555 exp, 1, high, 1)));
3556 low = high = 0;
3557 exp = 0;
3560 *pin_p = in_p, *plow = low, *phigh = high;
3561 return exp;
3564 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3565 type, TYPE, return an expression to test if EXP is in (or out of, depending
3566 on IN_P) the range. */
3568 static tree
3569 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3571 tree etype = TREE_TYPE (exp);
3572 tree value;
3574 if (! in_p
3575 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3576 return invert_truthvalue (value);
3578 if (low == 0 && high == 0)
3579 return fold_convert (type, integer_one_node);
3581 if (low == 0)
3582 return fold (build (LE_EXPR, type, exp, high));
3584 if (high == 0)
3585 return fold (build (GE_EXPR, type, exp, low));
3587 if (operand_equal_p (low, high, 0))
3588 return fold (build (EQ_EXPR, type, exp, low));
3590 if (integer_zerop (low))
3592 if (! TYPE_UNSIGNED (etype))
3594 etype = lang_hooks.types.unsigned_type (etype);
3595 high = fold_convert (etype, high);
3596 exp = fold_convert (etype, exp);
3598 return build_range_check (type, exp, 1, 0, high);
3601 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3602 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3604 unsigned HOST_WIDE_INT lo;
3605 HOST_WIDE_INT hi;
3606 int prec;
3608 prec = TYPE_PRECISION (etype);
3609 if (prec <= HOST_BITS_PER_WIDE_INT)
3611 hi = 0;
3612 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3614 else
3616 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3617 lo = (unsigned HOST_WIDE_INT) -1;
3620 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3622 if (TYPE_UNSIGNED (etype))
3624 etype = lang_hooks.types.signed_type (etype);
3625 exp = fold_convert (etype, exp);
3627 return fold (build (GT_EXPR, type, exp,
3628 fold_convert (etype, integer_zero_node)));
3632 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3633 && ! TREE_OVERFLOW (value))
3634 return build_range_check (type,
3635 fold (build (MINUS_EXPR, etype, exp, low)),
3636 1, fold_convert (etype, integer_zero_node),
3637 value);
3639 return 0;
3642 /* Given two ranges, see if we can merge them into one. Return 1 if we
3643 can, 0 if we can't. Set the output range into the specified parameters. */
3645 static int
3646 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3647 tree high0, int in1_p, tree low1, tree high1)
3649 int no_overlap;
3650 int subset;
3651 int temp;
3652 tree tem;
3653 int in_p;
3654 tree low, high;
3655 int lowequal = ((low0 == 0 && low1 == 0)
3656 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3657 low0, 0, low1, 0)));
3658 int highequal = ((high0 == 0 && high1 == 0)
3659 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3660 high0, 1, high1, 1)));
3662 /* Make range 0 be the range that starts first, or ends last if they
3663 start at the same value. Swap them if it isn't. */
3664 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3665 low0, 0, low1, 0))
3666 || (lowequal
3667 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3668 high1, 1, high0, 1))))
3670 temp = in0_p, in0_p = in1_p, in1_p = temp;
3671 tem = low0, low0 = low1, low1 = tem;
3672 tem = high0, high0 = high1, high1 = tem;
3675 /* Now flag two cases, whether the ranges are disjoint or whether the
3676 second range is totally subsumed in the first. Note that the tests
3677 below are simplified by the ones above. */
3678 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3679 high0, 1, low1, 0));
3680 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3681 high1, 1, high0, 1));
3683 /* We now have four cases, depending on whether we are including or
3684 excluding the two ranges. */
3685 if (in0_p && in1_p)
3687 /* If they don't overlap, the result is false. If the second range
3688 is a subset it is the result. Otherwise, the range is from the start
3689 of the second to the end of the first. */
3690 if (no_overlap)
3691 in_p = 0, low = high = 0;
3692 else if (subset)
3693 in_p = 1, low = low1, high = high1;
3694 else
3695 in_p = 1, low = low1, high = high0;
3698 else if (in0_p && ! in1_p)
3700 /* If they don't overlap, the result is the first range. If they are
3701 equal, the result is false. If the second range is a subset of the
3702 first, and the ranges begin at the same place, we go from just after
3703 the end of the first range to the end of the second. If the second
3704 range is not a subset of the first, or if it is a subset and both
3705 ranges end at the same place, the range starts at the start of the
3706 first range and ends just before the second range.
3707 Otherwise, we can't describe this as a single range. */
3708 if (no_overlap)
3709 in_p = 1, low = low0, high = high0;
3710 else if (lowequal && highequal)
3711 in_p = 0, low = high = 0;
3712 else if (subset && lowequal)
3714 in_p = 1, high = high0;
3715 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3716 integer_one_node, 0);
3718 else if (! subset || highequal)
3720 in_p = 1, low = low0;
3721 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3722 integer_one_node, 0);
3724 else
3725 return 0;
3728 else if (! in0_p && in1_p)
3730 /* If they don't overlap, the result is the second range. If the second
3731 is a subset of the first, the result is false. Otherwise,
3732 the range starts just after the first range and ends at the
3733 end of the second. */
3734 if (no_overlap)
3735 in_p = 1, low = low1, high = high1;
3736 else if (subset || highequal)
3737 in_p = 0, low = high = 0;
3738 else
3740 in_p = 1, high = high1;
3741 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3742 integer_one_node, 0);
3746 else
3748 /* The case where we are excluding both ranges. Here the complex case
3749 is if they don't overlap. In that case, the only time we have a
3750 range is if they are adjacent. If the second is a subset of the
3751 first, the result is the first. Otherwise, the range to exclude
3752 starts at the beginning of the first range and ends at the end of the
3753 second. */
3754 if (no_overlap)
3756 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3757 range_binop (PLUS_EXPR, NULL_TREE,
3758 high0, 1,
3759 integer_one_node, 1),
3760 1, low1, 0)))
3761 in_p = 0, low = low0, high = high1;
3762 else
3763 return 0;
3765 else if (subset)
3766 in_p = 0, low = low0, high = high0;
3767 else
3768 in_p = 0, low = low0, high = high1;
3771 *pin_p = in_p, *plow = low, *phigh = high;
3772 return 1;
3775 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3776 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3777 #endif
3779 /* EXP is some logical combination of boolean tests. See if we can
3780 merge it into some range test. Return the new tree if so. */
3782 static tree
3783 fold_range_test (tree exp)
3785 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3786 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3787 int in0_p, in1_p, in_p;
3788 tree low0, low1, low, high0, high1, high;
3789 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3790 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3791 tree tem;
3793 /* If this is an OR operation, invert both sides; we will invert
3794 again at the end. */
3795 if (or_op)
3796 in0_p = ! in0_p, in1_p = ! in1_p;
3798 /* If both expressions are the same, if we can merge the ranges, and we
3799 can build the range test, return it or it inverted. If one of the
3800 ranges is always true or always false, consider it to be the same
3801 expression as the other. */
3802 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3803 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3804 in1_p, low1, high1)
3805 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3806 lhs != 0 ? lhs
3807 : rhs != 0 ? rhs : integer_zero_node,
3808 in_p, low, high))))
3809 return or_op ? invert_truthvalue (tem) : tem;
3811 /* On machines where the branch cost is expensive, if this is a
3812 short-circuited branch and the underlying object on both sides
3813 is the same, make a non-short-circuit operation. */
3814 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3815 && lhs != 0 && rhs != 0
3816 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3817 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3818 && operand_equal_p (lhs, rhs, 0))
3820 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3821 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3822 which cases we can't do this. */
3823 if (simple_operand_p (lhs))
3824 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3825 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3826 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3827 TREE_OPERAND (exp, 1));
3829 else if (lang_hooks.decls.global_bindings_p () == 0
3830 && ! CONTAINS_PLACEHOLDER_P (lhs))
3832 tree common = save_expr (lhs);
3834 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3835 or_op ? ! in0_p : in0_p,
3836 low0, high0))
3837 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3838 or_op ? ! in1_p : in1_p,
3839 low1, high1))))
3840 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3841 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3842 TREE_TYPE (exp), lhs, rhs);
3846 return 0;
3849 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3850 bit value. Arrange things so the extra bits will be set to zero if and
3851 only if C is signed-extended to its full width. If MASK is nonzero,
3852 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3854 static tree
3855 unextend (tree c, int p, int unsignedp, tree mask)
3857 tree type = TREE_TYPE (c);
3858 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3859 tree temp;
3861 if (p == modesize || unsignedp)
3862 return c;
3864 /* We work by getting just the sign bit into the low-order bit, then
3865 into the high-order bit, then sign-extend. We then XOR that value
3866 with C. */
3867 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3868 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3870 /* We must use a signed type in order to get an arithmetic right shift.
3871 However, we must also avoid introducing accidental overflows, so that
3872 a subsequent call to integer_zerop will work. Hence we must
3873 do the type conversion here. At this point, the constant is either
3874 zero or one, and the conversion to a signed type can never overflow.
3875 We could get an overflow if this conversion is done anywhere else. */
3876 if (TYPE_UNSIGNED (type))
3877 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3879 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3880 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3881 if (mask != 0)
3882 temp = const_binop (BIT_AND_EXPR, temp,
3883 fold_convert (TREE_TYPE (c), mask), 0);
3884 /* If necessary, convert the type back to match the type of C. */
3885 if (TYPE_UNSIGNED (type))
3886 temp = fold_convert (type, temp);
3888 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3891 /* Find ways of folding logical expressions of LHS and RHS:
3892 Try to merge two comparisons to the same innermost item.
3893 Look for range tests like "ch >= '0' && ch <= '9'".
3894 Look for combinations of simple terms on machines with expensive branches
3895 and evaluate the RHS unconditionally.
3897 For example, if we have p->a == 2 && p->b == 4 and we can make an
3898 object large enough to span both A and B, we can do this with a comparison
3899 against the object ANDed with the a mask.
3901 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3902 operations to do this with one comparison.
3904 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3905 function and the one above.
3907 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3908 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3910 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3911 two operands.
3913 We return the simplified tree or 0 if no optimization is possible. */
3915 static tree
3916 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3918 /* If this is the "or" of two comparisons, we can do something if
3919 the comparisons are NE_EXPR. If this is the "and", we can do something
3920 if the comparisons are EQ_EXPR. I.e.,
3921 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3923 WANTED_CODE is this operation code. For single bit fields, we can
3924 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3925 comparison for one-bit fields. */
3927 enum tree_code wanted_code;
3928 enum tree_code lcode, rcode;
3929 tree ll_arg, lr_arg, rl_arg, rr_arg;
3930 tree ll_inner, lr_inner, rl_inner, rr_inner;
3931 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3932 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3933 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3934 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3935 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3936 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3937 enum machine_mode lnmode, rnmode;
3938 tree ll_mask, lr_mask, rl_mask, rr_mask;
3939 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3940 tree l_const, r_const;
3941 tree lntype, rntype, result;
3942 int first_bit, end_bit;
3943 int volatilep;
3945 /* Start by getting the comparison codes. Fail if anything is volatile.
3946 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3947 it were surrounded with a NE_EXPR. */
3949 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3950 return 0;
3952 lcode = TREE_CODE (lhs);
3953 rcode = TREE_CODE (rhs);
3955 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3956 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3958 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3959 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3961 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3962 return 0;
3964 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3965 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3967 ll_arg = TREE_OPERAND (lhs, 0);
3968 lr_arg = TREE_OPERAND (lhs, 1);
3969 rl_arg = TREE_OPERAND (rhs, 0);
3970 rr_arg = TREE_OPERAND (rhs, 1);
3972 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3973 if (simple_operand_p (ll_arg)
3974 && simple_operand_p (lr_arg)
3975 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3977 int compcode;
3979 if (operand_equal_p (ll_arg, rl_arg, 0)
3980 && operand_equal_p (lr_arg, rr_arg, 0))
3982 int lcompcode, rcompcode;
3984 lcompcode = comparison_to_compcode (lcode);
3985 rcompcode = comparison_to_compcode (rcode);
3986 compcode = (code == TRUTH_AND_EXPR)
3987 ? lcompcode & rcompcode
3988 : lcompcode | rcompcode;
3990 else if (operand_equal_p (ll_arg, rr_arg, 0)
3991 && operand_equal_p (lr_arg, rl_arg, 0))
3993 int lcompcode, rcompcode;
3995 rcode = swap_tree_comparison (rcode);
3996 lcompcode = comparison_to_compcode (lcode);
3997 rcompcode = comparison_to_compcode (rcode);
3998 compcode = (code == TRUTH_AND_EXPR)
3999 ? lcompcode & rcompcode
4000 : lcompcode | rcompcode;
4002 else
4003 compcode = -1;
4005 if (compcode == COMPCODE_TRUE)
4006 return fold_convert (truth_type, integer_one_node);
4007 else if (compcode == COMPCODE_FALSE)
4008 return fold_convert (truth_type, integer_zero_node);
4009 else if (compcode != -1)
4010 return build (compcode_to_comparison (compcode),
4011 truth_type, ll_arg, lr_arg);
4014 /* If the RHS can be evaluated unconditionally and its operands are
4015 simple, it wins to evaluate the RHS unconditionally on machines
4016 with expensive branches. In this case, this isn't a comparison
4017 that can be merged. Avoid doing this if the RHS is a floating-point
4018 comparison since those can trap. */
4020 if (BRANCH_COST >= 2
4021 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4022 && simple_operand_p (rl_arg)
4023 && simple_operand_p (rr_arg))
4025 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4026 if (code == TRUTH_OR_EXPR
4027 && lcode == NE_EXPR && integer_zerop (lr_arg)
4028 && rcode == NE_EXPR && integer_zerop (rr_arg)
4029 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4030 return build (NE_EXPR, truth_type,
4031 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4032 ll_arg, rl_arg),
4033 integer_zero_node);
4035 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4036 if (code == TRUTH_AND_EXPR
4037 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4038 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4039 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4040 return build (EQ_EXPR, truth_type,
4041 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4042 ll_arg, rl_arg),
4043 integer_zero_node);
4045 return build (code, truth_type, lhs, rhs);
4048 /* See if the comparisons can be merged. Then get all the parameters for
4049 each side. */
4051 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4052 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4053 return 0;
4055 volatilep = 0;
4056 ll_inner = decode_field_reference (ll_arg,
4057 &ll_bitsize, &ll_bitpos, &ll_mode,
4058 &ll_unsignedp, &volatilep, &ll_mask,
4059 &ll_and_mask);
4060 lr_inner = decode_field_reference (lr_arg,
4061 &lr_bitsize, &lr_bitpos, &lr_mode,
4062 &lr_unsignedp, &volatilep, &lr_mask,
4063 &lr_and_mask);
4064 rl_inner = decode_field_reference (rl_arg,
4065 &rl_bitsize, &rl_bitpos, &rl_mode,
4066 &rl_unsignedp, &volatilep, &rl_mask,
4067 &rl_and_mask);
4068 rr_inner = decode_field_reference (rr_arg,
4069 &rr_bitsize, &rr_bitpos, &rr_mode,
4070 &rr_unsignedp, &volatilep, &rr_mask,
4071 &rr_and_mask);
4073 /* It must be true that the inner operation on the lhs of each
4074 comparison must be the same if we are to be able to do anything.
4075 Then see if we have constants. If not, the same must be true for
4076 the rhs's. */
4077 if (volatilep || ll_inner == 0 || rl_inner == 0
4078 || ! operand_equal_p (ll_inner, rl_inner, 0))
4079 return 0;
4081 if (TREE_CODE (lr_arg) == INTEGER_CST
4082 && TREE_CODE (rr_arg) == INTEGER_CST)
4083 l_const = lr_arg, r_const = rr_arg;
4084 else if (lr_inner == 0 || rr_inner == 0
4085 || ! operand_equal_p (lr_inner, rr_inner, 0))
4086 return 0;
4087 else
4088 l_const = r_const = 0;
4090 /* If either comparison code is not correct for our logical operation,
4091 fail. However, we can convert a one-bit comparison against zero into
4092 the opposite comparison against that bit being set in the field. */
4094 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4095 if (lcode != wanted_code)
4097 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4099 /* Make the left operand unsigned, since we are only interested
4100 in the value of one bit. Otherwise we are doing the wrong
4101 thing below. */
4102 ll_unsignedp = 1;
4103 l_const = ll_mask;
4105 else
4106 return 0;
4109 /* This is analogous to the code for l_const above. */
4110 if (rcode != wanted_code)
4112 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4114 rl_unsignedp = 1;
4115 r_const = rl_mask;
4117 else
4118 return 0;
4121 /* After this point all optimizations will generate bit-field
4122 references, which we might not want. */
4123 if (! lang_hooks.can_use_bit_fields_p ())
4124 return 0;
4126 /* See if we can find a mode that contains both fields being compared on
4127 the left. If we can't, fail. Otherwise, update all constants and masks
4128 to be relative to a field of that size. */
4129 first_bit = MIN (ll_bitpos, rl_bitpos);
4130 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4131 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4132 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4133 volatilep);
4134 if (lnmode == VOIDmode)
4135 return 0;
4137 lnbitsize = GET_MODE_BITSIZE (lnmode);
4138 lnbitpos = first_bit & ~ (lnbitsize - 1);
4139 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4140 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4142 if (BYTES_BIG_ENDIAN)
4144 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4145 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4148 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4149 size_int (xll_bitpos), 0);
4150 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4151 size_int (xrl_bitpos), 0);
4153 if (l_const)
4155 l_const = fold_convert (lntype, l_const);
4156 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4157 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4158 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4159 fold (build1 (BIT_NOT_EXPR,
4160 lntype, ll_mask)),
4161 0)))
4163 warning ("comparison is always %d", wanted_code == NE_EXPR);
4165 return fold_convert (truth_type,
4166 wanted_code == NE_EXPR
4167 ? integer_one_node : integer_zero_node);
4170 if (r_const)
4172 r_const = fold_convert (lntype, r_const);
4173 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4174 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4175 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4176 fold (build1 (BIT_NOT_EXPR,
4177 lntype, rl_mask)),
4178 0)))
4180 warning ("comparison is always %d", wanted_code == NE_EXPR);
4182 return fold_convert (truth_type,
4183 wanted_code == NE_EXPR
4184 ? integer_one_node : integer_zero_node);
4188 /* If the right sides are not constant, do the same for it. Also,
4189 disallow this optimization if a size or signedness mismatch occurs
4190 between the left and right sides. */
4191 if (l_const == 0)
4193 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4194 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4195 /* Make sure the two fields on the right
4196 correspond to the left without being swapped. */
4197 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4198 return 0;
4200 first_bit = MIN (lr_bitpos, rr_bitpos);
4201 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4202 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4203 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4204 volatilep);
4205 if (rnmode == VOIDmode)
4206 return 0;
4208 rnbitsize = GET_MODE_BITSIZE (rnmode);
4209 rnbitpos = first_bit & ~ (rnbitsize - 1);
4210 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4211 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4213 if (BYTES_BIG_ENDIAN)
4215 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4216 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4219 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4220 size_int (xlr_bitpos), 0);
4221 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4222 size_int (xrr_bitpos), 0);
4224 /* Make a mask that corresponds to both fields being compared.
4225 Do this for both items being compared. If the operands are the
4226 same size and the bits being compared are in the same position
4227 then we can do this by masking both and comparing the masked
4228 results. */
4229 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4230 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4231 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4233 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4234 ll_unsignedp || rl_unsignedp);
4235 if (! all_ones_mask_p (ll_mask, lnbitsize))
4236 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4238 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4239 lr_unsignedp || rr_unsignedp);
4240 if (! all_ones_mask_p (lr_mask, rnbitsize))
4241 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4243 return build (wanted_code, truth_type, lhs, rhs);
4246 /* There is still another way we can do something: If both pairs of
4247 fields being compared are adjacent, we may be able to make a wider
4248 field containing them both.
4250 Note that we still must mask the lhs/rhs expressions. Furthermore,
4251 the mask must be shifted to account for the shift done by
4252 make_bit_field_ref. */
4253 if ((ll_bitsize + ll_bitpos == rl_bitpos
4254 && lr_bitsize + lr_bitpos == rr_bitpos)
4255 || (ll_bitpos == rl_bitpos + rl_bitsize
4256 && lr_bitpos == rr_bitpos + rr_bitsize))
4258 tree type;
4260 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4261 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4262 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4263 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4265 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4266 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4267 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4268 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4270 /* Convert to the smaller type before masking out unwanted bits. */
4271 type = lntype;
4272 if (lntype != rntype)
4274 if (lnbitsize > rnbitsize)
4276 lhs = fold_convert (rntype, lhs);
4277 ll_mask = fold_convert (rntype, ll_mask);
4278 type = rntype;
4280 else if (lnbitsize < rnbitsize)
4282 rhs = fold_convert (lntype, rhs);
4283 lr_mask = fold_convert (lntype, lr_mask);
4284 type = lntype;
4288 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4289 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4291 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4292 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4294 return build (wanted_code, truth_type, lhs, rhs);
4297 return 0;
4300 /* Handle the case of comparisons with constants. If there is something in
4301 common between the masks, those bits of the constants must be the same.
4302 If not, the condition is always false. Test for this to avoid generating
4303 incorrect code below. */
4304 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4305 if (! integer_zerop (result)
4306 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4307 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4309 if (wanted_code == NE_EXPR)
4311 warning ("`or' of unmatched not-equal tests is always 1");
4312 return fold_convert (truth_type, integer_one_node);
4314 else
4316 warning ("`and' of mutually exclusive equal-tests is always 0");
4317 return fold_convert (truth_type, integer_zero_node);
4321 /* Construct the expression we will return. First get the component
4322 reference we will make. Unless the mask is all ones the width of
4323 that field, perform the mask operation. Then compare with the
4324 merged constant. */
4325 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4326 ll_unsignedp || rl_unsignedp);
4328 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4329 if (! all_ones_mask_p (ll_mask, lnbitsize))
4330 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4332 return build (wanted_code, truth_type, result,
4333 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4336 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4337 constant. */
4339 static tree
4340 optimize_minmax_comparison (tree t)
4342 tree type = TREE_TYPE (t);
4343 tree arg0 = TREE_OPERAND (t, 0);
4344 enum tree_code op_code;
4345 tree comp_const = TREE_OPERAND (t, 1);
4346 tree minmax_const;
4347 int consts_equal, consts_lt;
4348 tree inner;
4350 STRIP_SIGN_NOPS (arg0);
4352 op_code = TREE_CODE (arg0);
4353 minmax_const = TREE_OPERAND (arg0, 1);
4354 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4355 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4356 inner = TREE_OPERAND (arg0, 0);
4358 /* If something does not permit us to optimize, return the original tree. */
4359 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4360 || TREE_CODE (comp_const) != INTEGER_CST
4361 || TREE_CONSTANT_OVERFLOW (comp_const)
4362 || TREE_CODE (minmax_const) != INTEGER_CST
4363 || TREE_CONSTANT_OVERFLOW (minmax_const))
4364 return t;
4366 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4367 and GT_EXPR, doing the rest with recursive calls using logical
4368 simplifications. */
4369 switch (TREE_CODE (t))
4371 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4372 return
4373 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4375 case GE_EXPR:
4376 return
4377 fold (build (TRUTH_ORIF_EXPR, type,
4378 optimize_minmax_comparison
4379 (build (EQ_EXPR, type, arg0, comp_const)),
4380 optimize_minmax_comparison
4381 (build (GT_EXPR, type, arg0, comp_const))));
4383 case EQ_EXPR:
4384 if (op_code == MAX_EXPR && consts_equal)
4385 /* MAX (X, 0) == 0 -> X <= 0 */
4386 return fold (build (LE_EXPR, type, inner, comp_const));
4388 else if (op_code == MAX_EXPR && consts_lt)
4389 /* MAX (X, 0) == 5 -> X == 5 */
4390 return fold (build (EQ_EXPR, type, inner, comp_const));
4392 else if (op_code == MAX_EXPR)
4393 /* MAX (X, 0) == -1 -> false */
4394 return omit_one_operand (type, integer_zero_node, inner);
4396 else if (consts_equal)
4397 /* MIN (X, 0) == 0 -> X >= 0 */
4398 return fold (build (GE_EXPR, type, inner, comp_const));
4400 else if (consts_lt)
4401 /* MIN (X, 0) == 5 -> false */
4402 return omit_one_operand (type, integer_zero_node, inner);
4404 else
4405 /* MIN (X, 0) == -1 -> X == -1 */
4406 return fold (build (EQ_EXPR, type, inner, comp_const));
4408 case GT_EXPR:
4409 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4410 /* MAX (X, 0) > 0 -> X > 0
4411 MAX (X, 0) > 5 -> X > 5 */
4412 return fold (build (GT_EXPR, type, inner, comp_const));
4414 else if (op_code == MAX_EXPR)
4415 /* MAX (X, 0) > -1 -> true */
4416 return omit_one_operand (type, integer_one_node, inner);
4418 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4419 /* MIN (X, 0) > 0 -> false
4420 MIN (X, 0) > 5 -> false */
4421 return omit_one_operand (type, integer_zero_node, inner);
4423 else
4424 /* MIN (X, 0) > -1 -> X > -1 */
4425 return fold (build (GT_EXPR, type, inner, comp_const));
4427 default:
4428 return t;
4432 /* T is an integer expression that is being multiplied, divided, or taken a
4433 modulus (CODE says which and what kind of divide or modulus) by a
4434 constant C. See if we can eliminate that operation by folding it with
4435 other operations already in T. WIDE_TYPE, if non-null, is a type that
4436 should be used for the computation if wider than our type.
4438 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4439 (X * 2) + (Y * 4). We must, however, be assured that either the original
4440 expression would not overflow or that overflow is undefined for the type
4441 in the language in question.
4443 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4444 the machine has a multiply-accumulate insn or that this is part of an
4445 addressing calculation.
4447 If we return a non-null expression, it is an equivalent form of the
4448 original computation, but need not be in the original type. */
4450 static tree
4451 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4453 /* To avoid exponential search depth, refuse to allow recursion past
4454 three levels. Beyond that (1) it's highly unlikely that we'll find
4455 something interesting and (2) we've probably processed it before
4456 when we built the inner expression. */
4458 static int depth;
4459 tree ret;
4461 if (depth > 3)
4462 return NULL;
4464 depth++;
4465 ret = extract_muldiv_1 (t, c, code, wide_type);
4466 depth--;
4468 return ret;
4471 static tree
4472 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4474 tree type = TREE_TYPE (t);
4475 enum tree_code tcode = TREE_CODE (t);
4476 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4477 > GET_MODE_SIZE (TYPE_MODE (type)))
4478 ? wide_type : type);
4479 tree t1, t2;
4480 int same_p = tcode == code;
4481 tree op0 = NULL_TREE, op1 = NULL_TREE;
4483 /* Don't deal with constants of zero here; they confuse the code below. */
4484 if (integer_zerop (c))
4485 return NULL_TREE;
4487 if (TREE_CODE_CLASS (tcode) == '1')
4488 op0 = TREE_OPERAND (t, 0);
4490 if (TREE_CODE_CLASS (tcode) == '2')
4491 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4493 /* Note that we need not handle conditional operations here since fold
4494 already handles those cases. So just do arithmetic here. */
4495 switch (tcode)
4497 case INTEGER_CST:
4498 /* For a constant, we can always simplify if we are a multiply
4499 or (for divide and modulus) if it is a multiple of our constant. */
4500 if (code == MULT_EXPR
4501 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4502 return const_binop (code, fold_convert (ctype, t),
4503 fold_convert (ctype, c), 0);
4504 break;
4506 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4507 /* If op0 is an expression ... */
4508 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4509 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4510 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4511 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4512 /* ... and is unsigned, and its type is smaller than ctype,
4513 then we cannot pass through as widening. */
4514 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4515 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4516 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4517 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4518 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4519 /* ... or its type is larger than ctype,
4520 then we cannot pass through this truncation. */
4521 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4522 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4523 /* ... or signedness changes for division or modulus,
4524 then we cannot pass through this conversion. */
4525 || (code != MULT_EXPR
4526 && (TYPE_UNSIGNED (ctype)
4527 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4528 break;
4530 /* Pass the constant down and see if we can make a simplification. If
4531 we can, replace this expression with the inner simplification for
4532 possible later conversion to our or some other type. */
4533 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4534 && TREE_CODE (t2) == INTEGER_CST
4535 && ! TREE_CONSTANT_OVERFLOW (t2)
4536 && (0 != (t1 = extract_muldiv (op0, t2, code,
4537 code == MULT_EXPR
4538 ? ctype : NULL_TREE))))
4539 return t1;
4540 break;
4542 case NEGATE_EXPR: case ABS_EXPR:
4543 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4544 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4545 break;
4547 case MIN_EXPR: case MAX_EXPR:
4548 /* If widening the type changes the signedness, then we can't perform
4549 this optimization as that changes the result. */
4550 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4551 break;
4553 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4554 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4555 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4557 if (tree_int_cst_sgn (c) < 0)
4558 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4560 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4561 fold_convert (ctype, t2)));
4563 break;
4565 case LSHIFT_EXPR: case RSHIFT_EXPR:
4566 /* If the second operand is constant, this is a multiplication
4567 or floor division, by a power of two, so we can treat it that
4568 way unless the multiplier or divisor overflows. */
4569 if (TREE_CODE (op1) == INTEGER_CST
4570 /* const_binop may not detect overflow correctly,
4571 so check for it explicitly here. */
4572 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4573 && TREE_INT_CST_HIGH (op1) == 0
4574 && 0 != (t1 = fold_convert (ctype,
4575 const_binop (LSHIFT_EXPR,
4576 size_one_node,
4577 op1, 0)))
4578 && ! TREE_OVERFLOW (t1))
4579 return extract_muldiv (build (tcode == LSHIFT_EXPR
4580 ? MULT_EXPR : FLOOR_DIV_EXPR,
4581 ctype, fold_convert (ctype, op0), t1),
4582 c, code, wide_type);
4583 break;
4585 case PLUS_EXPR: case MINUS_EXPR:
4586 /* See if we can eliminate the operation on both sides. If we can, we
4587 can return a new PLUS or MINUS. If we can't, the only remaining
4588 cases where we can do anything are if the second operand is a
4589 constant. */
4590 t1 = extract_muldiv (op0, c, code, wide_type);
4591 t2 = extract_muldiv (op1, c, code, wide_type);
4592 if (t1 != 0 && t2 != 0
4593 && (code == MULT_EXPR
4594 /* If not multiplication, we can only do this if both operands
4595 are divisible by c. */
4596 || (multiple_of_p (ctype, op0, c)
4597 && multiple_of_p (ctype, op1, c))))
4598 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4599 fold_convert (ctype, t2)));
4601 /* If this was a subtraction, negate OP1 and set it to be an addition.
4602 This simplifies the logic below. */
4603 if (tcode == MINUS_EXPR)
4604 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4606 if (TREE_CODE (op1) != INTEGER_CST)
4607 break;
4609 /* If either OP1 or C are negative, this optimization is not safe for
4610 some of the division and remainder types while for others we need
4611 to change the code. */
4612 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4614 if (code == CEIL_DIV_EXPR)
4615 code = FLOOR_DIV_EXPR;
4616 else if (code == FLOOR_DIV_EXPR)
4617 code = CEIL_DIV_EXPR;
4618 else if (code != MULT_EXPR
4619 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4620 break;
4623 /* If it's a multiply or a division/modulus operation of a multiple
4624 of our constant, do the operation and verify it doesn't overflow. */
4625 if (code == MULT_EXPR
4626 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4628 op1 = const_binop (code, fold_convert (ctype, op1),
4629 fold_convert (ctype, c), 0);
4630 /* We allow the constant to overflow with wrapping semantics. */
4631 if (op1 == 0
4632 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4633 break;
4635 else
4636 break;
4638 /* If we have an unsigned type is not a sizetype, we cannot widen
4639 the operation since it will change the result if the original
4640 computation overflowed. */
4641 if (TYPE_UNSIGNED (ctype)
4642 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4643 && ctype != type)
4644 break;
4646 /* If we were able to eliminate our operation from the first side,
4647 apply our operation to the second side and reform the PLUS. */
4648 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4649 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4651 /* The last case is if we are a multiply. In that case, we can
4652 apply the distributive law to commute the multiply and addition
4653 if the multiplication of the constants doesn't overflow. */
4654 if (code == MULT_EXPR)
4655 return fold (build (tcode, ctype,
4656 fold (build (code, ctype,
4657 fold_convert (ctype, op0),
4658 fold_convert (ctype, c))),
4659 op1));
4661 break;
4663 case MULT_EXPR:
4664 /* We have a special case here if we are doing something like
4665 (C * 8) % 4 since we know that's zero. */
4666 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4667 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4668 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4669 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4670 return omit_one_operand (type, integer_zero_node, op0);
4672 /* ... fall through ... */
4674 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4675 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4676 /* If we can extract our operation from the LHS, do so and return a
4677 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4678 do something only if the second operand is a constant. */
4679 if (same_p
4680 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4681 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4682 fold_convert (ctype, op1)));
4683 else if (tcode == MULT_EXPR && code == MULT_EXPR
4684 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4685 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4686 fold_convert (ctype, t1)));
4687 else if (TREE_CODE (op1) != INTEGER_CST)
4688 return 0;
4690 /* If these are the same operation types, we can associate them
4691 assuming no overflow. */
4692 if (tcode == code
4693 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4694 fold_convert (ctype, c), 0))
4695 && ! TREE_OVERFLOW (t1))
4696 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4698 /* If these operations "cancel" each other, we have the main
4699 optimizations of this pass, which occur when either constant is a
4700 multiple of the other, in which case we replace this with either an
4701 operation or CODE or TCODE.
4703 If we have an unsigned type that is not a sizetype, we cannot do
4704 this since it will change the result if the original computation
4705 overflowed. */
4706 if ((! TYPE_UNSIGNED (ctype)
4707 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4708 && ! flag_wrapv
4709 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4710 || (tcode == MULT_EXPR
4711 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4712 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4714 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4715 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4716 fold_convert (ctype,
4717 const_binop (TRUNC_DIV_EXPR,
4718 op1, c, 0))));
4719 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4720 return fold (build (code, ctype, fold_convert (ctype, op0),
4721 fold_convert (ctype,
4722 const_binop (TRUNC_DIV_EXPR,
4723 c, op1, 0))));
4725 break;
4727 default:
4728 break;
4731 return 0;
4734 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4735 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4736 that we may sometimes modify the tree. */
4738 static tree
4739 strip_compound_expr (tree t, tree s)
4741 enum tree_code code = TREE_CODE (t);
4743 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4744 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4745 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4746 return TREE_OPERAND (t, 1);
4748 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4749 don't bother handling any other types. */
4750 else if (code == COND_EXPR)
4752 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4753 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4754 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4756 else if (TREE_CODE_CLASS (code) == '1')
4757 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4758 else if (TREE_CODE_CLASS (code) == '<'
4759 || TREE_CODE_CLASS (code) == '2')
4761 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4762 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4765 return t;
4768 /* Return a node which has the indicated constant VALUE (either 0 or
4769 1), and is of the indicated TYPE. */
4771 static tree
4772 constant_boolean_node (int value, tree type)
4774 if (type == integer_type_node)
4775 return value ? integer_one_node : integer_zero_node;
4776 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4777 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4778 : integer_zero_node);
4779 else
4781 tree t = build_int_2 (value, 0);
4783 TREE_TYPE (t) = type;
4784 return t;
4788 /* Utility function for the following routine, to see how complex a nesting of
4789 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4790 we don't care (to avoid spending too much time on complex expressions.). */
4792 static int
4793 count_cond (tree expr, int lim)
4795 int ctrue, cfalse;
4797 if (TREE_CODE (expr) != COND_EXPR)
4798 return 0;
4799 else if (lim <= 0)
4800 return 0;
4802 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4803 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4804 return MIN (lim, 1 + ctrue + cfalse);
4807 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4808 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4809 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4810 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4811 COND is the first argument to CODE; otherwise (as in the example
4812 given here), it is the second argument. TYPE is the type of the
4813 original expression. Return NULL_TREE if no simplification is
4814 possible. */
4816 static tree
4817 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4818 tree cond, tree arg, int cond_first_p)
4820 tree test, true_value, false_value;
4821 tree lhs = NULL_TREE;
4822 tree rhs = NULL_TREE;
4823 /* In the end, we'll produce a COND_EXPR. Both arms of the
4824 conditional expression will be binary operations. The left-hand
4825 side of the expression to be executed if the condition is true
4826 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4827 of the expression to be executed if the condition is true will be
4828 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4829 but apply to the expression to be executed if the conditional is
4830 false. */
4831 tree *true_lhs;
4832 tree *true_rhs;
4833 tree *false_lhs;
4834 tree *false_rhs;
4835 /* These are the codes to use for the left-hand side and right-hand
4836 side of the COND_EXPR. Normally, they are the same as CODE. */
4837 enum tree_code lhs_code = code;
4838 enum tree_code rhs_code = code;
4839 /* And these are the types of the expressions. */
4840 tree lhs_type = type;
4841 tree rhs_type = type;
4842 int save = 0;
4844 if (TREE_CODE (cond) != COND_EXPR
4845 && TREE_CODE_CLASS (code) == '<')
4846 return NULL_TREE;
4848 if (TREE_CODE (arg) == COND_EXPR
4849 && count_cond (cond, 25) + count_cond (arg, 25) > 25)
4850 return NULL_TREE;
4852 if (TREE_SIDE_EFFECTS (arg)
4853 && (lang_hooks.decls.global_bindings_p () != 0
4854 || CONTAINS_PLACEHOLDER_P (arg)))
4855 return NULL_TREE;
4857 if (cond_first_p)
4859 true_rhs = false_rhs = &arg;
4860 true_lhs = &true_value;
4861 false_lhs = &false_value;
4863 else
4865 true_lhs = false_lhs = &arg;
4866 true_rhs = &true_value;
4867 false_rhs = &false_value;
4870 if (TREE_CODE (cond) == COND_EXPR)
4872 test = TREE_OPERAND (cond, 0);
4873 true_value = TREE_OPERAND (cond, 1);
4874 false_value = TREE_OPERAND (cond, 2);
4875 /* If this operand throws an expression, then it does not make
4876 sense to try to perform a logical or arithmetic operation
4877 involving it. Instead of building `a + throw 3' for example,
4878 we simply build `a, throw 3'. */
4879 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4881 if (! cond_first_p)
4883 lhs_code = COMPOUND_EXPR;
4884 lhs_type = void_type_node;
4886 else
4887 lhs = true_value;
4889 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4891 if (! cond_first_p)
4893 rhs_code = COMPOUND_EXPR;
4894 rhs_type = void_type_node;
4896 else
4897 rhs = false_value;
4900 else
4902 tree testtype = TREE_TYPE (cond);
4903 test = cond;
4904 true_value = fold_convert (testtype, integer_one_node);
4905 false_value = fold_convert (testtype, integer_zero_node);
4908 /* If ARG is complex we want to make sure we only evaluate it once. Though
4909 this is only required if it is volatile, it might be more efficient even
4910 if it is not. However, if we succeed in folding one part to a constant,
4911 we do not need to make this SAVE_EXPR. Since we do this optimization
4912 primarily to see if we do end up with constant and this SAVE_EXPR
4913 interferes with later optimizations, suppressing it when we can is
4914 important.
4916 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4917 do so. Don't try to see if the result is a constant if an arm is a
4918 COND_EXPR since we get exponential behavior in that case. */
4920 if (saved_expr_p (arg))
4921 save = 1;
4922 else if (lhs == 0 && rhs == 0
4923 && !TREE_CONSTANT (arg)
4924 && lang_hooks.decls.global_bindings_p () == 0
4925 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4926 || TREE_SIDE_EFFECTS (arg)))
4928 if (TREE_CODE (true_value) != COND_EXPR)
4929 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4931 if (TREE_CODE (false_value) != COND_EXPR)
4932 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4934 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4935 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4937 arg = save_expr (arg);
4938 lhs = rhs = 0;
4939 save = saved_expr_p (arg);
4943 if (lhs == 0)
4944 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4945 if (rhs == 0)
4946 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4948 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4950 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4951 ahead of the COND_EXPR we made. Otherwise we would have it only
4952 evaluated in one branch, with the other branch using the result
4953 but missing the evaluation code. Beware that the save_expr call
4954 above might not return a SAVE_EXPR, so testing the TREE_CODE
4955 of ARG is not enough to decide here.  */
4956 if (save)
4957 return build (COMPOUND_EXPR, type,
4958 fold_convert (void_type_node, arg),
4959 strip_compound_expr (test, arg));
4960 else
4961 return fold_convert (type, test);
4965 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4967 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4968 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4969 ADDEND is the same as X.
4971 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4972 and finite. The problematic cases are when X is zero, and its mode
4973 has signed zeros. In the case of rounding towards -infinity,
4974 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4975 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4977 static bool
4978 fold_real_zero_addition_p (tree type, tree addend, int negate)
4980 if (!real_zerop (addend))
4981 return false;
4983 /* Don't allow the fold with -fsignaling-nans. */
4984 if (HONOR_SNANS (TYPE_MODE (type)))
4985 return false;
4987 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4988 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4989 return true;
4991 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4992 if (TREE_CODE (addend) == REAL_CST
4993 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4994 negate = !negate;
4996 /* The mode has signed zeros, and we have to honor their sign.
4997 In this situation, there is only one case we can return true for.
4998 X - 0 is the same as X unless rounding towards -infinity is
4999 supported. */
5000 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5003 /* Subroutine of fold() that checks comparisons of built-in math
5004 functions against real constants.
5006 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5007 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5008 is the type of the result and ARG0 and ARG1 are the operands of the
5009 comparison. ARG1 must be a TREE_REAL_CST.
5011 The function returns the constant folded tree if a simplification
5012 can be made, and NULL_TREE otherwise. */
5014 static tree
5015 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5016 tree type, tree arg0, tree arg1)
5018 REAL_VALUE_TYPE c;
5020 if (BUILTIN_SQRT_P (fcode))
5022 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5023 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5025 c = TREE_REAL_CST (arg1);
5026 if (REAL_VALUE_NEGATIVE (c))
5028 /* sqrt(x) < y is always false, if y is negative. */
5029 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5030 return omit_one_operand (type,
5031 fold_convert (type, integer_zero_node),
5032 arg);
5034 /* sqrt(x) > y is always true, if y is negative and we
5035 don't care about NaNs, i.e. negative values of x. */
5036 if (code == NE_EXPR || !HONOR_NANS (mode))
5037 return omit_one_operand (type,
5038 fold_convert (type, integer_one_node),
5039 arg);
5041 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5042 return fold (build (GE_EXPR, type, arg,
5043 build_real (TREE_TYPE (arg), dconst0)));
5045 else if (code == GT_EXPR || code == GE_EXPR)
5047 REAL_VALUE_TYPE c2;
5049 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5050 real_convert (&c2, mode, &c2);
5052 if (REAL_VALUE_ISINF (c2))
5054 /* sqrt(x) > y is x == +Inf, when y is very large. */
5055 if (HONOR_INFINITIES (mode))
5056 return fold (build (EQ_EXPR, type, arg,
5057 build_real (TREE_TYPE (arg), c2)));
5059 /* sqrt(x) > y is always false, when y is very large
5060 and we don't care about infinities. */
5061 return omit_one_operand (type,
5062 fold_convert (type, integer_zero_node),
5063 arg);
5066 /* sqrt(x) > c is the same as x > c*c. */
5067 return fold (build (code, type, arg,
5068 build_real (TREE_TYPE (arg), c2)));
5070 else if (code == LT_EXPR || code == LE_EXPR)
5072 REAL_VALUE_TYPE c2;
5074 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5075 real_convert (&c2, mode, &c2);
5077 if (REAL_VALUE_ISINF (c2))
5079 /* sqrt(x) < y is always true, when y is a very large
5080 value and we don't care about NaNs or Infinities. */
5081 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5082 return omit_one_operand (type,
5083 fold_convert (type, integer_one_node),
5084 arg);
5086 /* sqrt(x) < y is x != +Inf when y is very large and we
5087 don't care about NaNs. */
5088 if (! HONOR_NANS (mode))
5089 return fold (build (NE_EXPR, type, arg,
5090 build_real (TREE_TYPE (arg), c2)));
5092 /* sqrt(x) < y is x >= 0 when y is very large and we
5093 don't care about Infinities. */
5094 if (! HONOR_INFINITIES (mode))
5095 return fold (build (GE_EXPR, type, arg,
5096 build_real (TREE_TYPE (arg), dconst0)));
5098 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5099 if (lang_hooks.decls.global_bindings_p () != 0
5100 || CONTAINS_PLACEHOLDER_P (arg))
5101 return NULL_TREE;
5103 arg = save_expr (arg);
5104 return fold (build (TRUTH_ANDIF_EXPR, type,
5105 fold (build (GE_EXPR, type, arg,
5106 build_real (TREE_TYPE (arg),
5107 dconst0))),
5108 fold (build (NE_EXPR, type, arg,
5109 build_real (TREE_TYPE (arg),
5110 c2)))));
5113 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5114 if (! HONOR_NANS (mode))
5115 return fold (build (code, type, arg,
5116 build_real (TREE_TYPE (arg), c2)));
5118 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5119 if (lang_hooks.decls.global_bindings_p () == 0
5120 && ! CONTAINS_PLACEHOLDER_P (arg))
5122 arg = save_expr (arg);
5123 return fold (build (TRUTH_ANDIF_EXPR, type,
5124 fold (build (GE_EXPR, type, arg,
5125 build_real (TREE_TYPE (arg),
5126 dconst0))),
5127 fold (build (code, type, arg,
5128 build_real (TREE_TYPE (arg),
5129 c2)))));
5134 return NULL_TREE;
5137 /* Subroutine of fold() that optimizes comparisons against Infinities,
5138 either +Inf or -Inf.
5140 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5141 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5142 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5144 The function returns the constant folded tree if a simplification
5145 can be made, and NULL_TREE otherwise. */
5147 static tree
5148 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5150 enum machine_mode mode;
5151 REAL_VALUE_TYPE max;
5152 tree temp;
5153 bool neg;
5155 mode = TYPE_MODE (TREE_TYPE (arg0));
5157 /* For negative infinity swap the sense of the comparison. */
5158 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5159 if (neg)
5160 code = swap_tree_comparison (code);
5162 switch (code)
5164 case GT_EXPR:
5165 /* x > +Inf is always false, if with ignore sNANs. */
5166 if (HONOR_SNANS (mode))
5167 return NULL_TREE;
5168 return omit_one_operand (type,
5169 fold_convert (type, integer_zero_node),
5170 arg0);
5172 case LE_EXPR:
5173 /* x <= +Inf is always true, if we don't case about NaNs. */
5174 if (! HONOR_NANS (mode))
5175 return omit_one_operand (type,
5176 fold_convert (type, integer_one_node),
5177 arg0);
5179 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5180 if (lang_hooks.decls.global_bindings_p () == 0
5181 && ! CONTAINS_PLACEHOLDER_P (arg0))
5183 arg0 = save_expr (arg0);
5184 return fold (build (EQ_EXPR, type, arg0, arg0));
5186 break;
5188 case EQ_EXPR:
5189 case GE_EXPR:
5190 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5191 real_maxval (&max, neg, mode);
5192 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5193 arg0, build_real (TREE_TYPE (arg0), max)));
5195 case LT_EXPR:
5196 /* x < +Inf is always equal to x <= DBL_MAX. */
5197 real_maxval (&max, neg, mode);
5198 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5199 arg0, build_real (TREE_TYPE (arg0), max)));
5201 case NE_EXPR:
5202 /* x != +Inf is always equal to !(x > DBL_MAX). */
5203 real_maxval (&max, neg, mode);
5204 if (! HONOR_NANS (mode))
5205 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5206 arg0, build_real (TREE_TYPE (arg0), max)));
5207 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5208 arg0, build_real (TREE_TYPE (arg0), max)));
5209 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5211 default:
5212 break;
5215 return NULL_TREE;
5218 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5219 equality/inequality test, then return a simplified form of
5220 the test using shifts and logical operations. Otherwise return
5221 NULL. TYPE is the desired result type. */
5223 tree
5224 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5225 tree result_type)
5227 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5228 operand 0. */
5229 if (code == TRUTH_NOT_EXPR)
5231 code = TREE_CODE (arg0);
5232 if (code != NE_EXPR && code != EQ_EXPR)
5233 return NULL_TREE;
5235 /* Extract the arguments of the EQ/NE. */
5236 arg1 = TREE_OPERAND (arg0, 1);
5237 arg0 = TREE_OPERAND (arg0, 0);
5239 /* This requires us to invert the code. */
5240 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5243 /* If this is testing a single bit, we can optimize the test. */
5244 if ((code == NE_EXPR || code == EQ_EXPR)
5245 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5246 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5248 tree inner = TREE_OPERAND (arg0, 0);
5249 tree type = TREE_TYPE (arg0);
5250 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5251 enum machine_mode operand_mode = TYPE_MODE (type);
5252 int ops_unsigned;
5253 tree signed_type, unsigned_type, intermediate_type;
5254 tree arg00;
5256 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5257 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5258 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5259 if (arg00 != NULL_TREE)
5261 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5262 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5263 fold_convert (stype, arg00),
5264 fold_convert (stype, integer_zero_node)));
5267 /* At this point, we know that arg0 is not testing the sign bit. */
5268 if (TYPE_PRECISION (type) - 1 == bitnum)
5269 abort ();
5271 /* Otherwise we have (A & C) != 0 where C is a single bit,
5272 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5273 Similarly for (A & C) == 0. */
5275 /* If INNER is a right shift of a constant and it plus BITNUM does
5276 not overflow, adjust BITNUM and INNER. */
5277 if (TREE_CODE (inner) == RSHIFT_EXPR
5278 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5279 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5280 && bitnum < TYPE_PRECISION (type)
5281 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5282 bitnum - TYPE_PRECISION (type)))
5284 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5285 inner = TREE_OPERAND (inner, 0);
5288 /* If we are going to be able to omit the AND below, we must do our
5289 operations as unsigned. If we must use the AND, we have a choice.
5290 Normally unsigned is faster, but for some machines signed is. */
5291 #ifdef LOAD_EXTEND_OP
5292 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5293 #else
5294 ops_unsigned = 1;
5295 #endif
5297 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5298 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5299 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5300 inner = fold_convert (intermediate_type, inner);
5302 if (bitnum != 0)
5303 inner = build (RSHIFT_EXPR, intermediate_type,
5304 inner, size_int (bitnum));
5306 if (code == EQ_EXPR)
5307 inner = build (BIT_XOR_EXPR, intermediate_type,
5308 inner, integer_one_node);
5310 /* Put the AND last so it can combine with more things. */
5311 inner = build (BIT_AND_EXPR, intermediate_type,
5312 inner, integer_one_node);
5314 /* Make sure to return the proper type. */
5315 inner = fold_convert (result_type, inner);
5317 return inner;
5319 return NULL_TREE;
5322 /* Check whether we are allowed to reorder operands arg0 and arg1,
5323 such that the evaluation of arg1 occurs before arg0. */
5325 static bool
5326 reorder_operands_p (tree arg0, tree arg1)
5328 if (! flag_evaluation_order)
5329 return true;
5330 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5331 return true;
5332 return ! TREE_SIDE_EFFECTS (arg0)
5333 && ! TREE_SIDE_EFFECTS (arg1);
5336 /* Test whether it is preferable two swap two operands, ARG0 and
5337 ARG1, for example because ARG0 is an integer constant and ARG1
5338 isn't. If REORDER is true, only recommend swapping if we can
5339 evaluate the operands in reverse order. */
5341 static bool
5342 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5344 STRIP_SIGN_NOPS (arg0);
5345 STRIP_SIGN_NOPS (arg1);
5347 if (TREE_CODE (arg1) == INTEGER_CST)
5348 return 0;
5349 if (TREE_CODE (arg0) == INTEGER_CST)
5350 return 1;
5352 if (TREE_CODE (arg1) == REAL_CST)
5353 return 0;
5354 if (TREE_CODE (arg0) == REAL_CST)
5355 return 1;
5357 if (TREE_CODE (arg1) == COMPLEX_CST)
5358 return 0;
5359 if (TREE_CODE (arg0) == COMPLEX_CST)
5360 return 1;
5362 if (TREE_CONSTANT (arg1))
5363 return 0;
5364 if (TREE_CONSTANT (arg0))
5365 return 1;
5367 if (optimize_size)
5368 return 0;
5370 if (reorder && flag_evaluation_order
5371 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5372 return 0;
5374 if (DECL_P (arg1))
5375 return 0;
5376 if (DECL_P (arg0))
5377 return 1;
5379 return 0;
5382 /* Perform constant folding and related simplification of EXPR.
5383 The related simplifications include x*1 => x, x*0 => 0, etc.,
5384 and application of the associative law.
5385 NOP_EXPR conversions may be removed freely (as long as we
5386 are careful not to change the type of the overall expression).
5387 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5388 but we can constant-fold them if they have constant operands. */
5390 #ifdef ENABLE_FOLD_CHECKING
5391 # define fold(x) fold_1 (x)
5392 static tree fold_1 (tree);
5393 static
5394 #endif
5395 tree
5396 fold (tree expr)
5398 const tree t = expr;
5399 const tree type = TREE_TYPE (expr);
5400 tree t1 = NULL_TREE;
5401 tree tem;
5402 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5403 enum tree_code code = TREE_CODE (t);
5404 int kind = TREE_CODE_CLASS (code);
5405 /* WINS will be nonzero when the switch is done
5406 if all operands are constant. */
5407 int wins = 1;
5409 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5410 Likewise for a SAVE_EXPR that's already been evaluated. */
5411 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5412 return t;
5414 /* Return right away if a constant. */
5415 if (kind == 'c')
5416 return t;
5418 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5420 tree subop;
5422 /* Special case for conversion ops that can have fixed point args. */
5423 arg0 = TREE_OPERAND (t, 0);
5425 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5426 if (arg0 != 0)
5427 STRIP_SIGN_NOPS (arg0);
5429 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5430 subop = TREE_REALPART (arg0);
5431 else
5432 subop = arg0;
5434 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5435 && TREE_CODE (subop) != REAL_CST)
5436 /* Note that TREE_CONSTANT isn't enough:
5437 static var addresses are constant but we can't
5438 do arithmetic on them. */
5439 wins = 0;
5441 else if (IS_EXPR_CODE_CLASS (kind))
5443 int len = first_rtl_op (code);
5444 int i;
5445 for (i = 0; i < len; i++)
5447 tree op = TREE_OPERAND (t, i);
5448 tree subop;
5450 if (op == 0)
5451 continue; /* Valid for CALL_EXPR, at least. */
5453 /* Strip any conversions that don't change the mode. This is
5454 safe for every expression, except for a comparison expression
5455 because its signedness is derived from its operands. So, in
5456 the latter case, only strip conversions that don't change the
5457 signedness.
5459 Note that this is done as an internal manipulation within the
5460 constant folder, in order to find the simplest representation
5461 of the arguments so that their form can be studied. In any
5462 cases, the appropriate type conversions should be put back in
5463 the tree that will get out of the constant folder. */
5464 if (kind == '<')
5465 STRIP_SIGN_NOPS (op);
5466 else
5467 STRIP_NOPS (op);
5469 if (TREE_CODE (op) == COMPLEX_CST)
5470 subop = TREE_REALPART (op);
5471 else
5472 subop = op;
5474 if (TREE_CODE (subop) != INTEGER_CST
5475 && TREE_CODE (subop) != REAL_CST)
5476 /* Note that TREE_CONSTANT isn't enough:
5477 static var addresses are constant but we can't
5478 do arithmetic on them. */
5479 wins = 0;
5481 if (i == 0)
5482 arg0 = op;
5483 else if (i == 1)
5484 arg1 = op;
5488 /* If this is a commutative operation, and ARG0 is a constant, move it
5489 to ARG1 to reduce the number of tests below. */
5490 if (commutative_tree_code (code)
5491 && tree_swap_operands_p (arg0, arg1, true))
5492 return fold (build (code, type, TREE_OPERAND (t, 1),
5493 TREE_OPERAND (t, 0)));
5495 /* Now WINS is set as described above,
5496 ARG0 is the first operand of EXPR,
5497 and ARG1 is the second operand (if it has more than one operand).
5499 First check for cases where an arithmetic operation is applied to a
5500 compound, conditional, or comparison operation. Push the arithmetic
5501 operation inside the compound or conditional to see if any folding
5502 can then be done. Convert comparison to conditional for this purpose.
5503 The also optimizes non-constant cases that used to be done in
5504 expand_expr.
5506 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5507 one of the operands is a comparison and the other is a comparison, a
5508 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5509 code below would make the expression more complex. Change it to a
5510 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5511 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5513 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5514 || code == EQ_EXPR || code == NE_EXPR)
5515 && ((truth_value_p (TREE_CODE (arg0))
5516 && (truth_value_p (TREE_CODE (arg1))
5517 || (TREE_CODE (arg1) == BIT_AND_EXPR
5518 && integer_onep (TREE_OPERAND (arg1, 1)))))
5519 || (truth_value_p (TREE_CODE (arg1))
5520 && (truth_value_p (TREE_CODE (arg0))
5521 || (TREE_CODE (arg0) == BIT_AND_EXPR
5522 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5524 tem = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5525 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5526 : TRUTH_XOR_EXPR,
5527 type, arg0, arg1));
5529 if (code == EQ_EXPR)
5530 tem = invert_truthvalue (tem);
5532 return tem;
5535 if (TREE_CODE_CLASS (code) == '1')
5537 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5538 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5539 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5540 else if (TREE_CODE (arg0) == COND_EXPR)
5542 tree arg01 = TREE_OPERAND (arg0, 1);
5543 tree arg02 = TREE_OPERAND (arg0, 2);
5544 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5545 arg01 = fold (build1 (code, type, arg01));
5546 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5547 arg02 = fold (build1 (code, type, arg02));
5548 tem = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5549 arg01, arg02));
5551 /* If this was a conversion, and all we did was to move into
5552 inside the COND_EXPR, bring it back out. But leave it if
5553 it is a conversion from integer to integer and the
5554 result precision is no wider than a word since such a
5555 conversion is cheap and may be optimized away by combine,
5556 while it couldn't if it were outside the COND_EXPR. Then return
5557 so we don't get into an infinite recursion loop taking the
5558 conversion out and then back in. */
5560 if ((code == NOP_EXPR || code == CONVERT_EXPR
5561 || code == NON_LVALUE_EXPR)
5562 && TREE_CODE (tem) == COND_EXPR
5563 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5564 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5565 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5566 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5567 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5568 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5569 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5570 && (INTEGRAL_TYPE_P
5571 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5572 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5573 tem = build1 (code, type,
5574 build (COND_EXPR,
5575 TREE_TYPE (TREE_OPERAND
5576 (TREE_OPERAND (tem, 1), 0)),
5577 TREE_OPERAND (tem, 0),
5578 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5579 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5580 return tem;
5582 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5583 return fold (build (COND_EXPR, type, arg0,
5584 fold (build1 (code, type, integer_one_node)),
5585 fold (build1 (code, type, integer_zero_node))));
5587 else if (TREE_CODE_CLASS (code) == '<'
5588 && TREE_CODE (arg0) == COMPOUND_EXPR)
5589 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5590 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5591 else if (TREE_CODE_CLASS (code) == '<'
5592 && TREE_CODE (arg1) == COMPOUND_EXPR)
5593 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5594 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5595 else if (TREE_CODE_CLASS (code) == '2'
5596 || TREE_CODE_CLASS (code) == '<')
5598 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5599 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5600 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5601 if (TREE_CODE (arg1) == COMPOUND_EXPR
5602 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5603 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5604 fold (build (code, type,
5605 arg0, TREE_OPERAND (arg1, 1))));
5607 if (TREE_CODE (arg0) == COND_EXPR
5608 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5610 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5611 /*cond_first_p=*/1);
5612 if (tem != NULL_TREE)
5613 return tem;
5616 if (TREE_CODE (arg1) == COND_EXPR
5617 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5619 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5620 /*cond_first_p=*/0);
5621 if (tem != NULL_TREE)
5622 return tem;
5626 switch (code)
5628 case CONST_DECL:
5629 return fold (DECL_INITIAL (t));
5631 case NOP_EXPR:
5632 case FLOAT_EXPR:
5633 case CONVERT_EXPR:
5634 case FIX_TRUNC_EXPR:
5635 case FIX_CEIL_EXPR:
5636 case FIX_FLOOR_EXPR:
5637 case FIX_ROUND_EXPR:
5638 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5639 return TREE_OPERAND (t, 0);
5641 /* Handle cases of two conversions in a row. */
5642 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5643 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5645 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5646 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5647 int inside_int = INTEGRAL_TYPE_P (inside_type);
5648 int inside_ptr = POINTER_TYPE_P (inside_type);
5649 int inside_float = FLOAT_TYPE_P (inside_type);
5650 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5651 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5652 int inter_int = INTEGRAL_TYPE_P (inter_type);
5653 int inter_ptr = POINTER_TYPE_P (inter_type);
5654 int inter_float = FLOAT_TYPE_P (inter_type);
5655 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5656 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5657 int final_int = INTEGRAL_TYPE_P (type);
5658 int final_ptr = POINTER_TYPE_P (type);
5659 int final_float = FLOAT_TYPE_P (type);
5660 unsigned int final_prec = TYPE_PRECISION (type);
5661 int final_unsignedp = TYPE_UNSIGNED (type);
5663 /* In addition to the cases of two conversions in a row
5664 handled below, if we are converting something to its own
5665 type via an object of identical or wider precision, neither
5666 conversion is needed. */
5667 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5668 && ((inter_int && final_int) || (inter_float && final_float))
5669 && inter_prec >= final_prec)
5670 return fold (build1 (code, type,
5671 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5673 /* Likewise, if the intermediate and final types are either both
5674 float or both integer, we don't need the middle conversion if
5675 it is wider than the final type and doesn't change the signedness
5676 (for integers). Avoid this if the final type is a pointer
5677 since then we sometimes need the inner conversion. Likewise if
5678 the outer has a precision not equal to the size of its mode. */
5679 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5680 || (inter_float && inside_float))
5681 && inter_prec >= inside_prec
5682 && (inter_float || inter_unsignedp == inside_unsignedp)
5683 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5684 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5685 && ! final_ptr)
5686 return fold (build1 (code, type,
5687 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5689 /* If we have a sign-extension of a zero-extended value, we can
5690 replace that by a single zero-extension. */
5691 if (inside_int && inter_int && final_int
5692 && inside_prec < inter_prec && inter_prec < final_prec
5693 && inside_unsignedp && !inter_unsignedp)
5694 return fold (build1 (code, type,
5695 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5697 /* Two conversions in a row are not needed unless:
5698 - some conversion is floating-point (overstrict for now), or
5699 - the intermediate type is narrower than both initial and
5700 final, or
5701 - the intermediate type and innermost type differ in signedness,
5702 and the outermost type is wider than the intermediate, or
5703 - the initial type is a pointer type and the precisions of the
5704 intermediate and final types differ, or
5705 - the final type is a pointer type and the precisions of the
5706 initial and intermediate types differ. */
5707 if (! inside_float && ! inter_float && ! final_float
5708 && (inter_prec > inside_prec || inter_prec > final_prec)
5709 && ! (inside_int && inter_int
5710 && inter_unsignedp != inside_unsignedp
5711 && inter_prec < final_prec)
5712 && ((inter_unsignedp && inter_prec > inside_prec)
5713 == (final_unsignedp && final_prec > inter_prec))
5714 && ! (inside_ptr && inter_prec != final_prec)
5715 && ! (final_ptr && inside_prec != inter_prec)
5716 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5717 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5718 && ! final_ptr)
5719 return fold (build1 (code, type,
5720 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5723 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5724 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5725 /* Detect assigning a bitfield. */
5726 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5727 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5729 /* Don't leave an assignment inside a conversion
5730 unless assigning a bitfield. */
5731 tree prev = TREE_OPERAND (t, 0);
5732 tem = copy_node (t);
5733 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5734 /* First do the assignment, then return converted constant. */
5735 tem = build (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5736 TREE_NO_UNUSED_WARNING (tem) = 1;
5737 TREE_USED (tem) = 1;
5738 return tem;
5741 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5742 constants (if x has signed type, the sign bit cannot be set
5743 in c). This folds extension into the BIT_AND_EXPR. */
5744 if (INTEGRAL_TYPE_P (type)
5745 && TREE_CODE (type) != BOOLEAN_TYPE
5746 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5747 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5749 tree and = TREE_OPERAND (t, 0);
5750 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5751 int change = 0;
5753 if (TYPE_UNSIGNED (TREE_TYPE (and))
5754 || (TYPE_PRECISION (type)
5755 <= TYPE_PRECISION (TREE_TYPE (and))))
5756 change = 1;
5757 else if (TYPE_PRECISION (TREE_TYPE (and1))
5758 <= HOST_BITS_PER_WIDE_INT
5759 && host_integerp (and1, 1))
5761 unsigned HOST_WIDE_INT cst;
5763 cst = tree_low_cst (and1, 1);
5764 cst &= (HOST_WIDE_INT) -1
5765 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5766 change = (cst == 0);
5767 #ifdef LOAD_EXTEND_OP
5768 if (change
5769 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5770 == ZERO_EXTEND))
5772 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5773 and0 = fold_convert (uns, and0);
5774 and1 = fold_convert (uns, and1);
5776 #endif
5778 if (change)
5779 return fold (build (BIT_AND_EXPR, type,
5780 fold_convert (type, and0),
5781 fold_convert (type, and1)));
5784 tem = fold_convert_const (code, type, arg0);
5785 return tem ? tem : t;
5787 case VIEW_CONVERT_EXPR:
5788 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5789 return build1 (VIEW_CONVERT_EXPR, type,
5790 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5791 return t;
5793 case COMPONENT_REF:
5794 if (TREE_CODE (arg0) == CONSTRUCTOR
5795 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5797 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5798 if (m)
5799 return TREE_VALUE (m);
5801 return t;
5803 case RANGE_EXPR:
5804 if (TREE_CONSTANT (t) != wins)
5806 tem = copy_node (t);
5807 TREE_CONSTANT (tem) = wins;
5808 return tem;
5810 return t;
5812 case NEGATE_EXPR:
5813 if (negate_expr_p (arg0))
5814 return fold_convert (type, negate_expr (arg0));
5815 return t;
5817 case ABS_EXPR:
5818 if (wins
5819 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5820 return fold_abs_const (arg0, type);
5821 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5822 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5823 /* Convert fabs((double)float) into (double)fabsf(float). */
5824 else if (TREE_CODE (arg0) == NOP_EXPR
5825 && TREE_CODE (type) == REAL_TYPE)
5827 tree targ0 = strip_float_extensions (arg0);
5828 if (targ0 != arg0)
5829 return fold_convert (type, fold (build1 (ABS_EXPR,
5830 TREE_TYPE (targ0),
5831 targ0)));
5833 else if (tree_expr_nonnegative_p (arg0))
5834 return arg0;
5835 return t;
5837 case CONJ_EXPR:
5838 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5839 return fold_convert (type, arg0);
5840 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5841 return build (COMPLEX_EXPR, type,
5842 TREE_OPERAND (arg0, 0),
5843 negate_expr (TREE_OPERAND (arg0, 1)));
5844 else if (TREE_CODE (arg0) == COMPLEX_CST)
5845 return build_complex (type, TREE_REALPART (arg0),
5846 negate_expr (TREE_IMAGPART (arg0)));
5847 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5848 return fold (build (TREE_CODE (arg0), type,
5849 fold (build1 (CONJ_EXPR, type,
5850 TREE_OPERAND (arg0, 0))),
5851 fold (build1 (CONJ_EXPR,
5852 type, TREE_OPERAND (arg0, 1)))));
5853 else if (TREE_CODE (arg0) == CONJ_EXPR)
5854 return TREE_OPERAND (arg0, 0);
5855 return t;
5857 case BIT_NOT_EXPR:
5858 if (wins)
5860 tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5861 ~ TREE_INT_CST_HIGH (arg0));
5862 TREE_TYPE (tem) = type;
5863 force_fit_type (tem, 0);
5864 TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
5865 TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
5866 return tem;
5868 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5869 return TREE_OPERAND (arg0, 0);
5870 return t;
5872 case PLUS_EXPR:
5873 /* A + (-B) -> A - B */
5874 if (TREE_CODE (arg1) == NEGATE_EXPR)
5875 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5876 /* (-A) + B -> B - A */
5877 if (TREE_CODE (arg0) == NEGATE_EXPR
5878 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
5879 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5880 if (! FLOAT_TYPE_P (type))
5882 if (integer_zerop (arg1))
5883 return non_lvalue (fold_convert (type, arg0));
5885 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5886 with a constant, and the two constants have no bits in common,
5887 we should treat this as a BIT_IOR_EXPR since this may produce more
5888 simplifications. */
5889 if (TREE_CODE (arg0) == BIT_AND_EXPR
5890 && TREE_CODE (arg1) == BIT_AND_EXPR
5891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5892 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5893 && integer_zerop (const_binop (BIT_AND_EXPR,
5894 TREE_OPERAND (arg0, 1),
5895 TREE_OPERAND (arg1, 1), 0)))
5897 code = BIT_IOR_EXPR;
5898 goto bit_ior;
5901 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5902 (plus (plus (mult) (mult)) (foo)) so that we can
5903 take advantage of the factoring cases below. */
5904 if ((TREE_CODE (arg0) == PLUS_EXPR
5905 && TREE_CODE (arg1) == MULT_EXPR)
5906 || (TREE_CODE (arg1) == PLUS_EXPR
5907 && TREE_CODE (arg0) == MULT_EXPR))
5909 tree parg0, parg1, parg, marg;
5911 if (TREE_CODE (arg0) == PLUS_EXPR)
5912 parg = arg0, marg = arg1;
5913 else
5914 parg = arg1, marg = arg0;
5915 parg0 = TREE_OPERAND (parg, 0);
5916 parg1 = TREE_OPERAND (parg, 1);
5917 STRIP_NOPS (parg0);
5918 STRIP_NOPS (parg1);
5920 if (TREE_CODE (parg0) == MULT_EXPR
5921 && TREE_CODE (parg1) != MULT_EXPR)
5922 return fold (build (PLUS_EXPR, type,
5923 fold (build (PLUS_EXPR, type,
5924 fold_convert (type, parg0),
5925 fold_convert (type, marg))),
5926 fold_convert (type, parg1)));
5927 if (TREE_CODE (parg0) != MULT_EXPR
5928 && TREE_CODE (parg1) == MULT_EXPR)
5929 return fold (build (PLUS_EXPR, type,
5930 fold (build (PLUS_EXPR, type,
5931 fold_convert (type, parg1),
5932 fold_convert (type, marg))),
5933 fold_convert (type, parg0)));
5936 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5938 tree arg00, arg01, arg10, arg11;
5939 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5941 /* (A * C) + (B * C) -> (A+B) * C.
5942 We are most concerned about the case where C is a constant,
5943 but other combinations show up during loop reduction. Since
5944 it is not difficult, try all four possibilities. */
5946 arg00 = TREE_OPERAND (arg0, 0);
5947 arg01 = TREE_OPERAND (arg0, 1);
5948 arg10 = TREE_OPERAND (arg1, 0);
5949 arg11 = TREE_OPERAND (arg1, 1);
5950 same = NULL_TREE;
5952 if (operand_equal_p (arg01, arg11, 0))
5953 same = arg01, alt0 = arg00, alt1 = arg10;
5954 else if (operand_equal_p (arg00, arg10, 0))
5955 same = arg00, alt0 = arg01, alt1 = arg11;
5956 else if (operand_equal_p (arg00, arg11, 0))
5957 same = arg00, alt0 = arg01, alt1 = arg10;
5958 else if (operand_equal_p (arg01, arg10, 0))
5959 same = arg01, alt0 = arg00, alt1 = arg11;
5961 /* No identical multiplicands; see if we can find a common
5962 power-of-two factor in non-power-of-two multiplies. This
5963 can help in multi-dimensional array access. */
5964 else if (TREE_CODE (arg01) == INTEGER_CST
5965 && TREE_CODE (arg11) == INTEGER_CST
5966 && TREE_INT_CST_HIGH (arg01) == 0
5967 && TREE_INT_CST_HIGH (arg11) == 0)
5969 HOST_WIDE_INT int01, int11, tmp;
5970 int01 = TREE_INT_CST_LOW (arg01);
5971 int11 = TREE_INT_CST_LOW (arg11);
5973 /* Move min of absolute values to int11. */
5974 if ((int01 >= 0 ? int01 : -int01)
5975 < (int11 >= 0 ? int11 : -int11))
5977 tmp = int01, int01 = int11, int11 = tmp;
5978 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5979 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5982 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5984 alt0 = fold (build (MULT_EXPR, type, arg00,
5985 build_int_2 (int01 / int11, 0)));
5986 alt1 = arg10;
5987 same = arg11;
5991 if (same)
5992 return fold (build (MULT_EXPR, type,
5993 fold (build (PLUS_EXPR, type, alt0, alt1)),
5994 same));
5997 else
5999 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6000 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6001 return non_lvalue (fold_convert (type, arg0));
6003 /* Likewise if the operands are reversed. */
6004 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6005 return non_lvalue (fold_convert (type, arg1));
6007 /* Convert x+x into x*2.0. */
6008 if (operand_equal_p (arg0, arg1, 0)
6009 && SCALAR_FLOAT_TYPE_P (type))
6010 return fold (build (MULT_EXPR, type, arg0,
6011 build_real (type, dconst2)));
6013 /* Convert x*c+x into x*(c+1). */
6014 if (flag_unsafe_math_optimizations
6015 && TREE_CODE (arg0) == MULT_EXPR
6016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6017 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6020 REAL_VALUE_TYPE c;
6022 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6023 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6024 return fold (build (MULT_EXPR, type, arg1,
6025 build_real (type, c)));
6028 /* Convert x+x*c into x*(c+1). */
6029 if (flag_unsafe_math_optimizations
6030 && TREE_CODE (arg1) == MULT_EXPR
6031 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6032 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6033 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6035 REAL_VALUE_TYPE c;
6037 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6038 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6039 return fold (build (MULT_EXPR, type, arg0,
6040 build_real (type, c)));
6043 /* Convert x*c1+x*c2 into x*(c1+c2). */
6044 if (flag_unsafe_math_optimizations
6045 && TREE_CODE (arg0) == MULT_EXPR
6046 && TREE_CODE (arg1) == MULT_EXPR
6047 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6048 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6049 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6050 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6051 && operand_equal_p (TREE_OPERAND (arg0, 0),
6052 TREE_OPERAND (arg1, 0), 0))
6054 REAL_VALUE_TYPE c1, c2;
6056 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6057 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6058 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6059 return fold (build (MULT_EXPR, type,
6060 TREE_OPERAND (arg0, 0),
6061 build_real (type, c1)));
6063 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6064 if (flag_unsafe_math_optimizations
6065 && TREE_CODE (arg1) == PLUS_EXPR
6066 && TREE_CODE (arg0) != MULT_EXPR)
6068 tree tree10 = TREE_OPERAND (arg1, 0);
6069 tree tree11 = TREE_OPERAND (arg1, 1);
6070 if (TREE_CODE (tree11) == MULT_EXPR
6071 && TREE_CODE (tree10) == MULT_EXPR)
6073 tree tree0;
6074 tree0 = fold (build (PLUS_EXPR, type, arg0, tree10));
6075 return fold (build (PLUS_EXPR, type, tree0, tree11));
6078 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6079 if (flag_unsafe_math_optimizations
6080 && TREE_CODE (arg0) == PLUS_EXPR
6081 && TREE_CODE (arg1) != MULT_EXPR)
6083 tree tree00 = TREE_OPERAND (arg0, 0);
6084 tree tree01 = TREE_OPERAND (arg0, 1);
6085 if (TREE_CODE (tree01) == MULT_EXPR
6086 && TREE_CODE (tree00) == MULT_EXPR)
6088 tree tree0;
6089 tree0 = fold (build (PLUS_EXPR, type, tree01, arg1));
6090 return fold (build (PLUS_EXPR, type, tree00, tree0));
6095 bit_rotate:
6096 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6097 is a rotate of A by C1 bits. */
6098 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6099 is a rotate of A by B bits. */
6101 enum tree_code code0, code1;
6102 code0 = TREE_CODE (arg0);
6103 code1 = TREE_CODE (arg1);
6104 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6105 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6106 && operand_equal_p (TREE_OPERAND (arg0, 0),
6107 TREE_OPERAND (arg1, 0), 0)
6108 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6110 tree tree01, tree11;
6111 enum tree_code code01, code11;
6113 tree01 = TREE_OPERAND (arg0, 1);
6114 tree11 = TREE_OPERAND (arg1, 1);
6115 STRIP_NOPS (tree01);
6116 STRIP_NOPS (tree11);
6117 code01 = TREE_CODE (tree01);
6118 code11 = TREE_CODE (tree11);
6119 if (code01 == INTEGER_CST
6120 && code11 == INTEGER_CST
6121 && TREE_INT_CST_HIGH (tree01) == 0
6122 && TREE_INT_CST_HIGH (tree11) == 0
6123 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6124 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6125 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6126 code0 == LSHIFT_EXPR ? tree01 : tree11);
6127 else if (code11 == MINUS_EXPR)
6129 tree tree110, tree111;
6130 tree110 = TREE_OPERAND (tree11, 0);
6131 tree111 = TREE_OPERAND (tree11, 1);
6132 STRIP_NOPS (tree110);
6133 STRIP_NOPS (tree111);
6134 if (TREE_CODE (tree110) == INTEGER_CST
6135 && 0 == compare_tree_int (tree110,
6136 TYPE_PRECISION
6137 (TREE_TYPE (TREE_OPERAND
6138 (arg0, 0))))
6139 && operand_equal_p (tree01, tree111, 0))
6140 return build ((code0 == LSHIFT_EXPR
6141 ? LROTATE_EXPR
6142 : RROTATE_EXPR),
6143 type, TREE_OPERAND (arg0, 0), tree01);
6145 else if (code01 == MINUS_EXPR)
6147 tree tree010, tree011;
6148 tree010 = TREE_OPERAND (tree01, 0);
6149 tree011 = TREE_OPERAND (tree01, 1);
6150 STRIP_NOPS (tree010);
6151 STRIP_NOPS (tree011);
6152 if (TREE_CODE (tree010) == INTEGER_CST
6153 && 0 == compare_tree_int (tree010,
6154 TYPE_PRECISION
6155 (TREE_TYPE (TREE_OPERAND
6156 (arg0, 0))))
6157 && operand_equal_p (tree11, tree011, 0))
6158 return build ((code0 != LSHIFT_EXPR
6159 ? LROTATE_EXPR
6160 : RROTATE_EXPR),
6161 type, TREE_OPERAND (arg0, 0), tree11);
6166 associate:
6167 /* In most languages, can't associate operations on floats through
6168 parentheses. Rather than remember where the parentheses were, we
6169 don't associate floats at all, unless the user has specified
6170 -funsafe-math-optimizations. */
6172 if (! wins
6173 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6175 tree var0, con0, lit0, minus_lit0;
6176 tree var1, con1, lit1, minus_lit1;
6178 /* Split both trees into variables, constants, and literals. Then
6179 associate each group together, the constants with literals,
6180 then the result with variables. This increases the chances of
6181 literals being recombined later and of generating relocatable
6182 expressions for the sum of a constant and literal. */
6183 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6184 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6185 code == MINUS_EXPR);
6187 /* Only do something if we found more than two objects. Otherwise,
6188 nothing has changed and we risk infinite recursion. */
6189 if (2 < ((var0 != 0) + (var1 != 0)
6190 + (con0 != 0) + (con1 != 0)
6191 + (lit0 != 0) + (lit1 != 0)
6192 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6194 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6195 if (code == MINUS_EXPR)
6196 code = PLUS_EXPR;
6198 var0 = associate_trees (var0, var1, code, type);
6199 con0 = associate_trees (con0, con1, code, type);
6200 lit0 = associate_trees (lit0, lit1, code, type);
6201 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6203 /* Preserve the MINUS_EXPR if the negative part of the literal is
6204 greater than the positive part. Otherwise, the multiplicative
6205 folding code (i.e extract_muldiv) may be fooled in case
6206 unsigned constants are subtracted, like in the following
6207 example: ((X*2 + 4) - 8U)/2. */
6208 if (minus_lit0 && lit0)
6210 if (TREE_CODE (lit0) == INTEGER_CST
6211 && TREE_CODE (minus_lit0) == INTEGER_CST
6212 && tree_int_cst_lt (lit0, minus_lit0))
6214 minus_lit0 = associate_trees (minus_lit0, lit0,
6215 MINUS_EXPR, type);
6216 lit0 = 0;
6218 else
6220 lit0 = associate_trees (lit0, minus_lit0,
6221 MINUS_EXPR, type);
6222 minus_lit0 = 0;
6225 if (minus_lit0)
6227 if (con0 == 0)
6228 return fold_convert (type,
6229 associate_trees (var0, minus_lit0,
6230 MINUS_EXPR, type));
6231 else
6233 con0 = associate_trees (con0, minus_lit0,
6234 MINUS_EXPR, type);
6235 return fold_convert (type,
6236 associate_trees (var0, con0,
6237 PLUS_EXPR, type));
6241 con0 = associate_trees (con0, lit0, code, type);
6242 return fold_convert (type, associate_trees (var0, con0,
6243 code, type));
6247 binary:
6248 if (wins)
6249 t1 = const_binop (code, arg0, arg1, 0);
6250 if (t1 != NULL_TREE)
6252 /* The return value should always have
6253 the same type as the original expression. */
6254 if (TREE_TYPE (t1) != type)
6255 t1 = fold_convert (type, t1);
6257 return t1;
6259 return t;
6261 case MINUS_EXPR:
6262 /* A - (-B) -> A + B */
6263 if (TREE_CODE (arg1) == NEGATE_EXPR)
6264 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6265 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6266 if (TREE_CODE (arg0) == NEGATE_EXPR
6267 && (FLOAT_TYPE_P (type)
6268 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6269 && negate_expr_p (arg1)
6270 && reorder_operands_p (arg0, arg1))
6271 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6272 TREE_OPERAND (arg0, 0)));
6274 if (! FLOAT_TYPE_P (type))
6276 if (! wins && integer_zerop (arg0))
6277 return negate_expr (fold_convert (type, arg1));
6278 if (integer_zerop (arg1))
6279 return non_lvalue (fold_convert (type, arg0));
6281 /* Fold A - (A & B) into ~B & A. */
6282 if (!TREE_SIDE_EFFECTS (arg0)
6283 && TREE_CODE (arg1) == BIT_AND_EXPR)
6285 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6286 return fold (build (BIT_AND_EXPR, type,
6287 fold (build1 (BIT_NOT_EXPR, type,
6288 TREE_OPERAND (arg1, 0))),
6289 arg0));
6290 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6291 return fold (build (BIT_AND_EXPR, type,
6292 fold (build1 (BIT_NOT_EXPR, type,
6293 TREE_OPERAND (arg1, 1))),
6294 arg0));
6297 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6298 any power of 2 minus 1. */
6299 if (TREE_CODE (arg0) == BIT_AND_EXPR
6300 && TREE_CODE (arg1) == BIT_AND_EXPR
6301 && operand_equal_p (TREE_OPERAND (arg0, 0),
6302 TREE_OPERAND (arg1, 0), 0))
6304 tree mask0 = TREE_OPERAND (arg0, 1);
6305 tree mask1 = TREE_OPERAND (arg1, 1);
6306 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6308 if (operand_equal_p (tem, mask1, 0))
6310 tem = fold (build (BIT_XOR_EXPR, type,
6311 TREE_OPERAND (arg0, 0), mask1));
6312 return fold (build (MINUS_EXPR, type, tem, mask1));
6317 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6318 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6319 return non_lvalue (fold_convert (type, arg0));
6321 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6322 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6323 (-ARG1 + ARG0) reduces to -ARG1. */
6324 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6325 return negate_expr (fold_convert (type, arg1));
6327 /* Fold &x - &x. This can happen from &x.foo - &x.
6328 This is unsafe for certain floats even in non-IEEE formats.
6329 In IEEE, it is unsafe because it does wrong for NaNs.
6330 Also note that operand_equal_p is always false if an operand
6331 is volatile. */
6333 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6334 && operand_equal_p (arg0, arg1, 0))
6335 return fold_convert (type, integer_zero_node);
6337 /* A - B -> A + (-B) if B is easily negatable. */
6338 if (!wins && negate_expr_p (arg1)
6339 && (FLOAT_TYPE_P (type)
6340 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6341 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6343 if (TREE_CODE (arg0) == MULT_EXPR
6344 && TREE_CODE (arg1) == MULT_EXPR
6345 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6347 /* (A * C) - (B * C) -> (A-B) * C. */
6348 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6349 TREE_OPERAND (arg1, 1), 0))
6350 return fold (build (MULT_EXPR, type,
6351 fold (build (MINUS_EXPR, type,
6352 TREE_OPERAND (arg0, 0),
6353 TREE_OPERAND (arg1, 0))),
6354 TREE_OPERAND (arg0, 1)));
6355 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6356 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6357 TREE_OPERAND (arg1, 0), 0))
6358 return fold (build (MULT_EXPR, type,
6359 TREE_OPERAND (arg0, 0),
6360 fold (build (MINUS_EXPR, type,
6361 TREE_OPERAND (arg0, 1),
6362 TREE_OPERAND (arg1, 1)))));
6365 goto associate;
6367 case MULT_EXPR:
6368 /* (-A) * (-B) -> A * B */
6369 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6370 return fold (build (MULT_EXPR, type,
6371 TREE_OPERAND (arg0, 0),
6372 negate_expr (arg1)));
6373 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6374 return fold (build (MULT_EXPR, type,
6375 negate_expr (arg0),
6376 TREE_OPERAND (arg1, 0)));
6378 if (! FLOAT_TYPE_P (type))
6380 if (integer_zerop (arg1))
6381 return omit_one_operand (type, arg1, arg0);
6382 if (integer_onep (arg1))
6383 return non_lvalue (fold_convert (type, arg0));
6385 /* (a * (1 << b)) is (a << b) */
6386 if (TREE_CODE (arg1) == LSHIFT_EXPR
6387 && integer_onep (TREE_OPERAND (arg1, 0)))
6388 return fold (build (LSHIFT_EXPR, type, arg0,
6389 TREE_OPERAND (arg1, 1)));
6390 if (TREE_CODE (arg0) == LSHIFT_EXPR
6391 && integer_onep (TREE_OPERAND (arg0, 0)))
6392 return fold (build (LSHIFT_EXPR, type, arg1,
6393 TREE_OPERAND (arg0, 1)));
6395 if (TREE_CODE (arg1) == INTEGER_CST
6396 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6397 fold_convert (type, arg1),
6398 code, NULL_TREE)))
6399 return fold_convert (type, tem);
6402 else
6404 /* Maybe fold x * 0 to 0. The expressions aren't the same
6405 when x is NaN, since x * 0 is also NaN. Nor are they the
6406 same in modes with signed zeros, since multiplying a
6407 negative value by 0 gives -0, not +0. */
6408 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6409 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6410 && real_zerop (arg1))
6411 return omit_one_operand (type, arg1, arg0);
6412 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6413 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6414 && real_onep (arg1))
6415 return non_lvalue (fold_convert (type, arg0));
6417 /* Transform x * -1.0 into -x. */
6418 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6419 && real_minus_onep (arg1))
6420 return fold_convert (type, negate_expr (arg0));
6422 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6423 if (flag_unsafe_math_optimizations
6424 && TREE_CODE (arg0) == RDIV_EXPR
6425 && TREE_CODE (arg1) == REAL_CST
6426 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6428 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6429 arg1, 0);
6430 if (tem)
6431 return fold (build (RDIV_EXPR, type, tem,
6432 TREE_OPERAND (arg0, 1)));
6435 if (flag_unsafe_math_optimizations)
6437 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6438 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6440 /* Optimizations of root(...)*root(...). */
6441 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6443 tree rootfn, arg, arglist;
6444 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6445 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6447 /* Optimize sqrt(x)*sqrt(x) as x. */
6448 if (BUILTIN_SQRT_P (fcode0)
6449 && operand_equal_p (arg00, arg10, 0)
6450 && ! HONOR_SNANS (TYPE_MODE (type)))
6451 return arg00;
6453 /* Optimize root(x)*root(y) as root(x*y). */
6454 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6455 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6456 arglist = build_tree_list (NULL_TREE, arg);
6457 return build_function_call_expr (rootfn, arglist);
6460 /* Optimize expN(x)*expN(y) as expN(x+y). */
6461 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6463 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6464 tree arg = build (PLUS_EXPR, type,
6465 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6466 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6467 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6468 return build_function_call_expr (expfn, arglist);
6471 /* Optimizations of pow(...)*pow(...). */
6472 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6473 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6474 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6476 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6477 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6478 1)));
6479 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6480 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6481 1)));
6483 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6484 if (operand_equal_p (arg01, arg11, 0))
6486 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6487 tree arg = build (MULT_EXPR, type, arg00, arg10);
6488 tree arglist = tree_cons (NULL_TREE, fold (arg),
6489 build_tree_list (NULL_TREE,
6490 arg01));
6491 return build_function_call_expr (powfn, arglist);
6494 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6495 if (operand_equal_p (arg00, arg10, 0))
6497 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6498 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6499 tree arglist = tree_cons (NULL_TREE, arg00,
6500 build_tree_list (NULL_TREE,
6501 arg));
6502 return build_function_call_expr (powfn, arglist);
6506 /* Optimize tan(x)*cos(x) as sin(x). */
6507 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6508 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6509 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6510 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6511 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6512 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6513 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6514 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6516 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6518 if (sinfn != NULL_TREE)
6519 return build_function_call_expr (sinfn,
6520 TREE_OPERAND (arg0, 1));
6523 /* Optimize x*pow(x,c) as pow(x,c+1). */
6524 if (fcode1 == BUILT_IN_POW
6525 || fcode1 == BUILT_IN_POWF
6526 || fcode1 == BUILT_IN_POWL)
6528 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6529 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6530 1)));
6531 if (TREE_CODE (arg11) == REAL_CST
6532 && ! TREE_CONSTANT_OVERFLOW (arg11)
6533 && operand_equal_p (arg0, arg10, 0))
6535 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6536 REAL_VALUE_TYPE c;
6537 tree arg, arglist;
6539 c = TREE_REAL_CST (arg11);
6540 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6541 arg = build_real (type, c);
6542 arglist = build_tree_list (NULL_TREE, arg);
6543 arglist = tree_cons (NULL_TREE, arg0, arglist);
6544 return build_function_call_expr (powfn, arglist);
6548 /* Optimize pow(x,c)*x as pow(x,c+1). */
6549 if (fcode0 == BUILT_IN_POW
6550 || fcode0 == BUILT_IN_POWF
6551 || fcode0 == BUILT_IN_POWL)
6553 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6554 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6555 1)));
6556 if (TREE_CODE (arg01) == REAL_CST
6557 && ! TREE_CONSTANT_OVERFLOW (arg01)
6558 && operand_equal_p (arg1, arg00, 0))
6560 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6561 REAL_VALUE_TYPE c;
6562 tree arg, arglist;
6564 c = TREE_REAL_CST (arg01);
6565 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6566 arg = build_real (type, c);
6567 arglist = build_tree_list (NULL_TREE, arg);
6568 arglist = tree_cons (NULL_TREE, arg1, arglist);
6569 return build_function_call_expr (powfn, arglist);
6573 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6574 if (! optimize_size
6575 && operand_equal_p (arg0, arg1, 0))
6577 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6579 if (powfn)
6581 tree arg = build_real (type, dconst2);
6582 tree arglist = build_tree_list (NULL_TREE, arg);
6583 arglist = tree_cons (NULL_TREE, arg0, arglist);
6584 return build_function_call_expr (powfn, arglist);
6589 goto associate;
6591 case BIT_IOR_EXPR:
6592 bit_ior:
6593 if (integer_all_onesp (arg1))
6594 return omit_one_operand (type, arg1, arg0);
6595 if (integer_zerop (arg1))
6596 return non_lvalue (fold_convert (type, arg0));
6597 if (operand_equal_p (arg0, arg1, 0))
6598 return non_lvalue (fold_convert (type, arg0));
6599 t1 = distribute_bit_expr (code, type, arg0, arg1);
6600 if (t1 != NULL_TREE)
6601 return t1;
6603 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6605 This results in more efficient code for machines without a NAND
6606 instruction. Combine will canonicalize to the first form
6607 which will allow use of NAND instructions provided by the
6608 backend if they exist. */
6609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6610 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6612 return fold (build1 (BIT_NOT_EXPR, type,
6613 build (BIT_AND_EXPR, type,
6614 TREE_OPERAND (arg0, 0),
6615 TREE_OPERAND (arg1, 0))));
6618 /* See if this can be simplified into a rotate first. If that
6619 is unsuccessful continue in the association code. */
6620 goto bit_rotate;
6622 case BIT_XOR_EXPR:
6623 if (integer_zerop (arg1))
6624 return non_lvalue (fold_convert (type, arg0));
6625 if (integer_all_onesp (arg1))
6626 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6627 if (operand_equal_p (arg0, arg1, 0))
6628 return omit_one_operand (type, integer_zero_node, arg0);
6630 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6631 with a constant, and the two constants have no bits in common,
6632 we should treat this as a BIT_IOR_EXPR since this may produce more
6633 simplifications. */
6634 if (TREE_CODE (arg0) == BIT_AND_EXPR
6635 && TREE_CODE (arg1) == BIT_AND_EXPR
6636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6637 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6638 && integer_zerop (const_binop (BIT_AND_EXPR,
6639 TREE_OPERAND (arg0, 1),
6640 TREE_OPERAND (arg1, 1), 0)))
6642 code = BIT_IOR_EXPR;
6643 goto bit_ior;
6646 /* See if this can be simplified into a rotate first. If that
6647 is unsuccessful continue in the association code. */
6648 goto bit_rotate;
6650 case BIT_AND_EXPR:
6651 if (integer_all_onesp (arg1))
6652 return non_lvalue (fold_convert (type, arg0));
6653 if (integer_zerop (arg1))
6654 return omit_one_operand (type, arg1, arg0);
6655 if (operand_equal_p (arg0, arg1, 0))
6656 return non_lvalue (fold_convert (type, arg0));
6657 t1 = distribute_bit_expr (code, type, arg0, arg1);
6658 if (t1 != NULL_TREE)
6659 return t1;
6660 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6661 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6662 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6664 unsigned int prec
6665 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6667 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6668 && (~TREE_INT_CST_LOW (arg1)
6669 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6670 return fold_convert (type, TREE_OPERAND (arg0, 0));
6673 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6675 This results in more efficient code for machines without a NOR
6676 instruction. Combine will canonicalize to the first form
6677 which will allow use of NOR instructions provided by the
6678 backend if they exist. */
6679 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6680 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6682 return fold (build1 (BIT_NOT_EXPR, type,
6683 build (BIT_IOR_EXPR, type,
6684 TREE_OPERAND (arg0, 0),
6685 TREE_OPERAND (arg1, 0))));
6688 goto associate;
6690 case RDIV_EXPR:
6691 /* Don't touch a floating-point divide by zero unless the mode
6692 of the constant can represent infinity. */
6693 if (TREE_CODE (arg1) == REAL_CST
6694 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6695 && real_zerop (arg1))
6696 return t;
6698 /* (-A) / (-B) -> A / B */
6699 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6700 return fold (build (RDIV_EXPR, type,
6701 TREE_OPERAND (arg0, 0),
6702 negate_expr (arg1)));
6703 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6704 return fold (build (RDIV_EXPR, type,
6705 negate_expr (arg0),
6706 TREE_OPERAND (arg1, 0)));
6708 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6709 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6710 && real_onep (arg1))
6711 return non_lvalue (fold_convert (type, arg0));
6713 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6714 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6715 && real_minus_onep (arg1))
6716 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6718 /* If ARG1 is a constant, we can convert this to a multiply by the
6719 reciprocal. This does not have the same rounding properties,
6720 so only do this if -funsafe-math-optimizations. We can actually
6721 always safely do it if ARG1 is a power of two, but it's hard to
6722 tell if it is or not in a portable manner. */
6723 if (TREE_CODE (arg1) == REAL_CST)
6725 if (flag_unsafe_math_optimizations
6726 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6727 arg1, 0)))
6728 return fold (build (MULT_EXPR, type, arg0, tem));
6729 /* Find the reciprocal if optimizing and the result is exact. */
6730 if (optimize)
6732 REAL_VALUE_TYPE r;
6733 r = TREE_REAL_CST (arg1);
6734 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6736 tem = build_real (type, r);
6737 return fold (build (MULT_EXPR, type, arg0, tem));
6741 /* Convert A/B/C to A/(B*C). */
6742 if (flag_unsafe_math_optimizations
6743 && TREE_CODE (arg0) == RDIV_EXPR)
6744 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6745 fold (build (MULT_EXPR, type,
6746 TREE_OPERAND (arg0, 1), arg1))));
6748 /* Convert A/(B/C) to (A/B)*C. */
6749 if (flag_unsafe_math_optimizations
6750 && TREE_CODE (arg1) == RDIV_EXPR)
6751 return fold (build (MULT_EXPR, type,
6752 fold (build (RDIV_EXPR, type, arg0,
6753 TREE_OPERAND (arg1, 0))),
6754 TREE_OPERAND (arg1, 1)));
6756 /* Convert C1/(X*C2) into (C1/C2)/X. */
6757 if (flag_unsafe_math_optimizations
6758 && TREE_CODE (arg1) == MULT_EXPR
6759 && TREE_CODE (arg0) == REAL_CST
6760 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6762 tree tem = const_binop (RDIV_EXPR, arg0,
6763 TREE_OPERAND (arg1, 1), 0);
6764 if (tem)
6765 return fold (build (RDIV_EXPR, type, tem,
6766 TREE_OPERAND (arg1, 0)));
6769 if (flag_unsafe_math_optimizations)
6771 enum built_in_function fcode = builtin_mathfn_code (arg1);
6772 /* Optimize x/expN(y) into x*expN(-y). */
6773 if (BUILTIN_EXPONENT_P (fcode))
6775 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6776 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
6777 tree arglist = build_tree_list (NULL_TREE,
6778 fold_convert (type, arg));
6779 arg1 = build_function_call_expr (expfn, arglist);
6780 return fold (build (MULT_EXPR, type, arg0, arg1));
6783 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6784 if (fcode == BUILT_IN_POW
6785 || fcode == BUILT_IN_POWF
6786 || fcode == BUILT_IN_POWL)
6788 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6789 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6790 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6791 tree neg11 = fold_convert (type, negate_expr (arg11));
6792 tree arglist = tree_cons(NULL_TREE, arg10,
6793 build_tree_list (NULL_TREE, neg11));
6794 arg1 = build_function_call_expr (powfn, arglist);
6795 return fold (build (MULT_EXPR, type, arg0, arg1));
6799 if (flag_unsafe_math_optimizations)
6801 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6802 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6804 /* Optimize sin(x)/cos(x) as tan(x). */
6805 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6806 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6807 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6808 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6809 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6811 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6813 if (tanfn != NULL_TREE)
6814 return build_function_call_expr (tanfn,
6815 TREE_OPERAND (arg0, 1));
6818 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6819 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6820 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6821 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6822 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6823 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6825 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6827 if (tanfn != NULL_TREE)
6829 tree tmp = TREE_OPERAND (arg0, 1);
6830 tmp = build_function_call_expr (tanfn, tmp);
6831 return fold (build (RDIV_EXPR, type,
6832 build_real (type, dconst1),
6833 tmp));
6837 /* Optimize pow(x,c)/x as pow(x,c-1). */
6838 if (fcode0 == BUILT_IN_POW
6839 || fcode0 == BUILT_IN_POWF
6840 || fcode0 == BUILT_IN_POWL)
6842 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6843 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6844 if (TREE_CODE (arg01) == REAL_CST
6845 && ! TREE_CONSTANT_OVERFLOW (arg01)
6846 && operand_equal_p (arg1, arg00, 0))
6848 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6849 REAL_VALUE_TYPE c;
6850 tree arg, arglist;
6852 c = TREE_REAL_CST (arg01);
6853 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6854 arg = build_real (type, c);
6855 arglist = build_tree_list (NULL_TREE, arg);
6856 arglist = tree_cons (NULL_TREE, arg1, arglist);
6857 return build_function_call_expr (powfn, arglist);
6861 goto binary;
6863 case TRUNC_DIV_EXPR:
6864 case ROUND_DIV_EXPR:
6865 case FLOOR_DIV_EXPR:
6866 case CEIL_DIV_EXPR:
6867 case EXACT_DIV_EXPR:
6868 if (integer_onep (arg1))
6869 return non_lvalue (fold_convert (type, arg0));
6870 if (integer_zerop (arg1))
6871 return t;
6872 /* X / -1 is -X. */
6873 if (!TYPE_UNSIGNED (type)
6874 && TREE_CODE (arg1) == INTEGER_CST
6875 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6876 && TREE_INT_CST_HIGH (arg1) == -1)
6877 return fold_convert (type, negate_expr (arg0));
6879 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6880 operation, EXACT_DIV_EXPR.
6882 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6883 At one time others generated faster code, it's not clear if they do
6884 after the last round to changes to the DIV code in expmed.c. */
6885 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6886 && multiple_of_p (type, arg0, arg1))
6887 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6889 if (TREE_CODE (arg1) == INTEGER_CST
6890 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6891 code, NULL_TREE)))
6892 return fold_convert (type, tem);
6894 goto binary;
6896 case CEIL_MOD_EXPR:
6897 case FLOOR_MOD_EXPR:
6898 case ROUND_MOD_EXPR:
6899 case TRUNC_MOD_EXPR:
6900 if (integer_onep (arg1))
6901 return omit_one_operand (type, integer_zero_node, arg0);
6902 if (integer_zerop (arg1))
6903 return t;
6904 /* X % -1 is zero. */
6905 if (!TYPE_UNSIGNED (type)
6906 && TREE_CODE (arg1) == INTEGER_CST
6907 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6908 && TREE_INT_CST_HIGH (arg1) == -1)
6909 return omit_one_operand (type, integer_zero_node, arg0);
6911 if (TREE_CODE (arg1) == INTEGER_CST
6912 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6913 code, NULL_TREE)))
6914 return fold_convert (type, tem);
6916 goto binary;
6918 case LROTATE_EXPR:
6919 case RROTATE_EXPR:
6920 if (integer_all_onesp (arg0))
6921 return omit_one_operand (type, arg0, arg1);
6922 goto shift;
6924 case RSHIFT_EXPR:
6925 /* Optimize -1 >> x for arithmetic right shifts. */
6926 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
6927 return omit_one_operand (type, arg0, arg1);
6928 /* ... fall through ... */
6930 case LSHIFT_EXPR:
6931 shift:
6932 if (integer_zerop (arg1))
6933 return non_lvalue (fold_convert (type, arg0));
6934 if (integer_zerop (arg0))
6935 return omit_one_operand (type, arg0, arg1);
6937 /* Since negative shift count is not well-defined,
6938 don't try to compute it in the compiler. */
6939 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6940 return t;
6941 /* Rewrite an LROTATE_EXPR by a constant into an
6942 RROTATE_EXPR by a new constant. */
6943 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6945 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6946 tem = fold_convert (TREE_TYPE (arg1), tem);
6947 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6948 return fold (build (RROTATE_EXPR, type, arg0, tem));
6951 /* If we have a rotate of a bit operation with the rotate count and
6952 the second operand of the bit operation both constant,
6953 permute the two operations. */
6954 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6955 && (TREE_CODE (arg0) == BIT_AND_EXPR
6956 || TREE_CODE (arg0) == BIT_IOR_EXPR
6957 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6959 return fold (build (TREE_CODE (arg0), type,
6960 fold (build (code, type,
6961 TREE_OPERAND (arg0, 0), arg1)),
6962 fold (build (code, type,
6963 TREE_OPERAND (arg0, 1), arg1))));
6965 /* Two consecutive rotates adding up to the width of the mode can
6966 be ignored. */
6967 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6968 && TREE_CODE (arg0) == RROTATE_EXPR
6969 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6970 && TREE_INT_CST_HIGH (arg1) == 0
6971 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6972 && ((TREE_INT_CST_LOW (arg1)
6973 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6974 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6975 return TREE_OPERAND (arg0, 0);
6977 goto binary;
6979 case MIN_EXPR:
6980 if (operand_equal_p (arg0, arg1, 0))
6981 return omit_one_operand (type, arg0, arg1);
6982 if (INTEGRAL_TYPE_P (type)
6983 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6984 return omit_one_operand (type, arg1, arg0);
6985 goto associate;
6987 case MAX_EXPR:
6988 if (operand_equal_p (arg0, arg1, 0))
6989 return omit_one_operand (type, arg0, arg1);
6990 if (INTEGRAL_TYPE_P (type)
6991 && TYPE_MAX_VALUE (type)
6992 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6993 return omit_one_operand (type, arg1, arg0);
6994 goto associate;
6996 case TRUTH_NOT_EXPR:
6997 /* Note that the operand of this must be an int
6998 and its values must be 0 or 1.
6999 ("true" is a fixed value perhaps depending on the language,
7000 but we don't handle values other than 1 correctly yet.) */
7001 tem = invert_truthvalue (arg0);
7002 /* Avoid infinite recursion. */
7003 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7005 tem = fold_single_bit_test (code, arg0, arg1, type);
7006 if (tem)
7007 return tem;
7008 return t;
7010 return fold_convert (type, tem);
7012 case TRUTH_ANDIF_EXPR:
7013 /* Note that the operands of this must be ints
7014 and their values must be 0 or 1.
7015 ("true" is a fixed value perhaps depending on the language.) */
7016 /* If first arg is constant zero, return it. */
7017 if (integer_zerop (arg0))
7018 return fold_convert (type, arg0);
7019 case TRUTH_AND_EXPR:
7020 /* If either arg is constant true, drop it. */
7021 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7022 return non_lvalue (fold_convert (type, arg1));
7023 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7024 /* Preserve sequence points. */
7025 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7026 return non_lvalue (fold_convert (type, arg0));
7027 /* If second arg is constant zero, result is zero, but first arg
7028 must be evaluated. */
7029 if (integer_zerop (arg1))
7030 return omit_one_operand (type, arg1, arg0);
7031 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7032 case will be handled here. */
7033 if (integer_zerop (arg0))
7034 return omit_one_operand (type, arg0, arg1);
7036 truth_andor:
7037 /* We only do these simplifications if we are optimizing. */
7038 if (!optimize)
7039 return t;
7041 /* Check for things like (A || B) && (A || C). We can convert this
7042 to A || (B && C). Note that either operator can be any of the four
7043 truth and/or operations and the transformation will still be
7044 valid. Also note that we only care about order for the
7045 ANDIF and ORIF operators. If B contains side effects, this
7046 might change the truth-value of A. */
7047 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7048 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7049 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7050 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7051 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7052 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7054 tree a00 = TREE_OPERAND (arg0, 0);
7055 tree a01 = TREE_OPERAND (arg0, 1);
7056 tree a10 = TREE_OPERAND (arg1, 0);
7057 tree a11 = TREE_OPERAND (arg1, 1);
7058 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7059 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7060 && (code == TRUTH_AND_EXPR
7061 || code == TRUTH_OR_EXPR));
7063 if (operand_equal_p (a00, a10, 0))
7064 return fold (build (TREE_CODE (arg0), type, a00,
7065 fold (build (code, type, a01, a11))));
7066 else if (commutative && operand_equal_p (a00, a11, 0))
7067 return fold (build (TREE_CODE (arg0), type, a00,
7068 fold (build (code, type, a01, a10))));
7069 else if (commutative && operand_equal_p (a01, a10, 0))
7070 return fold (build (TREE_CODE (arg0), type, a01,
7071 fold (build (code, type, a00, a11))));
7073 /* This case if tricky because we must either have commutative
7074 operators or else A10 must not have side-effects. */
7076 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7077 && operand_equal_p (a01, a11, 0))
7078 return fold (build (TREE_CODE (arg0), type,
7079 fold (build (code, type, a00, a10)),
7080 a01));
7083 /* See if we can build a range comparison. */
7084 if (0 != (tem = fold_range_test (t)))
7085 return tem;
7087 /* Check for the possibility of merging component references. If our
7088 lhs is another similar operation, try to merge its rhs with our
7089 rhs. Then try to merge our lhs and rhs. */
7090 if (TREE_CODE (arg0) == code
7091 && 0 != (tem = fold_truthop (code, type,
7092 TREE_OPERAND (arg0, 1), arg1)))
7093 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7095 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7096 return tem;
7098 return t;
7100 case TRUTH_ORIF_EXPR:
7101 /* Note that the operands of this must be ints
7102 and their values must be 0 or true.
7103 ("true" is a fixed value perhaps depending on the language.) */
7104 /* If first arg is constant true, return it. */
7105 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7106 return fold_convert (type, arg0);
7107 case TRUTH_OR_EXPR:
7108 /* If either arg is constant zero, drop it. */
7109 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7110 return non_lvalue (fold_convert (type, arg1));
7111 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7112 /* Preserve sequence points. */
7113 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7114 return non_lvalue (fold_convert (type, arg0));
7115 /* If second arg is constant true, result is true, but we must
7116 evaluate first arg. */
7117 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7118 return omit_one_operand (type, arg1, arg0);
7119 /* Likewise for first arg, but note this only occurs here for
7120 TRUTH_OR_EXPR. */
7121 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7122 return omit_one_operand (type, arg0, arg1);
7123 goto truth_andor;
7125 case TRUTH_XOR_EXPR:
7126 /* If either arg is constant zero, drop it. */
7127 if (integer_zerop (arg0))
7128 return non_lvalue (fold_convert (type, arg1));
7129 if (integer_zerop (arg1))
7130 return non_lvalue (fold_convert (type, arg0));
7131 /* If either arg is constant true, this is a logical inversion. */
7132 if (integer_onep (arg0))
7133 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7134 if (integer_onep (arg1))
7135 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7136 return t;
7138 case EQ_EXPR:
7139 case NE_EXPR:
7140 case LT_EXPR:
7141 case GT_EXPR:
7142 case LE_EXPR:
7143 case GE_EXPR:
7144 /* If one arg is a real or integer constant, put it last. */
7145 if (tree_swap_operands_p (arg0, arg1, true))
7146 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7148 /* If this is an equality comparison of the address of a non-weak
7149 object against zero, then we know the result. */
7150 if ((code == EQ_EXPR || code == NE_EXPR)
7151 && TREE_CODE (arg0) == ADDR_EXPR
7152 && DECL_P (TREE_OPERAND (arg0, 0))
7153 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7154 && integer_zerop (arg1))
7156 if (code == EQ_EXPR)
7157 return fold_convert (type, integer_zero_node);
7158 else
7159 return fold_convert (type, integer_one_node);
7162 /* If this is an equality comparison of the address of two non-weak,
7163 unaliased symbols neither of which are extern (since we do not
7164 have access to attributes for externs), then we know the result. */
7165 if ((code == EQ_EXPR || code == NE_EXPR)
7166 && TREE_CODE (arg0) == ADDR_EXPR
7167 && DECL_P (TREE_OPERAND (arg0, 0))
7168 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7169 && ! lookup_attribute ("alias",
7170 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7171 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7172 && TREE_CODE (arg1) == ADDR_EXPR
7173 && DECL_P (TREE_OPERAND (arg1, 0))
7174 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7175 && ! lookup_attribute ("alias",
7176 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7177 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7179 if (code == EQ_EXPR)
7180 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7181 ? integer_one_node : integer_zero_node));
7182 else
7183 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7184 ? integer_zero_node : integer_one_node));
7187 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7189 tree targ0 = strip_float_extensions (arg0);
7190 tree targ1 = strip_float_extensions (arg1);
7191 tree newtype = TREE_TYPE (targ0);
7193 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7194 newtype = TREE_TYPE (targ1);
7196 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7197 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7198 return fold (build (code, type, fold_convert (newtype, targ0),
7199 fold_convert (newtype, targ1)));
7201 /* (-a) CMP (-b) -> b CMP a */
7202 if (TREE_CODE (arg0) == NEGATE_EXPR
7203 && TREE_CODE (arg1) == NEGATE_EXPR)
7204 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7205 TREE_OPERAND (arg0, 0)));
7207 if (TREE_CODE (arg1) == REAL_CST)
7209 REAL_VALUE_TYPE cst;
7210 cst = TREE_REAL_CST (arg1);
7212 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7213 if (TREE_CODE (arg0) == NEGATE_EXPR)
7214 return
7215 fold (build (swap_tree_comparison (code), type,
7216 TREE_OPERAND (arg0, 0),
7217 build_real (TREE_TYPE (arg1),
7218 REAL_VALUE_NEGATE (cst))));
7220 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7221 /* a CMP (-0) -> a CMP 0 */
7222 if (REAL_VALUE_MINUS_ZERO (cst))
7223 return fold (build (code, type, arg0,
7224 build_real (TREE_TYPE (arg1), dconst0)));
7226 /* x != NaN is always true, other ops are always false. */
7227 if (REAL_VALUE_ISNAN (cst)
7228 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7230 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7231 return omit_one_operand (type, fold_convert (type, tem), arg0);
7234 /* Fold comparisons against infinity. */
7235 if (REAL_VALUE_ISINF (cst))
7237 tem = fold_inf_compare (code, type, arg0, arg1);
7238 if (tem != NULL_TREE)
7239 return tem;
7243 /* If this is a comparison of a real constant with a PLUS_EXPR
7244 or a MINUS_EXPR of a real constant, we can convert it into a
7245 comparison with a revised real constant as long as no overflow
7246 occurs when unsafe_math_optimizations are enabled. */
7247 if (flag_unsafe_math_optimizations
7248 && TREE_CODE (arg1) == REAL_CST
7249 && (TREE_CODE (arg0) == PLUS_EXPR
7250 || TREE_CODE (arg0) == MINUS_EXPR)
7251 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7252 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7253 ? MINUS_EXPR : PLUS_EXPR,
7254 arg1, TREE_OPERAND (arg0, 1), 0))
7255 && ! TREE_CONSTANT_OVERFLOW (tem))
7256 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7258 /* Likewise, we can simplify a comparison of a real constant with
7259 a MINUS_EXPR whose first operand is also a real constant, i.e.
7260 (c1 - x) < c2 becomes x > c1-c2. */
7261 if (flag_unsafe_math_optimizations
7262 && TREE_CODE (arg1) == REAL_CST
7263 && TREE_CODE (arg0) == MINUS_EXPR
7264 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7265 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7266 arg1, 0))
7267 && ! TREE_CONSTANT_OVERFLOW (tem))
7268 return fold (build (swap_tree_comparison (code), type,
7269 TREE_OPERAND (arg0, 1), tem));
7271 /* Fold comparisons against built-in math functions. */
7272 if (TREE_CODE (arg1) == REAL_CST
7273 && flag_unsafe_math_optimizations
7274 && ! flag_errno_math)
7276 enum built_in_function fcode = builtin_mathfn_code (arg0);
7278 if (fcode != END_BUILTINS)
7280 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7281 if (tem != NULL_TREE)
7282 return tem;
7287 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7288 if (TREE_CONSTANT (arg1)
7289 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7290 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7291 /* This optimization is invalid for ordered comparisons
7292 if CONST+INCR overflows or if foo+incr might overflow.
7293 This optimization is invalid for floating point due to rounding.
7294 For pointer types we assume overflow doesn't happen. */
7295 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7296 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7297 && (code == EQ_EXPR || code == NE_EXPR))))
7299 tree varop, newconst;
7301 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7303 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7304 arg1, TREE_OPERAND (arg0, 1)));
7305 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7306 TREE_OPERAND (arg0, 0),
7307 TREE_OPERAND (arg0, 1));
7309 else
7311 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7312 arg1, TREE_OPERAND (arg0, 1)));
7313 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7314 TREE_OPERAND (arg0, 0),
7315 TREE_OPERAND (arg0, 1));
7319 /* If VAROP is a reference to a bitfield, we must mask
7320 the constant by the width of the field. */
7321 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7322 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7324 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7325 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7326 tree folded_compare, shift;
7328 /* First check whether the comparison would come out
7329 always the same. If we don't do that we would
7330 change the meaning with the masking. */
7331 folded_compare = fold (build2 (code, type,
7332 TREE_OPERAND (varop, 0),
7333 arg1));
7334 if (integer_zerop (folded_compare)
7335 || integer_onep (folded_compare))
7336 return omit_one_operand (type, folded_compare, varop);
7338 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7340 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7341 newconst, shift));
7342 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7343 newconst, shift));
7346 return fold (build2 (code, type, varop, newconst));
7349 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7350 This transformation affects the cases which are handled in later
7351 optimizations involving comparisons with non-negative constants. */
7352 if (TREE_CODE (arg1) == INTEGER_CST
7353 && TREE_CODE (arg0) != INTEGER_CST
7354 && tree_int_cst_sgn (arg1) > 0)
7356 switch (code)
7358 case GE_EXPR:
7359 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7360 return fold (build (GT_EXPR, type, arg0, arg1));
7362 case LT_EXPR:
7363 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7364 return fold (build (LE_EXPR, type, arg0, arg1));
7366 default:
7367 break;
7371 /* Comparisons with the highest or lowest possible integer of
7372 the specified size will have known values. */
7374 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7376 if (TREE_CODE (arg1) == INTEGER_CST
7377 && ! TREE_CONSTANT_OVERFLOW (arg1)
7378 && width <= HOST_BITS_PER_WIDE_INT
7379 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7380 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7382 unsigned HOST_WIDE_INT signed_max;
7383 unsigned HOST_WIDE_INT max, min;
7385 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7387 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7389 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7390 min = 0;
7392 else
7394 max = signed_max;
7395 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7398 if (TREE_INT_CST_HIGH (arg1) == 0
7399 && TREE_INT_CST_LOW (arg1) == max)
7400 switch (code)
7402 case GT_EXPR:
7403 return omit_one_operand (type,
7404 fold_convert (type,
7405 integer_zero_node),
7406 arg0);
7407 case GE_EXPR:
7408 return fold (build (EQ_EXPR, type, arg0, arg1));
7410 case LE_EXPR:
7411 return omit_one_operand (type,
7412 fold_convert (type,
7413 integer_one_node),
7414 arg0);
7415 case LT_EXPR:
7416 return fold (build (NE_EXPR, type, arg0, arg1));
7418 /* The GE_EXPR and LT_EXPR cases above are not normally
7419 reached because of previous transformations. */
7421 default:
7422 break;
7424 else if (TREE_INT_CST_HIGH (arg1) == 0
7425 && TREE_INT_CST_LOW (arg1) == max - 1)
7426 switch (code)
7428 case GT_EXPR:
7429 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7430 return fold (build (EQ_EXPR, type, arg0, arg1));
7431 case LE_EXPR:
7432 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7433 return fold (build (NE_EXPR, type, arg0, arg1));
7434 default:
7435 break;
7437 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7438 && TREE_INT_CST_LOW (arg1) == min)
7439 switch (code)
7441 case LT_EXPR:
7442 return omit_one_operand (type,
7443 fold_convert (type,
7444 integer_zero_node),
7445 arg0);
7446 case LE_EXPR:
7447 return fold (build (EQ_EXPR, type, arg0, arg1));
7449 case GE_EXPR:
7450 return omit_one_operand (type,
7451 fold_convert (type,
7452 integer_one_node),
7453 arg0);
7454 case GT_EXPR:
7455 return fold (build (NE_EXPR, type, arg0, arg1));
7457 default:
7458 break;
7460 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7461 && TREE_INT_CST_LOW (arg1) == min + 1)
7462 switch (code)
7464 case GE_EXPR:
7465 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7466 return fold (build (NE_EXPR, type, arg0, arg1));
7467 case LT_EXPR:
7468 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7469 return fold (build (EQ_EXPR, type, arg0, arg1));
7470 default:
7471 break;
7474 else if (TREE_INT_CST_HIGH (arg1) == 0
7475 && TREE_INT_CST_LOW (arg1) == signed_max
7476 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7477 /* signed_type does not work on pointer types. */
7478 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7480 /* The following case also applies to X < signed_max+1
7481 and X >= signed_max+1 because previous transformations. */
7482 if (code == LE_EXPR || code == GT_EXPR)
7484 tree st0, st1;
7485 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7486 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7487 return fold
7488 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7489 type, fold_convert (st0, arg0),
7490 fold_convert (st1, integer_zero_node)));
7496 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7497 a MINUS_EXPR of a constant, we can convert it into a comparison with
7498 a revised constant as long as no overflow occurs. */
7499 if ((code == EQ_EXPR || code == NE_EXPR)
7500 && TREE_CODE (arg1) == INTEGER_CST
7501 && (TREE_CODE (arg0) == PLUS_EXPR
7502 || TREE_CODE (arg0) == MINUS_EXPR)
7503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7504 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7505 ? MINUS_EXPR : PLUS_EXPR,
7506 arg1, TREE_OPERAND (arg0, 1), 0))
7507 && ! TREE_CONSTANT_OVERFLOW (tem))
7508 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7510 /* Similarly for a NEGATE_EXPR. */
7511 else if ((code == EQ_EXPR || code == NE_EXPR)
7512 && TREE_CODE (arg0) == NEGATE_EXPR
7513 && TREE_CODE (arg1) == INTEGER_CST
7514 && 0 != (tem = negate_expr (arg1))
7515 && TREE_CODE (tem) == INTEGER_CST
7516 && ! TREE_CONSTANT_OVERFLOW (tem))
7517 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7519 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7520 for !=. Don't do this for ordered comparisons due to overflow. */
7521 else if ((code == NE_EXPR || code == EQ_EXPR)
7522 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7523 return fold (build (code, type,
7524 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7526 /* If we are widening one operand of an integer comparison,
7527 see if the other operand is similarly being widened. Perhaps we
7528 can do the comparison in the narrower type. */
7529 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7530 && TREE_CODE (arg0) == NOP_EXPR
7531 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7532 && (code == EQ_EXPR || code == NE_EXPR
7533 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7534 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7535 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7536 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7537 || (TREE_CODE (t1) == INTEGER_CST
7538 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7539 return fold (build (code, type, tem,
7540 fold_convert (TREE_TYPE (tem), t1)));
7542 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7543 constant, we can simplify it. */
7544 else if (TREE_CODE (arg1) == INTEGER_CST
7545 && (TREE_CODE (arg0) == MIN_EXPR
7546 || TREE_CODE (arg0) == MAX_EXPR)
7547 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7548 return optimize_minmax_comparison (t);
7550 /* If we are comparing an ABS_EXPR with a constant, we can
7551 convert all the cases into explicit comparisons, but they may
7552 well not be faster than doing the ABS and one comparison.
7553 But ABS (X) <= C is a range comparison, which becomes a subtraction
7554 and a comparison, and is probably faster. */
7555 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7556 && TREE_CODE (arg0) == ABS_EXPR
7557 && ! TREE_SIDE_EFFECTS (arg0)
7558 && (0 != (tem = negate_expr (arg1)))
7559 && TREE_CODE (tem) == INTEGER_CST
7560 && ! TREE_CONSTANT_OVERFLOW (tem))
7561 return fold (build (TRUTH_ANDIF_EXPR, type,
7562 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7563 build (LE_EXPR, type,
7564 TREE_OPERAND (arg0, 0), arg1)));
7566 /* If this is an EQ or NE comparison with zero and ARG0 is
7567 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7568 two operations, but the latter can be done in one less insn
7569 on machines that have only two-operand insns or on which a
7570 constant cannot be the first operand. */
7571 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7572 && TREE_CODE (arg0) == BIT_AND_EXPR)
7574 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7575 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7576 return
7577 fold (build (code, type,
7578 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7579 build (RSHIFT_EXPR,
7580 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7581 TREE_OPERAND (arg0, 1),
7582 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7583 fold_convert (TREE_TYPE (arg0),
7584 integer_one_node)),
7585 arg1));
7586 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7587 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7588 return
7589 fold (build (code, type,
7590 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7591 build (RSHIFT_EXPR,
7592 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7593 TREE_OPERAND (arg0, 0),
7594 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7595 fold_convert (TREE_TYPE (arg0),
7596 integer_one_node)),
7597 arg1));
7600 /* If this is an NE or EQ comparison of zero against the result of a
7601 signed MOD operation whose second operand is a power of 2, make
7602 the MOD operation unsigned since it is simpler and equivalent. */
7603 if ((code == NE_EXPR || code == EQ_EXPR)
7604 && integer_zerop (arg1)
7605 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7606 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7607 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7608 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7609 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7610 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7612 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7613 tree newmod = build (TREE_CODE (arg0), newtype,
7614 fold_convert (newtype,
7615 TREE_OPERAND (arg0, 0)),
7616 fold_convert (newtype,
7617 TREE_OPERAND (arg0, 1)));
7619 return build (code, type, newmod, fold_convert (newtype, arg1));
7622 /* If this is an NE comparison of zero with an AND of one, remove the
7623 comparison since the AND will give the correct value. */
7624 if (code == NE_EXPR && integer_zerop (arg1)
7625 && TREE_CODE (arg0) == BIT_AND_EXPR
7626 && integer_onep (TREE_OPERAND (arg0, 1)))
7627 return fold_convert (type, arg0);
7629 /* If we have (A & C) == C where C is a power of 2, convert this into
7630 (A & C) != 0. Similarly for NE_EXPR. */
7631 if ((code == EQ_EXPR || code == NE_EXPR)
7632 && TREE_CODE (arg0) == BIT_AND_EXPR
7633 && integer_pow2p (TREE_OPERAND (arg0, 1))
7634 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7635 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7636 arg0, integer_zero_node));
7638 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7639 2, then fold the expression into shifts and logical operations. */
7640 tem = fold_single_bit_test (code, arg0, arg1, type);
7641 if (tem)
7642 return tem;
7644 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7645 Similarly for NE_EXPR. */
7646 if ((code == EQ_EXPR || code == NE_EXPR)
7647 && TREE_CODE (arg0) == BIT_AND_EXPR
7648 && TREE_CODE (arg1) == INTEGER_CST
7649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7651 tree dandnotc
7652 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7653 arg1, build1 (BIT_NOT_EXPR,
7654 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7655 TREE_OPERAND (arg0, 1))));
7656 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7657 if (integer_nonzerop (dandnotc))
7658 return omit_one_operand (type, rslt, arg0);
7661 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7662 Similarly for NE_EXPR. */
7663 if ((code == EQ_EXPR || code == NE_EXPR)
7664 && TREE_CODE (arg0) == BIT_IOR_EXPR
7665 && TREE_CODE (arg1) == INTEGER_CST
7666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7668 tree candnotd
7669 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7670 TREE_OPERAND (arg0, 1),
7671 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7672 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7673 if (integer_nonzerop (candnotd))
7674 return omit_one_operand (type, rslt, arg0);
7677 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7678 and similarly for >= into !=. */
7679 if ((code == LT_EXPR || code == GE_EXPR)
7680 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7681 && TREE_CODE (arg1) == LSHIFT_EXPR
7682 && integer_onep (TREE_OPERAND (arg1, 0)))
7683 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7684 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7685 TREE_OPERAND (arg1, 1)),
7686 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7688 else if ((code == LT_EXPR || code == GE_EXPR)
7689 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7690 && (TREE_CODE (arg1) == NOP_EXPR
7691 || TREE_CODE (arg1) == CONVERT_EXPR)
7692 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7693 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7694 return
7695 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7696 fold_convert (TREE_TYPE (arg0),
7697 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7698 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7699 1))),
7700 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7702 /* Simplify comparison of something with itself. (For IEEE
7703 floating-point, we can only do some of these simplifications.) */
7704 if (operand_equal_p (arg0, arg1, 0))
7706 switch (code)
7708 case EQ_EXPR:
7709 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7710 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7711 return constant_boolean_node (1, type);
7712 break;
7714 case GE_EXPR:
7715 case LE_EXPR:
7716 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7717 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7718 return constant_boolean_node (1, type);
7719 return fold (build (EQ_EXPR, type, arg0, arg1));
7721 case NE_EXPR:
7722 /* For NE, we can only do this simplification if integer
7723 or we don't honor IEEE floating point NaNs. */
7724 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7725 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7726 break;
7727 /* ... fall through ... */
7728 case GT_EXPR:
7729 case LT_EXPR:
7730 return constant_boolean_node (0, type);
7731 default:
7732 abort ();
7736 /* If we are comparing an expression that just has comparisons
7737 of two integer values, arithmetic expressions of those comparisons,
7738 and constants, we can simplify it. There are only three cases
7739 to check: the two values can either be equal, the first can be
7740 greater, or the second can be greater. Fold the expression for
7741 those three values. Since each value must be 0 or 1, we have
7742 eight possibilities, each of which corresponds to the constant 0
7743 or 1 or one of the six possible comparisons.
7745 This handles common cases like (a > b) == 0 but also handles
7746 expressions like ((x > y) - (y > x)) > 0, which supposedly
7747 occur in macroized code. */
7749 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7751 tree cval1 = 0, cval2 = 0;
7752 int save_p = 0;
7754 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7755 /* Don't handle degenerate cases here; they should already
7756 have been handled anyway. */
7757 && cval1 != 0 && cval2 != 0
7758 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7759 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7760 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7761 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7762 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7763 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7764 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7766 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7767 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7769 /* We can't just pass T to eval_subst in case cval1 or cval2
7770 was the same as ARG1. */
7772 tree high_result
7773 = fold (build (code, type,
7774 eval_subst (arg0, cval1, maxval, cval2, minval),
7775 arg1));
7776 tree equal_result
7777 = fold (build (code, type,
7778 eval_subst (arg0, cval1, maxval, cval2, maxval),
7779 arg1));
7780 tree low_result
7781 = fold (build (code, type,
7782 eval_subst (arg0, cval1, minval, cval2, maxval),
7783 arg1));
7785 /* All three of these results should be 0 or 1. Confirm they
7786 are. Then use those values to select the proper code
7787 to use. */
7789 if ((integer_zerop (high_result)
7790 || integer_onep (high_result))
7791 && (integer_zerop (equal_result)
7792 || integer_onep (equal_result))
7793 && (integer_zerop (low_result)
7794 || integer_onep (low_result)))
7796 /* Make a 3-bit mask with the high-order bit being the
7797 value for `>', the next for '=', and the low for '<'. */
7798 switch ((integer_onep (high_result) * 4)
7799 + (integer_onep (equal_result) * 2)
7800 + integer_onep (low_result))
7802 case 0:
7803 /* Always false. */
7804 return omit_one_operand (type, integer_zero_node, arg0);
7805 case 1:
7806 code = LT_EXPR;
7807 break;
7808 case 2:
7809 code = EQ_EXPR;
7810 break;
7811 case 3:
7812 code = LE_EXPR;
7813 break;
7814 case 4:
7815 code = GT_EXPR;
7816 break;
7817 case 5:
7818 code = NE_EXPR;
7819 break;
7820 case 6:
7821 code = GE_EXPR;
7822 break;
7823 case 7:
7824 /* Always true. */
7825 return omit_one_operand (type, integer_one_node, arg0);
7828 tem = build (code, type, cval1, cval2);
7829 if (save_p)
7830 return save_expr (tem);
7831 else
7832 return fold (tem);
7837 /* If this is a comparison of a field, we may be able to simplify it. */
7838 if (((TREE_CODE (arg0) == COMPONENT_REF
7839 && lang_hooks.can_use_bit_fields_p ())
7840 || TREE_CODE (arg0) == BIT_FIELD_REF)
7841 && (code == EQ_EXPR || code == NE_EXPR)
7842 /* Handle the constant case even without -O
7843 to make sure the warnings are given. */
7844 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7846 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7847 if (t1)
7848 return t1;
7851 /* If this is a comparison of complex values and either or both sides
7852 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7853 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7854 This may prevent needless evaluations. */
7855 if ((code == EQ_EXPR || code == NE_EXPR)
7856 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7857 && (TREE_CODE (arg0) == COMPLEX_EXPR
7858 || TREE_CODE (arg1) == COMPLEX_EXPR
7859 || TREE_CODE (arg0) == COMPLEX_CST
7860 || TREE_CODE (arg1) == COMPLEX_CST))
7862 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7863 tree real0, imag0, real1, imag1;
7865 arg0 = save_expr (arg0);
7866 arg1 = save_expr (arg1);
7867 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7868 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7869 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7870 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7872 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7873 : TRUTH_ORIF_EXPR),
7874 type,
7875 fold (build (code, type, real0, real1)),
7876 fold (build (code, type, imag0, imag1))));
7879 /* Optimize comparisons of strlen vs zero to a compare of the
7880 first character of the string vs zero. To wit,
7881 strlen(ptr) == 0 => *ptr == 0
7882 strlen(ptr) != 0 => *ptr != 0
7883 Other cases should reduce to one of these two (or a constant)
7884 due to the return value of strlen being unsigned. */
7885 if ((code == EQ_EXPR || code == NE_EXPR)
7886 && integer_zerop (arg1)
7887 && TREE_CODE (arg0) == CALL_EXPR)
7889 tree fndecl = get_callee_fndecl (arg0);
7890 tree arglist;
7892 if (fndecl
7893 && DECL_BUILT_IN (fndecl)
7894 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7895 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7896 && (arglist = TREE_OPERAND (arg0, 1))
7897 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7898 && ! TREE_CHAIN (arglist))
7899 return fold (build (code, type,
7900 build1 (INDIRECT_REF, char_type_node,
7901 TREE_VALUE(arglist)),
7902 integer_zero_node));
7905 /* Both ARG0 and ARG1 are known to be constants at this point. */
7906 t1 = fold_relational_const (code, type, arg0, arg1);
7907 return (t1 == NULL_TREE ? t : t1);
7909 case COND_EXPR:
7910 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7911 so all simple results must be passed through pedantic_non_lvalue. */
7912 if (TREE_CODE (arg0) == INTEGER_CST)
7914 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7915 /* Only optimize constant conditions when the selected branch
7916 has the same type as the COND_EXPR. This avoids optimizing
7917 away "c ? x : throw", where the throw has a void type. */
7918 if (! VOID_TYPE_P (TREE_TYPE (tem))
7919 || VOID_TYPE_P (type))
7920 return pedantic_non_lvalue (tem);
7921 return t;
7923 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
7924 return pedantic_omit_one_operand (type, arg1, arg0);
7926 /* If we have A op B ? A : C, we may be able to convert this to a
7927 simpler expression, depending on the operation and the values
7928 of B and C. Signed zeros prevent all of these transformations,
7929 for reasons given above each one. */
7931 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7932 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7933 arg1, TREE_OPERAND (arg0, 1))
7934 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7936 tree arg2 = TREE_OPERAND (t, 2);
7937 enum tree_code comp_code = TREE_CODE (arg0);
7939 STRIP_NOPS (arg2);
7941 /* If we have A op 0 ? A : -A, consider applying the following
7942 transformations:
7944 A == 0? A : -A same as -A
7945 A != 0? A : -A same as A
7946 A >= 0? A : -A same as abs (A)
7947 A > 0? A : -A same as abs (A)
7948 A <= 0? A : -A same as -abs (A)
7949 A < 0? A : -A same as -abs (A)
7951 None of these transformations work for modes with signed
7952 zeros. If A is +/-0, the first two transformations will
7953 change the sign of the result (from +0 to -0, or vice
7954 versa). The last four will fix the sign of the result,
7955 even though the original expressions could be positive or
7956 negative, depending on the sign of A.
7958 Note that all these transformations are correct if A is
7959 NaN, since the two alternatives (A and -A) are also NaNs. */
7960 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7961 ? real_zerop (TREE_OPERAND (arg0, 1))
7962 : integer_zerop (TREE_OPERAND (arg0, 1)))
7963 && TREE_CODE (arg2) == NEGATE_EXPR
7964 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7965 switch (comp_code)
7967 case EQ_EXPR:
7968 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7969 tem = fold_convert (type, negate_expr (tem));
7970 return pedantic_non_lvalue (tem);
7971 case NE_EXPR:
7972 return pedantic_non_lvalue (fold_convert (type, arg1));
7973 case GE_EXPR:
7974 case GT_EXPR:
7975 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7976 arg1 = fold_convert (lang_hooks.types.signed_type
7977 (TREE_TYPE (arg1)), arg1);
7978 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7979 return pedantic_non_lvalue (fold_convert (type, arg1));
7980 case LE_EXPR:
7981 case LT_EXPR:
7982 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7983 arg1 = fold_convert (lang_hooks.types.signed_type
7984 (TREE_TYPE (arg1)), arg1);
7985 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7986 arg1 = negate_expr (fold_convert (type, arg1));
7987 return pedantic_non_lvalue (arg1);
7988 default:
7989 abort ();
7992 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7993 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7994 both transformations are correct when A is NaN: A != 0
7995 is then true, and A == 0 is false. */
7997 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7999 if (comp_code == NE_EXPR)
8000 return pedantic_non_lvalue (fold_convert (type, arg1));
8001 else if (comp_code == EQ_EXPR)
8002 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8005 /* Try some transformations of A op B ? A : B.
8007 A == B? A : B same as B
8008 A != B? A : B same as A
8009 A >= B? A : B same as max (A, B)
8010 A > B? A : B same as max (B, A)
8011 A <= B? A : B same as min (A, B)
8012 A < B? A : B same as min (B, A)
8014 As above, these transformations don't work in the presence
8015 of signed zeros. For example, if A and B are zeros of
8016 opposite sign, the first two transformations will change
8017 the sign of the result. In the last four, the original
8018 expressions give different results for (A=+0, B=-0) and
8019 (A=-0, B=+0), but the transformed expressions do not.
8021 The first two transformations are correct if either A or B
8022 is a NaN. In the first transformation, the condition will
8023 be false, and B will indeed be chosen. In the case of the
8024 second transformation, the condition A != B will be true,
8025 and A will be chosen.
8027 The conversions to max() and min() are not correct if B is
8028 a number and A is not. The conditions in the original
8029 expressions will be false, so all four give B. The min()
8030 and max() versions would give a NaN instead. */
8031 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8032 arg2, TREE_OPERAND (arg0, 0)))
8034 tree comp_op0 = TREE_OPERAND (arg0, 0);
8035 tree comp_op1 = TREE_OPERAND (arg0, 1);
8036 tree comp_type = TREE_TYPE (comp_op0);
8038 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8039 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8041 comp_type = type;
8042 comp_op0 = arg1;
8043 comp_op1 = arg2;
8046 switch (comp_code)
8048 case EQ_EXPR:
8049 return pedantic_non_lvalue (fold_convert (type, arg2));
8050 case NE_EXPR:
8051 return pedantic_non_lvalue (fold_convert (type, arg1));
8052 case LE_EXPR:
8053 case LT_EXPR:
8054 /* In C++ a ?: expression can be an lvalue, so put the
8055 operand which will be used if they are equal first
8056 so that we can convert this back to the
8057 corresponding COND_EXPR. */
8058 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8059 return pedantic_non_lvalue (fold_convert
8060 (type, fold (build (MIN_EXPR, comp_type,
8061 (comp_code == LE_EXPR
8062 ? comp_op0 : comp_op1),
8063 (comp_code == LE_EXPR
8064 ? comp_op1 : comp_op0)))));
8065 break;
8066 case GE_EXPR:
8067 case GT_EXPR:
8068 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8069 return pedantic_non_lvalue (fold_convert
8070 (type, fold (build (MAX_EXPR, comp_type,
8071 (comp_code == GE_EXPR
8072 ? comp_op0 : comp_op1),
8073 (comp_code == GE_EXPR
8074 ? comp_op1 : comp_op0)))));
8075 break;
8076 default:
8077 abort ();
8081 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8082 we might still be able to simplify this. For example,
8083 if C1 is one less or one more than C2, this might have started
8084 out as a MIN or MAX and been transformed by this function.
8085 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8087 if (INTEGRAL_TYPE_P (type)
8088 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8089 && TREE_CODE (arg2) == INTEGER_CST)
8090 switch (comp_code)
8092 case EQ_EXPR:
8093 /* We can replace A with C1 in this case. */
8094 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8095 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8096 TREE_OPERAND (t, 2)));
8098 case LT_EXPR:
8099 /* If C1 is C2 + 1, this is min(A, C2). */
8100 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8101 && operand_equal_p (TREE_OPERAND (arg0, 1),
8102 const_binop (PLUS_EXPR, arg2,
8103 integer_one_node, 0), 1))
8104 return pedantic_non_lvalue
8105 (fold (build (MIN_EXPR, type, arg1, arg2)));
8106 break;
8108 case LE_EXPR:
8109 /* If C1 is C2 - 1, this is min(A, C2). */
8110 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8111 && operand_equal_p (TREE_OPERAND (arg0, 1),
8112 const_binop (MINUS_EXPR, arg2,
8113 integer_one_node, 0), 1))
8114 return pedantic_non_lvalue
8115 (fold (build (MIN_EXPR, type, arg1, arg2)));
8116 break;
8118 case GT_EXPR:
8119 /* If C1 is C2 - 1, this is max(A, C2). */
8120 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8121 && operand_equal_p (TREE_OPERAND (arg0, 1),
8122 const_binop (MINUS_EXPR, arg2,
8123 integer_one_node, 0), 1))
8124 return pedantic_non_lvalue
8125 (fold (build (MAX_EXPR, type, arg1, arg2)));
8126 break;
8128 case GE_EXPR:
8129 /* If C1 is C2 + 1, this is max(A, C2). */
8130 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8131 && operand_equal_p (TREE_OPERAND (arg0, 1),
8132 const_binop (PLUS_EXPR, arg2,
8133 integer_one_node, 0), 1))
8134 return pedantic_non_lvalue
8135 (fold (build (MAX_EXPR, type, arg1, arg2)));
8136 break;
8137 case NE_EXPR:
8138 break;
8139 default:
8140 abort ();
8144 /* If the second operand is simpler than the third, swap them
8145 since that produces better jump optimization results. */
8146 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8147 TREE_OPERAND (t, 2), false))
8149 /* See if this can be inverted. If it can't, possibly because
8150 it was a floating-point inequality comparison, don't do
8151 anything. */
8152 tem = invert_truthvalue (arg0);
8154 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8155 return fold (build (code, type, tem,
8156 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8159 /* Convert A ? 1 : 0 to simply A. */
8160 if (integer_onep (TREE_OPERAND (t, 1))
8161 && integer_zerop (TREE_OPERAND (t, 2))
8162 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8163 call to fold will try to move the conversion inside
8164 a COND, which will recurse. In that case, the COND_EXPR
8165 is probably the best choice, so leave it alone. */
8166 && type == TREE_TYPE (arg0))
8167 return pedantic_non_lvalue (arg0);
8169 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8170 over COND_EXPR in cases such as floating point comparisons. */
8171 if (integer_zerop (TREE_OPERAND (t, 1))
8172 && integer_onep (TREE_OPERAND (t, 2))
8173 && truth_value_p (TREE_CODE (arg0)))
8174 return pedantic_non_lvalue (fold_convert (type,
8175 invert_truthvalue (arg0)));
8177 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8178 operation is simply A & 2. */
8180 if (integer_zerop (TREE_OPERAND (t, 2))
8181 && TREE_CODE (arg0) == NE_EXPR
8182 && integer_zerop (TREE_OPERAND (arg0, 1))
8183 && integer_pow2p (arg1)
8184 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8185 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8186 arg1, 1))
8187 return pedantic_non_lvalue (fold_convert (type,
8188 TREE_OPERAND (arg0, 0)));
8190 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8191 if (integer_zerop (TREE_OPERAND (t, 2))
8192 && truth_value_p (TREE_CODE (arg0))
8193 && truth_value_p (TREE_CODE (arg1)))
8194 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8195 arg0, arg1)));
8197 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8198 if (integer_onep (TREE_OPERAND (t, 2))
8199 && truth_value_p (TREE_CODE (arg0))
8200 && truth_value_p (TREE_CODE (arg1)))
8202 /* Only perform transformation if ARG0 is easily inverted. */
8203 tem = invert_truthvalue (arg0);
8204 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8205 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8206 tem, arg1)));
8209 return t;
8211 case COMPOUND_EXPR:
8212 /* When pedantic, a compound expression can be neither an lvalue
8213 nor an integer constant expression. */
8214 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8215 return t;
8216 /* Don't let (0, 0) be null pointer constant. */
8217 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8218 : fold_convert (type, arg1);
8219 return pedantic_non_lvalue (tem);
8221 case COMPLEX_EXPR:
8222 if (wins)
8223 return build_complex (type, arg0, arg1);
8224 return t;
8226 case REALPART_EXPR:
8227 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8228 return t;
8229 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8230 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8231 TREE_OPERAND (arg0, 1));
8232 else if (TREE_CODE (arg0) == COMPLEX_CST)
8233 return TREE_REALPART (arg0);
8234 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8235 return fold (build (TREE_CODE (arg0), type,
8236 fold (build1 (REALPART_EXPR, type,
8237 TREE_OPERAND (arg0, 0))),
8238 fold (build1 (REALPART_EXPR,
8239 type, TREE_OPERAND (arg0, 1)))));
8240 return t;
8242 case IMAGPART_EXPR:
8243 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8244 return fold_convert (type, integer_zero_node);
8245 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8246 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8247 TREE_OPERAND (arg0, 0));
8248 else if (TREE_CODE (arg0) == COMPLEX_CST)
8249 return TREE_IMAGPART (arg0);
8250 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8251 return fold (build (TREE_CODE (arg0), type,
8252 fold (build1 (IMAGPART_EXPR, type,
8253 TREE_OPERAND (arg0, 0))),
8254 fold (build1 (IMAGPART_EXPR, type,
8255 TREE_OPERAND (arg0, 1)))));
8256 return t;
8258 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8259 appropriate. */
8260 case CLEANUP_POINT_EXPR:
8261 if (! has_cleanups (arg0))
8262 return TREE_OPERAND (t, 0);
8265 enum tree_code code0 = TREE_CODE (arg0);
8266 int kind0 = TREE_CODE_CLASS (code0);
8267 tree arg00 = TREE_OPERAND (arg0, 0);
8268 tree arg01;
8270 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8271 return fold (build1 (code0, type,
8272 fold (build1 (CLEANUP_POINT_EXPR,
8273 TREE_TYPE (arg00), arg00))));
8275 if (kind0 == '<' || kind0 == '2'
8276 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8277 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8278 || code0 == TRUTH_XOR_EXPR)
8280 arg01 = TREE_OPERAND (arg0, 1);
8282 if (TREE_CONSTANT (arg00)
8283 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8284 && ! has_cleanups (arg00)))
8285 return fold (build (code0, type, arg00,
8286 fold (build1 (CLEANUP_POINT_EXPR,
8287 TREE_TYPE (arg01), arg01))));
8289 if (TREE_CONSTANT (arg01))
8290 return fold (build (code0, type,
8291 fold (build1 (CLEANUP_POINT_EXPR,
8292 TREE_TYPE (arg00), arg00)),
8293 arg01));
8296 return t;
8299 case CALL_EXPR:
8300 /* Check for a built-in function. */
8301 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8302 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8303 == FUNCTION_DECL)
8304 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8306 tree tmp = fold_builtin (t);
8307 if (tmp)
8308 return tmp;
8310 return t;
8312 default:
8313 return t;
8314 } /* switch (code) */
8317 #ifdef ENABLE_FOLD_CHECKING
8318 #undef fold
8320 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8321 static void fold_check_failed (tree, tree);
8322 void print_fold_checksum (tree);
8324 /* When --enable-checking=fold, compute a digest of expr before
8325 and after actual fold call to see if fold did not accidentally
8326 change original expr. */
8328 tree
8329 fold (tree expr)
8331 tree ret;
8332 struct md5_ctx ctx;
8333 unsigned char checksum_before[16], checksum_after[16];
8334 htab_t ht;
8336 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8337 md5_init_ctx (&ctx);
8338 fold_checksum_tree (expr, &ctx, ht);
8339 md5_finish_ctx (&ctx, checksum_before);
8340 htab_empty (ht);
8342 ret = fold_1 (expr);
8344 md5_init_ctx (&ctx);
8345 fold_checksum_tree (expr, &ctx, ht);
8346 md5_finish_ctx (&ctx, checksum_after);
8347 htab_delete (ht);
8349 if (memcmp (checksum_before, checksum_after, 16))
8350 fold_check_failed (expr, ret);
8352 return ret;
8355 void
8356 print_fold_checksum (tree expr)
8358 struct md5_ctx ctx;
8359 unsigned char checksum[16], cnt;
8360 htab_t ht;
8362 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8363 md5_init_ctx (&ctx);
8364 fold_checksum_tree (expr, &ctx, ht);
8365 md5_finish_ctx (&ctx, checksum);
8366 htab_delete (ht);
8367 for (cnt = 0; cnt < 16; ++cnt)
8368 fprintf (stderr, "%02x", checksum[cnt]);
8369 putc ('\n', stderr);
8372 static void
8373 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8375 internal_error ("fold check: original tree changed by fold");
8378 static void
8379 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8381 void **slot;
8382 enum tree_code code;
8383 char buf[sizeof (struct tree_decl)];
8384 int i, len;
8386 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8387 > sizeof (struct tree_decl)
8388 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8389 abort ();
8390 if (expr == NULL)
8391 return;
8392 slot = htab_find_slot (ht, expr, INSERT);
8393 if (*slot != NULL)
8394 return;
8395 *slot = expr;
8396 code = TREE_CODE (expr);
8397 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8399 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8400 memcpy (buf, expr, tree_size (expr));
8401 expr = (tree) buf;
8402 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8404 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8406 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8407 memcpy (buf, expr, tree_size (expr));
8408 expr = (tree) buf;
8409 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8411 else if (TREE_CODE_CLASS (code) == 't'
8412 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8414 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8415 memcpy (buf, expr, tree_size (expr));
8416 expr = (tree) buf;
8417 TYPE_POINTER_TO (expr) = NULL;
8418 TYPE_REFERENCE_TO (expr) = NULL;
8420 md5_process_bytes (expr, tree_size (expr), ctx);
8421 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8422 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8423 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8424 len = TREE_CODE_LENGTH (code);
8425 switch (TREE_CODE_CLASS (code))
8427 case 'c':
8428 switch (code)
8430 case STRING_CST:
8431 md5_process_bytes (TREE_STRING_POINTER (expr),
8432 TREE_STRING_LENGTH (expr), ctx);
8433 break;
8434 case COMPLEX_CST:
8435 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8436 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8437 break;
8438 case VECTOR_CST:
8439 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8440 break;
8441 default:
8442 break;
8444 break;
8445 case 'x':
8446 switch (code)
8448 case TREE_LIST:
8449 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8450 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8451 break;
8452 case TREE_VEC:
8453 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8454 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8455 break;
8456 default:
8457 break;
8459 break;
8460 case 'e':
8461 switch (code)
8463 case SAVE_EXPR: len = 2; break;
8464 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8465 case RTL_EXPR: len = 0; break;
8466 case WITH_CLEANUP_EXPR: len = 2; break;
8467 default: break;
8469 /* Fall through. */
8470 case 'r':
8471 case '<':
8472 case '1':
8473 case '2':
8474 case 's':
8475 for (i = 0; i < len; ++i)
8476 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8477 break;
8478 case 'd':
8479 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8480 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8481 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8482 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8483 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8484 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8485 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8486 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8487 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8488 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8489 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8490 break;
8491 case 't':
8492 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8493 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8494 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8495 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8496 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8497 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8498 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8499 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8500 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8501 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8502 break;
8503 default:
8504 break;
8508 #endif
8510 /* Perform constant folding and related simplification of initializer
8511 expression EXPR. This behaves identically to "fold" but ignores
8512 potential run-time traps and exceptions that fold must preserve. */
8514 tree
8515 fold_initializer (tree expr)
8517 int saved_signaling_nans = flag_signaling_nans;
8518 int saved_trapping_math = flag_trapping_math;
8519 int saved_trapv = flag_trapv;
8520 tree result;
8522 flag_signaling_nans = 0;
8523 flag_trapping_math = 0;
8524 flag_trapv = 0;
8526 result = fold (expr);
8528 flag_signaling_nans = saved_signaling_nans;
8529 flag_trapping_math = saved_trapping_math;
8530 flag_trapv = saved_trapv;
8532 return result;
8535 /* Determine if first argument is a multiple of second argument. Return 0 if
8536 it is not, or we cannot easily determined it to be.
8538 An example of the sort of thing we care about (at this point; this routine
8539 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8540 fold cases do now) is discovering that
8542 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8544 is a multiple of
8546 SAVE_EXPR (J * 8)
8548 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8550 This code also handles discovering that
8552 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8554 is a multiple of 8 so we don't have to worry about dealing with a
8555 possible remainder.
8557 Note that we *look* inside a SAVE_EXPR only to determine how it was
8558 calculated; it is not safe for fold to do much of anything else with the
8559 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8560 at run time. For example, the latter example above *cannot* be implemented
8561 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8562 evaluation time of the original SAVE_EXPR is not necessarily the same at
8563 the time the new expression is evaluated. The only optimization of this
8564 sort that would be valid is changing
8566 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8568 divided by 8 to
8570 SAVE_EXPR (I) * SAVE_EXPR (J)
8572 (where the same SAVE_EXPR (J) is used in the original and the
8573 transformed version). */
8575 static int
8576 multiple_of_p (tree type, tree top, tree bottom)
8578 if (operand_equal_p (top, bottom, 0))
8579 return 1;
8581 if (TREE_CODE (type) != INTEGER_TYPE)
8582 return 0;
8584 switch (TREE_CODE (top))
8586 case MULT_EXPR:
8587 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8588 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8590 case PLUS_EXPR:
8591 case MINUS_EXPR:
8592 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8593 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8595 case LSHIFT_EXPR:
8596 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8598 tree op1, t1;
8600 op1 = TREE_OPERAND (top, 1);
8601 /* const_binop may not detect overflow correctly,
8602 so check for it explicitly here. */
8603 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8604 > TREE_INT_CST_LOW (op1)
8605 && TREE_INT_CST_HIGH (op1) == 0
8606 && 0 != (t1 = fold_convert (type,
8607 const_binop (LSHIFT_EXPR,
8608 size_one_node,
8609 op1, 0)))
8610 && ! TREE_OVERFLOW (t1))
8611 return multiple_of_p (type, t1, bottom);
8613 return 0;
8615 case NOP_EXPR:
8616 /* Can't handle conversions from non-integral or wider integral type. */
8617 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8618 || (TYPE_PRECISION (type)
8619 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8620 return 0;
8622 /* .. fall through ... */
8624 case SAVE_EXPR:
8625 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8627 case INTEGER_CST:
8628 if (TREE_CODE (bottom) != INTEGER_CST
8629 || (TYPE_UNSIGNED (type)
8630 && (tree_int_cst_sgn (top) < 0
8631 || tree_int_cst_sgn (bottom) < 0)))
8632 return 0;
8633 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8634 top, bottom, 0));
8636 default:
8637 return 0;
8641 /* Return true if `t' is known to be non-negative. */
8644 tree_expr_nonnegative_p (tree t)
8646 switch (TREE_CODE (t))
8648 case ABS_EXPR:
8649 return 1;
8651 case INTEGER_CST:
8652 return tree_int_cst_sgn (t) >= 0;
8654 case REAL_CST:
8655 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8657 case PLUS_EXPR:
8658 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8659 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8660 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8662 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8663 both unsigned and at least 2 bits shorter than the result. */
8664 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8665 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8666 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8668 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8669 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8670 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8671 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8673 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8674 TYPE_PRECISION (inner2)) + 1;
8675 return prec < TYPE_PRECISION (TREE_TYPE (t));
8678 break;
8680 case MULT_EXPR:
8681 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8683 /* x * x for floating point x is always non-negative. */
8684 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8685 return 1;
8686 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8687 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8690 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8691 both unsigned and their total bits is shorter than the result. */
8692 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8693 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8694 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8696 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8697 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8698 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8699 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8700 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8701 < TYPE_PRECISION (TREE_TYPE (t));
8703 return 0;
8705 case TRUNC_DIV_EXPR:
8706 case CEIL_DIV_EXPR:
8707 case FLOOR_DIV_EXPR:
8708 case ROUND_DIV_EXPR:
8709 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8710 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8712 case TRUNC_MOD_EXPR:
8713 case CEIL_MOD_EXPR:
8714 case FLOOR_MOD_EXPR:
8715 case ROUND_MOD_EXPR:
8716 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8718 case RDIV_EXPR:
8719 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8720 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8722 case BIT_AND_EXPR:
8723 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8724 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8725 case BIT_IOR_EXPR:
8726 case BIT_XOR_EXPR:
8727 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8728 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8730 case NOP_EXPR:
8732 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8733 tree outer_type = TREE_TYPE (t);
8735 if (TREE_CODE (outer_type) == REAL_TYPE)
8737 if (TREE_CODE (inner_type) == REAL_TYPE)
8738 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8739 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8741 if (TYPE_UNSIGNED (inner_type))
8742 return 1;
8743 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8746 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8748 if (TREE_CODE (inner_type) == REAL_TYPE)
8749 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8750 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8751 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8752 && TYPE_UNSIGNED (inner_type);
8755 break;
8757 case COND_EXPR:
8758 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8759 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8760 case COMPOUND_EXPR:
8761 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8762 case MIN_EXPR:
8763 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8764 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8765 case MAX_EXPR:
8766 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8767 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8768 case MODIFY_EXPR:
8769 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8770 case BIND_EXPR:
8771 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8772 case SAVE_EXPR:
8773 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8774 case NON_LVALUE_EXPR:
8775 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8776 case FLOAT_EXPR:
8777 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8778 case RTL_EXPR:
8779 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8781 case CALL_EXPR:
8783 tree fndecl = get_callee_fndecl (t);
8784 tree arglist = TREE_OPERAND (t, 1);
8785 if (fndecl
8786 && DECL_BUILT_IN (fndecl)
8787 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8788 switch (DECL_FUNCTION_CODE (fndecl))
8790 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8791 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8792 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8793 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8795 CASE_BUILTIN_F (BUILT_IN_ACOS)
8796 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8797 CASE_BUILTIN_F (BUILT_IN_CABS)
8798 CASE_BUILTIN_F (BUILT_IN_COSH)
8799 CASE_BUILTIN_F (BUILT_IN_ERFC)
8800 CASE_BUILTIN_F (BUILT_IN_EXP)
8801 CASE_BUILTIN_F (BUILT_IN_EXP10)
8802 CASE_BUILTIN_F (BUILT_IN_EXP2)
8803 CASE_BUILTIN_F (BUILT_IN_FABS)
8804 CASE_BUILTIN_F (BUILT_IN_FDIM)
8805 CASE_BUILTIN_F (BUILT_IN_FREXP)
8806 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8807 CASE_BUILTIN_F (BUILT_IN_POW10)
8808 CASE_BUILTIN_F (BUILT_IN_SQRT)
8809 CASE_BUILTIN_I (BUILT_IN_FFS)
8810 CASE_BUILTIN_I (BUILT_IN_PARITY)
8811 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8812 /* Always true. */
8813 return 1;
8815 CASE_BUILTIN_F (BUILT_IN_ASINH)
8816 CASE_BUILTIN_F (BUILT_IN_ATAN)
8817 CASE_BUILTIN_F (BUILT_IN_ATANH)
8818 CASE_BUILTIN_F (BUILT_IN_CBRT)
8819 CASE_BUILTIN_F (BUILT_IN_CEIL)
8820 CASE_BUILTIN_F (BUILT_IN_ERF)
8821 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8822 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8823 CASE_BUILTIN_F (BUILT_IN_FMOD)
8824 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8825 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8826 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8827 CASE_BUILTIN_F (BUILT_IN_LRINT)
8828 CASE_BUILTIN_F (BUILT_IN_LROUND)
8829 CASE_BUILTIN_F (BUILT_IN_MODF)
8830 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8831 CASE_BUILTIN_F (BUILT_IN_POW)
8832 CASE_BUILTIN_F (BUILT_IN_RINT)
8833 CASE_BUILTIN_F (BUILT_IN_ROUND)
8834 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8835 CASE_BUILTIN_F (BUILT_IN_SINH)
8836 CASE_BUILTIN_F (BUILT_IN_TANH)
8837 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8838 /* True if the 1st argument is nonnegative. */
8839 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8841 CASE_BUILTIN_F(BUILT_IN_FMAX)
8842 /* True if the 1st OR 2nd arguments are nonnegative. */
8843 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8844 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8846 CASE_BUILTIN_F(BUILT_IN_FMIN)
8847 /* True if the 1st AND 2nd arguments are nonnegative. */
8848 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8849 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8851 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8852 /* True if the 2nd argument is nonnegative. */
8853 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8855 default:
8856 break;
8857 #undef CASE_BUILTIN_F
8858 #undef CASE_BUILTIN_I
8862 /* ... fall through ... */
8864 default:
8865 if (truth_value_p (TREE_CODE (t)))
8866 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8867 return 1;
8870 /* We don't know sign of `t', so be conservative and return false. */
8871 return 0;
8874 /* Return true when T is an address and is known to be nonzero.
8875 For floating point we further ensure that T is not denormal.
8876 Similar logic is present in nonzero_address in rtlanal.h */
8878 static bool
8879 tree_expr_nonzero_p (tree t)
8881 tree type = TREE_TYPE (t);
8883 /* Doing something useful for floating point would need more work. */
8884 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8885 return false;
8887 switch (TREE_CODE (t))
8889 case ABS_EXPR:
8890 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8891 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8893 case INTEGER_CST:
8894 return !integer_zerop (t);
8896 case PLUS_EXPR:
8897 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8899 /* With the presence of negative values it is hard
8900 to say something. */
8901 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8902 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8903 return false;
8904 /* One of operands must be positive and the other non-negative. */
8905 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8906 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8908 break;
8910 case MULT_EXPR:
8911 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8913 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8914 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8916 break;
8918 case NOP_EXPR:
8920 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8921 tree outer_type = TREE_TYPE (t);
8923 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
8924 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
8926 break;
8928 case ADDR_EXPR:
8929 /* Weak declarations may link to NULL. */
8930 if (DECL_P (TREE_OPERAND (t, 0)))
8931 return !DECL_WEAK (TREE_OPERAND (t, 0));
8932 /* Constants and all other cases are never weak. */
8933 return true;
8935 case COND_EXPR:
8936 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8937 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
8939 case MIN_EXPR:
8940 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8941 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8943 case MAX_EXPR:
8944 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
8946 /* When both operands are nonzero, then MAX must be too. */
8947 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
8948 return true;
8950 /* MAX where operand 0 is positive is positive. */
8951 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8953 /* MAX where operand 1 is positive is positive. */
8954 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8955 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8956 return true;
8957 break;
8959 case COMPOUND_EXPR:
8960 case MODIFY_EXPR:
8961 case BIND_EXPR:
8962 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
8964 case SAVE_EXPR:
8965 case NON_LVALUE_EXPR:
8966 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8968 case BIT_IOR_EXPR:
8969 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8970 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8972 default:
8973 break;
8975 return false;
8978 /* Return true if `r' is known to be non-negative.
8979 Only handles constants at the moment. */
8982 rtl_expr_nonnegative_p (rtx r)
8984 switch (GET_CODE (r))
8986 case CONST_INT:
8987 return INTVAL (r) >= 0;
8989 case CONST_DOUBLE:
8990 if (GET_MODE (r) == VOIDmode)
8991 return CONST_DOUBLE_HIGH (r) >= 0;
8992 return 0;
8994 case CONST_VECTOR:
8996 int units, i;
8997 rtx elt;
8999 units = CONST_VECTOR_NUNITS (r);
9001 for (i = 0; i < units; ++i)
9003 elt = CONST_VECTOR_ELT (r, i);
9004 if (!rtl_expr_nonnegative_p (elt))
9005 return 0;
9008 return 1;
9011 case SYMBOL_REF:
9012 case LABEL_REF:
9013 /* These are always nonnegative. */
9014 return 1;
9016 default:
9017 return 0;
9021 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9022 an integer constant or real constant.
9024 TYPE is the type of the result. */
9026 static tree
9027 fold_negate_const (tree arg0, tree type)
9029 tree t = NULL_TREE;
9031 if (TREE_CODE (arg0) == INTEGER_CST)
9033 unsigned HOST_WIDE_INT low;
9034 HOST_WIDE_INT high;
9035 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9036 TREE_INT_CST_HIGH (arg0),
9037 &low, &high);
9038 t = build_int_2 (low, high);
9039 TREE_TYPE (t) = type;
9040 TREE_OVERFLOW (t)
9041 = (TREE_OVERFLOW (arg0)
9042 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9043 TREE_CONSTANT_OVERFLOW (t)
9044 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9046 else if (TREE_CODE (arg0) == REAL_CST)
9047 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9048 #ifdef ENABLE_CHECKING
9049 else
9050 abort ();
9051 #endif
9053 return t;
9056 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9057 an integer constant or real constant.
9059 TYPE is the type of the result. */
9061 static tree
9062 fold_abs_const (tree arg0, tree type)
9064 tree t = NULL_TREE;
9066 if (TREE_CODE (arg0) == INTEGER_CST)
9068 /* If the value is unsigned, then the absolute value is
9069 the same as the ordinary value. */
9070 if (TYPE_UNSIGNED (type))
9071 return arg0;
9072 /* Similarly, if the value is non-negative. */
9073 else if (INT_CST_LT (integer_minus_one_node, arg0))
9074 return arg0;
9075 /* If the value is negative, then the absolute value is
9076 its negation. */
9077 else
9079 unsigned HOST_WIDE_INT low;
9080 HOST_WIDE_INT high;
9081 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9082 TREE_INT_CST_HIGH (arg0),
9083 &low, &high);
9084 t = build_int_2 (low, high);
9085 TREE_TYPE (t) = type;
9086 TREE_OVERFLOW (t)
9087 = (TREE_OVERFLOW (arg0)
9088 | force_fit_type (t, overflow));
9089 TREE_CONSTANT_OVERFLOW (t)
9090 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9091 return t;
9094 else if (TREE_CODE (arg0) == REAL_CST)
9096 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9097 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9098 else
9099 return arg0;
9101 #ifdef ENABLE_CHECKING
9102 else
9103 abort ();
9104 #endif
9106 return t;
9109 /* Given CODE, a relational operator, the target type, TYPE and two
9110 constant operands OP0 and OP1, return the result of the
9111 relational operation. If the result is not a compile time
9112 constant, then return NULL_TREE. */
9114 static tree
9115 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9117 tree tem;
9118 int invert;
9120 /* From here on, the only cases we handle are when the result is
9121 known to be a constant.
9123 To compute GT, swap the arguments and do LT.
9124 To compute GE, do LT and invert the result.
9125 To compute LE, swap the arguments, do LT and invert the result.
9126 To compute NE, do EQ and invert the result.
9128 Therefore, the code below must handle only EQ and LT. */
9130 if (code == LE_EXPR || code == GT_EXPR)
9132 tem = op0, op0 = op1, op1 = tem;
9133 code = swap_tree_comparison (code);
9136 /* Note that it is safe to invert for real values here because we
9137 will check below in the one case that it matters. */
9139 tem = NULL_TREE;
9140 invert = 0;
9141 if (code == NE_EXPR || code == GE_EXPR)
9143 invert = 1;
9144 code = invert_tree_comparison (code);
9147 /* Compute a result for LT or EQ if args permit;
9148 Otherwise return T. */
9149 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9151 if (code == EQ_EXPR)
9152 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9153 else
9154 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9155 ? INT_CST_LT_UNSIGNED (op0, op1)
9156 : INT_CST_LT (op0, op1)),
9160 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9161 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9162 tem = build_int_2 (0, 0);
9164 /* Two real constants can be compared explicitly. */
9165 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9167 /* If either operand is a NaN, the result is false with two
9168 exceptions: First, an NE_EXPR is true on NaNs, but that case
9169 is already handled correctly since we will be inverting the
9170 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9171 or a GE_EXPR into a LT_EXPR, we must return true so that it
9172 will be inverted into false. */
9174 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9175 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9176 tem = build_int_2 (invert && code == LT_EXPR, 0);
9178 else if (code == EQ_EXPR)
9179 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9180 TREE_REAL_CST (op1)),
9182 else
9183 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9184 TREE_REAL_CST (op1)),
9188 if (tem == NULL_TREE)
9189 return NULL_TREE;
9191 if (invert)
9192 TREE_INT_CST_LOW (tem) ^= 1;
9194 TREE_TYPE (tem) = type;
9195 if (TREE_CODE (type) == BOOLEAN_TYPE)
9196 return (*lang_hooks.truthvalue_conversion) (tem);
9197 return tem;
9200 #include "gt-fold-const.h"