* system.h: Poison NO_RECURSIVE_FUNCTION_CSE.
[official-gcc.git] / gcc / fold-const.c
blobc4a53e0d0a3d8f51d89774a09e491bfcd88d7d2f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert_const (enum tree_code, tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
86 tree *, tree *);
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
94 tree);
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static int multiple_of_p (tree, tree, tree);
102 static tree constant_boolean_node (int, tree);
103 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
104 tree, int);
105 static bool fold_real_zero_addition_p (tree, tree, int);
106 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
107 tree, tree, tree);
108 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
109 static tree fold_div_compare (enum tree_code, tree, tree, tree);
110 static bool reorder_operands_p (tree, tree);
111 static bool tree_swap_operands_p (tree, tree, bool);
113 static tree fold_negate_const (tree, tree);
114 static tree fold_abs_const (tree, tree);
115 static tree fold_relational_const (enum tree_code, tree, tree, tree);
116 static tree fold_relational_hi_lo (enum tree_code *, const tree, tree *, tree *);
118 /* The following constants represent a bit based encoding of GCC's
119 comparison operators. This encoding simplifies transformations
120 on relational comparison operators, such as AND and OR. */
121 #define COMPCODE_FALSE 0
122 #define COMPCODE_LT 1
123 #define COMPCODE_EQ 2
124 #define COMPCODE_LE 3
125 #define COMPCODE_GT 4
126 #define COMPCODE_NE 5
127 #define COMPCODE_GE 6
128 #define COMPCODE_TRUE 7
130 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
131 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
132 and SUM1. Then this yields nonzero if overflow occurred during the
133 addition.
135 Overflow occurs if A and B have the same sign, but A and SUM differ in
136 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
137 sign. */
138 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
140 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
141 We do that by representing the two-word integer in 4 words, with only
142 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
143 number. The value of the word is LOWPART + HIGHPART * BASE. */
145 #define LOWPART(x) \
146 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
147 #define HIGHPART(x) \
148 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
149 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
151 /* Unpack a two-word integer into 4 words.
152 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
153 WORDS points to the array of HOST_WIDE_INTs. */
155 static void
156 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
158 words[0] = LOWPART (low);
159 words[1] = HIGHPART (low);
160 words[2] = LOWPART (hi);
161 words[3] = HIGHPART (hi);
164 /* Pack an array of 4 words into a two-word integer.
165 WORDS points to the array of words.
166 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
168 static void
169 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 HOST_WIDE_INT *hi)
172 *low = words[0] + words[1] * BASE;
173 *hi = words[2] + words[3] * BASE;
176 /* Make the integer constant T valid for its type by setting to 0 or 1 all
177 the bits in the constant that don't belong in the type.
179 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
180 nonzero, a signed overflow has already occurred in calculating T, so
181 propagate it. */
184 force_fit_type (tree t, int overflow)
186 unsigned HOST_WIDE_INT low;
187 HOST_WIDE_INT high;
188 unsigned int prec;
190 if (TREE_CODE (t) == REAL_CST)
192 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
193 Consider doing it via real_convert now. */
194 return overflow;
197 else if (TREE_CODE (t) != INTEGER_CST)
198 return overflow;
200 low = TREE_INT_CST_LOW (t);
201 high = TREE_INT_CST_HIGH (t);
203 if (POINTER_TYPE_P (TREE_TYPE (t))
204 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = POINTER_SIZE;
206 else
207 prec = TYPE_PRECISION (TREE_TYPE (t));
209 /* First clear all bits that are beyond the type's precision. */
211 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
213 else if (prec > HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_HIGH (t)
215 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 else
218 TREE_INT_CST_HIGH (t) = 0;
219 if (prec < HOST_BITS_PER_WIDE_INT)
220 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
223 /* Unsigned types do not suffer sign extension or overflow unless they
224 are a sizetype. */
225 if (TYPE_UNSIGNED (TREE_TYPE (t))
226 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
227 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 return overflow;
230 /* If the value's sign bit is set, extend the sign. */
231 if (prec != 2 * HOST_BITS_PER_WIDE_INT
232 && (prec > HOST_BITS_PER_WIDE_INT
233 ? 0 != (TREE_INT_CST_HIGH (t)
234 & ((HOST_WIDE_INT) 1
235 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
236 : 0 != (TREE_INT_CST_LOW (t)
237 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
239 /* Value is negative:
240 set to 1 all the bits that are outside this type's precision. */
241 if (prec > HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_HIGH (t)
243 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 else
246 TREE_INT_CST_HIGH (t) = -1;
247 if (prec < HOST_BITS_PER_WIDE_INT)
248 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
252 /* Return nonzero if signed overflow occurred. */
253 return
254 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
255 != 0);
258 /* Add two doubleword integers with doubleword result.
259 Each argument is given as two `HOST_WIDE_INT' pieces.
260 One argument is L1 and H1; the other, L2 and H2.
261 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
264 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
265 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
266 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
268 unsigned HOST_WIDE_INT l;
269 HOST_WIDE_INT h;
271 l = l1 + l2;
272 h = h1 + h2 + (l < l1);
274 *lv = l;
275 *hv = h;
276 return OVERFLOW_SUM_SIGN (h1, h2, h);
279 /* Negate a doubleword integer with doubleword result.
280 Return nonzero if the operation overflows, assuming it's signed.
281 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
282 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
285 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
286 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
288 if (l1 == 0)
290 *lv = 0;
291 *hv = - h1;
292 return (*hv & h1) < 0;
294 else
296 *lv = -l1;
297 *hv = ~h1;
298 return 0;
302 /* Multiply two doubleword integers with doubleword result.
303 Return nonzero if the operation overflows, assuming it's signed.
304 Each argument is given as two `HOST_WIDE_INT' pieces.
305 One argument is L1 and H1; the other, L2 and H2.
306 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
310 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
311 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 HOST_WIDE_INT arg1[4];
314 HOST_WIDE_INT arg2[4];
315 HOST_WIDE_INT prod[4 * 2];
316 unsigned HOST_WIDE_INT carry;
317 int i, j, k;
318 unsigned HOST_WIDE_INT toplow, neglow;
319 HOST_WIDE_INT tophigh, neghigh;
321 encode (arg1, l1, h1);
322 encode (arg2, l2, h2);
324 memset (prod, 0, sizeof prod);
326 for (i = 0; i < 4; i++)
328 carry = 0;
329 for (j = 0; j < 4; j++)
331 k = i + j;
332 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
333 carry += arg1[i] * arg2[j];
334 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
335 carry += prod[k];
336 prod[k] = LOWPART (carry);
337 carry = HIGHPART (carry);
339 prod[i + 4] = carry;
342 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
344 /* Check for overflow by calculating the top half of the answer in full;
345 it should agree with the low half's sign bit. */
346 decode (prod + 4, &toplow, &tophigh);
347 if (h1 < 0)
349 neg_double (l2, h2, &neglow, &neghigh);
350 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 if (h2 < 0)
354 neg_double (l1, h1, &neglow, &neghigh);
355 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
357 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
360 /* Shift the doubleword integer in L1, H1 left by COUNT places
361 keeping only PREC bits of result.
362 Shift right if COUNT is negative.
363 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
364 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
366 void
367 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
368 HOST_WIDE_INT count, unsigned int prec,
369 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
371 unsigned HOST_WIDE_INT signmask;
373 if (count < 0)
375 rshift_double (l1, h1, -count, prec, lv, hv, arith);
376 return;
379 if (SHIFT_COUNT_TRUNCATED)
380 count %= prec;
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
386 *hv = 0;
387 *lv = 0;
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392 *lv = 0;
394 else
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
398 *lv = l1 << count;
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
415 else
417 *hv = signmask;
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
428 void
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
432 int arith)
434 unsigned HOST_WIDE_INT signmask;
436 signmask = (arith
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
438 : 0);
440 if (SHIFT_COUNT_TRUNCATED)
441 count %= prec;
443 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
445 /* Shifting by the host word size is undefined according to the
446 ANSI standard, so we must handle this as a special case. */
447 *hv = 0;
448 *lv = 0;
450 else if (count >= HOST_BITS_PER_WIDE_INT)
452 *hv = 0;
453 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
455 else
457 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
458 *lv = ((l1 >> count)
459 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
462 /* Zero / sign extend all bits that are beyond the precision. */
464 if (count >= (HOST_WIDE_INT)prec)
466 *hv = signmask;
467 *lv = signmask;
469 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
471 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
473 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
474 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
476 else
478 *hv = signmask;
479 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
480 *lv |= signmask << (prec - count);
484 /* Rotate the doubleword integer in L1, H1 left by COUNT places
485 keeping only PREC bits of result.
486 Rotate right if COUNT is negative.
487 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
489 void
490 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
491 HOST_WIDE_INT count, unsigned int prec,
492 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
494 unsigned HOST_WIDE_INT s1l, s2l;
495 HOST_WIDE_INT s1h, s2h;
497 count %= prec;
498 if (count < 0)
499 count += prec;
501 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
502 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
503 *lv = s1l | s2l;
504 *hv = s1h | s2h;
507 /* Rotate the doubleword integer in L1, H1 left by COUNT places
508 keeping only PREC bits of result. COUNT must be positive.
509 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
511 void
512 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
513 HOST_WIDE_INT count, unsigned int prec,
514 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
516 unsigned HOST_WIDE_INT s1l, s2l;
517 HOST_WIDE_INT s1h, s2h;
519 count %= prec;
520 if (count < 0)
521 count += prec;
523 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
524 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
525 *lv = s1l | s2l;
526 *hv = s1h | s2h;
529 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
530 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
531 CODE is a tree code for a kind of division, one of
532 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
533 or EXACT_DIV_EXPR
534 It controls how the quotient is rounded to an integer.
535 Return nonzero if the operation overflows.
536 UNS nonzero says do unsigned division. */
539 div_and_round_double (enum tree_code code, int uns,
540 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
541 HOST_WIDE_INT hnum_orig,
542 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
543 HOST_WIDE_INT hden_orig,
544 unsigned HOST_WIDE_INT *lquo,
545 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
546 HOST_WIDE_INT *hrem)
548 int quo_neg = 0;
549 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
550 HOST_WIDE_INT den[4], quo[4];
551 int i, j;
552 unsigned HOST_WIDE_INT work;
553 unsigned HOST_WIDE_INT carry = 0;
554 unsigned HOST_WIDE_INT lnum = lnum_orig;
555 HOST_WIDE_INT hnum = hnum_orig;
556 unsigned HOST_WIDE_INT lden = lden_orig;
557 HOST_WIDE_INT hden = hden_orig;
558 int overflow = 0;
560 if (hden == 0 && lden == 0)
561 overflow = 1, lden = 1;
563 /* Calculate quotient sign and convert operands to unsigned. */
564 if (!uns)
566 if (hnum < 0)
568 quo_neg = ~ quo_neg;
569 /* (minimum integer) / (-1) is the only overflow case. */
570 if (neg_double (lnum, hnum, &lnum, &hnum)
571 && ((HOST_WIDE_INT) lden & hden) == -1)
572 overflow = 1;
574 if (hden < 0)
576 quo_neg = ~ quo_neg;
577 neg_double (lden, hden, &lden, &hden);
581 if (hnum == 0 && hden == 0)
582 { /* single precision */
583 *hquo = *hrem = 0;
584 /* This unsigned division rounds toward zero. */
585 *lquo = lnum / lden;
586 goto finish_up;
589 if (hnum == 0)
590 { /* trivial case: dividend < divisor */
591 /* hden != 0 already checked. */
592 *hquo = *lquo = 0;
593 *hrem = hnum;
594 *lrem = lnum;
595 goto finish_up;
598 memset (quo, 0, sizeof quo);
600 memset (num, 0, sizeof num); /* to zero 9th element */
601 memset (den, 0, sizeof den);
603 encode (num, lnum, hnum);
604 encode (den, lden, hden);
606 /* Special code for when the divisor < BASE. */
607 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
609 /* hnum != 0 already checked. */
610 for (i = 4 - 1; i >= 0; i--)
612 work = num[i] + carry * BASE;
613 quo[i] = work / lden;
614 carry = work % lden;
617 else
619 /* Full double precision division,
620 with thanks to Don Knuth's "Seminumerical Algorithms". */
621 int num_hi_sig, den_hi_sig;
622 unsigned HOST_WIDE_INT quo_est, scale;
624 /* Find the highest nonzero divisor digit. */
625 for (i = 4 - 1;; i--)
626 if (den[i] != 0)
628 den_hi_sig = i;
629 break;
632 /* Insure that the first digit of the divisor is at least BASE/2.
633 This is required by the quotient digit estimation algorithm. */
635 scale = BASE / (den[den_hi_sig] + 1);
636 if (scale > 1)
637 { /* scale divisor and dividend */
638 carry = 0;
639 for (i = 0; i <= 4 - 1; i++)
641 work = (num[i] * scale) + carry;
642 num[i] = LOWPART (work);
643 carry = HIGHPART (work);
646 num[4] = carry;
647 carry = 0;
648 for (i = 0; i <= 4 - 1; i++)
650 work = (den[i] * scale) + carry;
651 den[i] = LOWPART (work);
652 carry = HIGHPART (work);
653 if (den[i] != 0) den_hi_sig = i;
657 num_hi_sig = 4;
659 /* Main loop */
660 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
662 /* Guess the next quotient digit, quo_est, by dividing the first
663 two remaining dividend digits by the high order quotient digit.
664 quo_est is never low and is at most 2 high. */
665 unsigned HOST_WIDE_INT tmp;
667 num_hi_sig = i + den_hi_sig + 1;
668 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
669 if (num[num_hi_sig] != den[den_hi_sig])
670 quo_est = work / den[den_hi_sig];
671 else
672 quo_est = BASE - 1;
674 /* Refine quo_est so it's usually correct, and at most one high. */
675 tmp = work - quo_est * den[den_hi_sig];
676 if (tmp < BASE
677 && (den[den_hi_sig - 1] * quo_est
678 > (tmp * BASE + num[num_hi_sig - 2])))
679 quo_est--;
681 /* Try QUO_EST as the quotient digit, by multiplying the
682 divisor by QUO_EST and subtracting from the remaining dividend.
683 Keep in mind that QUO_EST is the I - 1st digit. */
685 carry = 0;
686 for (j = 0; j <= den_hi_sig; j++)
688 work = quo_est * den[j] + carry;
689 carry = HIGHPART (work);
690 work = num[i + j] - LOWPART (work);
691 num[i + j] = LOWPART (work);
692 carry += HIGHPART (work) != 0;
695 /* If quo_est was high by one, then num[i] went negative and
696 we need to correct things. */
697 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
699 quo_est--;
700 carry = 0; /* add divisor back in */
701 for (j = 0; j <= den_hi_sig; j++)
703 work = num[i + j] + den[j] + carry;
704 carry = HIGHPART (work);
705 num[i + j] = LOWPART (work);
708 num [num_hi_sig] += carry;
711 /* Store the quotient digit. */
712 quo[i] = quo_est;
716 decode (quo, lquo, hquo);
718 finish_up:
719 /* If result is negative, make it so. */
720 if (quo_neg)
721 neg_double (*lquo, *hquo, lquo, hquo);
723 /* Compute trial remainder: rem = num - (quo * den) */
724 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
725 neg_double (*lrem, *hrem, lrem, hrem);
726 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
728 switch (code)
730 case TRUNC_DIV_EXPR:
731 case TRUNC_MOD_EXPR: /* round toward zero */
732 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
733 return overflow;
735 case FLOOR_DIV_EXPR:
736 case FLOOR_MOD_EXPR: /* round toward negative infinity */
737 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
739 /* quo = quo - 1; */
740 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
741 lquo, hquo);
743 else
744 return overflow;
745 break;
747 case CEIL_DIV_EXPR:
748 case CEIL_MOD_EXPR: /* round toward positive infinity */
749 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
752 lquo, hquo);
754 else
755 return overflow;
756 break;
758 case ROUND_DIV_EXPR:
759 case ROUND_MOD_EXPR: /* round to closest integer */
761 unsigned HOST_WIDE_INT labs_rem = *lrem;
762 HOST_WIDE_INT habs_rem = *hrem;
763 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
764 HOST_WIDE_INT habs_den = hden, htwice;
766 /* Get absolute values. */
767 if (*hrem < 0)
768 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
769 if (hden < 0)
770 neg_double (lden, hden, &labs_den, &habs_den);
772 /* If (2 * abs (lrem) >= abs (lden)) */
773 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
774 labs_rem, habs_rem, &ltwice, &htwice);
776 if (((unsigned HOST_WIDE_INT) habs_den
777 < (unsigned HOST_WIDE_INT) htwice)
778 || (((unsigned HOST_WIDE_INT) habs_den
779 == (unsigned HOST_WIDE_INT) htwice)
780 && (labs_den < ltwice)))
782 if (*hquo < 0)
783 /* quo = quo - 1; */
784 add_double (*lquo, *hquo,
785 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
786 else
787 /* quo = quo + 1; */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
789 lquo, hquo);
791 else
792 return overflow;
794 break;
796 default:
797 abort ();
800 /* Compute true remainder: rem = num - (quo * den) */
801 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
802 neg_double (*lrem, *hrem, lrem, hrem);
803 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
804 return overflow;
807 /* Return true if built-in mathematical function specified by CODE
808 preserves the sign of it argument, i.e. -f(x) == f(-x). */
810 static bool
811 negate_mathfn_p (enum built_in_function code)
813 switch (code)
815 case BUILT_IN_ASIN:
816 case BUILT_IN_ASINF:
817 case BUILT_IN_ASINL:
818 case BUILT_IN_ATAN:
819 case BUILT_IN_ATANF:
820 case BUILT_IN_ATANL:
821 case BUILT_IN_SIN:
822 case BUILT_IN_SINF:
823 case BUILT_IN_SINL:
824 case BUILT_IN_TAN:
825 case BUILT_IN_TANF:
826 case BUILT_IN_TANL:
827 return true;
829 default:
830 break;
832 return false;
835 /* Determine whether an expression T can be cheaply negated using
836 the function negate_expr. */
838 static bool
839 negate_expr_p (tree t)
841 unsigned HOST_WIDE_INT val;
842 unsigned int prec;
843 tree type;
845 if (t == 0)
846 return false;
848 type = TREE_TYPE (t);
850 STRIP_SIGN_NOPS (t);
851 switch (TREE_CODE (t))
853 case INTEGER_CST:
854 if (TYPE_UNSIGNED (type) || ! flag_trapv)
855 return true;
857 /* Check that -CST will not overflow type. */
858 prec = TYPE_PRECISION (type);
859 if (prec > HOST_BITS_PER_WIDE_INT)
861 if (TREE_INT_CST_LOW (t) != 0)
862 return true;
863 prec -= HOST_BITS_PER_WIDE_INT;
864 val = TREE_INT_CST_HIGH (t);
866 else
867 val = TREE_INT_CST_LOW (t);
868 if (prec < HOST_BITS_PER_WIDE_INT)
869 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
870 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
872 case REAL_CST:
873 case NEGATE_EXPR:
874 return true;
876 case COMPLEX_CST:
877 return negate_expr_p (TREE_REALPART (t))
878 && negate_expr_p (TREE_IMAGPART (t));
880 case PLUS_EXPR:
881 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
882 return false;
883 /* -(A + B) -> (-B) - A. */
884 if (negate_expr_p (TREE_OPERAND (t, 1))
885 && reorder_operands_p (TREE_OPERAND (t, 0),
886 TREE_OPERAND (t, 1)))
887 return true;
888 /* -(A + B) -> (-A) - B. */
889 return negate_expr_p (TREE_OPERAND (t, 0));
891 case MINUS_EXPR:
892 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
893 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
894 && reorder_operands_p (TREE_OPERAND (t, 0),
895 TREE_OPERAND (t, 1));
897 case MULT_EXPR:
898 if (TYPE_UNSIGNED (TREE_TYPE (t)))
899 break;
901 /* Fall through. */
903 case RDIV_EXPR:
904 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
905 return negate_expr_p (TREE_OPERAND (t, 1))
906 || negate_expr_p (TREE_OPERAND (t, 0));
907 break;
909 case NOP_EXPR:
910 /* Negate -((double)float) as (double)(-float). */
911 if (TREE_CODE (type) == REAL_TYPE)
913 tree tem = strip_float_extensions (t);
914 if (tem != t)
915 return negate_expr_p (tem);
917 break;
919 case CALL_EXPR:
920 /* Negate -f(x) as f(-x). */
921 if (negate_mathfn_p (builtin_mathfn_code (t)))
922 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
923 break;
925 case RSHIFT_EXPR:
926 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
927 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
929 tree op1 = TREE_OPERAND (t, 1);
930 if (TREE_INT_CST_HIGH (op1) == 0
931 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
932 == TREE_INT_CST_LOW (op1))
933 return true;
935 break;
937 default:
938 break;
940 return false;
943 /* Given T, an expression, return the negation of T. Allow for T to be
944 null, in which case return null. */
946 static tree
947 negate_expr (tree t)
949 tree type;
950 tree tem;
952 if (t == 0)
953 return 0;
955 type = TREE_TYPE (t);
956 STRIP_SIGN_NOPS (t);
958 switch (TREE_CODE (t))
960 case INTEGER_CST:
961 tem = fold_negate_const (t, type);
962 if (! TREE_OVERFLOW (tem)
963 || TYPE_UNSIGNED (type)
964 || ! flag_trapv)
965 return tem;
966 break;
968 case REAL_CST:
969 tem = fold_negate_const (t, type);
970 /* Two's complement FP formats, such as c4x, may overflow. */
971 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
972 return fold_convert (type, tem);
973 break;
975 case COMPLEX_CST:
977 tree rpart = negate_expr (TREE_REALPART (t));
978 tree ipart = negate_expr (TREE_IMAGPART (t));
980 if ((TREE_CODE (rpart) == REAL_CST
981 && TREE_CODE (ipart) == REAL_CST)
982 || (TREE_CODE (rpart) == INTEGER_CST
983 && TREE_CODE (ipart) == INTEGER_CST))
984 return build_complex (type, rpart, ipart);
986 break;
988 case NEGATE_EXPR:
989 return fold_convert (type, TREE_OPERAND (t, 0));
991 case PLUS_EXPR:
992 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
994 /* -(A + B) -> (-B) - A. */
995 if (negate_expr_p (TREE_OPERAND (t, 1))
996 && reorder_operands_p (TREE_OPERAND (t, 0),
997 TREE_OPERAND (t, 1)))
999 tem = negate_expr (TREE_OPERAND (t, 1));
1000 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1001 tem, TREE_OPERAND (t, 0)));
1002 return fold_convert (type, tem);
1005 /* -(A + B) -> (-A) - B. */
1006 if (negate_expr_p (TREE_OPERAND (t, 0)))
1008 tem = negate_expr (TREE_OPERAND (t, 0));
1009 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1010 tem, TREE_OPERAND (t, 1)));
1011 return fold_convert (type, tem);
1014 break;
1016 case MINUS_EXPR:
1017 /* - (A - B) -> B - A */
1018 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1019 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1020 return fold_convert (type,
1021 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1022 TREE_OPERAND (t, 1),
1023 TREE_OPERAND (t, 0))));
1024 break;
1026 case MULT_EXPR:
1027 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1028 break;
1030 /* Fall through. */
1032 case RDIV_EXPR:
1033 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1035 tem = TREE_OPERAND (t, 1);
1036 if (negate_expr_p (tem))
1037 return fold_convert (type,
1038 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1039 TREE_OPERAND (t, 0),
1040 negate_expr (tem))));
1041 tem = TREE_OPERAND (t, 0);
1042 if (negate_expr_p (tem))
1043 return fold_convert (type,
1044 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1045 negate_expr (tem),
1046 TREE_OPERAND (t, 1))));
1048 break;
1050 case NOP_EXPR:
1051 /* Convert -((double)float) into (double)(-float). */
1052 if (TREE_CODE (type) == REAL_TYPE)
1054 tem = strip_float_extensions (t);
1055 if (tem != t && negate_expr_p (tem))
1056 return fold_convert (type, negate_expr (tem));
1058 break;
1060 case CALL_EXPR:
1061 /* Negate -f(x) as f(-x). */
1062 if (negate_mathfn_p (builtin_mathfn_code (t))
1063 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1065 tree fndecl, arg, arglist;
1067 fndecl = get_callee_fndecl (t);
1068 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1069 arglist = build_tree_list (NULL_TREE, arg);
1070 return build_function_call_expr (fndecl, arglist);
1072 break;
1074 case RSHIFT_EXPR:
1075 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1076 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1078 tree op1 = TREE_OPERAND (t, 1);
1079 if (TREE_INT_CST_HIGH (op1) == 0
1080 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1081 == TREE_INT_CST_LOW (op1))
1083 tree ntype = TYPE_UNSIGNED (type)
1084 ? lang_hooks.types.signed_type (type)
1085 : lang_hooks.types.unsigned_type (type);
1086 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1087 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1088 return fold_convert (type, temp);
1091 break;
1093 default:
1094 break;
1097 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1098 return fold_convert (type, tem);
1101 /* Split a tree IN into a constant, literal and variable parts that could be
1102 combined with CODE to make IN. "constant" means an expression with
1103 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1104 commutative arithmetic operation. Store the constant part into *CONP,
1105 the literal in *LITP and return the variable part. If a part isn't
1106 present, set it to null. If the tree does not decompose in this way,
1107 return the entire tree as the variable part and the other parts as null.
1109 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1110 case, we negate an operand that was subtracted. Except if it is a
1111 literal for which we use *MINUS_LITP instead.
1113 If NEGATE_P is true, we are negating all of IN, again except a literal
1114 for which we use *MINUS_LITP instead.
1116 If IN is itself a literal or constant, return it as appropriate.
1118 Note that we do not guarantee that any of the three values will be the
1119 same type as IN, but they will have the same signedness and mode. */
1121 static tree
1122 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1123 tree *minus_litp, int negate_p)
1125 tree var = 0;
1127 *conp = 0;
1128 *litp = 0;
1129 *minus_litp = 0;
1131 /* Strip any conversions that don't change the machine mode or signedness. */
1132 STRIP_SIGN_NOPS (in);
1134 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1135 *litp = in;
1136 else if (TREE_CODE (in) == code
1137 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1138 /* We can associate addition and subtraction together (even
1139 though the C standard doesn't say so) for integers because
1140 the value is not affected. For reals, the value might be
1141 affected, so we can't. */
1142 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1143 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1145 tree op0 = TREE_OPERAND (in, 0);
1146 tree op1 = TREE_OPERAND (in, 1);
1147 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1148 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1150 /* First see if either of the operands is a literal, then a constant. */
1151 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1152 *litp = op0, op0 = 0;
1153 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1154 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1156 if (op0 != 0 && TREE_CONSTANT (op0))
1157 *conp = op0, op0 = 0;
1158 else if (op1 != 0 && TREE_CONSTANT (op1))
1159 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1161 /* If we haven't dealt with either operand, this is not a case we can
1162 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1163 if (op0 != 0 && op1 != 0)
1164 var = in;
1165 else if (op0 != 0)
1166 var = op0;
1167 else
1168 var = op1, neg_var_p = neg1_p;
1170 /* Now do any needed negations. */
1171 if (neg_litp_p)
1172 *minus_litp = *litp, *litp = 0;
1173 if (neg_conp_p)
1174 *conp = negate_expr (*conp);
1175 if (neg_var_p)
1176 var = negate_expr (var);
1178 else if (TREE_CONSTANT (in))
1179 *conp = in;
1180 else
1181 var = in;
1183 if (negate_p)
1185 if (*litp)
1186 *minus_litp = *litp, *litp = 0;
1187 else if (*minus_litp)
1188 *litp = *minus_litp, *minus_litp = 0;
1189 *conp = negate_expr (*conp);
1190 var = negate_expr (var);
1193 return var;
1196 /* Re-associate trees split by the above function. T1 and T2 are either
1197 expressions to associate or null. Return the new expression, if any. If
1198 we build an operation, do it in TYPE and with CODE. */
1200 static tree
1201 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1203 if (t1 == 0)
1204 return t2;
1205 else if (t2 == 0)
1206 return t1;
1208 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1209 try to fold this since we will have infinite recursion. But do
1210 deal with any NEGATE_EXPRs. */
1211 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1212 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1214 if (code == PLUS_EXPR)
1216 if (TREE_CODE (t1) == NEGATE_EXPR)
1217 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1218 fold_convert (type, TREE_OPERAND (t1, 0)));
1219 else if (TREE_CODE (t2) == NEGATE_EXPR)
1220 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1221 fold_convert (type, TREE_OPERAND (t2, 0)));
1223 return build2 (code, type, fold_convert (type, t1),
1224 fold_convert (type, t2));
1227 return fold (build2 (code, type, fold_convert (type, t1),
1228 fold_convert (type, t2)));
1231 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1232 to produce a new constant.
1234 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1236 tree
1237 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1239 unsigned HOST_WIDE_INT int1l, int2l;
1240 HOST_WIDE_INT int1h, int2h;
1241 unsigned HOST_WIDE_INT low;
1242 HOST_WIDE_INT hi;
1243 unsigned HOST_WIDE_INT garbagel;
1244 HOST_WIDE_INT garbageh;
1245 tree t;
1246 tree type = TREE_TYPE (arg1);
1247 int uns = TYPE_UNSIGNED (type);
1248 int is_sizetype
1249 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1250 int overflow = 0;
1251 int no_overflow = 0;
1253 int1l = TREE_INT_CST_LOW (arg1);
1254 int1h = TREE_INT_CST_HIGH (arg1);
1255 int2l = TREE_INT_CST_LOW (arg2);
1256 int2h = TREE_INT_CST_HIGH (arg2);
1258 switch (code)
1260 case BIT_IOR_EXPR:
1261 low = int1l | int2l, hi = int1h | int2h;
1262 break;
1264 case BIT_XOR_EXPR:
1265 low = int1l ^ int2l, hi = int1h ^ int2h;
1266 break;
1268 case BIT_AND_EXPR:
1269 low = int1l & int2l, hi = int1h & int2h;
1270 break;
1272 case RSHIFT_EXPR:
1273 int2l = -int2l;
1274 case LSHIFT_EXPR:
1275 /* It's unclear from the C standard whether shifts can overflow.
1276 The following code ignores overflow; perhaps a C standard
1277 interpretation ruling is needed. */
1278 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1279 &low, &hi, !uns);
1280 no_overflow = 1;
1281 break;
1283 case RROTATE_EXPR:
1284 int2l = - int2l;
1285 case LROTATE_EXPR:
1286 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1287 &low, &hi);
1288 break;
1290 case PLUS_EXPR:
1291 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1292 break;
1294 case MINUS_EXPR:
1295 neg_double (int2l, int2h, &low, &hi);
1296 add_double (int1l, int1h, low, hi, &low, &hi);
1297 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1298 break;
1300 case MULT_EXPR:
1301 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1302 break;
1304 case TRUNC_DIV_EXPR:
1305 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1306 case EXACT_DIV_EXPR:
1307 /* This is a shortcut for a common special case. */
1308 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1309 && ! TREE_CONSTANT_OVERFLOW (arg1)
1310 && ! TREE_CONSTANT_OVERFLOW (arg2)
1311 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1313 if (code == CEIL_DIV_EXPR)
1314 int1l += int2l - 1;
1316 low = int1l / int2l, hi = 0;
1317 break;
1320 /* ... fall through ... */
1322 case ROUND_DIV_EXPR:
1323 if (int2h == 0 && int2l == 1)
1325 low = int1l, hi = int1h;
1326 break;
1328 if (int1l == int2l && int1h == int2h
1329 && ! (int1l == 0 && int1h == 0))
1331 low = 1, hi = 0;
1332 break;
1334 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1335 &low, &hi, &garbagel, &garbageh);
1336 break;
1338 case TRUNC_MOD_EXPR:
1339 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1340 /* This is a shortcut for a common special case. */
1341 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1342 && ! TREE_CONSTANT_OVERFLOW (arg1)
1343 && ! TREE_CONSTANT_OVERFLOW (arg2)
1344 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1346 if (code == CEIL_MOD_EXPR)
1347 int1l += int2l - 1;
1348 low = int1l % int2l, hi = 0;
1349 break;
1352 /* ... fall through ... */
1354 case ROUND_MOD_EXPR:
1355 overflow = div_and_round_double (code, uns,
1356 int1l, int1h, int2l, int2h,
1357 &garbagel, &garbageh, &low, &hi);
1358 break;
1360 case MIN_EXPR:
1361 case MAX_EXPR:
1362 if (uns)
1363 low = (((unsigned HOST_WIDE_INT) int1h
1364 < (unsigned HOST_WIDE_INT) int2h)
1365 || (((unsigned HOST_WIDE_INT) int1h
1366 == (unsigned HOST_WIDE_INT) int2h)
1367 && int1l < int2l));
1368 else
1369 low = (int1h < int2h
1370 || (int1h == int2h && int1l < int2l));
1372 if (low == (code == MIN_EXPR))
1373 low = int1l, hi = int1h;
1374 else
1375 low = int2l, hi = int2h;
1376 break;
1378 default:
1379 abort ();
1382 /* If this is for a sizetype, can be represented as one (signed)
1383 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1384 constants. */
1385 if (is_sizetype
1386 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1387 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1388 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1389 return size_int_type_wide (low, type);
1390 else
1392 t = build_int_2 (low, hi);
1393 TREE_TYPE (t) = TREE_TYPE (arg1);
1396 TREE_OVERFLOW (t)
1397 = ((notrunc
1398 ? (!uns || is_sizetype) && overflow
1399 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1400 && ! no_overflow))
1401 | TREE_OVERFLOW (arg1)
1402 | TREE_OVERFLOW (arg2));
1404 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1405 So check if force_fit_type truncated the value. */
1406 if (is_sizetype
1407 && ! TREE_OVERFLOW (t)
1408 && (TREE_INT_CST_HIGH (t) != hi
1409 || TREE_INT_CST_LOW (t) != low))
1410 TREE_OVERFLOW (t) = 1;
1412 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1413 | TREE_CONSTANT_OVERFLOW (arg1)
1414 | TREE_CONSTANT_OVERFLOW (arg2));
1415 return t;
1418 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1419 constant. We assume ARG1 and ARG2 have the same data type, or at least
1420 are the same kind of constant and the same machine mode.
1422 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1424 static tree
1425 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1427 STRIP_NOPS (arg1);
1428 STRIP_NOPS (arg2);
1430 if (TREE_CODE (arg1) == INTEGER_CST)
1431 return int_const_binop (code, arg1, arg2, notrunc);
1433 if (TREE_CODE (arg1) == REAL_CST)
1435 enum machine_mode mode;
1436 REAL_VALUE_TYPE d1;
1437 REAL_VALUE_TYPE d2;
1438 REAL_VALUE_TYPE value;
1439 tree t, type;
1441 d1 = TREE_REAL_CST (arg1);
1442 d2 = TREE_REAL_CST (arg2);
1444 type = TREE_TYPE (arg1);
1445 mode = TYPE_MODE (type);
1447 /* Don't perform operation if we honor signaling NaNs and
1448 either operand is a NaN. */
1449 if (HONOR_SNANS (mode)
1450 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1451 return NULL_TREE;
1453 /* Don't perform operation if it would raise a division
1454 by zero exception. */
1455 if (code == RDIV_EXPR
1456 && REAL_VALUES_EQUAL (d2, dconst0)
1457 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1458 return NULL_TREE;
1460 /* If either operand is a NaN, just return it. Otherwise, set up
1461 for floating-point trap; we return an overflow. */
1462 if (REAL_VALUE_ISNAN (d1))
1463 return arg1;
1464 else if (REAL_VALUE_ISNAN (d2))
1465 return arg2;
1467 REAL_ARITHMETIC (value, code, d1, d2);
1469 t = build_real (type, real_value_truncate (mode, value));
1471 TREE_OVERFLOW (t)
1472 = (force_fit_type (t, 0)
1473 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1474 TREE_CONSTANT_OVERFLOW (t)
1475 = TREE_OVERFLOW (t)
1476 | TREE_CONSTANT_OVERFLOW (arg1)
1477 | TREE_CONSTANT_OVERFLOW (arg2);
1478 return t;
1480 if (TREE_CODE (arg1) == COMPLEX_CST)
1482 tree type = TREE_TYPE (arg1);
1483 tree r1 = TREE_REALPART (arg1);
1484 tree i1 = TREE_IMAGPART (arg1);
1485 tree r2 = TREE_REALPART (arg2);
1486 tree i2 = TREE_IMAGPART (arg2);
1487 tree t;
1489 switch (code)
1491 case PLUS_EXPR:
1492 t = build_complex (type,
1493 const_binop (PLUS_EXPR, r1, r2, notrunc),
1494 const_binop (PLUS_EXPR, i1, i2, notrunc));
1495 break;
1497 case MINUS_EXPR:
1498 t = build_complex (type,
1499 const_binop (MINUS_EXPR, r1, r2, notrunc),
1500 const_binop (MINUS_EXPR, i1, i2, notrunc));
1501 break;
1503 case MULT_EXPR:
1504 t = build_complex (type,
1505 const_binop (MINUS_EXPR,
1506 const_binop (MULT_EXPR,
1507 r1, r2, notrunc),
1508 const_binop (MULT_EXPR,
1509 i1, i2, notrunc),
1510 notrunc),
1511 const_binop (PLUS_EXPR,
1512 const_binop (MULT_EXPR,
1513 r1, i2, notrunc),
1514 const_binop (MULT_EXPR,
1515 i1, r2, notrunc),
1516 notrunc));
1517 break;
1519 case RDIV_EXPR:
1521 tree magsquared
1522 = const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR, r2, r2, notrunc),
1524 const_binop (MULT_EXPR, i2, i2, notrunc),
1525 notrunc);
1527 t = build_complex (type,
1528 const_binop
1529 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1530 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1531 const_binop (PLUS_EXPR,
1532 const_binop (MULT_EXPR, r1, r2,
1533 notrunc),
1534 const_binop (MULT_EXPR, i1, i2,
1535 notrunc),
1536 notrunc),
1537 magsquared, notrunc),
1538 const_binop
1539 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1540 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1541 const_binop (MINUS_EXPR,
1542 const_binop (MULT_EXPR, i1, r2,
1543 notrunc),
1544 const_binop (MULT_EXPR, r1, i2,
1545 notrunc),
1546 notrunc),
1547 magsquared, notrunc));
1549 break;
1551 default:
1552 abort ();
1554 return t;
1556 return 0;
1559 /* These are the hash table functions for the hash table of INTEGER_CST
1560 nodes of a sizetype. */
1562 /* Return the hash code code X, an INTEGER_CST. */
1564 static hashval_t
1565 size_htab_hash (const void *x)
1567 tree t = (tree) x;
1569 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1570 ^ htab_hash_pointer (TREE_TYPE (t))
1571 ^ (TREE_OVERFLOW (t) << 20));
1574 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1575 is the same as that given by *Y, which is the same. */
1577 static int
1578 size_htab_eq (const void *x, const void *y)
1580 tree xt = (tree) x;
1581 tree yt = (tree) y;
1583 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1584 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1585 && TREE_TYPE (xt) == TREE_TYPE (yt)
1586 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1589 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1590 bits are given by NUMBER and of the sizetype represented by KIND. */
1592 tree
1593 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1595 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1598 /* Likewise, but the desired type is specified explicitly. */
1600 static GTY (()) tree new_const;
1601 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1602 htab_t size_htab;
1604 tree
1605 size_int_type_wide (HOST_WIDE_INT number, tree type)
1607 void **slot;
1609 if (size_htab == 0)
1611 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1612 new_const = make_node (INTEGER_CST);
1615 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1616 hash table, we return the value from the hash table. Otherwise, we
1617 place that in the hash table and make a new node for the next time. */
1618 TREE_INT_CST_LOW (new_const) = number;
1619 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1620 TREE_TYPE (new_const) = type;
1621 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1622 = force_fit_type (new_const, 0);
1624 slot = htab_find_slot (size_htab, new_const, INSERT);
1625 if (*slot == 0)
1627 tree t = new_const;
1629 *slot = new_const;
1630 new_const = make_node (INTEGER_CST);
1631 return t;
1633 else
1634 return (tree) *slot;
1637 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1638 is a tree code. The type of the result is taken from the operands.
1639 Both must be the same type integer type and it must be a size type.
1640 If the operands are constant, so is the result. */
1642 tree
1643 size_binop (enum tree_code code, tree arg0, tree arg1)
1645 tree type = TREE_TYPE (arg0);
1647 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1648 || type != TREE_TYPE (arg1))
1649 abort ();
1651 /* Handle the special case of two integer constants faster. */
1652 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1654 /* And some specific cases even faster than that. */
1655 if (code == PLUS_EXPR && integer_zerop (arg0))
1656 return arg1;
1657 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1658 && integer_zerop (arg1))
1659 return arg0;
1660 else if (code == MULT_EXPR && integer_onep (arg0))
1661 return arg1;
1663 /* Handle general case of two integer constants. */
1664 return int_const_binop (code, arg0, arg1, 0);
1667 if (arg0 == error_mark_node || arg1 == error_mark_node)
1668 return error_mark_node;
1670 return fold (build2 (code, type, arg0, arg1));
1673 /* Given two values, either both of sizetype or both of bitsizetype,
1674 compute the difference between the two values. Return the value
1675 in signed type corresponding to the type of the operands. */
1677 tree
1678 size_diffop (tree arg0, tree arg1)
1680 tree type = TREE_TYPE (arg0);
1681 tree ctype;
1683 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1684 || type != TREE_TYPE (arg1))
1685 abort ();
1687 /* If the type is already signed, just do the simple thing. */
1688 if (!TYPE_UNSIGNED (type))
1689 return size_binop (MINUS_EXPR, arg0, arg1);
1691 ctype = (type == bitsizetype || type == ubitsizetype
1692 ? sbitsizetype : ssizetype);
1694 /* If either operand is not a constant, do the conversions to the signed
1695 type and subtract. The hardware will do the right thing with any
1696 overflow in the subtraction. */
1697 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1698 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1699 fold_convert (ctype, arg1));
1701 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1702 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1703 overflow) and negate (which can't either). Special-case a result
1704 of zero while we're here. */
1705 if (tree_int_cst_equal (arg0, arg1))
1706 return fold_convert (ctype, integer_zero_node);
1707 else if (tree_int_cst_lt (arg1, arg0))
1708 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1709 else
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1711 fold_convert (ctype, size_binop (MINUS_EXPR,
1712 arg1, arg0)));
1716 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1717 type TYPE. If no simplification can be done return NULL_TREE. */
1719 static tree
1720 fold_convert_const (enum tree_code code, tree type, tree arg1)
1722 int overflow = 0;
1723 tree t;
1725 if (TREE_TYPE (arg1) == type)
1726 return arg1;
1728 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1730 if (TREE_CODE (arg1) == INTEGER_CST)
1732 /* If we would build a constant wider than GCC supports,
1733 leave the conversion unfolded. */
1734 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1735 return NULL_TREE;
1737 /* If we are trying to make a sizetype for a small integer, use
1738 size_int to pick up cached types to reduce duplicate nodes. */
1739 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1740 && !TREE_CONSTANT_OVERFLOW (arg1)
1741 && compare_tree_int (arg1, 10000) < 0)
1742 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1744 /* Given an integer constant, make new constant with new type,
1745 appropriately sign-extended or truncated. */
1746 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1747 TREE_INT_CST_HIGH (arg1));
1748 TREE_TYPE (t) = type;
1749 /* Indicate an overflow if (1) ARG1 already overflowed,
1750 or (2) force_fit_type indicates an overflow.
1751 Tell force_fit_type that an overflow has already occurred
1752 if ARG1 is a too-large unsigned value and T is signed.
1753 But don't indicate an overflow if converting a pointer. */
1754 TREE_OVERFLOW (t)
1755 = ((force_fit_type (t,
1756 (TREE_INT_CST_HIGH (arg1) < 0
1757 && (TYPE_UNSIGNED (type)
1758 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1759 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1760 || TREE_OVERFLOW (arg1));
1761 TREE_CONSTANT_OVERFLOW (t)
1762 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1763 return t;
1765 else if (TREE_CODE (arg1) == REAL_CST)
1767 /* The following code implements the floating point to integer
1768 conversion rules required by the Java Language Specification,
1769 that IEEE NaNs are mapped to zero and values that overflow
1770 the target precision saturate, i.e. values greater than
1771 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1772 are mapped to INT_MIN. These semantics are allowed by the
1773 C and C++ standards that simply state that the behavior of
1774 FP-to-integer conversion is unspecified upon overflow. */
1776 HOST_WIDE_INT high, low;
1778 REAL_VALUE_TYPE r;
1779 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1781 switch (code)
1783 case FIX_TRUNC_EXPR:
1784 real_trunc (&r, VOIDmode, &x);
1785 break;
1787 case FIX_CEIL_EXPR:
1788 real_ceil (&r, VOIDmode, &x);
1789 break;
1791 case FIX_FLOOR_EXPR:
1792 real_floor (&r, VOIDmode, &x);
1793 break;
1795 case FIX_ROUND_EXPR:
1796 real_round (&r, VOIDmode, &x);
1797 break;
1799 default:
1800 abort ();
1803 /* If R is NaN, return zero and show we have an overflow. */
1804 if (REAL_VALUE_ISNAN (r))
1806 overflow = 1;
1807 high = 0;
1808 low = 0;
1811 /* See if R is less than the lower bound or greater than the
1812 upper bound. */
1814 if (! overflow)
1816 tree lt = TYPE_MIN_VALUE (type);
1817 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1818 if (REAL_VALUES_LESS (r, l))
1820 overflow = 1;
1821 high = TREE_INT_CST_HIGH (lt);
1822 low = TREE_INT_CST_LOW (lt);
1826 if (! overflow)
1828 tree ut = TYPE_MAX_VALUE (type);
1829 if (ut)
1831 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1832 if (REAL_VALUES_LESS (u, r))
1834 overflow = 1;
1835 high = TREE_INT_CST_HIGH (ut);
1836 low = TREE_INT_CST_LOW (ut);
1841 if (! overflow)
1842 REAL_VALUE_TO_INT (&low, &high, r);
1844 t = build_int_2 (low, high);
1845 TREE_TYPE (t) = type;
1846 TREE_OVERFLOW (t)
1847 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1848 TREE_CONSTANT_OVERFLOW (t)
1849 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1850 return t;
1853 else if (TREE_CODE (type) == REAL_TYPE)
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 if (TREE_CODE (arg1) == REAL_CST)
1859 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1861 /* We make a copy of ARG1 so that we don't modify an
1862 existing constant tree. */
1863 t = copy_node (arg1);
1864 TREE_TYPE (t) = type;
1865 return t;
1868 t = build_real (type,
1869 real_value_truncate (TYPE_MODE (type),
1870 TREE_REAL_CST (arg1)));
1872 TREE_OVERFLOW (t)
1873 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1874 TREE_CONSTANT_OVERFLOW (t)
1875 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1876 return t;
1879 return NULL_TREE;
1882 /* Convert expression ARG to type TYPE. Used by the middle-end for
1883 simple conversions in preference to calling the front-end's convert. */
1885 tree
1886 fold_convert (tree type, tree arg)
1888 tree orig = TREE_TYPE (arg);
1889 tree tem;
1891 if (type == orig)
1892 return arg;
1894 if (TREE_CODE (arg) == ERROR_MARK
1895 || TREE_CODE (type) == ERROR_MARK
1896 || TREE_CODE (orig) == ERROR_MARK)
1897 return error_mark_node;
1899 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1900 return fold (build1 (NOP_EXPR, type, arg));
1902 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1910 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1911 return fold (build1 (NOP_EXPR, type, arg));
1912 if (TREE_CODE (orig) == COMPLEX_TYPE)
1914 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1915 return fold_convert (type, tem);
1917 if (TREE_CODE (orig) == VECTOR_TYPE
1918 && GET_MODE_SIZE (TYPE_MODE (type))
1919 == GET_MODE_SIZE (TYPE_MODE (orig)))
1920 return fold (build1 (NOP_EXPR, type, arg));
1922 else if (TREE_CODE (type) == REAL_TYPE)
1924 if (TREE_CODE (arg) == INTEGER_CST)
1926 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1927 if (tem != NULL_TREE)
1928 return tem;
1930 else if (TREE_CODE (arg) == REAL_CST)
1932 tem = fold_convert_const (NOP_EXPR, type, arg);
1933 if (tem != NULL_TREE)
1934 return tem;
1937 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1938 return fold (build1 (FLOAT_EXPR, type, arg));
1939 if (TREE_CODE (orig) == REAL_TYPE)
1940 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1941 type, arg));
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1945 return fold_convert (type, tem);
1948 else if (TREE_CODE (type) == COMPLEX_TYPE)
1950 if (INTEGRAL_TYPE_P (orig)
1951 || POINTER_TYPE_P (orig)
1952 || TREE_CODE (orig) == REAL_TYPE)
1953 return build2 (COMPLEX_EXPR, type,
1954 fold_convert (TREE_TYPE (type), arg),
1955 fold_convert (TREE_TYPE (type), integer_zero_node));
1956 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 tree rpart, ipart;
1960 if (TREE_CODE (arg) == COMPLEX_EXPR)
1962 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1963 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1964 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1967 arg = save_expr (arg);
1968 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1969 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1970 rpart = fold_convert (TREE_TYPE (type), rpart);
1971 ipart = fold_convert (TREE_TYPE (type), ipart);
1972 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1975 else if (TREE_CODE (type) == VECTOR_TYPE)
1977 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1978 && GET_MODE_SIZE (TYPE_MODE (type))
1979 == GET_MODE_SIZE (TYPE_MODE (orig)))
1980 return fold (build1 (NOP_EXPR, type, arg));
1981 if (TREE_CODE (orig) == VECTOR_TYPE
1982 && GET_MODE_SIZE (TYPE_MODE (type))
1983 == GET_MODE_SIZE (TYPE_MODE (orig)))
1984 return fold (build1 (NOP_EXPR, type, arg));
1986 else if (VOID_TYPE_P (type))
1987 return fold (build1 (CONVERT_EXPR, type, arg));
1988 abort ();
1991 /* Return an expr equal to X but certainly not valid as an lvalue. */
1993 tree
1994 non_lvalue (tree x)
1996 /* These things are certainly not lvalues. */
1997 if (TREE_CODE (x) == NON_LVALUE_EXPR
1998 || TREE_CODE (x) == INTEGER_CST
1999 || TREE_CODE (x) == REAL_CST
2000 || TREE_CODE (x) == STRING_CST
2001 || TREE_CODE (x) == ADDR_EXPR)
2002 return x;
2004 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2007 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2008 Zero means allow extended lvalues. */
2010 int pedantic_lvalues;
2012 /* When pedantic, return an expr equal to X but certainly not valid as a
2013 pedantic lvalue. Otherwise, return X. */
2015 tree
2016 pedantic_non_lvalue (tree x)
2018 if (pedantic_lvalues)
2019 return non_lvalue (x);
2020 else
2021 return x;
2024 /* Given a tree comparison code, return the code that is the logical inverse
2025 of the given code. It is not safe to do this for floating-point
2026 comparisons, except for NE_EXPR and EQ_EXPR. */
2028 static enum tree_code
2029 invert_tree_comparison (enum tree_code code)
2031 switch (code)
2033 case EQ_EXPR:
2034 return NE_EXPR;
2035 case NE_EXPR:
2036 return EQ_EXPR;
2037 case GT_EXPR:
2038 return LE_EXPR;
2039 case GE_EXPR:
2040 return LT_EXPR;
2041 case LT_EXPR:
2042 return GE_EXPR;
2043 case LE_EXPR:
2044 return GT_EXPR;
2045 default:
2046 abort ();
2050 /* Similar, but return the comparison that results if the operands are
2051 swapped. This is safe for floating-point. */
2053 static enum tree_code
2054 swap_tree_comparison (enum tree_code code)
2056 switch (code)
2058 case EQ_EXPR:
2059 case NE_EXPR:
2060 return code;
2061 case GT_EXPR:
2062 return LT_EXPR;
2063 case GE_EXPR:
2064 return LE_EXPR;
2065 case LT_EXPR:
2066 return GT_EXPR;
2067 case LE_EXPR:
2068 return GE_EXPR;
2069 default:
2070 abort ();
2075 /* Convert a comparison tree code from an enum tree_code representation
2076 into a compcode bit-based encoding. This function is the inverse of
2077 compcode_to_comparison. */
2079 static int
2080 comparison_to_compcode (enum tree_code code)
2082 switch (code)
2084 case LT_EXPR:
2085 return COMPCODE_LT;
2086 case EQ_EXPR:
2087 return COMPCODE_EQ;
2088 case LE_EXPR:
2089 return COMPCODE_LE;
2090 case GT_EXPR:
2091 return COMPCODE_GT;
2092 case NE_EXPR:
2093 return COMPCODE_NE;
2094 case GE_EXPR:
2095 return COMPCODE_GE;
2096 default:
2097 abort ();
2101 /* Convert a compcode bit-based encoding of a comparison operator back
2102 to GCC's enum tree_code representation. This function is the
2103 inverse of comparison_to_compcode. */
2105 static enum tree_code
2106 compcode_to_comparison (int code)
2108 switch (code)
2110 case COMPCODE_LT:
2111 return LT_EXPR;
2112 case COMPCODE_EQ:
2113 return EQ_EXPR;
2114 case COMPCODE_LE:
2115 return LE_EXPR;
2116 case COMPCODE_GT:
2117 return GT_EXPR;
2118 case COMPCODE_NE:
2119 return NE_EXPR;
2120 case COMPCODE_GE:
2121 return GE_EXPR;
2122 default:
2123 abort ();
2127 /* Return nonzero if CODE is a tree code that represents a truth value. */
2129 static int
2130 truth_value_p (enum tree_code code)
2132 return (TREE_CODE_CLASS (code) == '<'
2133 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2134 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2135 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2138 /* Return nonzero if two operands (typically of the same tree node)
2139 are necessarily equal. If either argument has side-effects this
2140 function returns zero. FLAGS modifies behaviour as follows:
2142 If OEP_ONLY_CONST is set, only return nonzero for constants.
2143 This function tests whether the operands are indistinguishable;
2144 it does not test whether they are equal using C's == operation.
2145 The distinction is important for IEEE floating point, because
2146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2150 even though it may hold multiple values during a function.
2151 This is because a GCC tree node guarantees that nothing else is
2152 executed between the evaluation of its "operands" (which may often
2153 be evaluated in arbitrary order). Hence if the operands themselves
2154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2155 same value in each operand/subexpression. Hence a zero value for
2156 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2157 If comparing arbitrary expression trees, such as from different
2158 statements, ONLY_CONST must usually be nonzero.
2160 If OEP_PURE_SAME is set, then pure functions with identical arguments
2161 are considered the same. It is used when the caller has other ways
2162 to ensure that global memory is unchanged in between. */
2165 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2167 /* If either is ERROR_MARK, they aren't equal. */
2168 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2169 return 0;
2171 /* If both types don't have the same signedness, then we can't consider
2172 them equal. We must check this before the STRIP_NOPS calls
2173 because they may change the signedness of the arguments. */
2174 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2175 return 0;
2177 STRIP_NOPS (arg0);
2178 STRIP_NOPS (arg1);
2180 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2181 /* This is needed for conversions and for COMPONENT_REF.
2182 Might as well play it safe and always test this. */
2183 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2184 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2185 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2186 return 0;
2188 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2189 We don't care about side effects in that case because the SAVE_EXPR
2190 takes care of that for us. In all other cases, two expressions are
2191 equal if they have no side effects. If we have two identical
2192 expressions with side effects that should be treated the same due
2193 to the only side effects being identical SAVE_EXPR's, that will
2194 be detected in the recursive calls below. */
2195 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2196 && (TREE_CODE (arg0) == SAVE_EXPR
2197 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2198 return 1;
2200 /* Next handle constant cases, those for which we can return 1 even
2201 if ONLY_CONST is set. */
2202 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2203 switch (TREE_CODE (arg0))
2205 case INTEGER_CST:
2206 return (! TREE_CONSTANT_OVERFLOW (arg0)
2207 && ! TREE_CONSTANT_OVERFLOW (arg1)
2208 && tree_int_cst_equal (arg0, arg1));
2210 case REAL_CST:
2211 return (! TREE_CONSTANT_OVERFLOW (arg0)
2212 && ! TREE_CONSTANT_OVERFLOW (arg1)
2213 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2214 TREE_REAL_CST (arg1)));
2216 case VECTOR_CST:
2218 tree v1, v2;
2220 if (TREE_CONSTANT_OVERFLOW (arg0)
2221 || TREE_CONSTANT_OVERFLOW (arg1))
2222 return 0;
2224 v1 = TREE_VECTOR_CST_ELTS (arg0);
2225 v2 = TREE_VECTOR_CST_ELTS (arg1);
2226 while (v1 && v2)
2228 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2229 flags))
2230 return 0;
2231 v1 = TREE_CHAIN (v1);
2232 v2 = TREE_CHAIN (v2);
2235 return 1;
2238 case COMPLEX_CST:
2239 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2240 flags)
2241 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2242 flags));
2244 case STRING_CST:
2245 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2246 && ! memcmp (TREE_STRING_POINTER (arg0),
2247 TREE_STRING_POINTER (arg1),
2248 TREE_STRING_LENGTH (arg0)));
2250 case ADDR_EXPR:
2251 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2253 default:
2254 break;
2257 if (flags & OEP_ONLY_CONST)
2258 return 0;
2260 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2262 case '1':
2263 /* Two conversions are equal only if signedness and modes match. */
2264 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2265 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2266 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2267 return 0;
2269 return operand_equal_p (TREE_OPERAND (arg0, 0),
2270 TREE_OPERAND (arg1, 0), flags);
2272 case '<':
2273 case '2':
2274 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2275 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2277 return 1;
2279 /* For commutative ops, allow the other order. */
2280 return (commutative_tree_code (TREE_CODE (arg0))
2281 && operand_equal_p (TREE_OPERAND (arg0, 0),
2282 TREE_OPERAND (arg1, 1), flags)
2283 && operand_equal_p (TREE_OPERAND (arg0, 1),
2284 TREE_OPERAND (arg1, 0), flags));
2286 case 'r':
2287 /* If either of the pointer (or reference) expressions we are
2288 dereferencing contain a side effect, these cannot be equal. */
2289 if (TREE_SIDE_EFFECTS (arg0)
2290 || TREE_SIDE_EFFECTS (arg1))
2291 return 0;
2293 switch (TREE_CODE (arg0))
2295 case INDIRECT_REF:
2296 return operand_equal_p (TREE_OPERAND (arg0, 0),
2297 TREE_OPERAND (arg1, 0), flags);
2299 case COMPONENT_REF:
2300 case ARRAY_REF:
2301 case ARRAY_RANGE_REF:
2302 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2303 TREE_OPERAND (arg1, 0), flags)
2304 && operand_equal_p (TREE_OPERAND (arg0, 1),
2305 TREE_OPERAND (arg1, 1), flags));
2307 case BIT_FIELD_REF:
2308 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2309 TREE_OPERAND (arg1, 0), flags)
2310 && operand_equal_p (TREE_OPERAND (arg0, 1),
2311 TREE_OPERAND (arg1, 1), flags)
2312 && operand_equal_p (TREE_OPERAND (arg0, 2),
2313 TREE_OPERAND (arg1, 2), flags));
2314 default:
2315 return 0;
2318 case 'e':
2319 switch (TREE_CODE (arg0))
2321 case ADDR_EXPR:
2322 case TRUTH_NOT_EXPR:
2323 return operand_equal_p (TREE_OPERAND (arg0, 0),
2324 TREE_OPERAND (arg1, 0), flags);
2326 case RTL_EXPR:
2327 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2329 case CALL_EXPR:
2330 /* If the CALL_EXPRs call different functions, then they
2331 clearly can not be equal. */
2332 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2333 TREE_OPERAND (arg1, 0), flags))
2334 return 0;
2337 unsigned int cef = call_expr_flags (arg0);
2338 if (flags & OEP_PURE_SAME)
2339 cef &= ECF_CONST | ECF_PURE;
2340 else
2341 cef &= ECF_CONST;
2342 if (!cef)
2343 return 0;
2346 /* Now see if all the arguments are the same. operand_equal_p
2347 does not handle TREE_LIST, so we walk the operands here
2348 feeding them to operand_equal_p. */
2349 arg0 = TREE_OPERAND (arg0, 1);
2350 arg1 = TREE_OPERAND (arg1, 1);
2351 while (arg0 && arg1)
2353 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2354 flags))
2355 return 0;
2357 arg0 = TREE_CHAIN (arg0);
2358 arg1 = TREE_CHAIN (arg1);
2361 /* If we get here and both argument lists are exhausted
2362 then the CALL_EXPRs are equal. */
2363 return ! (arg0 || arg1);
2365 default:
2366 return 0;
2369 case 'd':
2370 /* Consider __builtin_sqrt equal to sqrt. */
2371 return (TREE_CODE (arg0) == FUNCTION_DECL
2372 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2373 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2374 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2376 default:
2377 return 0;
2381 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2382 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2384 When in doubt, return 0. */
2386 static int
2387 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2389 int unsignedp1, unsignedpo;
2390 tree primarg0, primarg1, primother;
2391 unsigned int correct_width;
2393 if (operand_equal_p (arg0, arg1, 0))
2394 return 1;
2396 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2397 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2398 return 0;
2400 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2401 and see if the inner values are the same. This removes any
2402 signedness comparison, which doesn't matter here. */
2403 primarg0 = arg0, primarg1 = arg1;
2404 STRIP_NOPS (primarg0);
2405 STRIP_NOPS (primarg1);
2406 if (operand_equal_p (primarg0, primarg1, 0))
2407 return 1;
2409 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2410 actual comparison operand, ARG0.
2412 First throw away any conversions to wider types
2413 already present in the operands. */
2415 primarg1 = get_narrower (arg1, &unsignedp1);
2416 primother = get_narrower (other, &unsignedpo);
2418 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2419 if (unsignedp1 == unsignedpo
2420 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2421 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2423 tree type = TREE_TYPE (arg0);
2425 /* Make sure shorter operand is extended the right way
2426 to match the longer operand. */
2427 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2428 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2430 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2431 return 1;
2434 return 0;
2437 /* See if ARG is an expression that is either a comparison or is performing
2438 arithmetic on comparisons. The comparisons must only be comparing
2439 two different values, which will be stored in *CVAL1 and *CVAL2; if
2440 they are nonzero it means that some operands have already been found.
2441 No variables may be used anywhere else in the expression except in the
2442 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2443 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2445 If this is true, return 1. Otherwise, return zero. */
2447 static int
2448 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2450 enum tree_code code = TREE_CODE (arg);
2451 char class = TREE_CODE_CLASS (code);
2453 /* We can handle some of the 'e' cases here. */
2454 if (class == 'e' && code == TRUTH_NOT_EXPR)
2455 class = '1';
2456 else if (class == 'e'
2457 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2458 || code == COMPOUND_EXPR))
2459 class = '2';
2461 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2462 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2464 /* If we've already found a CVAL1 or CVAL2, this expression is
2465 two complex to handle. */
2466 if (*cval1 || *cval2)
2467 return 0;
2469 class = '1';
2470 *save_p = 1;
2473 switch (class)
2475 case '1':
2476 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2478 case '2':
2479 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2480 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2481 cval1, cval2, save_p));
2483 case 'c':
2484 return 1;
2486 case 'e':
2487 if (code == COND_EXPR)
2488 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2489 cval1, cval2, save_p)
2490 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2491 cval1, cval2, save_p)
2492 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2493 cval1, cval2, save_p));
2494 return 0;
2496 case '<':
2497 /* First see if we can handle the first operand, then the second. For
2498 the second operand, we know *CVAL1 can't be zero. It must be that
2499 one side of the comparison is each of the values; test for the
2500 case where this isn't true by failing if the two operands
2501 are the same. */
2503 if (operand_equal_p (TREE_OPERAND (arg, 0),
2504 TREE_OPERAND (arg, 1), 0))
2505 return 0;
2507 if (*cval1 == 0)
2508 *cval1 = TREE_OPERAND (arg, 0);
2509 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2511 else if (*cval2 == 0)
2512 *cval2 = TREE_OPERAND (arg, 0);
2513 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2515 else
2516 return 0;
2518 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2520 else if (*cval2 == 0)
2521 *cval2 = TREE_OPERAND (arg, 1);
2522 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2524 else
2525 return 0;
2527 return 1;
2529 default:
2530 return 0;
2534 /* ARG is a tree that is known to contain just arithmetic operations and
2535 comparisons. Evaluate the operations in the tree substituting NEW0 for
2536 any occurrence of OLD0 as an operand of a comparison and likewise for
2537 NEW1 and OLD1. */
2539 static tree
2540 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2542 tree type = TREE_TYPE (arg);
2543 enum tree_code code = TREE_CODE (arg);
2544 char class = TREE_CODE_CLASS (code);
2546 /* We can handle some of the 'e' cases here. */
2547 if (class == 'e' && code == TRUTH_NOT_EXPR)
2548 class = '1';
2549 else if (class == 'e'
2550 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2551 class = '2';
2553 switch (class)
2555 case '1':
2556 return fold (build1 (code, type,
2557 eval_subst (TREE_OPERAND (arg, 0),
2558 old0, new0, old1, new1)));
2560 case '2':
2561 return fold (build2 (code, type,
2562 eval_subst (TREE_OPERAND (arg, 0),
2563 old0, new0, old1, new1),
2564 eval_subst (TREE_OPERAND (arg, 1),
2565 old0, new0, old1, new1)));
2567 case 'e':
2568 switch (code)
2570 case SAVE_EXPR:
2571 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2573 case COMPOUND_EXPR:
2574 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2576 case COND_EXPR:
2577 return fold (build3 (code, type,
2578 eval_subst (TREE_OPERAND (arg, 0),
2579 old0, new0, old1, new1),
2580 eval_subst (TREE_OPERAND (arg, 1),
2581 old0, new0, old1, new1),
2582 eval_subst (TREE_OPERAND (arg, 2),
2583 old0, new0, old1, new1)));
2584 default:
2585 break;
2587 /* Fall through - ??? */
2589 case '<':
2591 tree arg0 = TREE_OPERAND (arg, 0);
2592 tree arg1 = TREE_OPERAND (arg, 1);
2594 /* We need to check both for exact equality and tree equality. The
2595 former will be true if the operand has a side-effect. In that
2596 case, we know the operand occurred exactly once. */
2598 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2599 arg0 = new0;
2600 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2601 arg0 = new1;
2603 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2604 arg1 = new0;
2605 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2606 arg1 = new1;
2608 return fold (build2 (code, type, arg0, arg1));
2611 default:
2612 return arg;
2616 /* Return a tree for the case when the result of an expression is RESULT
2617 converted to TYPE and OMITTED was previously an operand of the expression
2618 but is now not needed (e.g., we folded OMITTED * 0).
2620 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2621 the conversion of RESULT to TYPE. */
2623 tree
2624 omit_one_operand (tree type, tree result, tree omitted)
2626 tree t = fold_convert (type, result);
2628 if (TREE_SIDE_EFFECTS (omitted))
2629 return build2 (COMPOUND_EXPR, type, omitted, t);
2631 return non_lvalue (t);
2634 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2636 static tree
2637 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2639 tree t = fold_convert (type, result);
2641 if (TREE_SIDE_EFFECTS (omitted))
2642 return build2 (COMPOUND_EXPR, type, omitted, t);
2644 return pedantic_non_lvalue (t);
2647 /* Return a simplified tree node for the truth-negation of ARG. This
2648 never alters ARG itself. We assume that ARG is an operation that
2649 returns a truth value (0 or 1). */
2651 tree
2652 invert_truthvalue (tree arg)
2654 tree type = TREE_TYPE (arg);
2655 enum tree_code code = TREE_CODE (arg);
2657 if (code == ERROR_MARK)
2658 return arg;
2660 /* If this is a comparison, we can simply invert it, except for
2661 floating-point non-equality comparisons, in which case we just
2662 enclose a TRUTH_NOT_EXPR around what we have. */
2664 if (TREE_CODE_CLASS (code) == '<')
2666 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2667 && !flag_unsafe_math_optimizations
2668 && code != NE_EXPR
2669 && code != EQ_EXPR)
2670 return build1 (TRUTH_NOT_EXPR, type, arg);
2671 else if (code == UNORDERED_EXPR
2672 || code == ORDERED_EXPR
2673 || code == UNEQ_EXPR
2674 || code == UNLT_EXPR
2675 || code == UNLE_EXPR
2676 || code == UNGT_EXPR
2677 || code == UNGE_EXPR)
2678 return build1 (TRUTH_NOT_EXPR, type, arg);
2679 else
2680 return build2 (invert_tree_comparison (code), type,
2681 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2684 switch (code)
2686 case INTEGER_CST:
2687 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2689 case TRUTH_AND_EXPR:
2690 return build2 (TRUTH_OR_EXPR, type,
2691 invert_truthvalue (TREE_OPERAND (arg, 0)),
2692 invert_truthvalue (TREE_OPERAND (arg, 1)));
2694 case TRUTH_OR_EXPR:
2695 return build2 (TRUTH_AND_EXPR, type,
2696 invert_truthvalue (TREE_OPERAND (arg, 0)),
2697 invert_truthvalue (TREE_OPERAND (arg, 1)));
2699 case TRUTH_XOR_EXPR:
2700 /* Here we can invert either operand. We invert the first operand
2701 unless the second operand is a TRUTH_NOT_EXPR in which case our
2702 result is the XOR of the first operand with the inside of the
2703 negation of the second operand. */
2705 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2706 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2707 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2708 else
2709 return build2 (TRUTH_XOR_EXPR, type,
2710 invert_truthvalue (TREE_OPERAND (arg, 0)),
2711 TREE_OPERAND (arg, 1));
2713 case TRUTH_ANDIF_EXPR:
2714 return build2 (TRUTH_ORIF_EXPR, type,
2715 invert_truthvalue (TREE_OPERAND (arg, 0)),
2716 invert_truthvalue (TREE_OPERAND (arg, 1)));
2718 case TRUTH_ORIF_EXPR:
2719 return build2 (TRUTH_ANDIF_EXPR, type,
2720 invert_truthvalue (TREE_OPERAND (arg, 0)),
2721 invert_truthvalue (TREE_OPERAND (arg, 1)));
2723 case TRUTH_NOT_EXPR:
2724 return TREE_OPERAND (arg, 0);
2726 case COND_EXPR:
2727 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2728 invert_truthvalue (TREE_OPERAND (arg, 1)),
2729 invert_truthvalue (TREE_OPERAND (arg, 2)));
2731 case COMPOUND_EXPR:
2732 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2733 invert_truthvalue (TREE_OPERAND (arg, 1)));
2735 case NON_LVALUE_EXPR:
2736 return invert_truthvalue (TREE_OPERAND (arg, 0));
2738 case NOP_EXPR:
2739 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2740 break;
2742 case CONVERT_EXPR:
2743 case FLOAT_EXPR:
2744 return build1 (TREE_CODE (arg), type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)));
2747 case BIT_AND_EXPR:
2748 if (!integer_onep (TREE_OPERAND (arg, 1)))
2749 break;
2750 return build2 (EQ_EXPR, type, arg,
2751 fold_convert (type, integer_zero_node));
2753 case SAVE_EXPR:
2754 return build1 (TRUTH_NOT_EXPR, type, arg);
2756 case CLEANUP_POINT_EXPR:
2757 return build1 (CLEANUP_POINT_EXPR, type,
2758 invert_truthvalue (TREE_OPERAND (arg, 0)));
2760 default:
2761 break;
2763 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2764 abort ();
2765 return build1 (TRUTH_NOT_EXPR, type, arg);
2768 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2769 operands are another bit-wise operation with a common input. If so,
2770 distribute the bit operations to save an operation and possibly two if
2771 constants are involved. For example, convert
2772 (A | B) & (A | C) into A | (B & C)
2773 Further simplification will occur if B and C are constants.
2775 If this optimization cannot be done, 0 will be returned. */
2777 static tree
2778 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2780 tree common;
2781 tree left, right;
2783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2784 || TREE_CODE (arg0) == code
2785 || (TREE_CODE (arg0) != BIT_AND_EXPR
2786 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2787 return 0;
2789 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2791 common = TREE_OPERAND (arg0, 0);
2792 left = TREE_OPERAND (arg0, 1);
2793 right = TREE_OPERAND (arg1, 1);
2795 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2797 common = TREE_OPERAND (arg0, 0);
2798 left = TREE_OPERAND (arg0, 1);
2799 right = TREE_OPERAND (arg1, 0);
2801 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2803 common = TREE_OPERAND (arg0, 1);
2804 left = TREE_OPERAND (arg0, 0);
2805 right = TREE_OPERAND (arg1, 1);
2807 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2809 common = TREE_OPERAND (arg0, 1);
2810 left = TREE_OPERAND (arg0, 0);
2811 right = TREE_OPERAND (arg1, 0);
2813 else
2814 return 0;
2816 return fold (build2 (TREE_CODE (arg0), type, common,
2817 fold (build2 (code, type, left, right))));
2820 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2821 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2823 static tree
2824 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2825 int unsignedp)
2827 tree result = build3 (BIT_FIELD_REF, type, inner,
2828 size_int (bitsize), bitsize_int (bitpos));
2830 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
2832 return result;
2835 /* Optimize a bit-field compare.
2837 There are two cases: First is a compare against a constant and the
2838 second is a comparison of two items where the fields are at the same
2839 bit position relative to the start of a chunk (byte, halfword, word)
2840 large enough to contain it. In these cases we can avoid the shift
2841 implicit in bitfield extractions.
2843 For constants, we emit a compare of the shifted constant with the
2844 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2845 compared. For two fields at the same position, we do the ANDs with the
2846 similar mask and compare the result of the ANDs.
2848 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2849 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2850 are the left and right operands of the comparison, respectively.
2852 If the optimization described above can be done, we return the resulting
2853 tree. Otherwise we return zero. */
2855 static tree
2856 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2857 tree lhs, tree rhs)
2859 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2860 tree type = TREE_TYPE (lhs);
2861 tree signed_type, unsigned_type;
2862 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2863 enum machine_mode lmode, rmode, nmode;
2864 int lunsignedp, runsignedp;
2865 int lvolatilep = 0, rvolatilep = 0;
2866 tree linner, rinner = NULL_TREE;
2867 tree mask;
2868 tree offset;
2870 /* Get all the information about the extractions being done. If the bit size
2871 if the same as the size of the underlying object, we aren't doing an
2872 extraction at all and so can do nothing. We also don't want to
2873 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2874 then will no longer be able to replace it. */
2875 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2876 &lunsignedp, &lvolatilep);
2877 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2878 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2879 return 0;
2881 if (!const_p)
2883 /* If this is not a constant, we can only do something if bit positions,
2884 sizes, and signedness are the same. */
2885 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2886 &runsignedp, &rvolatilep);
2888 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2889 || lunsignedp != runsignedp || offset != 0
2890 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2891 return 0;
2894 /* See if we can find a mode to refer to this field. We should be able to,
2895 but fail if we can't. */
2896 nmode = get_best_mode (lbitsize, lbitpos,
2897 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2898 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2899 TYPE_ALIGN (TREE_TYPE (rinner))),
2900 word_mode, lvolatilep || rvolatilep);
2901 if (nmode == VOIDmode)
2902 return 0;
2904 /* Set signed and unsigned types of the precision of this mode for the
2905 shifts below. */
2906 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2907 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2909 /* Compute the bit position and size for the new reference and our offset
2910 within it. If the new reference is the same size as the original, we
2911 won't optimize anything, so return zero. */
2912 nbitsize = GET_MODE_BITSIZE (nmode);
2913 nbitpos = lbitpos & ~ (nbitsize - 1);
2914 lbitpos -= nbitpos;
2915 if (nbitsize == lbitsize)
2916 return 0;
2918 if (BYTES_BIG_ENDIAN)
2919 lbitpos = nbitsize - lbitsize - lbitpos;
2921 /* Make the mask to be used against the extracted field. */
2922 mask = build_int_2 (~0, ~0);
2923 TREE_TYPE (mask) = unsigned_type;
2924 force_fit_type (mask, 0);
2925 mask = fold_convert (unsigned_type, mask);
2926 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2927 mask = const_binop (RSHIFT_EXPR, mask,
2928 size_int (nbitsize - lbitsize - lbitpos), 0);
2930 if (! const_p)
2931 /* If not comparing with constant, just rework the comparison
2932 and return. */
2933 return build2 (code, compare_type,
2934 build2 (BIT_AND_EXPR, unsigned_type,
2935 make_bit_field_ref (linner, unsigned_type,
2936 nbitsize, nbitpos, 1),
2937 mask),
2938 build2 (BIT_AND_EXPR, unsigned_type,
2939 make_bit_field_ref (rinner, unsigned_type,
2940 nbitsize, nbitpos, 1),
2941 mask));
2943 /* Otherwise, we are handling the constant case. See if the constant is too
2944 big for the field. Warn and return a tree of for 0 (false) if so. We do
2945 this not only for its own sake, but to avoid having to test for this
2946 error case below. If we didn't, we might generate wrong code.
2948 For unsigned fields, the constant shifted right by the field length should
2949 be all zero. For signed fields, the high-order bits should agree with
2950 the sign bit. */
2952 if (lunsignedp)
2954 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2955 fold_convert (unsigned_type, rhs),
2956 size_int (lbitsize), 0)))
2958 warning ("comparison is always %d due to width of bit-field",
2959 code == NE_EXPR);
2960 return fold_convert (compare_type,
2961 (code == NE_EXPR
2962 ? integer_one_node : integer_zero_node));
2965 else
2967 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2968 size_int (lbitsize - 1), 0);
2969 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2971 warning ("comparison is always %d due to width of bit-field",
2972 code == NE_EXPR);
2973 return fold_convert (compare_type,
2974 (code == NE_EXPR
2975 ? integer_one_node : integer_zero_node));
2979 /* Single-bit compares should always be against zero. */
2980 if (lbitsize == 1 && ! integer_zerop (rhs))
2982 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2983 rhs = fold_convert (type, integer_zero_node);
2986 /* Make a new bitfield reference, shift the constant over the
2987 appropriate number of bits and mask it with the computed mask
2988 (in case this was a signed field). If we changed it, make a new one. */
2989 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2990 if (lvolatilep)
2992 TREE_SIDE_EFFECTS (lhs) = 1;
2993 TREE_THIS_VOLATILE (lhs) = 1;
2996 rhs = fold (const_binop (BIT_AND_EXPR,
2997 const_binop (LSHIFT_EXPR,
2998 fold_convert (unsigned_type, rhs),
2999 size_int (lbitpos), 0),
3000 mask, 0));
3002 return build2 (code, compare_type,
3003 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3004 rhs);
3007 /* Subroutine for fold_truthop: decode a field reference.
3009 If EXP is a comparison reference, we return the innermost reference.
3011 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3012 set to the starting bit number.
3014 If the innermost field can be completely contained in a mode-sized
3015 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3017 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3018 otherwise it is not changed.
3020 *PUNSIGNEDP is set to the signedness of the field.
3022 *PMASK is set to the mask used. This is either contained in a
3023 BIT_AND_EXPR or derived from the width of the field.
3025 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3027 Return 0 if this is not a component reference or is one that we can't
3028 do anything with. */
3030 static tree
3031 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3032 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3033 int *punsignedp, int *pvolatilep,
3034 tree *pmask, tree *pand_mask)
3036 tree outer_type = 0;
3037 tree and_mask = 0;
3038 tree mask, inner, offset;
3039 tree unsigned_type;
3040 unsigned int precision;
3042 /* All the optimizations using this function assume integer fields.
3043 There are problems with FP fields since the type_for_size call
3044 below can fail for, e.g., XFmode. */
3045 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3046 return 0;
3048 /* We are interested in the bare arrangement of bits, so strip everything
3049 that doesn't affect the machine mode. However, record the type of the
3050 outermost expression if it may matter below. */
3051 if (TREE_CODE (exp) == NOP_EXPR
3052 || TREE_CODE (exp) == CONVERT_EXPR
3053 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3054 outer_type = TREE_TYPE (exp);
3055 STRIP_NOPS (exp);
3057 if (TREE_CODE (exp) == BIT_AND_EXPR)
3059 and_mask = TREE_OPERAND (exp, 1);
3060 exp = TREE_OPERAND (exp, 0);
3061 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3062 if (TREE_CODE (and_mask) != INTEGER_CST)
3063 return 0;
3066 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3067 punsignedp, pvolatilep);
3068 if ((inner == exp && and_mask == 0)
3069 || *pbitsize < 0 || offset != 0
3070 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3071 return 0;
3073 /* If the number of bits in the reference is the same as the bitsize of
3074 the outer type, then the outer type gives the signedness. Otherwise
3075 (in case of a small bitfield) the signedness is unchanged. */
3076 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3077 *punsignedp = TYPE_UNSIGNED (outer_type);
3079 /* Compute the mask to access the bitfield. */
3080 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3081 precision = TYPE_PRECISION (unsigned_type);
3083 mask = build_int_2 (~0, ~0);
3084 TREE_TYPE (mask) = unsigned_type;
3085 force_fit_type (mask, 0);
3086 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3087 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3089 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3090 if (and_mask != 0)
3091 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3092 fold_convert (unsigned_type, and_mask), mask));
3094 *pmask = mask;
3095 *pand_mask = and_mask;
3096 return inner;
3099 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3100 bit positions. */
3102 static int
3103 all_ones_mask_p (tree mask, int size)
3105 tree type = TREE_TYPE (mask);
3106 unsigned int precision = TYPE_PRECISION (type);
3107 tree tmask;
3109 tmask = build_int_2 (~0, ~0);
3110 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3111 force_fit_type (tmask, 0);
3112 return
3113 tree_int_cst_equal (mask,
3114 const_binop (RSHIFT_EXPR,
3115 const_binop (LSHIFT_EXPR, tmask,
3116 size_int (precision - size),
3118 size_int (precision - size), 0));
3121 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3122 represents the sign bit of EXP's type. If EXP represents a sign
3123 or zero extension, also test VAL against the unextended type.
3124 The return value is the (sub)expression whose sign bit is VAL,
3125 or NULL_TREE otherwise. */
3127 static tree
3128 sign_bit_p (tree exp, tree val)
3130 unsigned HOST_WIDE_INT mask_lo, lo;
3131 HOST_WIDE_INT mask_hi, hi;
3132 int width;
3133 tree t;
3135 /* Tree EXP must have an integral type. */
3136 t = TREE_TYPE (exp);
3137 if (! INTEGRAL_TYPE_P (t))
3138 return NULL_TREE;
3140 /* Tree VAL must be an integer constant. */
3141 if (TREE_CODE (val) != INTEGER_CST
3142 || TREE_CONSTANT_OVERFLOW (val))
3143 return NULL_TREE;
3145 width = TYPE_PRECISION (t);
3146 if (width > HOST_BITS_PER_WIDE_INT)
3148 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3149 lo = 0;
3151 mask_hi = ((unsigned HOST_WIDE_INT) -1
3152 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3153 mask_lo = -1;
3155 else
3157 hi = 0;
3158 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3160 mask_hi = 0;
3161 mask_lo = ((unsigned HOST_WIDE_INT) -1
3162 >> (HOST_BITS_PER_WIDE_INT - width));
3165 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3166 treat VAL as if it were unsigned. */
3167 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3168 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3169 return exp;
3171 /* Handle extension from a narrower type. */
3172 if (TREE_CODE (exp) == NOP_EXPR
3173 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3174 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3176 return NULL_TREE;
3179 /* Subroutine for fold_truthop: determine if an operand is simple enough
3180 to be evaluated unconditionally. */
3182 static int
3183 simple_operand_p (tree exp)
3185 /* Strip any conversions that don't change the machine mode. */
3186 while ((TREE_CODE (exp) == NOP_EXPR
3187 || TREE_CODE (exp) == CONVERT_EXPR)
3188 && (TYPE_MODE (TREE_TYPE (exp))
3189 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3190 exp = TREE_OPERAND (exp, 0);
3192 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3193 || (DECL_P (exp)
3194 && ! TREE_ADDRESSABLE (exp)
3195 && ! TREE_THIS_VOLATILE (exp)
3196 && ! DECL_NONLOCAL (exp)
3197 /* Don't regard global variables as simple. They may be
3198 allocated in ways unknown to the compiler (shared memory,
3199 #pragma weak, etc). */
3200 && ! TREE_PUBLIC (exp)
3201 && ! DECL_EXTERNAL (exp)
3202 /* Loading a static variable is unduly expensive, but global
3203 registers aren't expensive. */
3204 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3207 /* The following functions are subroutines to fold_range_test and allow it to
3208 try to change a logical combination of comparisons into a range test.
3210 For example, both
3211 X == 2 || X == 3 || X == 4 || X == 5
3213 X >= 2 && X <= 5
3214 are converted to
3215 (unsigned) (X - 2) <= 3
3217 We describe each set of comparisons as being either inside or outside
3218 a range, using a variable named like IN_P, and then describe the
3219 range with a lower and upper bound. If one of the bounds is omitted,
3220 it represents either the highest or lowest value of the type.
3222 In the comments below, we represent a range by two numbers in brackets
3223 preceded by a "+" to designate being inside that range, or a "-" to
3224 designate being outside that range, so the condition can be inverted by
3225 flipping the prefix. An omitted bound is represented by a "-". For
3226 example, "- [-, 10]" means being outside the range starting at the lowest
3227 possible value and ending at 10, in other words, being greater than 10.
3228 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3229 always false.
3231 We set up things so that the missing bounds are handled in a consistent
3232 manner so neither a missing bound nor "true" and "false" need to be
3233 handled using a special case. */
3235 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3236 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3237 and UPPER1_P are nonzero if the respective argument is an upper bound
3238 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3239 must be specified for a comparison. ARG1 will be converted to ARG0's
3240 type if both are specified. */
3242 static tree
3243 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3244 tree arg1, int upper1_p)
3246 tree tem;
3247 int result;
3248 int sgn0, sgn1;
3250 /* If neither arg represents infinity, do the normal operation.
3251 Else, if not a comparison, return infinity. Else handle the special
3252 comparison rules. Note that most of the cases below won't occur, but
3253 are handled for consistency. */
3255 if (arg0 != 0 && arg1 != 0)
3257 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3258 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3259 STRIP_NOPS (tem);
3260 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3263 if (TREE_CODE_CLASS (code) != '<')
3264 return 0;
3266 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3267 for neither. In real maths, we cannot assume open ended ranges are
3268 the same. But, this is computer arithmetic, where numbers are finite.
3269 We can therefore make the transformation of any unbounded range with
3270 the value Z, Z being greater than any representable number. This permits
3271 us to treat unbounded ranges as equal. */
3272 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3273 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3274 switch (code)
3276 case EQ_EXPR:
3277 result = sgn0 == sgn1;
3278 break;
3279 case NE_EXPR:
3280 result = sgn0 != sgn1;
3281 break;
3282 case LT_EXPR:
3283 result = sgn0 < sgn1;
3284 break;
3285 case LE_EXPR:
3286 result = sgn0 <= sgn1;
3287 break;
3288 case GT_EXPR:
3289 result = sgn0 > sgn1;
3290 break;
3291 case GE_EXPR:
3292 result = sgn0 >= sgn1;
3293 break;
3294 default:
3295 abort ();
3298 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3301 /* Given EXP, a logical expression, set the range it is testing into
3302 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3303 actually being tested. *PLOW and *PHIGH will be made of the same type
3304 as the returned expression. If EXP is not a comparison, we will most
3305 likely not be returning a useful value and range. */
3307 static tree
3308 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3310 enum tree_code code;
3311 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3312 tree orig_type = NULL_TREE;
3313 int in_p, n_in_p;
3314 tree low, high, n_low, n_high;
3316 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3317 and see if we can refine the range. Some of the cases below may not
3318 happen, but it doesn't seem worth worrying about this. We "continue"
3319 the outer loop when we've changed something; otherwise we "break"
3320 the switch, which will "break" the while. */
3322 in_p = 0;
3323 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3325 while (1)
3327 code = TREE_CODE (exp);
3329 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3331 if (first_rtl_op (code) > 0)
3332 arg0 = TREE_OPERAND (exp, 0);
3333 if (TREE_CODE_CLASS (code) == '<'
3334 || TREE_CODE_CLASS (code) == '1'
3335 || TREE_CODE_CLASS (code) == '2')
3336 type = TREE_TYPE (arg0);
3337 if (TREE_CODE_CLASS (code) == '2'
3338 || TREE_CODE_CLASS (code) == '<'
3339 || (TREE_CODE_CLASS (code) == 'e'
3340 && TREE_CODE_LENGTH (code) > 1))
3341 arg1 = TREE_OPERAND (exp, 1);
3344 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3345 lose a cast by accident. */
3346 if (type != NULL_TREE && orig_type == NULL_TREE)
3347 orig_type = type;
3349 switch (code)
3351 case TRUTH_NOT_EXPR:
3352 in_p = ! in_p, exp = arg0;
3353 continue;
3355 case EQ_EXPR: case NE_EXPR:
3356 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3357 /* We can only do something if the range is testing for zero
3358 and if the second operand is an integer constant. Note that
3359 saying something is "in" the range we make is done by
3360 complementing IN_P since it will set in the initial case of
3361 being not equal to zero; "out" is leaving it alone. */
3362 if (low == 0 || high == 0
3363 || ! integer_zerop (low) || ! integer_zerop (high)
3364 || TREE_CODE (arg1) != INTEGER_CST)
3365 break;
3367 switch (code)
3369 case NE_EXPR: /* - [c, c] */
3370 low = high = arg1;
3371 break;
3372 case EQ_EXPR: /* + [c, c] */
3373 in_p = ! in_p, low = high = arg1;
3374 break;
3375 case GT_EXPR: /* - [-, c] */
3376 low = 0, high = arg1;
3377 break;
3378 case GE_EXPR: /* + [c, -] */
3379 in_p = ! in_p, low = arg1, high = 0;
3380 break;
3381 case LT_EXPR: /* - [c, -] */
3382 low = arg1, high = 0;
3383 break;
3384 case LE_EXPR: /* + [-, c] */
3385 in_p = ! in_p, low = 0, high = arg1;
3386 break;
3387 default:
3388 abort ();
3391 exp = arg0;
3393 /* If this is an unsigned comparison, we also know that EXP is
3394 greater than or equal to zero. We base the range tests we make
3395 on that fact, so we record it here so we can parse existing
3396 range tests. */
3397 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3399 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3400 1, fold_convert (type, integer_zero_node),
3401 NULL_TREE))
3402 break;
3404 in_p = n_in_p, low = n_low, high = n_high;
3406 /* If the high bound is missing, but we have a nonzero low
3407 bound, reverse the range so it goes from zero to the low bound
3408 minus 1. */
3409 if (high == 0 && low && ! integer_zerop (low))
3411 in_p = ! in_p;
3412 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3413 integer_one_node, 0);
3414 low = fold_convert (type, integer_zero_node);
3417 continue;
3419 case NEGATE_EXPR:
3420 /* (-x) IN [a,b] -> x in [-b, -a] */
3421 n_low = range_binop (MINUS_EXPR, type,
3422 fold_convert (type, integer_zero_node),
3423 0, high, 1);
3424 n_high = range_binop (MINUS_EXPR, type,
3425 fold_convert (type, integer_zero_node),
3426 0, low, 0);
3427 low = n_low, high = n_high;
3428 exp = arg0;
3429 continue;
3431 case BIT_NOT_EXPR:
3432 /* ~ X -> -X - 1 */
3433 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3434 fold_convert (type, integer_one_node));
3435 continue;
3437 case PLUS_EXPR: case MINUS_EXPR:
3438 if (TREE_CODE (arg1) != INTEGER_CST)
3439 break;
3441 /* If EXP is signed, any overflow in the computation is undefined,
3442 so we don't worry about it so long as our computations on
3443 the bounds don't overflow. For unsigned, overflow is defined
3444 and this is exactly the right thing. */
3445 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3446 type, low, 0, arg1, 0);
3447 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3448 type, high, 1, arg1, 0);
3449 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3450 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3451 break;
3453 /* Check for an unsigned range which has wrapped around the maximum
3454 value thus making n_high < n_low, and normalize it. */
3455 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3457 low = range_binop (PLUS_EXPR, type, n_high, 0,
3458 integer_one_node, 0);
3459 high = range_binop (MINUS_EXPR, type, n_low, 0,
3460 integer_one_node, 0);
3462 /* If the range is of the form +/- [ x+1, x ], we won't
3463 be able to normalize it. But then, it represents the
3464 whole range or the empty set, so make it
3465 +/- [ -, - ]. */
3466 if (tree_int_cst_equal (n_low, low)
3467 && tree_int_cst_equal (n_high, high))
3468 low = high = 0;
3469 else
3470 in_p = ! in_p;
3472 else
3473 low = n_low, high = n_high;
3475 exp = arg0;
3476 continue;
3478 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3479 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3480 break;
3482 if (! INTEGRAL_TYPE_P (type)
3483 || (low != 0 && ! int_fits_type_p (low, type))
3484 || (high != 0 && ! int_fits_type_p (high, type)))
3485 break;
3487 n_low = low, n_high = high;
3489 if (n_low != 0)
3490 n_low = fold_convert (type, n_low);
3492 if (n_high != 0)
3493 n_high = fold_convert (type, n_high);
3495 /* If we're converting from an unsigned to a signed type,
3496 we will be doing the comparison as unsigned. The tests above
3497 have already verified that LOW and HIGH are both positive.
3499 So we have to make sure that the original unsigned value will
3500 be interpreted as positive. */
3501 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3503 tree equiv_type = lang_hooks.types.type_for_mode
3504 (TYPE_MODE (type), 1);
3505 tree high_positive;
3507 /* A range without an upper bound is, naturally, unbounded.
3508 Since convert would have cropped a very large value, use
3509 the max value for the destination type. */
3510 high_positive
3511 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3512 : TYPE_MAX_VALUE (type);
3514 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3515 high_positive = fold (build2 (RSHIFT_EXPR, type,
3516 fold_convert (type,
3517 high_positive),
3518 fold_convert (type,
3519 integer_one_node)));
3521 /* If the low bound is specified, "and" the range with the
3522 range for which the original unsigned value will be
3523 positive. */
3524 if (low != 0)
3526 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3527 1, n_low, n_high, 1,
3528 fold_convert (type, integer_zero_node),
3529 high_positive))
3530 break;
3532 in_p = (n_in_p == in_p);
3534 else
3536 /* Otherwise, "or" the range with the range of the input
3537 that will be interpreted as negative. */
3538 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3539 0, n_low, n_high, 1,
3540 fold_convert (type, integer_zero_node),
3541 high_positive))
3542 break;
3544 in_p = (in_p != n_in_p);
3548 exp = arg0;
3549 low = n_low, high = n_high;
3550 continue;
3552 default:
3553 break;
3556 break;
3559 /* If EXP is a constant, we can evaluate whether this is true or false. */
3560 if (TREE_CODE (exp) == INTEGER_CST)
3562 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3563 exp, 0, low, 0))
3564 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3565 exp, 1, high, 1)));
3566 low = high = 0;
3567 exp = 0;
3570 *pin_p = in_p, *plow = low, *phigh = high;
3571 return exp;
3574 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3575 type, TYPE, return an expression to test if EXP is in (or out of, depending
3576 on IN_P) the range. */
3578 static tree
3579 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3581 tree etype = TREE_TYPE (exp);
3582 tree value;
3584 if (! in_p
3585 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3586 return invert_truthvalue (value);
3588 if (low == 0 && high == 0)
3589 return fold_convert (type, integer_one_node);
3591 if (low == 0)
3592 return fold (build2 (LE_EXPR, type, exp, high));
3594 if (high == 0)
3595 return fold (build2 (GE_EXPR, type, exp, low));
3597 if (operand_equal_p (low, high, 0))
3598 return fold (build2 (EQ_EXPR, type, exp, low));
3600 if (integer_zerop (low))
3602 if (! TYPE_UNSIGNED (etype))
3604 etype = lang_hooks.types.unsigned_type (etype);
3605 high = fold_convert (etype, high);
3606 exp = fold_convert (etype, exp);
3608 return build_range_check (type, exp, 1, 0, high);
3611 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3612 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3614 unsigned HOST_WIDE_INT lo;
3615 HOST_WIDE_INT hi;
3616 int prec;
3618 prec = TYPE_PRECISION (etype);
3619 if (prec <= HOST_BITS_PER_WIDE_INT)
3621 hi = 0;
3622 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3624 else
3626 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3627 lo = (unsigned HOST_WIDE_INT) -1;
3630 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3632 if (TYPE_UNSIGNED (etype))
3634 etype = lang_hooks.types.signed_type (etype);
3635 exp = fold_convert (etype, exp);
3637 return fold (build2 (GT_EXPR, type, exp,
3638 fold_convert (etype, integer_zero_node)));
3642 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3643 && ! TREE_OVERFLOW (value))
3644 return build_range_check (type,
3645 fold (build2 (MINUS_EXPR, etype, exp, low)),
3646 1, fold_convert (etype, integer_zero_node),
3647 value);
3649 return 0;
3652 /* Given two ranges, see if we can merge them into one. Return 1 if we
3653 can, 0 if we can't. Set the output range into the specified parameters. */
3655 static int
3656 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3657 tree high0, int in1_p, tree low1, tree high1)
3659 int no_overlap;
3660 int subset;
3661 int temp;
3662 tree tem;
3663 int in_p;
3664 tree low, high;
3665 int lowequal = ((low0 == 0 && low1 == 0)
3666 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3667 low0, 0, low1, 0)));
3668 int highequal = ((high0 == 0 && high1 == 0)
3669 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3670 high0, 1, high1, 1)));
3672 /* Make range 0 be the range that starts first, or ends last if they
3673 start at the same value. Swap them if it isn't. */
3674 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3675 low0, 0, low1, 0))
3676 || (lowequal
3677 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3678 high1, 1, high0, 1))))
3680 temp = in0_p, in0_p = in1_p, in1_p = temp;
3681 tem = low0, low0 = low1, low1 = tem;
3682 tem = high0, high0 = high1, high1 = tem;
3685 /* Now flag two cases, whether the ranges are disjoint or whether the
3686 second range is totally subsumed in the first. Note that the tests
3687 below are simplified by the ones above. */
3688 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3689 high0, 1, low1, 0));
3690 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3691 high1, 1, high0, 1));
3693 /* We now have four cases, depending on whether we are including or
3694 excluding the two ranges. */
3695 if (in0_p && in1_p)
3697 /* If they don't overlap, the result is false. If the second range
3698 is a subset it is the result. Otherwise, the range is from the start
3699 of the second to the end of the first. */
3700 if (no_overlap)
3701 in_p = 0, low = high = 0;
3702 else if (subset)
3703 in_p = 1, low = low1, high = high1;
3704 else
3705 in_p = 1, low = low1, high = high0;
3708 else if (in0_p && ! in1_p)
3710 /* If they don't overlap, the result is the first range. If they are
3711 equal, the result is false. If the second range is a subset of the
3712 first, and the ranges begin at the same place, we go from just after
3713 the end of the first range to the end of the second. If the second
3714 range is not a subset of the first, or if it is a subset and both
3715 ranges end at the same place, the range starts at the start of the
3716 first range and ends just before the second range.
3717 Otherwise, we can't describe this as a single range. */
3718 if (no_overlap)
3719 in_p = 1, low = low0, high = high0;
3720 else if (lowequal && highequal)
3721 in_p = 0, low = high = 0;
3722 else if (subset && lowequal)
3724 in_p = 1, high = high0;
3725 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3726 integer_one_node, 0);
3728 else if (! subset || highequal)
3730 in_p = 1, low = low0;
3731 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3732 integer_one_node, 0);
3734 else
3735 return 0;
3738 else if (! in0_p && in1_p)
3740 /* If they don't overlap, the result is the second range. If the second
3741 is a subset of the first, the result is false. Otherwise,
3742 the range starts just after the first range and ends at the
3743 end of the second. */
3744 if (no_overlap)
3745 in_p = 1, low = low1, high = high1;
3746 else if (subset || highequal)
3747 in_p = 0, low = high = 0;
3748 else
3750 in_p = 1, high = high1;
3751 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3752 integer_one_node, 0);
3756 else
3758 /* The case where we are excluding both ranges. Here the complex case
3759 is if they don't overlap. In that case, the only time we have a
3760 range is if they are adjacent. If the second is a subset of the
3761 first, the result is the first. Otherwise, the range to exclude
3762 starts at the beginning of the first range and ends at the end of the
3763 second. */
3764 if (no_overlap)
3766 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3767 range_binop (PLUS_EXPR, NULL_TREE,
3768 high0, 1,
3769 integer_one_node, 1),
3770 1, low1, 0)))
3771 in_p = 0, low = low0, high = high1;
3772 else
3773 return 0;
3775 else if (subset)
3776 in_p = 0, low = low0, high = high0;
3777 else
3778 in_p = 0, low = low0, high = high1;
3781 *pin_p = in_p, *plow = low, *phigh = high;
3782 return 1;
3785 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3786 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3787 #endif
3789 /* EXP is some logical combination of boolean tests. See if we can
3790 merge it into some range test. Return the new tree if so. */
3792 static tree
3793 fold_range_test (tree exp)
3795 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3796 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3797 int in0_p, in1_p, in_p;
3798 tree low0, low1, low, high0, high1, high;
3799 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3800 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3801 tree tem;
3803 /* If this is an OR operation, invert both sides; we will invert
3804 again at the end. */
3805 if (or_op)
3806 in0_p = ! in0_p, in1_p = ! in1_p;
3808 /* If both expressions are the same, if we can merge the ranges, and we
3809 can build the range test, return it or it inverted. If one of the
3810 ranges is always true or always false, consider it to be the same
3811 expression as the other. */
3812 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3813 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3814 in1_p, low1, high1)
3815 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3816 lhs != 0 ? lhs
3817 : rhs != 0 ? rhs : integer_zero_node,
3818 in_p, low, high))))
3819 return or_op ? invert_truthvalue (tem) : tem;
3821 /* On machines where the branch cost is expensive, if this is a
3822 short-circuited branch and the underlying object on both sides
3823 is the same, make a non-short-circuit operation. */
3824 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3825 && lhs != 0 && rhs != 0
3826 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3827 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3828 && operand_equal_p (lhs, rhs, 0))
3830 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3831 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3832 which cases we can't do this. */
3833 if (simple_operand_p (lhs))
3834 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3835 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3836 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3837 TREE_OPERAND (exp, 1));
3839 else if (lang_hooks.decls.global_bindings_p () == 0
3840 && ! CONTAINS_PLACEHOLDER_P (lhs))
3842 tree common = save_expr (lhs);
3844 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3845 or_op ? ! in0_p : in0_p,
3846 low0, high0))
3847 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3848 or_op ? ! in1_p : in1_p,
3849 low1, high1))))
3850 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3851 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3852 TREE_TYPE (exp), lhs, rhs);
3856 return 0;
3859 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3860 bit value. Arrange things so the extra bits will be set to zero if and
3861 only if C is signed-extended to its full width. If MASK is nonzero,
3862 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3864 static tree
3865 unextend (tree c, int p, int unsignedp, tree mask)
3867 tree type = TREE_TYPE (c);
3868 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3869 tree temp;
3871 if (p == modesize || unsignedp)
3872 return c;
3874 /* We work by getting just the sign bit into the low-order bit, then
3875 into the high-order bit, then sign-extend. We then XOR that value
3876 with C. */
3877 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3878 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3880 /* We must use a signed type in order to get an arithmetic right shift.
3881 However, we must also avoid introducing accidental overflows, so that
3882 a subsequent call to integer_zerop will work. Hence we must
3883 do the type conversion here. At this point, the constant is either
3884 zero or one, and the conversion to a signed type can never overflow.
3885 We could get an overflow if this conversion is done anywhere else. */
3886 if (TYPE_UNSIGNED (type))
3887 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3889 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3890 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3891 if (mask != 0)
3892 temp = const_binop (BIT_AND_EXPR, temp,
3893 fold_convert (TREE_TYPE (c), mask), 0);
3894 /* If necessary, convert the type back to match the type of C. */
3895 if (TYPE_UNSIGNED (type))
3896 temp = fold_convert (type, temp);
3898 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3901 /* Find ways of folding logical expressions of LHS and RHS:
3902 Try to merge two comparisons to the same innermost item.
3903 Look for range tests like "ch >= '0' && ch <= '9'".
3904 Look for combinations of simple terms on machines with expensive branches
3905 and evaluate the RHS unconditionally.
3907 For example, if we have p->a == 2 && p->b == 4 and we can make an
3908 object large enough to span both A and B, we can do this with a comparison
3909 against the object ANDed with the a mask.
3911 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3912 operations to do this with one comparison.
3914 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3915 function and the one above.
3917 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3918 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3920 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3921 two operands.
3923 We return the simplified tree or 0 if no optimization is possible. */
3925 static tree
3926 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3928 /* If this is the "or" of two comparisons, we can do something if
3929 the comparisons are NE_EXPR. If this is the "and", we can do something
3930 if the comparisons are EQ_EXPR. I.e.,
3931 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3933 WANTED_CODE is this operation code. For single bit fields, we can
3934 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3935 comparison for one-bit fields. */
3937 enum tree_code wanted_code;
3938 enum tree_code lcode, rcode;
3939 tree ll_arg, lr_arg, rl_arg, rr_arg;
3940 tree ll_inner, lr_inner, rl_inner, rr_inner;
3941 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3942 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3943 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3944 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3945 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3946 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3947 enum machine_mode lnmode, rnmode;
3948 tree ll_mask, lr_mask, rl_mask, rr_mask;
3949 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3950 tree l_const, r_const;
3951 tree lntype, rntype, result;
3952 int first_bit, end_bit;
3953 int volatilep;
3955 /* Start by getting the comparison codes. Fail if anything is volatile.
3956 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3957 it were surrounded with a NE_EXPR. */
3959 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3960 return 0;
3962 lcode = TREE_CODE (lhs);
3963 rcode = TREE_CODE (rhs);
3965 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3967 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
3968 lcode = NE_EXPR;
3971 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3973 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
3974 rcode = NE_EXPR;
3977 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3978 return 0;
3980 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3983 ll_arg = TREE_OPERAND (lhs, 0);
3984 lr_arg = TREE_OPERAND (lhs, 1);
3985 rl_arg = TREE_OPERAND (rhs, 0);
3986 rr_arg = TREE_OPERAND (rhs, 1);
3988 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3989 if (simple_operand_p (ll_arg)
3990 && simple_operand_p (lr_arg)
3991 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3993 int compcode;
3995 if (operand_equal_p (ll_arg, rl_arg, 0)
3996 && operand_equal_p (lr_arg, rr_arg, 0))
3998 int lcompcode, rcompcode;
4000 lcompcode = comparison_to_compcode (lcode);
4001 rcompcode = comparison_to_compcode (rcode);
4002 compcode = (code == TRUTH_AND_EXPR)
4003 ? lcompcode & rcompcode
4004 : lcompcode | rcompcode;
4006 else if (operand_equal_p (ll_arg, rr_arg, 0)
4007 && operand_equal_p (lr_arg, rl_arg, 0))
4009 int lcompcode, rcompcode;
4011 rcode = swap_tree_comparison (rcode);
4012 lcompcode = comparison_to_compcode (lcode);
4013 rcompcode = comparison_to_compcode (rcode);
4014 compcode = (code == TRUTH_AND_EXPR)
4015 ? lcompcode & rcompcode
4016 : lcompcode | rcompcode;
4018 else
4019 compcode = -1;
4021 if (compcode == COMPCODE_TRUE)
4022 return fold_convert (truth_type, integer_one_node);
4023 else if (compcode == COMPCODE_FALSE)
4024 return fold_convert (truth_type, integer_zero_node);
4025 else if (compcode != -1)
4026 return build2 (compcode_to_comparison (compcode),
4027 truth_type, ll_arg, lr_arg);
4030 /* If the RHS can be evaluated unconditionally and its operands are
4031 simple, it wins to evaluate the RHS unconditionally on machines
4032 with expensive branches. In this case, this isn't a comparison
4033 that can be merged. Avoid doing this if the RHS is a floating-point
4034 comparison since those can trap. */
4036 if (BRANCH_COST >= 2
4037 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4038 && simple_operand_p (rl_arg)
4039 && simple_operand_p (rr_arg))
4041 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4042 if (code == TRUTH_OR_EXPR
4043 && lcode == NE_EXPR && integer_zerop (lr_arg)
4044 && rcode == NE_EXPR && integer_zerop (rr_arg)
4045 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4046 return build2 (NE_EXPR, truth_type,
4047 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4048 ll_arg, rl_arg),
4049 integer_zero_node);
4051 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4052 if (code == TRUTH_AND_EXPR
4053 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4054 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4055 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4056 return build2 (EQ_EXPR, truth_type,
4057 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4058 ll_arg, rl_arg),
4059 integer_zero_node);
4061 return build2 (code, truth_type, lhs, rhs);
4064 /* See if the comparisons can be merged. Then get all the parameters for
4065 each side. */
4067 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4068 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4069 return 0;
4071 volatilep = 0;
4072 ll_inner = decode_field_reference (ll_arg,
4073 &ll_bitsize, &ll_bitpos, &ll_mode,
4074 &ll_unsignedp, &volatilep, &ll_mask,
4075 &ll_and_mask);
4076 lr_inner = decode_field_reference (lr_arg,
4077 &lr_bitsize, &lr_bitpos, &lr_mode,
4078 &lr_unsignedp, &volatilep, &lr_mask,
4079 &lr_and_mask);
4080 rl_inner = decode_field_reference (rl_arg,
4081 &rl_bitsize, &rl_bitpos, &rl_mode,
4082 &rl_unsignedp, &volatilep, &rl_mask,
4083 &rl_and_mask);
4084 rr_inner = decode_field_reference (rr_arg,
4085 &rr_bitsize, &rr_bitpos, &rr_mode,
4086 &rr_unsignedp, &volatilep, &rr_mask,
4087 &rr_and_mask);
4089 /* It must be true that the inner operation on the lhs of each
4090 comparison must be the same if we are to be able to do anything.
4091 Then see if we have constants. If not, the same must be true for
4092 the rhs's. */
4093 if (volatilep || ll_inner == 0 || rl_inner == 0
4094 || ! operand_equal_p (ll_inner, rl_inner, 0))
4095 return 0;
4097 if (TREE_CODE (lr_arg) == INTEGER_CST
4098 && TREE_CODE (rr_arg) == INTEGER_CST)
4099 l_const = lr_arg, r_const = rr_arg;
4100 else if (lr_inner == 0 || rr_inner == 0
4101 || ! operand_equal_p (lr_inner, rr_inner, 0))
4102 return 0;
4103 else
4104 l_const = r_const = 0;
4106 /* If either comparison code is not correct for our logical operation,
4107 fail. However, we can convert a one-bit comparison against zero into
4108 the opposite comparison against that bit being set in the field. */
4110 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4111 if (lcode != wanted_code)
4113 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4115 /* Make the left operand unsigned, since we are only interested
4116 in the value of one bit. Otherwise we are doing the wrong
4117 thing below. */
4118 ll_unsignedp = 1;
4119 l_const = ll_mask;
4121 else
4122 return 0;
4125 /* This is analogous to the code for l_const above. */
4126 if (rcode != wanted_code)
4128 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4130 rl_unsignedp = 1;
4131 r_const = rl_mask;
4133 else
4134 return 0;
4137 /* After this point all optimizations will generate bit-field
4138 references, which we might not want. */
4139 if (! lang_hooks.can_use_bit_fields_p ())
4140 return 0;
4142 /* See if we can find a mode that contains both fields being compared on
4143 the left. If we can't, fail. Otherwise, update all constants and masks
4144 to be relative to a field of that size. */
4145 first_bit = MIN (ll_bitpos, rl_bitpos);
4146 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4147 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4148 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4149 volatilep);
4150 if (lnmode == VOIDmode)
4151 return 0;
4153 lnbitsize = GET_MODE_BITSIZE (lnmode);
4154 lnbitpos = first_bit & ~ (lnbitsize - 1);
4155 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4156 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4158 if (BYTES_BIG_ENDIAN)
4160 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4161 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4164 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4165 size_int (xll_bitpos), 0);
4166 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4167 size_int (xrl_bitpos), 0);
4169 if (l_const)
4171 l_const = fold_convert (lntype, l_const);
4172 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4173 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4174 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4175 fold (build1 (BIT_NOT_EXPR,
4176 lntype, ll_mask)),
4177 0)))
4179 warning ("comparison is always %d", wanted_code == NE_EXPR);
4181 return fold_convert (truth_type,
4182 wanted_code == NE_EXPR
4183 ? integer_one_node : integer_zero_node);
4186 if (r_const)
4188 r_const = fold_convert (lntype, r_const);
4189 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4190 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4191 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4192 fold (build1 (BIT_NOT_EXPR,
4193 lntype, rl_mask)),
4194 0)))
4196 warning ("comparison is always %d", wanted_code == NE_EXPR);
4198 return fold_convert (truth_type,
4199 wanted_code == NE_EXPR
4200 ? integer_one_node : integer_zero_node);
4204 /* If the right sides are not constant, do the same for it. Also,
4205 disallow this optimization if a size or signedness mismatch occurs
4206 between the left and right sides. */
4207 if (l_const == 0)
4209 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4210 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4211 /* Make sure the two fields on the right
4212 correspond to the left without being swapped. */
4213 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4214 return 0;
4216 first_bit = MIN (lr_bitpos, rr_bitpos);
4217 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4218 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4219 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4220 volatilep);
4221 if (rnmode == VOIDmode)
4222 return 0;
4224 rnbitsize = GET_MODE_BITSIZE (rnmode);
4225 rnbitpos = first_bit & ~ (rnbitsize - 1);
4226 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4227 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4229 if (BYTES_BIG_ENDIAN)
4231 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4232 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4235 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4236 size_int (xlr_bitpos), 0);
4237 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4238 size_int (xrr_bitpos), 0);
4240 /* Make a mask that corresponds to both fields being compared.
4241 Do this for both items being compared. If the operands are the
4242 same size and the bits being compared are in the same position
4243 then we can do this by masking both and comparing the masked
4244 results. */
4245 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4246 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4247 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4249 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4250 ll_unsignedp || rl_unsignedp);
4251 if (! all_ones_mask_p (ll_mask, lnbitsize))
4252 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4254 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4255 lr_unsignedp || rr_unsignedp);
4256 if (! all_ones_mask_p (lr_mask, rnbitsize))
4257 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4259 return build2 (wanted_code, truth_type, lhs, rhs);
4262 /* There is still another way we can do something: If both pairs of
4263 fields being compared are adjacent, we may be able to make a wider
4264 field containing them both.
4266 Note that we still must mask the lhs/rhs expressions. Furthermore,
4267 the mask must be shifted to account for the shift done by
4268 make_bit_field_ref. */
4269 if ((ll_bitsize + ll_bitpos == rl_bitpos
4270 && lr_bitsize + lr_bitpos == rr_bitpos)
4271 || (ll_bitpos == rl_bitpos + rl_bitsize
4272 && lr_bitpos == rr_bitpos + rr_bitsize))
4274 tree type;
4276 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4277 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4278 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4279 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4281 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4282 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4283 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4284 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4286 /* Convert to the smaller type before masking out unwanted bits. */
4287 type = lntype;
4288 if (lntype != rntype)
4290 if (lnbitsize > rnbitsize)
4292 lhs = fold_convert (rntype, lhs);
4293 ll_mask = fold_convert (rntype, ll_mask);
4294 type = rntype;
4296 else if (lnbitsize < rnbitsize)
4298 rhs = fold_convert (lntype, rhs);
4299 lr_mask = fold_convert (lntype, lr_mask);
4300 type = lntype;
4304 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4305 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4307 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4308 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4310 return build2 (wanted_code, truth_type, lhs, rhs);
4313 return 0;
4316 /* Handle the case of comparisons with constants. If there is something in
4317 common between the masks, those bits of the constants must be the same.
4318 If not, the condition is always false. Test for this to avoid generating
4319 incorrect code below. */
4320 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4321 if (! integer_zerop (result)
4322 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4323 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4325 if (wanted_code == NE_EXPR)
4327 warning ("`or' of unmatched not-equal tests is always 1");
4328 return fold_convert (truth_type, integer_one_node);
4330 else
4332 warning ("`and' of mutually exclusive equal-tests is always 0");
4333 return fold_convert (truth_type, integer_zero_node);
4337 /* Construct the expression we will return. First get the component
4338 reference we will make. Unless the mask is all ones the width of
4339 that field, perform the mask operation. Then compare with the
4340 merged constant. */
4341 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4342 ll_unsignedp || rl_unsignedp);
4344 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4345 if (! all_ones_mask_p (ll_mask, lnbitsize))
4346 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4348 return build2 (wanted_code, truth_type, result,
4349 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4352 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4353 constant. */
4355 static tree
4356 optimize_minmax_comparison (tree t)
4358 tree type = TREE_TYPE (t);
4359 tree arg0 = TREE_OPERAND (t, 0);
4360 enum tree_code op_code;
4361 tree comp_const = TREE_OPERAND (t, 1);
4362 tree minmax_const;
4363 int consts_equal, consts_lt;
4364 tree inner;
4366 STRIP_SIGN_NOPS (arg0);
4368 op_code = TREE_CODE (arg0);
4369 minmax_const = TREE_OPERAND (arg0, 1);
4370 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4371 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4372 inner = TREE_OPERAND (arg0, 0);
4374 /* If something does not permit us to optimize, return the original tree. */
4375 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4376 || TREE_CODE (comp_const) != INTEGER_CST
4377 || TREE_CONSTANT_OVERFLOW (comp_const)
4378 || TREE_CODE (minmax_const) != INTEGER_CST
4379 || TREE_CONSTANT_OVERFLOW (minmax_const))
4380 return t;
4382 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4383 and GT_EXPR, doing the rest with recursive calls using logical
4384 simplifications. */
4385 switch (TREE_CODE (t))
4387 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4388 return
4389 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4391 case GE_EXPR:
4392 return
4393 fold (build2 (TRUTH_ORIF_EXPR, type,
4394 optimize_minmax_comparison
4395 (build2 (EQ_EXPR, type, arg0, comp_const)),
4396 optimize_minmax_comparison
4397 (build2 (GT_EXPR, type, arg0, comp_const))));
4399 case EQ_EXPR:
4400 if (op_code == MAX_EXPR && consts_equal)
4401 /* MAX (X, 0) == 0 -> X <= 0 */
4402 return fold (build2 (LE_EXPR, type, inner, comp_const));
4404 else if (op_code == MAX_EXPR && consts_lt)
4405 /* MAX (X, 0) == 5 -> X == 5 */
4406 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4408 else if (op_code == MAX_EXPR)
4409 /* MAX (X, 0) == -1 -> false */
4410 return omit_one_operand (type, integer_zero_node, inner);
4412 else if (consts_equal)
4413 /* MIN (X, 0) == 0 -> X >= 0 */
4414 return fold (build2 (GE_EXPR, type, inner, comp_const));
4416 else if (consts_lt)
4417 /* MIN (X, 0) == 5 -> false */
4418 return omit_one_operand (type, integer_zero_node, inner);
4420 else
4421 /* MIN (X, 0) == -1 -> X == -1 */
4422 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4424 case GT_EXPR:
4425 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4426 /* MAX (X, 0) > 0 -> X > 0
4427 MAX (X, 0) > 5 -> X > 5 */
4428 return fold (build2 (GT_EXPR, type, inner, comp_const));
4430 else if (op_code == MAX_EXPR)
4431 /* MAX (X, 0) > -1 -> true */
4432 return omit_one_operand (type, integer_one_node, inner);
4434 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4435 /* MIN (X, 0) > 0 -> false
4436 MIN (X, 0) > 5 -> false */
4437 return omit_one_operand (type, integer_zero_node, inner);
4439 else
4440 /* MIN (X, 0) > -1 -> X > -1 */
4441 return fold (build2 (GT_EXPR, type, inner, comp_const));
4443 default:
4444 return t;
4448 /* T is an integer expression that is being multiplied, divided, or taken a
4449 modulus (CODE says which and what kind of divide or modulus) by a
4450 constant C. See if we can eliminate that operation by folding it with
4451 other operations already in T. WIDE_TYPE, if non-null, is a type that
4452 should be used for the computation if wider than our type.
4454 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4455 (X * 2) + (Y * 4). We must, however, be assured that either the original
4456 expression would not overflow or that overflow is undefined for the type
4457 in the language in question.
4459 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4460 the machine has a multiply-accumulate insn or that this is part of an
4461 addressing calculation.
4463 If we return a non-null expression, it is an equivalent form of the
4464 original computation, but need not be in the original type. */
4466 static tree
4467 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4469 /* To avoid exponential search depth, refuse to allow recursion past
4470 three levels. Beyond that (1) it's highly unlikely that we'll find
4471 something interesting and (2) we've probably processed it before
4472 when we built the inner expression. */
4474 static int depth;
4475 tree ret;
4477 if (depth > 3)
4478 return NULL;
4480 depth++;
4481 ret = extract_muldiv_1 (t, c, code, wide_type);
4482 depth--;
4484 return ret;
4487 static tree
4488 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4490 tree type = TREE_TYPE (t);
4491 enum tree_code tcode = TREE_CODE (t);
4492 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4493 > GET_MODE_SIZE (TYPE_MODE (type)))
4494 ? wide_type : type);
4495 tree t1, t2;
4496 int same_p = tcode == code;
4497 tree op0 = NULL_TREE, op1 = NULL_TREE;
4499 /* Don't deal with constants of zero here; they confuse the code below. */
4500 if (integer_zerop (c))
4501 return NULL_TREE;
4503 if (TREE_CODE_CLASS (tcode) == '1')
4504 op0 = TREE_OPERAND (t, 0);
4506 if (TREE_CODE_CLASS (tcode) == '2')
4507 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4509 /* Note that we need not handle conditional operations here since fold
4510 already handles those cases. So just do arithmetic here. */
4511 switch (tcode)
4513 case INTEGER_CST:
4514 /* For a constant, we can always simplify if we are a multiply
4515 or (for divide and modulus) if it is a multiple of our constant. */
4516 if (code == MULT_EXPR
4517 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4518 return const_binop (code, fold_convert (ctype, t),
4519 fold_convert (ctype, c), 0);
4520 break;
4522 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4523 /* If op0 is an expression ... */
4524 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4525 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4526 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4527 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4528 /* ... and is unsigned, and its type is smaller than ctype,
4529 then we cannot pass through as widening. */
4530 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4531 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4532 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4533 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4534 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4535 /* ... or its type is larger than ctype,
4536 then we cannot pass through this truncation. */
4537 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4538 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4539 /* ... or signedness changes for division or modulus,
4540 then we cannot pass through this conversion. */
4541 || (code != MULT_EXPR
4542 && (TYPE_UNSIGNED (ctype)
4543 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4544 break;
4546 /* Pass the constant down and see if we can make a simplification. If
4547 we can, replace this expression with the inner simplification for
4548 possible later conversion to our or some other type. */
4549 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4550 && TREE_CODE (t2) == INTEGER_CST
4551 && ! TREE_CONSTANT_OVERFLOW (t2)
4552 && (0 != (t1 = extract_muldiv (op0, t2, code,
4553 code == MULT_EXPR
4554 ? ctype : NULL_TREE))))
4555 return t1;
4556 break;
4558 case NEGATE_EXPR: case ABS_EXPR:
4559 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4560 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4561 break;
4563 case MIN_EXPR: case MAX_EXPR:
4564 /* If widening the type changes the signedness, then we can't perform
4565 this optimization as that changes the result. */
4566 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4567 break;
4569 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4570 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4571 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4573 if (tree_int_cst_sgn (c) < 0)
4574 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4576 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4577 fold_convert (ctype, t2)));
4579 break;
4581 case LSHIFT_EXPR: case RSHIFT_EXPR:
4582 /* If the second operand is constant, this is a multiplication
4583 or floor division, by a power of two, so we can treat it that
4584 way unless the multiplier or divisor overflows. */
4585 if (TREE_CODE (op1) == INTEGER_CST
4586 /* const_binop may not detect overflow correctly,
4587 so check for it explicitly here. */
4588 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4589 && TREE_INT_CST_HIGH (op1) == 0
4590 && 0 != (t1 = fold_convert (ctype,
4591 const_binop (LSHIFT_EXPR,
4592 size_one_node,
4593 op1, 0)))
4594 && ! TREE_OVERFLOW (t1))
4595 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4596 ? MULT_EXPR : FLOOR_DIV_EXPR,
4597 ctype, fold_convert (ctype, op0), t1),
4598 c, code, wide_type);
4599 break;
4601 case PLUS_EXPR: case MINUS_EXPR:
4602 /* See if we can eliminate the operation on both sides. If we can, we
4603 can return a new PLUS or MINUS. If we can't, the only remaining
4604 cases where we can do anything are if the second operand is a
4605 constant. */
4606 t1 = extract_muldiv (op0, c, code, wide_type);
4607 t2 = extract_muldiv (op1, c, code, wide_type);
4608 if (t1 != 0 && t2 != 0
4609 && (code == MULT_EXPR
4610 /* If not multiplication, we can only do this if both operands
4611 are divisible by c. */
4612 || (multiple_of_p (ctype, op0, c)
4613 && multiple_of_p (ctype, op1, c))))
4614 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4615 fold_convert (ctype, t2)));
4617 /* If this was a subtraction, negate OP1 and set it to be an addition.
4618 This simplifies the logic below. */
4619 if (tcode == MINUS_EXPR)
4620 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4622 if (TREE_CODE (op1) != INTEGER_CST)
4623 break;
4625 /* If either OP1 or C are negative, this optimization is not safe for
4626 some of the division and remainder types while for others we need
4627 to change the code. */
4628 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4630 if (code == CEIL_DIV_EXPR)
4631 code = FLOOR_DIV_EXPR;
4632 else if (code == FLOOR_DIV_EXPR)
4633 code = CEIL_DIV_EXPR;
4634 else if (code != MULT_EXPR
4635 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4636 break;
4639 /* If it's a multiply or a division/modulus operation of a multiple
4640 of our constant, do the operation and verify it doesn't overflow. */
4641 if (code == MULT_EXPR
4642 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4644 op1 = const_binop (code, fold_convert (ctype, op1),
4645 fold_convert (ctype, c), 0);
4646 /* We allow the constant to overflow with wrapping semantics. */
4647 if (op1 == 0
4648 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4649 break;
4651 else
4652 break;
4654 /* If we have an unsigned type is not a sizetype, we cannot widen
4655 the operation since it will change the result if the original
4656 computation overflowed. */
4657 if (TYPE_UNSIGNED (ctype)
4658 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4659 && ctype != type)
4660 break;
4662 /* If we were able to eliminate our operation from the first side,
4663 apply our operation to the second side and reform the PLUS. */
4664 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4665 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4667 /* The last case is if we are a multiply. In that case, we can
4668 apply the distributive law to commute the multiply and addition
4669 if the multiplication of the constants doesn't overflow. */
4670 if (code == MULT_EXPR)
4671 return fold (build2 (tcode, ctype,
4672 fold (build2 (code, ctype,
4673 fold_convert (ctype, op0),
4674 fold_convert (ctype, c))),
4675 op1));
4677 break;
4679 case MULT_EXPR:
4680 /* We have a special case here if we are doing something like
4681 (C * 8) % 4 since we know that's zero. */
4682 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4683 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4684 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4685 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4686 return omit_one_operand (type, integer_zero_node, op0);
4688 /* ... fall through ... */
4690 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4691 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4692 /* If we can extract our operation from the LHS, do so and return a
4693 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4694 do something only if the second operand is a constant. */
4695 if (same_p
4696 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4697 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4698 fold_convert (ctype, op1)));
4699 else if (tcode == MULT_EXPR && code == MULT_EXPR
4700 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4701 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4702 fold_convert (ctype, t1)));
4703 else if (TREE_CODE (op1) != INTEGER_CST)
4704 return 0;
4706 /* If these are the same operation types, we can associate them
4707 assuming no overflow. */
4708 if (tcode == code
4709 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4710 fold_convert (ctype, c), 0))
4711 && ! TREE_OVERFLOW (t1))
4712 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4714 /* If these operations "cancel" each other, we have the main
4715 optimizations of this pass, which occur when either constant is a
4716 multiple of the other, in which case we replace this with either an
4717 operation or CODE or TCODE.
4719 If we have an unsigned type that is not a sizetype, we cannot do
4720 this since it will change the result if the original computation
4721 overflowed. */
4722 if ((! TYPE_UNSIGNED (ctype)
4723 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4724 && ! flag_wrapv
4725 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4726 || (tcode == MULT_EXPR
4727 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4728 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4730 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4731 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4732 fold_convert (ctype,
4733 const_binop (TRUNC_DIV_EXPR,
4734 op1, c, 0))));
4735 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4736 return fold (build2 (code, ctype, fold_convert (ctype, op0),
4737 fold_convert (ctype,
4738 const_binop (TRUNC_DIV_EXPR,
4739 c, op1, 0))));
4741 break;
4743 default:
4744 break;
4747 return 0;
4750 /* Return a node which has the indicated constant VALUE (either 0 or
4751 1), and is of the indicated TYPE. */
4753 static tree
4754 constant_boolean_node (int value, tree type)
4756 if (type == integer_type_node)
4757 return value ? integer_one_node : integer_zero_node;
4758 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4759 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4760 : integer_zero_node);
4761 else
4763 tree t = build_int_2 (value, 0);
4765 TREE_TYPE (t) = type;
4766 return t;
4770 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4771 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4772 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4773 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4774 COND is the first argument to CODE; otherwise (as in the example
4775 given here), it is the second argument. TYPE is the type of the
4776 original expression. Return NULL_TREE if no simplification is
4777 possible. */
4779 static tree
4780 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4781 tree cond, tree arg, int cond_first_p)
4783 tree test, true_value, false_value;
4784 tree lhs = NULL_TREE;
4785 tree rhs = NULL_TREE;
4787 /* This transformation is only worthwhile if we don't have to wrap
4788 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4789 one of the branches once its pushed inside the COND_EXPR. */
4790 if (!TREE_CONSTANT (arg))
4791 return NULL_TREE;
4793 if (TREE_CODE (cond) == COND_EXPR)
4795 test = TREE_OPERAND (cond, 0);
4796 true_value = TREE_OPERAND (cond, 1);
4797 false_value = TREE_OPERAND (cond, 2);
4798 /* If this operand throws an expression, then it does not make
4799 sense to try to perform a logical or arithmetic operation
4800 involving it. */
4801 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4802 lhs = true_value;
4803 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4804 rhs = false_value;
4806 else
4808 tree testtype = TREE_TYPE (cond);
4809 test = cond;
4810 true_value = fold_convert (testtype, integer_one_node);
4811 false_value = fold_convert (testtype, integer_zero_node);
4814 if (lhs == 0)
4815 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
4816 : build2 (code, type, arg, true_value));
4817 if (rhs == 0)
4818 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
4819 : build2 (code, type, arg, false_value));
4821 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
4822 return fold_convert (type, test);
4826 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4828 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4829 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4830 ADDEND is the same as X.
4832 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4833 and finite. The problematic cases are when X is zero, and its mode
4834 has signed zeros. In the case of rounding towards -infinity,
4835 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4836 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4838 static bool
4839 fold_real_zero_addition_p (tree type, tree addend, int negate)
4841 if (!real_zerop (addend))
4842 return false;
4844 /* Don't allow the fold with -fsignaling-nans. */
4845 if (HONOR_SNANS (TYPE_MODE (type)))
4846 return false;
4848 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4849 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4850 return true;
4852 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4853 if (TREE_CODE (addend) == REAL_CST
4854 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4855 negate = !negate;
4857 /* The mode has signed zeros, and we have to honor their sign.
4858 In this situation, there is only one case we can return true for.
4859 X - 0 is the same as X unless rounding towards -infinity is
4860 supported. */
4861 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4864 /* Subroutine of fold() that checks comparisons of built-in math
4865 functions against real constants.
4867 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4868 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4869 is the type of the result and ARG0 and ARG1 are the operands of the
4870 comparison. ARG1 must be a TREE_REAL_CST.
4872 The function returns the constant folded tree if a simplification
4873 can be made, and NULL_TREE otherwise. */
4875 static tree
4876 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4877 tree type, tree arg0, tree arg1)
4879 REAL_VALUE_TYPE c;
4881 if (BUILTIN_SQRT_P (fcode))
4883 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4884 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4886 c = TREE_REAL_CST (arg1);
4887 if (REAL_VALUE_NEGATIVE (c))
4889 /* sqrt(x) < y is always false, if y is negative. */
4890 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4891 return omit_one_operand (type,
4892 fold_convert (type, integer_zero_node),
4893 arg);
4895 /* sqrt(x) > y is always true, if y is negative and we
4896 don't care about NaNs, i.e. negative values of x. */
4897 if (code == NE_EXPR || !HONOR_NANS (mode))
4898 return omit_one_operand (type,
4899 fold_convert (type, integer_one_node),
4900 arg);
4902 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4903 return fold (build2 (GE_EXPR, type, arg,
4904 build_real (TREE_TYPE (arg), dconst0)));
4906 else if (code == GT_EXPR || code == GE_EXPR)
4908 REAL_VALUE_TYPE c2;
4910 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4911 real_convert (&c2, mode, &c2);
4913 if (REAL_VALUE_ISINF (c2))
4915 /* sqrt(x) > y is x == +Inf, when y is very large. */
4916 if (HONOR_INFINITIES (mode))
4917 return fold (build2 (EQ_EXPR, type, arg,
4918 build_real (TREE_TYPE (arg), c2)));
4920 /* sqrt(x) > y is always false, when y is very large
4921 and we don't care about infinities. */
4922 return omit_one_operand (type,
4923 fold_convert (type, integer_zero_node),
4924 arg);
4927 /* sqrt(x) > c is the same as x > c*c. */
4928 return fold (build2 (code, type, arg,
4929 build_real (TREE_TYPE (arg), c2)));
4931 else if (code == LT_EXPR || code == LE_EXPR)
4933 REAL_VALUE_TYPE c2;
4935 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4936 real_convert (&c2, mode, &c2);
4938 if (REAL_VALUE_ISINF (c2))
4940 /* sqrt(x) < y is always true, when y is a very large
4941 value and we don't care about NaNs or Infinities. */
4942 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4943 return omit_one_operand (type,
4944 fold_convert (type, integer_one_node),
4945 arg);
4947 /* sqrt(x) < y is x != +Inf when y is very large and we
4948 don't care about NaNs. */
4949 if (! HONOR_NANS (mode))
4950 return fold (build2 (NE_EXPR, type, arg,
4951 build_real (TREE_TYPE (arg), c2)));
4953 /* sqrt(x) < y is x >= 0 when y is very large and we
4954 don't care about Infinities. */
4955 if (! HONOR_INFINITIES (mode))
4956 return fold (build2 (GE_EXPR, type, arg,
4957 build_real (TREE_TYPE (arg), dconst0)));
4959 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4960 if (lang_hooks.decls.global_bindings_p () != 0
4961 || CONTAINS_PLACEHOLDER_P (arg))
4962 return NULL_TREE;
4964 arg = save_expr (arg);
4965 return fold (build2 (TRUTH_ANDIF_EXPR, type,
4966 fold (build2 (GE_EXPR, type, arg,
4967 build_real (TREE_TYPE (arg),
4968 dconst0))),
4969 fold (build2 (NE_EXPR, type, arg,
4970 build_real (TREE_TYPE (arg),
4971 c2)))));
4974 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4975 if (! HONOR_NANS (mode))
4976 return fold (build2 (code, type, arg,
4977 build_real (TREE_TYPE (arg), c2)));
4979 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4980 if (lang_hooks.decls.global_bindings_p () == 0
4981 && ! CONTAINS_PLACEHOLDER_P (arg))
4983 arg = save_expr (arg);
4984 return fold (build2 (TRUTH_ANDIF_EXPR, type,
4985 fold (build2 (GE_EXPR, type, arg,
4986 build_real (TREE_TYPE (arg),
4987 dconst0))),
4988 fold (build2 (code, type, arg,
4989 build_real (TREE_TYPE (arg),
4990 c2)))));
4995 return NULL_TREE;
4998 /* Subroutine of fold() that optimizes comparisons against Infinities,
4999 either +Inf or -Inf.
5001 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5002 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5003 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5005 The function returns the constant folded tree if a simplification
5006 can be made, and NULL_TREE otherwise. */
5008 static tree
5009 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5011 enum machine_mode mode;
5012 REAL_VALUE_TYPE max;
5013 tree temp;
5014 bool neg;
5016 mode = TYPE_MODE (TREE_TYPE (arg0));
5018 /* For negative infinity swap the sense of the comparison. */
5019 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5020 if (neg)
5021 code = swap_tree_comparison (code);
5023 switch (code)
5025 case GT_EXPR:
5026 /* x > +Inf is always false, if with ignore sNANs. */
5027 if (HONOR_SNANS (mode))
5028 return NULL_TREE;
5029 return omit_one_operand (type,
5030 fold_convert (type, integer_zero_node),
5031 arg0);
5033 case LE_EXPR:
5034 /* x <= +Inf is always true, if we don't case about NaNs. */
5035 if (! HONOR_NANS (mode))
5036 return omit_one_operand (type,
5037 fold_convert (type, integer_one_node),
5038 arg0);
5040 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5041 if (lang_hooks.decls.global_bindings_p () == 0
5042 && ! CONTAINS_PLACEHOLDER_P (arg0))
5044 arg0 = save_expr (arg0);
5045 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5047 break;
5049 case EQ_EXPR:
5050 case GE_EXPR:
5051 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5052 real_maxval (&max, neg, mode);
5053 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5054 arg0, build_real (TREE_TYPE (arg0), max)));
5056 case LT_EXPR:
5057 /* x < +Inf is always equal to x <= DBL_MAX. */
5058 real_maxval (&max, neg, mode);
5059 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5060 arg0, build_real (TREE_TYPE (arg0), max)));
5062 case NE_EXPR:
5063 /* x != +Inf is always equal to !(x > DBL_MAX). */
5064 real_maxval (&max, neg, mode);
5065 if (! HONOR_NANS (mode))
5066 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5067 arg0, build_real (TREE_TYPE (arg0), max)));
5068 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5069 arg0, build_real (TREE_TYPE (arg0), max)));
5070 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5072 default:
5073 break;
5076 return NULL_TREE;
5079 /* Subroutine of fold() that optimizes comparisons of a division by
5080 a non-zero integer constant against an integer constant, i.e.
5081 X/C1 op C2.
5083 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5084 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5085 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5087 The function returns the constant folded tree if a simplification
5088 can be made, and NULL_TREE otherwise. */
5090 static tree
5091 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5093 tree prod, tmp, hi, lo;
5094 tree arg00 = TREE_OPERAND (arg0, 0);
5095 tree arg01 = TREE_OPERAND (arg0, 1);
5096 unsigned HOST_WIDE_INT lpart;
5097 HOST_WIDE_INT hpart;
5098 int overflow;
5100 /* We have to do this the hard way to detect unsigned overflow.
5101 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5102 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5103 TREE_INT_CST_HIGH (arg01),
5104 TREE_INT_CST_LOW (arg1),
5105 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5106 prod = build_int_2 (lpart, hpart);
5107 TREE_TYPE (prod) = TREE_TYPE (arg00);
5108 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5109 || TREE_INT_CST_HIGH (prod) != hpart
5110 || TREE_INT_CST_LOW (prod) != lpart;
5111 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5113 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5115 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5116 lo = prod;
5118 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5119 overflow = add_double (TREE_INT_CST_LOW (prod),
5120 TREE_INT_CST_HIGH (prod),
5121 TREE_INT_CST_LOW (tmp),
5122 TREE_INT_CST_HIGH (tmp),
5123 &lpart, &hpart);
5124 hi = build_int_2 (lpart, hpart);
5125 TREE_TYPE (hi) = TREE_TYPE (arg00);
5126 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5127 || TREE_INT_CST_HIGH (hi) != hpart
5128 || TREE_INT_CST_LOW (hi) != lpart
5129 || TREE_OVERFLOW (prod);
5130 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5132 else if (tree_int_cst_sgn (arg01) >= 0)
5134 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5135 switch (tree_int_cst_sgn (arg1))
5137 case -1:
5138 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5139 hi = prod;
5140 break;
5142 case 0:
5143 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5144 hi = tmp;
5145 break;
5147 case 1:
5148 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5149 lo = prod;
5150 break;
5152 default:
5153 abort ();
5156 else
5158 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5159 switch (tree_int_cst_sgn (arg1))
5161 case -1:
5162 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5163 lo = prod;
5164 break;
5166 case 0:
5167 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5168 lo = tmp;
5169 break;
5171 case 1:
5172 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5173 hi = prod;
5174 break;
5176 default:
5177 abort ();
5181 switch (code)
5183 case EQ_EXPR:
5184 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5185 return omit_one_operand (type, integer_zero_node, arg00);
5186 if (TREE_OVERFLOW (hi))
5187 return fold (build2 (GE_EXPR, type, arg00, lo));
5188 if (TREE_OVERFLOW (lo))
5189 return fold (build2 (LE_EXPR, type, arg00, hi));
5190 return build_range_check (type, arg00, 1, lo, hi);
5192 case NE_EXPR:
5193 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5194 return omit_one_operand (type, integer_one_node, arg00);
5195 if (TREE_OVERFLOW (hi))
5196 return fold (build2 (LT_EXPR, type, arg00, lo));
5197 if (TREE_OVERFLOW (lo))
5198 return fold (build2 (GT_EXPR, type, arg00, hi));
5199 return build_range_check (type, arg00, 0, lo, hi);
5201 case LT_EXPR:
5202 if (TREE_OVERFLOW (lo))
5203 return omit_one_operand (type, integer_zero_node, arg00);
5204 return fold (build2 (LT_EXPR, type, arg00, lo));
5206 case LE_EXPR:
5207 if (TREE_OVERFLOW (hi))
5208 return omit_one_operand (type, integer_one_node, arg00);
5209 return fold (build2 (LE_EXPR, type, arg00, hi));
5211 case GT_EXPR:
5212 if (TREE_OVERFLOW (hi))
5213 return omit_one_operand (type, integer_zero_node, arg00);
5214 return fold (build2 (GT_EXPR, type, arg00, hi));
5216 case GE_EXPR:
5217 if (TREE_OVERFLOW (lo))
5218 return omit_one_operand (type, integer_one_node, arg00);
5219 return fold (build2 (GE_EXPR, type, arg00, lo));
5221 default:
5222 break;
5225 return NULL_TREE;
5229 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5230 equality/inequality test, then return a simplified form of
5231 the test using shifts and logical operations. Otherwise return
5232 NULL. TYPE is the desired result type. */
5234 tree
5235 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5236 tree result_type)
5238 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5239 operand 0. */
5240 if (code == TRUTH_NOT_EXPR)
5242 code = TREE_CODE (arg0);
5243 if (code != NE_EXPR && code != EQ_EXPR)
5244 return NULL_TREE;
5246 /* Extract the arguments of the EQ/NE. */
5247 arg1 = TREE_OPERAND (arg0, 1);
5248 arg0 = TREE_OPERAND (arg0, 0);
5250 /* This requires us to invert the code. */
5251 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5254 /* If this is testing a single bit, we can optimize the test. */
5255 if ((code == NE_EXPR || code == EQ_EXPR)
5256 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5257 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5259 tree inner = TREE_OPERAND (arg0, 0);
5260 tree type = TREE_TYPE (arg0);
5261 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5262 enum machine_mode operand_mode = TYPE_MODE (type);
5263 int ops_unsigned;
5264 tree signed_type, unsigned_type, intermediate_type;
5265 tree arg00;
5267 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5268 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5269 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5270 if (arg00 != NULL_TREE)
5272 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5273 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5274 result_type, fold_convert (stype, arg00),
5275 fold_convert (stype, integer_zero_node)));
5278 /* At this point, we know that arg0 is not testing the sign bit. */
5279 if (TYPE_PRECISION (type) - 1 == bitnum)
5280 abort ();
5282 /* Otherwise we have (A & C) != 0 where C is a single bit,
5283 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5284 Similarly for (A & C) == 0. */
5286 /* If INNER is a right shift of a constant and it plus BITNUM does
5287 not overflow, adjust BITNUM and INNER. */
5288 if (TREE_CODE (inner) == RSHIFT_EXPR
5289 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5290 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5291 && bitnum < TYPE_PRECISION (type)
5292 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5293 bitnum - TYPE_PRECISION (type)))
5295 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5296 inner = TREE_OPERAND (inner, 0);
5299 /* If we are going to be able to omit the AND below, we must do our
5300 operations as unsigned. If we must use the AND, we have a choice.
5301 Normally unsigned is faster, but for some machines signed is. */
5302 #ifdef LOAD_EXTEND_OP
5303 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5304 #else
5305 ops_unsigned = 1;
5306 #endif
5308 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5309 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5310 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5311 inner = fold_convert (intermediate_type, inner);
5313 if (bitnum != 0)
5314 inner = build2 (RSHIFT_EXPR, intermediate_type,
5315 inner, size_int (bitnum));
5317 if (code == EQ_EXPR)
5318 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5319 inner, integer_one_node);
5321 /* Put the AND last so it can combine with more things. */
5322 inner = build2 (BIT_AND_EXPR, intermediate_type,
5323 inner, integer_one_node);
5325 /* Make sure to return the proper type. */
5326 inner = fold_convert (result_type, inner);
5328 return inner;
5330 return NULL_TREE;
5333 /* Check whether we are allowed to reorder operands arg0 and arg1,
5334 such that the evaluation of arg1 occurs before arg0. */
5336 static bool
5337 reorder_operands_p (tree arg0, tree arg1)
5339 if (! flag_evaluation_order)
5340 return true;
5341 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5342 return true;
5343 return ! TREE_SIDE_EFFECTS (arg0)
5344 && ! TREE_SIDE_EFFECTS (arg1);
5347 /* Test whether it is preferable two swap two operands, ARG0 and
5348 ARG1, for example because ARG0 is an integer constant and ARG1
5349 isn't. If REORDER is true, only recommend swapping if we can
5350 evaluate the operands in reverse order. */
5352 static bool
5353 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5355 STRIP_SIGN_NOPS (arg0);
5356 STRIP_SIGN_NOPS (arg1);
5358 if (TREE_CODE (arg1) == INTEGER_CST)
5359 return 0;
5360 if (TREE_CODE (arg0) == INTEGER_CST)
5361 return 1;
5363 if (TREE_CODE (arg1) == REAL_CST)
5364 return 0;
5365 if (TREE_CODE (arg0) == REAL_CST)
5366 return 1;
5368 if (TREE_CODE (arg1) == COMPLEX_CST)
5369 return 0;
5370 if (TREE_CODE (arg0) == COMPLEX_CST)
5371 return 1;
5373 if (TREE_CONSTANT (arg1))
5374 return 0;
5375 if (TREE_CONSTANT (arg0))
5376 return 1;
5378 if (optimize_size)
5379 return 0;
5381 if (reorder && flag_evaluation_order
5382 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5383 return 0;
5385 if (DECL_P (arg1))
5386 return 0;
5387 if (DECL_P (arg0))
5388 return 1;
5390 if (reorder && flag_evaluation_order
5391 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5392 return 0;
5394 if (DECL_P (arg1))
5395 return 0;
5396 if (DECL_P (arg0))
5397 return 1;
5399 return 0;
5402 /* Perform constant folding and related simplification of EXPR.
5403 The related simplifications include x*1 => x, x*0 => 0, etc.,
5404 and application of the associative law.
5405 NOP_EXPR conversions may be removed freely (as long as we
5406 are careful not to change the type of the overall expression).
5407 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5408 but we can constant-fold them if they have constant operands. */
5410 #ifdef ENABLE_FOLD_CHECKING
5411 # define fold(x) fold_1 (x)
5412 static tree fold_1 (tree);
5413 static
5414 #endif
5415 tree
5416 fold (tree expr)
5418 const tree t = expr;
5419 const tree type = TREE_TYPE (expr);
5420 tree t1 = NULL_TREE;
5421 tree tem;
5422 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5423 enum tree_code code = TREE_CODE (t);
5424 int kind = TREE_CODE_CLASS (code);
5426 /* WINS will be nonzero when the switch is done
5427 if all operands are constant. */
5428 int wins = 1;
5430 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5431 Likewise for a SAVE_EXPR that's already been evaluated. */
5432 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5433 return t;
5435 /* Return right away if a constant. */
5436 if (kind == 'c')
5437 return t;
5439 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5441 tree subop;
5443 /* Special case for conversion ops that can have fixed point args. */
5444 arg0 = TREE_OPERAND (t, 0);
5446 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5447 if (arg0 != 0)
5448 STRIP_SIGN_NOPS (arg0);
5450 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5451 subop = TREE_REALPART (arg0);
5452 else
5453 subop = arg0;
5455 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5456 && TREE_CODE (subop) != REAL_CST)
5457 /* Note that TREE_CONSTANT isn't enough:
5458 static var addresses are constant but we can't
5459 do arithmetic on them. */
5460 wins = 0;
5462 else if (IS_EXPR_CODE_CLASS (kind))
5464 int len = first_rtl_op (code);
5465 int i;
5466 for (i = 0; i < len; i++)
5468 tree op = TREE_OPERAND (t, i);
5469 tree subop;
5471 if (op == 0)
5472 continue; /* Valid for CALL_EXPR, at least. */
5474 /* Strip any conversions that don't change the mode. This is
5475 safe for every expression, except for a comparison expression
5476 because its signedness is derived from its operands. So, in
5477 the latter case, only strip conversions that don't change the
5478 signedness.
5480 Note that this is done as an internal manipulation within the
5481 constant folder, in order to find the simplest representation
5482 of the arguments so that their form can be studied. In any
5483 cases, the appropriate type conversions should be put back in
5484 the tree that will get out of the constant folder. */
5485 if (kind == '<')
5486 STRIP_SIGN_NOPS (op);
5487 else
5488 STRIP_NOPS (op);
5490 if (TREE_CODE (op) == COMPLEX_CST)
5491 subop = TREE_REALPART (op);
5492 else
5493 subop = op;
5495 if (TREE_CODE (subop) != INTEGER_CST
5496 && TREE_CODE (subop) != REAL_CST)
5497 /* Note that TREE_CONSTANT isn't enough:
5498 static var addresses are constant but we can't
5499 do arithmetic on them. */
5500 wins = 0;
5502 if (i == 0)
5503 arg0 = op;
5504 else if (i == 1)
5505 arg1 = op;
5509 /* If this is a commutative operation, and ARG0 is a constant, move it
5510 to ARG1 to reduce the number of tests below. */
5511 if (commutative_tree_code (code)
5512 && tree_swap_operands_p (arg0, arg1, true))
5513 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5514 TREE_OPERAND (t, 0)));
5516 /* Now WINS is set as described above,
5517 ARG0 is the first operand of EXPR,
5518 and ARG1 is the second operand (if it has more than one operand).
5520 First check for cases where an arithmetic operation is applied to a
5521 compound, conditional, or comparison operation. Push the arithmetic
5522 operation inside the compound or conditional to see if any folding
5523 can then be done. Convert comparison to conditional for this purpose.
5524 The also optimizes non-constant cases that used to be done in
5525 expand_expr.
5527 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5528 one of the operands is a comparison and the other is a comparison, a
5529 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5530 code below would make the expression more complex. Change it to a
5531 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5532 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5534 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5535 || code == EQ_EXPR || code == NE_EXPR)
5536 && ((truth_value_p (TREE_CODE (arg0))
5537 && (truth_value_p (TREE_CODE (arg1))
5538 || (TREE_CODE (arg1) == BIT_AND_EXPR
5539 && integer_onep (TREE_OPERAND (arg1, 1)))))
5540 || (truth_value_p (TREE_CODE (arg1))
5541 && (truth_value_p (TREE_CODE (arg0))
5542 || (TREE_CODE (arg0) == BIT_AND_EXPR
5543 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5545 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5546 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5547 : TRUTH_XOR_EXPR,
5548 type, fold_convert (boolean_type_node, arg0),
5549 fold_convert (boolean_type_node, arg1)));
5551 if (code == EQ_EXPR)
5552 tem = invert_truthvalue (tem);
5554 return tem;
5557 if (TREE_CODE_CLASS (code) == '1')
5559 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5560 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5561 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5562 else if (TREE_CODE (arg0) == COND_EXPR)
5564 tree arg01 = TREE_OPERAND (arg0, 1);
5565 tree arg02 = TREE_OPERAND (arg0, 2);
5566 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5567 arg01 = fold (build1 (code, type, arg01));
5568 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5569 arg02 = fold (build1 (code, type, arg02));
5570 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5571 arg01, arg02));
5573 /* If this was a conversion, and all we did was to move into
5574 inside the COND_EXPR, bring it back out. But leave it if
5575 it is a conversion from integer to integer and the
5576 result precision is no wider than a word since such a
5577 conversion is cheap and may be optimized away by combine,
5578 while it couldn't if it were outside the COND_EXPR. Then return
5579 so we don't get into an infinite recursion loop taking the
5580 conversion out and then back in. */
5582 if ((code == NOP_EXPR || code == CONVERT_EXPR
5583 || code == NON_LVALUE_EXPR)
5584 && TREE_CODE (tem) == COND_EXPR
5585 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5586 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5587 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5588 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5589 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5590 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5591 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5592 && (INTEGRAL_TYPE_P
5593 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5594 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5595 tem = build1 (code, type,
5596 build3 (COND_EXPR,
5597 TREE_TYPE (TREE_OPERAND
5598 (TREE_OPERAND (tem, 1), 0)),
5599 TREE_OPERAND (tem, 0),
5600 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5601 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5602 return tem;
5604 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5606 if (TREE_CODE (type) == BOOLEAN_TYPE)
5608 arg0 = copy_node (arg0);
5609 TREE_TYPE (arg0) = type;
5610 return arg0;
5612 else if (TREE_CODE (type) != INTEGER_TYPE)
5613 return fold (build3 (COND_EXPR, type, arg0,
5614 fold (build1 (code, type,
5615 integer_one_node)),
5616 fold (build1 (code, type,
5617 integer_zero_node))));
5620 else if (TREE_CODE_CLASS (code) == '<'
5621 && TREE_CODE (arg0) == COMPOUND_EXPR)
5622 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5623 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5624 else if (TREE_CODE_CLASS (code) == '<'
5625 && TREE_CODE (arg1) == COMPOUND_EXPR)
5626 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5627 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5628 else if (TREE_CODE_CLASS (code) == '2'
5629 || TREE_CODE_CLASS (code) == '<')
5631 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5632 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5633 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5634 arg1)));
5635 if (TREE_CODE (arg1) == COMPOUND_EXPR
5636 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5637 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5638 fold (build2 (code, type,
5639 arg0, TREE_OPERAND (arg1, 1))));
5641 if (TREE_CODE (arg0) == COND_EXPR
5642 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5644 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5645 /*cond_first_p=*/1);
5646 if (tem != NULL_TREE)
5647 return tem;
5650 if (TREE_CODE (arg1) == COND_EXPR
5651 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5653 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5654 /*cond_first_p=*/0);
5655 if (tem != NULL_TREE)
5656 return tem;
5660 switch (code)
5662 case CONST_DECL:
5663 return fold (DECL_INITIAL (t));
5665 case NOP_EXPR:
5666 case FLOAT_EXPR:
5667 case CONVERT_EXPR:
5668 case FIX_TRUNC_EXPR:
5669 case FIX_CEIL_EXPR:
5670 case FIX_FLOOR_EXPR:
5671 case FIX_ROUND_EXPR:
5672 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5673 return TREE_OPERAND (t, 0);
5675 /* Handle cases of two conversions in a row. */
5676 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5677 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5679 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5680 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5681 int inside_int = INTEGRAL_TYPE_P (inside_type);
5682 int inside_ptr = POINTER_TYPE_P (inside_type);
5683 int inside_float = FLOAT_TYPE_P (inside_type);
5684 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5685 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5686 int inter_int = INTEGRAL_TYPE_P (inter_type);
5687 int inter_ptr = POINTER_TYPE_P (inter_type);
5688 int inter_float = FLOAT_TYPE_P (inter_type);
5689 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5690 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5691 int final_int = INTEGRAL_TYPE_P (type);
5692 int final_ptr = POINTER_TYPE_P (type);
5693 int final_float = FLOAT_TYPE_P (type);
5694 unsigned int final_prec = TYPE_PRECISION (type);
5695 int final_unsignedp = TYPE_UNSIGNED (type);
5697 /* In addition to the cases of two conversions in a row
5698 handled below, if we are converting something to its own
5699 type via an object of identical or wider precision, neither
5700 conversion is needed. */
5701 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5702 && ((inter_int && final_int) || (inter_float && final_float))
5703 && inter_prec >= final_prec)
5704 return fold (build1 (code, type,
5705 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5707 /* Likewise, if the intermediate and final types are either both
5708 float or both integer, we don't need the middle conversion if
5709 it is wider than the final type and doesn't change the signedness
5710 (for integers). Avoid this if the final type is a pointer
5711 since then we sometimes need the inner conversion. Likewise if
5712 the outer has a precision not equal to the size of its mode. */
5713 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5714 || (inter_float && inside_float))
5715 && inter_prec >= inside_prec
5716 && (inter_float || inter_unsignedp == inside_unsignedp)
5717 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5718 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5719 && ! final_ptr)
5720 return fold (build1 (code, type,
5721 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5723 /* If we have a sign-extension of a zero-extended value, we can
5724 replace that by a single zero-extension. */
5725 if (inside_int && inter_int && final_int
5726 && inside_prec < inter_prec && inter_prec < final_prec
5727 && inside_unsignedp && !inter_unsignedp)
5728 return fold (build1 (code, type,
5729 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5731 /* Two conversions in a row are not needed unless:
5732 - some conversion is floating-point (overstrict for now), or
5733 - the intermediate type is narrower than both initial and
5734 final, or
5735 - the intermediate type and innermost type differ in signedness,
5736 and the outermost type is wider than the intermediate, or
5737 - the initial type is a pointer type and the precisions of the
5738 intermediate and final types differ, or
5739 - the final type is a pointer type and the precisions of the
5740 initial and intermediate types differ. */
5741 if (! inside_float && ! inter_float && ! final_float
5742 && (inter_prec > inside_prec || inter_prec > final_prec)
5743 && ! (inside_int && inter_int
5744 && inter_unsignedp != inside_unsignedp
5745 && inter_prec < final_prec)
5746 && ((inter_unsignedp && inter_prec > inside_prec)
5747 == (final_unsignedp && final_prec > inter_prec))
5748 && ! (inside_ptr && inter_prec != final_prec)
5749 && ! (final_ptr && inside_prec != inter_prec)
5750 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5751 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5752 && ! final_ptr)
5753 return fold (build1 (code, type,
5754 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5757 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5758 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5759 /* Detect assigning a bitfield. */
5760 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5761 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5763 /* Don't leave an assignment inside a conversion
5764 unless assigning a bitfield. */
5765 tree prev = TREE_OPERAND (t, 0);
5766 tem = copy_node (t);
5767 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5768 /* First do the assignment, then return converted constant. */
5769 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5770 TREE_NO_WARNING (tem) = 1;
5771 TREE_USED (tem) = 1;
5772 return tem;
5775 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5776 constants (if x has signed type, the sign bit cannot be set
5777 in c). This folds extension into the BIT_AND_EXPR. */
5778 if (INTEGRAL_TYPE_P (type)
5779 && TREE_CODE (type) != BOOLEAN_TYPE
5780 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5781 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5783 tree and = TREE_OPERAND (t, 0);
5784 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5785 int change = 0;
5787 if (TYPE_UNSIGNED (TREE_TYPE (and))
5788 || (TYPE_PRECISION (type)
5789 <= TYPE_PRECISION (TREE_TYPE (and))))
5790 change = 1;
5791 else if (TYPE_PRECISION (TREE_TYPE (and1))
5792 <= HOST_BITS_PER_WIDE_INT
5793 && host_integerp (and1, 1))
5795 unsigned HOST_WIDE_INT cst;
5797 cst = tree_low_cst (and1, 1);
5798 cst &= (HOST_WIDE_INT) -1
5799 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5800 change = (cst == 0);
5801 #ifdef LOAD_EXTEND_OP
5802 if (change
5803 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5804 == ZERO_EXTEND))
5806 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5807 and0 = fold_convert (uns, and0);
5808 and1 = fold_convert (uns, and1);
5810 #endif
5812 if (change)
5813 return fold (build2 (BIT_AND_EXPR, type,
5814 fold_convert (type, and0),
5815 fold_convert (type, and1)));
5818 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
5819 T2 being pointers to types of the same size. */
5820 if (POINTER_TYPE_P (TREE_TYPE (t))
5821 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
5822 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
5823 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5825 tree arg00 = TREE_OPERAND (arg0, 0);
5826 tree t0 = TREE_TYPE (t);
5827 tree t1 = TREE_TYPE (arg00);
5828 tree tt0 = TREE_TYPE (t0);
5829 tree tt1 = TREE_TYPE (t1);
5830 tree s0 = TYPE_SIZE (tt0);
5831 tree s1 = TYPE_SIZE (tt1);
5833 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
5834 return build2 (TREE_CODE (arg0), t0, convert (t0, arg00),
5835 TREE_OPERAND (arg0, 1));
5838 tem = fold_convert_const (code, type, arg0);
5839 return tem ? tem : t;
5841 case VIEW_CONVERT_EXPR:
5842 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5843 return build1 (VIEW_CONVERT_EXPR, type,
5844 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5845 return t;
5847 case COMPONENT_REF:
5848 if (TREE_CODE (arg0) == CONSTRUCTOR
5849 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5851 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5852 if (m)
5853 return TREE_VALUE (m);
5855 return t;
5857 case RANGE_EXPR:
5858 if (TREE_CONSTANT (t) != wins)
5860 tem = copy_node (t);
5861 TREE_CONSTANT (tem) = wins;
5862 TREE_INVARIANT (tem) = wins;
5863 return tem;
5865 return t;
5867 case NEGATE_EXPR:
5868 if (negate_expr_p (arg0))
5869 return fold_convert (type, negate_expr (arg0));
5870 return t;
5872 case ABS_EXPR:
5873 if (wins
5874 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5875 return fold_abs_const (arg0, type);
5876 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5877 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5878 /* Convert fabs((double)float) into (double)fabsf(float). */
5879 else if (TREE_CODE (arg0) == NOP_EXPR
5880 && TREE_CODE (type) == REAL_TYPE)
5882 tree targ0 = strip_float_extensions (arg0);
5883 if (targ0 != arg0)
5884 return fold_convert (type, fold (build1 (ABS_EXPR,
5885 TREE_TYPE (targ0),
5886 targ0)));
5888 else if (tree_expr_nonnegative_p (arg0))
5889 return arg0;
5890 return t;
5892 case CONJ_EXPR:
5893 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5894 return fold_convert (type, arg0);
5895 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5896 return build2 (COMPLEX_EXPR, type,
5897 TREE_OPERAND (arg0, 0),
5898 negate_expr (TREE_OPERAND (arg0, 1)));
5899 else if (TREE_CODE (arg0) == COMPLEX_CST)
5900 return build_complex (type, TREE_REALPART (arg0),
5901 negate_expr (TREE_IMAGPART (arg0)));
5902 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5903 return fold (build2 (TREE_CODE (arg0), type,
5904 fold (build1 (CONJ_EXPR, type,
5905 TREE_OPERAND (arg0, 0))),
5906 fold (build1 (CONJ_EXPR, type,
5907 TREE_OPERAND (arg0, 1)))));
5908 else if (TREE_CODE (arg0) == CONJ_EXPR)
5909 return TREE_OPERAND (arg0, 0);
5910 return t;
5912 case BIT_NOT_EXPR:
5913 if (wins)
5915 tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5916 ~ TREE_INT_CST_HIGH (arg0));
5917 TREE_TYPE (tem) = type;
5918 force_fit_type (tem, 0);
5919 TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
5920 TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
5921 return tem;
5923 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5924 return TREE_OPERAND (arg0, 0);
5925 return t;
5927 case PLUS_EXPR:
5928 /* A + (-B) -> A - B */
5929 if (TREE_CODE (arg1) == NEGATE_EXPR)
5930 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5931 /* (-A) + B -> B - A */
5932 if (TREE_CODE (arg0) == NEGATE_EXPR
5933 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
5934 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5935 if (! FLOAT_TYPE_P (type))
5937 if (integer_zerop (arg1))
5938 return non_lvalue (fold_convert (type, arg0));
5940 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5941 with a constant, and the two constants have no bits in common,
5942 we should treat this as a BIT_IOR_EXPR since this may produce more
5943 simplifications. */
5944 if (TREE_CODE (arg0) == BIT_AND_EXPR
5945 && TREE_CODE (arg1) == BIT_AND_EXPR
5946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5947 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5948 && integer_zerop (const_binop (BIT_AND_EXPR,
5949 TREE_OPERAND (arg0, 1),
5950 TREE_OPERAND (arg1, 1), 0)))
5952 code = BIT_IOR_EXPR;
5953 goto bit_ior;
5956 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5957 (plus (plus (mult) (mult)) (foo)) so that we can
5958 take advantage of the factoring cases below. */
5959 if ((TREE_CODE (arg0) == PLUS_EXPR
5960 && TREE_CODE (arg1) == MULT_EXPR)
5961 || (TREE_CODE (arg1) == PLUS_EXPR
5962 && TREE_CODE (arg0) == MULT_EXPR))
5964 tree parg0, parg1, parg, marg;
5966 if (TREE_CODE (arg0) == PLUS_EXPR)
5967 parg = arg0, marg = arg1;
5968 else
5969 parg = arg1, marg = arg0;
5970 parg0 = TREE_OPERAND (parg, 0);
5971 parg1 = TREE_OPERAND (parg, 1);
5972 STRIP_NOPS (parg0);
5973 STRIP_NOPS (parg1);
5975 if (TREE_CODE (parg0) == MULT_EXPR
5976 && TREE_CODE (parg1) != MULT_EXPR)
5977 return fold (build2 (PLUS_EXPR, type,
5978 fold (build2 (PLUS_EXPR, type,
5979 fold_convert (type, parg0),
5980 fold_convert (type, marg))),
5981 fold_convert (type, parg1)));
5982 if (TREE_CODE (parg0) != MULT_EXPR
5983 && TREE_CODE (parg1) == MULT_EXPR)
5984 return fold (build2 (PLUS_EXPR, type,
5985 fold (build2 (PLUS_EXPR, type,
5986 fold_convert (type, parg1),
5987 fold_convert (type, marg))),
5988 fold_convert (type, parg0)));
5991 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5993 tree arg00, arg01, arg10, arg11;
5994 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5996 /* (A * C) + (B * C) -> (A+B) * C.
5997 We are most concerned about the case where C is a constant,
5998 but other combinations show up during loop reduction. Since
5999 it is not difficult, try all four possibilities. */
6001 arg00 = TREE_OPERAND (arg0, 0);
6002 arg01 = TREE_OPERAND (arg0, 1);
6003 arg10 = TREE_OPERAND (arg1, 0);
6004 arg11 = TREE_OPERAND (arg1, 1);
6005 same = NULL_TREE;
6007 if (operand_equal_p (arg01, arg11, 0))
6008 same = arg01, alt0 = arg00, alt1 = arg10;
6009 else if (operand_equal_p (arg00, arg10, 0))
6010 same = arg00, alt0 = arg01, alt1 = arg11;
6011 else if (operand_equal_p (arg00, arg11, 0))
6012 same = arg00, alt0 = arg01, alt1 = arg10;
6013 else if (operand_equal_p (arg01, arg10, 0))
6014 same = arg01, alt0 = arg00, alt1 = arg11;
6016 /* No identical multiplicands; see if we can find a common
6017 power-of-two factor in non-power-of-two multiplies. This
6018 can help in multi-dimensional array access. */
6019 else if (TREE_CODE (arg01) == INTEGER_CST
6020 && TREE_CODE (arg11) == INTEGER_CST
6021 && TREE_INT_CST_HIGH (arg01) == 0
6022 && TREE_INT_CST_HIGH (arg11) == 0)
6024 HOST_WIDE_INT int01, int11, tmp;
6025 int01 = TREE_INT_CST_LOW (arg01);
6026 int11 = TREE_INT_CST_LOW (arg11);
6028 /* Move min of absolute values to int11. */
6029 if ((int01 >= 0 ? int01 : -int01)
6030 < (int11 >= 0 ? int11 : -int11))
6032 tmp = int01, int01 = int11, int11 = tmp;
6033 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6034 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6037 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6039 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6040 build_int_2 (int01 / int11, 0)));
6041 alt1 = arg10;
6042 same = arg11;
6046 if (same)
6047 return fold (build2 (MULT_EXPR, type,
6048 fold (build2 (PLUS_EXPR, type,
6049 alt0, alt1)),
6050 same));
6053 else
6055 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6056 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6057 return non_lvalue (fold_convert (type, arg0));
6059 /* Likewise if the operands are reversed. */
6060 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6061 return non_lvalue (fold_convert (type, arg1));
6063 /* Convert x+x into x*2.0. */
6064 if (operand_equal_p (arg0, arg1, 0)
6065 && SCALAR_FLOAT_TYPE_P (type))
6066 return fold (build2 (MULT_EXPR, type, arg0,
6067 build_real (type, dconst2)));
6069 /* Convert x*c+x into x*(c+1). */
6070 if (flag_unsafe_math_optimizations
6071 && TREE_CODE (arg0) == MULT_EXPR
6072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6073 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6074 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6076 REAL_VALUE_TYPE c;
6078 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6079 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6080 return fold (build2 (MULT_EXPR, type, arg1,
6081 build_real (type, c)));
6084 /* Convert x+x*c into x*(c+1). */
6085 if (flag_unsafe_math_optimizations
6086 && TREE_CODE (arg1) == MULT_EXPR
6087 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6088 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6089 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6091 REAL_VALUE_TYPE c;
6093 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6094 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6095 return fold (build2 (MULT_EXPR, type, arg0,
6096 build_real (type, c)));
6099 /* Convert x*c1+x*c2 into x*(c1+c2). */
6100 if (flag_unsafe_math_optimizations
6101 && TREE_CODE (arg0) == MULT_EXPR
6102 && TREE_CODE (arg1) == MULT_EXPR
6103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6104 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6105 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6106 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6107 && operand_equal_p (TREE_OPERAND (arg0, 0),
6108 TREE_OPERAND (arg1, 0), 0))
6110 REAL_VALUE_TYPE c1, c2;
6112 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6113 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6114 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6115 return fold (build2 (MULT_EXPR, type,
6116 TREE_OPERAND (arg0, 0),
6117 build_real (type, c1)));
6119 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6120 if (flag_unsafe_math_optimizations
6121 && TREE_CODE (arg1) == PLUS_EXPR
6122 && TREE_CODE (arg0) != MULT_EXPR)
6124 tree tree10 = TREE_OPERAND (arg1, 0);
6125 tree tree11 = TREE_OPERAND (arg1, 1);
6126 if (TREE_CODE (tree11) == MULT_EXPR
6127 && TREE_CODE (tree10) == MULT_EXPR)
6129 tree tree0;
6130 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6131 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6134 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6135 if (flag_unsafe_math_optimizations
6136 && TREE_CODE (arg0) == PLUS_EXPR
6137 && TREE_CODE (arg1) != MULT_EXPR)
6139 tree tree00 = TREE_OPERAND (arg0, 0);
6140 tree tree01 = TREE_OPERAND (arg0, 1);
6141 if (TREE_CODE (tree01) == MULT_EXPR
6142 && TREE_CODE (tree00) == MULT_EXPR)
6144 tree tree0;
6145 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6146 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6151 bit_rotate:
6152 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6153 is a rotate of A by C1 bits. */
6154 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6155 is a rotate of A by B bits. */
6157 enum tree_code code0, code1;
6158 code0 = TREE_CODE (arg0);
6159 code1 = TREE_CODE (arg1);
6160 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6161 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6162 && operand_equal_p (TREE_OPERAND (arg0, 0),
6163 TREE_OPERAND (arg1, 0), 0)
6164 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6166 tree tree01, tree11;
6167 enum tree_code code01, code11;
6169 tree01 = TREE_OPERAND (arg0, 1);
6170 tree11 = TREE_OPERAND (arg1, 1);
6171 STRIP_NOPS (tree01);
6172 STRIP_NOPS (tree11);
6173 code01 = TREE_CODE (tree01);
6174 code11 = TREE_CODE (tree11);
6175 if (code01 == INTEGER_CST
6176 && code11 == INTEGER_CST
6177 && TREE_INT_CST_HIGH (tree01) == 0
6178 && TREE_INT_CST_HIGH (tree11) == 0
6179 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6180 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6181 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6182 code0 == LSHIFT_EXPR ? tree01 : tree11);
6183 else if (code11 == MINUS_EXPR)
6185 tree tree110, tree111;
6186 tree110 = TREE_OPERAND (tree11, 0);
6187 tree111 = TREE_OPERAND (tree11, 1);
6188 STRIP_NOPS (tree110);
6189 STRIP_NOPS (tree111);
6190 if (TREE_CODE (tree110) == INTEGER_CST
6191 && 0 == compare_tree_int (tree110,
6192 TYPE_PRECISION
6193 (TREE_TYPE (TREE_OPERAND
6194 (arg0, 0))))
6195 && operand_equal_p (tree01, tree111, 0))
6196 return build2 ((code0 == LSHIFT_EXPR
6197 ? LROTATE_EXPR
6198 : RROTATE_EXPR),
6199 type, TREE_OPERAND (arg0, 0), tree01);
6201 else if (code01 == MINUS_EXPR)
6203 tree tree010, tree011;
6204 tree010 = TREE_OPERAND (tree01, 0);
6205 tree011 = TREE_OPERAND (tree01, 1);
6206 STRIP_NOPS (tree010);
6207 STRIP_NOPS (tree011);
6208 if (TREE_CODE (tree010) == INTEGER_CST
6209 && 0 == compare_tree_int (tree010,
6210 TYPE_PRECISION
6211 (TREE_TYPE (TREE_OPERAND
6212 (arg0, 0))))
6213 && operand_equal_p (tree11, tree011, 0))
6214 return build2 ((code0 != LSHIFT_EXPR
6215 ? LROTATE_EXPR
6216 : RROTATE_EXPR),
6217 type, TREE_OPERAND (arg0, 0), tree11);
6222 associate:
6223 /* In most languages, can't associate operations on floats through
6224 parentheses. Rather than remember where the parentheses were, we
6225 don't associate floats at all, unless the user has specified
6226 -funsafe-math-optimizations. */
6228 if (! wins
6229 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6231 tree var0, con0, lit0, minus_lit0;
6232 tree var1, con1, lit1, minus_lit1;
6234 /* Split both trees into variables, constants, and literals. Then
6235 associate each group together, the constants with literals,
6236 then the result with variables. This increases the chances of
6237 literals being recombined later and of generating relocatable
6238 expressions for the sum of a constant and literal. */
6239 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6240 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6241 code == MINUS_EXPR);
6243 /* Only do something if we found more than two objects. Otherwise,
6244 nothing has changed and we risk infinite recursion. */
6245 if (2 < ((var0 != 0) + (var1 != 0)
6246 + (con0 != 0) + (con1 != 0)
6247 + (lit0 != 0) + (lit1 != 0)
6248 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6250 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6251 if (code == MINUS_EXPR)
6252 code = PLUS_EXPR;
6254 var0 = associate_trees (var0, var1, code, type);
6255 con0 = associate_trees (con0, con1, code, type);
6256 lit0 = associate_trees (lit0, lit1, code, type);
6257 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6259 /* Preserve the MINUS_EXPR if the negative part of the literal is
6260 greater than the positive part. Otherwise, the multiplicative
6261 folding code (i.e extract_muldiv) may be fooled in case
6262 unsigned constants are subtracted, like in the following
6263 example: ((X*2 + 4) - 8U)/2. */
6264 if (minus_lit0 && lit0)
6266 if (TREE_CODE (lit0) == INTEGER_CST
6267 && TREE_CODE (minus_lit0) == INTEGER_CST
6268 && tree_int_cst_lt (lit0, minus_lit0))
6270 minus_lit0 = associate_trees (minus_lit0, lit0,
6271 MINUS_EXPR, type);
6272 lit0 = 0;
6274 else
6276 lit0 = associate_trees (lit0, minus_lit0,
6277 MINUS_EXPR, type);
6278 minus_lit0 = 0;
6281 if (minus_lit0)
6283 if (con0 == 0)
6284 return fold_convert (type,
6285 associate_trees (var0, minus_lit0,
6286 MINUS_EXPR, type));
6287 else
6289 con0 = associate_trees (con0, minus_lit0,
6290 MINUS_EXPR, type);
6291 return fold_convert (type,
6292 associate_trees (var0, con0,
6293 PLUS_EXPR, type));
6297 con0 = associate_trees (con0, lit0, code, type);
6298 return fold_convert (type, associate_trees (var0, con0,
6299 code, type));
6303 binary:
6304 if (wins)
6305 t1 = const_binop (code, arg0, arg1, 0);
6306 if (t1 != NULL_TREE)
6308 /* The return value should always have
6309 the same type as the original expression. */
6310 if (TREE_TYPE (t1) != type)
6311 t1 = fold_convert (type, t1);
6313 return t1;
6315 return t;
6317 case MINUS_EXPR:
6318 /* A - (-B) -> A + B */
6319 if (TREE_CODE (arg1) == NEGATE_EXPR)
6320 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6321 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6322 if (TREE_CODE (arg0) == NEGATE_EXPR
6323 && (FLOAT_TYPE_P (type)
6324 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6325 && negate_expr_p (arg1)
6326 && reorder_operands_p (arg0, arg1))
6327 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6328 TREE_OPERAND (arg0, 0)));
6330 if (! FLOAT_TYPE_P (type))
6332 if (! wins && integer_zerop (arg0))
6333 return negate_expr (fold_convert (type, arg1));
6334 if (integer_zerop (arg1))
6335 return non_lvalue (fold_convert (type, arg0));
6337 /* Fold A - (A & B) into ~B & A. */
6338 if (!TREE_SIDE_EFFECTS (arg0)
6339 && TREE_CODE (arg1) == BIT_AND_EXPR)
6341 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6342 return fold (build2 (BIT_AND_EXPR, type,
6343 fold (build1 (BIT_NOT_EXPR, type,
6344 TREE_OPERAND (arg1, 0))),
6345 arg0));
6346 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6347 return fold (build2 (BIT_AND_EXPR, type,
6348 fold (build1 (BIT_NOT_EXPR, type,
6349 TREE_OPERAND (arg1, 1))),
6350 arg0));
6353 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6354 any power of 2 minus 1. */
6355 if (TREE_CODE (arg0) == BIT_AND_EXPR
6356 && TREE_CODE (arg1) == BIT_AND_EXPR
6357 && operand_equal_p (TREE_OPERAND (arg0, 0),
6358 TREE_OPERAND (arg1, 0), 0))
6360 tree mask0 = TREE_OPERAND (arg0, 1);
6361 tree mask1 = TREE_OPERAND (arg1, 1);
6362 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6364 if (operand_equal_p (tem, mask1, 0))
6366 tem = fold (build2 (BIT_XOR_EXPR, type,
6367 TREE_OPERAND (arg0, 0), mask1));
6368 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6373 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6374 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6375 return non_lvalue (fold_convert (type, arg0));
6377 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6378 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6379 (-ARG1 + ARG0) reduces to -ARG1. */
6380 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6381 return negate_expr (fold_convert (type, arg1));
6383 /* Fold &x - &x. This can happen from &x.foo - &x.
6384 This is unsafe for certain floats even in non-IEEE formats.
6385 In IEEE, it is unsafe because it does wrong for NaNs.
6386 Also note that operand_equal_p is always false if an operand
6387 is volatile. */
6389 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6390 && operand_equal_p (arg0, arg1, 0))
6391 return fold_convert (type, integer_zero_node);
6393 /* A - B -> A + (-B) if B is easily negatable. */
6394 if (!wins && negate_expr_p (arg1)
6395 && (FLOAT_TYPE_P (type)
6396 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6397 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6399 if (TREE_CODE (arg0) == MULT_EXPR
6400 && TREE_CODE (arg1) == MULT_EXPR
6401 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6403 /* (A * C) - (B * C) -> (A-B) * C. */
6404 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6405 TREE_OPERAND (arg1, 1), 0))
6406 return fold (build2 (MULT_EXPR, type,
6407 fold (build2 (MINUS_EXPR, type,
6408 TREE_OPERAND (arg0, 0),
6409 TREE_OPERAND (arg1, 0))),
6410 TREE_OPERAND (arg0, 1)));
6411 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6412 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6413 TREE_OPERAND (arg1, 0), 0))
6414 return fold (build2 (MULT_EXPR, type,
6415 TREE_OPERAND (arg0, 0),
6416 fold (build2 (MINUS_EXPR, type,
6417 TREE_OPERAND (arg0, 1),
6418 TREE_OPERAND (arg1, 1)))));
6421 goto associate;
6423 case MULT_EXPR:
6424 /* (-A) * (-B) -> A * B */
6425 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6426 return fold (build2 (MULT_EXPR, type,
6427 TREE_OPERAND (arg0, 0),
6428 negate_expr (arg1)));
6429 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6430 return fold (build2 (MULT_EXPR, type,
6431 negate_expr (arg0),
6432 TREE_OPERAND (arg1, 0)));
6434 if (! FLOAT_TYPE_P (type))
6436 if (integer_zerop (arg1))
6437 return omit_one_operand (type, arg1, arg0);
6438 if (integer_onep (arg1))
6439 return non_lvalue (fold_convert (type, arg0));
6441 /* (a * (1 << b)) is (a << b) */
6442 if (TREE_CODE (arg1) == LSHIFT_EXPR
6443 && integer_onep (TREE_OPERAND (arg1, 0)))
6444 return fold (build2 (LSHIFT_EXPR, type, arg0,
6445 TREE_OPERAND (arg1, 1)));
6446 if (TREE_CODE (arg0) == LSHIFT_EXPR
6447 && integer_onep (TREE_OPERAND (arg0, 0)))
6448 return fold (build2 (LSHIFT_EXPR, type, arg1,
6449 TREE_OPERAND (arg0, 1)));
6451 if (TREE_CODE (arg1) == INTEGER_CST
6452 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6453 fold_convert (type, arg1),
6454 code, NULL_TREE)))
6455 return fold_convert (type, tem);
6458 else
6460 /* Maybe fold x * 0 to 0. The expressions aren't the same
6461 when x is NaN, since x * 0 is also NaN. Nor are they the
6462 same in modes with signed zeros, since multiplying a
6463 negative value by 0 gives -0, not +0. */
6464 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6465 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6466 && real_zerop (arg1))
6467 return omit_one_operand (type, arg1, arg0);
6468 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6469 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6470 && real_onep (arg1))
6471 return non_lvalue (fold_convert (type, arg0));
6473 /* Transform x * -1.0 into -x. */
6474 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6475 && real_minus_onep (arg1))
6476 return fold_convert (type, negate_expr (arg0));
6478 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6479 if (flag_unsafe_math_optimizations
6480 && TREE_CODE (arg0) == RDIV_EXPR
6481 && TREE_CODE (arg1) == REAL_CST
6482 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6484 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6485 arg1, 0);
6486 if (tem)
6487 return fold (build2 (RDIV_EXPR, type, tem,
6488 TREE_OPERAND (arg0, 1)));
6491 if (flag_unsafe_math_optimizations)
6493 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6494 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6496 /* Optimizations of root(...)*root(...). */
6497 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6499 tree rootfn, arg, arglist;
6500 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6501 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6503 /* Optimize sqrt(x)*sqrt(x) as x. */
6504 if (BUILTIN_SQRT_P (fcode0)
6505 && operand_equal_p (arg00, arg10, 0)
6506 && ! HONOR_SNANS (TYPE_MODE (type)))
6507 return arg00;
6509 /* Optimize root(x)*root(y) as root(x*y). */
6510 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6511 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6512 arglist = build_tree_list (NULL_TREE, arg);
6513 return build_function_call_expr (rootfn, arglist);
6516 /* Optimize expN(x)*expN(y) as expN(x+y). */
6517 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6519 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6520 tree arg = build2 (PLUS_EXPR, type,
6521 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6522 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6523 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6524 return build_function_call_expr (expfn, arglist);
6527 /* Optimizations of pow(...)*pow(...). */
6528 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6529 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6530 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6532 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6533 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6534 1)));
6535 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6536 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6537 1)));
6539 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6540 if (operand_equal_p (arg01, arg11, 0))
6542 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6543 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6544 tree arglist = tree_cons (NULL_TREE, fold (arg),
6545 build_tree_list (NULL_TREE,
6546 arg01));
6547 return build_function_call_expr (powfn, arglist);
6550 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6551 if (operand_equal_p (arg00, arg10, 0))
6553 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6554 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6555 tree arglist = tree_cons (NULL_TREE, arg00,
6556 build_tree_list (NULL_TREE,
6557 arg));
6558 return build_function_call_expr (powfn, arglist);
6562 /* Optimize tan(x)*cos(x) as sin(x). */
6563 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6564 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6565 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6566 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6567 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6568 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6569 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6570 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6572 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6574 if (sinfn != NULL_TREE)
6575 return build_function_call_expr (sinfn,
6576 TREE_OPERAND (arg0, 1));
6579 /* Optimize x*pow(x,c) as pow(x,c+1). */
6580 if (fcode1 == BUILT_IN_POW
6581 || fcode1 == BUILT_IN_POWF
6582 || fcode1 == BUILT_IN_POWL)
6584 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6585 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6586 1)));
6587 if (TREE_CODE (arg11) == REAL_CST
6588 && ! TREE_CONSTANT_OVERFLOW (arg11)
6589 && operand_equal_p (arg0, arg10, 0))
6591 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6592 REAL_VALUE_TYPE c;
6593 tree arg, arglist;
6595 c = TREE_REAL_CST (arg11);
6596 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6597 arg = build_real (type, c);
6598 arglist = build_tree_list (NULL_TREE, arg);
6599 arglist = tree_cons (NULL_TREE, arg0, arglist);
6600 return build_function_call_expr (powfn, arglist);
6604 /* Optimize pow(x,c)*x as pow(x,c+1). */
6605 if (fcode0 == BUILT_IN_POW
6606 || fcode0 == BUILT_IN_POWF
6607 || fcode0 == BUILT_IN_POWL)
6609 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6610 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6611 1)));
6612 if (TREE_CODE (arg01) == REAL_CST
6613 && ! TREE_CONSTANT_OVERFLOW (arg01)
6614 && operand_equal_p (arg1, arg00, 0))
6616 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6617 REAL_VALUE_TYPE c;
6618 tree arg, arglist;
6620 c = TREE_REAL_CST (arg01);
6621 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6622 arg = build_real (type, c);
6623 arglist = build_tree_list (NULL_TREE, arg);
6624 arglist = tree_cons (NULL_TREE, arg1, arglist);
6625 return build_function_call_expr (powfn, arglist);
6629 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6630 if (! optimize_size
6631 && operand_equal_p (arg0, arg1, 0))
6633 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6635 if (powfn)
6637 tree arg = build_real (type, dconst2);
6638 tree arglist = build_tree_list (NULL_TREE, arg);
6639 arglist = tree_cons (NULL_TREE, arg0, arglist);
6640 return build_function_call_expr (powfn, arglist);
6645 goto associate;
6647 case BIT_IOR_EXPR:
6648 bit_ior:
6649 if (integer_all_onesp (arg1))
6650 return omit_one_operand (type, arg1, arg0);
6651 if (integer_zerop (arg1))
6652 return non_lvalue (fold_convert (type, arg0));
6653 if (operand_equal_p (arg0, arg1, 0))
6654 return non_lvalue (fold_convert (type, arg0));
6655 t1 = distribute_bit_expr (code, type, arg0, arg1);
6656 if (t1 != NULL_TREE)
6657 return t1;
6659 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6661 This results in more efficient code for machines without a NAND
6662 instruction. Combine will canonicalize to the first form
6663 which will allow use of NAND instructions provided by the
6664 backend if they exist. */
6665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6666 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6668 return fold (build1 (BIT_NOT_EXPR, type,
6669 build2 (BIT_AND_EXPR, type,
6670 TREE_OPERAND (arg0, 0),
6671 TREE_OPERAND (arg1, 0))));
6674 /* See if this can be simplified into a rotate first. If that
6675 is unsuccessful continue in the association code. */
6676 goto bit_rotate;
6678 case BIT_XOR_EXPR:
6679 if (integer_zerop (arg1))
6680 return non_lvalue (fold_convert (type, arg0));
6681 if (integer_all_onesp (arg1))
6682 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6683 if (operand_equal_p (arg0, arg1, 0))
6684 return omit_one_operand (type, integer_zero_node, arg0);
6686 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6687 with a constant, and the two constants have no bits in common,
6688 we should treat this as a BIT_IOR_EXPR since this may produce more
6689 simplifications. */
6690 if (TREE_CODE (arg0) == BIT_AND_EXPR
6691 && TREE_CODE (arg1) == BIT_AND_EXPR
6692 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6693 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6694 && integer_zerop (const_binop (BIT_AND_EXPR,
6695 TREE_OPERAND (arg0, 1),
6696 TREE_OPERAND (arg1, 1), 0)))
6698 code = BIT_IOR_EXPR;
6699 goto bit_ior;
6702 /* See if this can be simplified into a rotate first. If that
6703 is unsuccessful continue in the association code. */
6704 goto bit_rotate;
6706 case BIT_AND_EXPR:
6707 if (integer_all_onesp (arg1))
6708 return non_lvalue (fold_convert (type, arg0));
6709 if (integer_zerop (arg1))
6710 return omit_one_operand (type, arg1, arg0);
6711 if (operand_equal_p (arg0, arg1, 0))
6712 return non_lvalue (fold_convert (type, arg0));
6713 t1 = distribute_bit_expr (code, type, arg0, arg1);
6714 if (t1 != NULL_TREE)
6715 return t1;
6716 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6717 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6718 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6720 unsigned int prec
6721 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6723 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6724 && (~TREE_INT_CST_LOW (arg1)
6725 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6726 return fold_convert (type, TREE_OPERAND (arg0, 0));
6729 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6731 This results in more efficient code for machines without a NOR
6732 instruction. Combine will canonicalize to the first form
6733 which will allow use of NOR instructions provided by the
6734 backend if they exist. */
6735 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6736 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6738 return fold (build1 (BIT_NOT_EXPR, type,
6739 build2 (BIT_IOR_EXPR, type,
6740 TREE_OPERAND (arg0, 0),
6741 TREE_OPERAND (arg1, 0))));
6744 goto associate;
6746 case RDIV_EXPR:
6747 /* Don't touch a floating-point divide by zero unless the mode
6748 of the constant can represent infinity. */
6749 if (TREE_CODE (arg1) == REAL_CST
6750 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6751 && real_zerop (arg1))
6752 return t;
6754 /* (-A) / (-B) -> A / B */
6755 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6756 return fold (build2 (RDIV_EXPR, type,
6757 TREE_OPERAND (arg0, 0),
6758 negate_expr (arg1)));
6759 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6760 return fold (build2 (RDIV_EXPR, type,
6761 negate_expr (arg0),
6762 TREE_OPERAND (arg1, 0)));
6764 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6765 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6766 && real_onep (arg1))
6767 return non_lvalue (fold_convert (type, arg0));
6769 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6770 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6771 && real_minus_onep (arg1))
6772 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6774 /* If ARG1 is a constant, we can convert this to a multiply by the
6775 reciprocal. This does not have the same rounding properties,
6776 so only do this if -funsafe-math-optimizations. We can actually
6777 always safely do it if ARG1 is a power of two, but it's hard to
6778 tell if it is or not in a portable manner. */
6779 if (TREE_CODE (arg1) == REAL_CST)
6781 if (flag_unsafe_math_optimizations
6782 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6783 arg1, 0)))
6784 return fold (build2 (MULT_EXPR, type, arg0, tem));
6785 /* Find the reciprocal if optimizing and the result is exact. */
6786 if (optimize)
6788 REAL_VALUE_TYPE r;
6789 r = TREE_REAL_CST (arg1);
6790 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6792 tem = build_real (type, r);
6793 return fold (build2 (MULT_EXPR, type, arg0, tem));
6797 /* Convert A/B/C to A/(B*C). */
6798 if (flag_unsafe_math_optimizations
6799 && TREE_CODE (arg0) == RDIV_EXPR)
6800 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6801 fold (build2 (MULT_EXPR, type,
6802 TREE_OPERAND (arg0, 1), arg1))));
6804 /* Convert A/(B/C) to (A/B)*C. */
6805 if (flag_unsafe_math_optimizations
6806 && TREE_CODE (arg1) == RDIV_EXPR)
6807 return fold (build2 (MULT_EXPR, type,
6808 fold (build2 (RDIV_EXPR, type, arg0,
6809 TREE_OPERAND (arg1, 0))),
6810 TREE_OPERAND (arg1, 1)));
6812 /* Convert C1/(X*C2) into (C1/C2)/X. */
6813 if (flag_unsafe_math_optimizations
6814 && TREE_CODE (arg1) == MULT_EXPR
6815 && TREE_CODE (arg0) == REAL_CST
6816 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6818 tree tem = const_binop (RDIV_EXPR, arg0,
6819 TREE_OPERAND (arg1, 1), 0);
6820 if (tem)
6821 return fold (build2 (RDIV_EXPR, type, tem,
6822 TREE_OPERAND (arg1, 0)));
6825 if (flag_unsafe_math_optimizations)
6827 enum built_in_function fcode = builtin_mathfn_code (arg1);
6828 /* Optimize x/expN(y) into x*expN(-y). */
6829 if (BUILTIN_EXPONENT_P (fcode))
6831 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6832 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
6833 tree arglist = build_tree_list (NULL_TREE,
6834 fold_convert (type, arg));
6835 arg1 = build_function_call_expr (expfn, arglist);
6836 return fold (build2 (MULT_EXPR, type, arg0, arg1));
6839 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6840 if (fcode == BUILT_IN_POW
6841 || fcode == BUILT_IN_POWF
6842 || fcode == BUILT_IN_POWL)
6844 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6845 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6846 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6847 tree neg11 = fold_convert (type, negate_expr (arg11));
6848 tree arglist = tree_cons(NULL_TREE, arg10,
6849 build_tree_list (NULL_TREE, neg11));
6850 arg1 = build_function_call_expr (powfn, arglist);
6851 return fold (build2 (MULT_EXPR, type, arg0, arg1));
6855 if (flag_unsafe_math_optimizations)
6857 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6858 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6860 /* Optimize sin(x)/cos(x) as tan(x). */
6861 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6862 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6863 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6864 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6865 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6867 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6869 if (tanfn != NULL_TREE)
6870 return build_function_call_expr (tanfn,
6871 TREE_OPERAND (arg0, 1));
6874 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6875 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6876 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6877 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6878 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6879 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6881 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6883 if (tanfn != NULL_TREE)
6885 tree tmp = TREE_OPERAND (arg0, 1);
6886 tmp = build_function_call_expr (tanfn, tmp);
6887 return fold (build2 (RDIV_EXPR, type,
6888 build_real (type, dconst1), tmp));
6892 /* Optimize pow(x,c)/x as pow(x,c-1). */
6893 if (fcode0 == BUILT_IN_POW
6894 || fcode0 == BUILT_IN_POWF
6895 || fcode0 == BUILT_IN_POWL)
6897 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6898 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6899 if (TREE_CODE (arg01) == REAL_CST
6900 && ! TREE_CONSTANT_OVERFLOW (arg01)
6901 && operand_equal_p (arg1, arg00, 0))
6903 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6904 REAL_VALUE_TYPE c;
6905 tree arg, arglist;
6907 c = TREE_REAL_CST (arg01);
6908 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6909 arg = build_real (type, c);
6910 arglist = build_tree_list (NULL_TREE, arg);
6911 arglist = tree_cons (NULL_TREE, arg1, arglist);
6912 return build_function_call_expr (powfn, arglist);
6916 goto binary;
6918 case TRUNC_DIV_EXPR:
6919 case ROUND_DIV_EXPR:
6920 case FLOOR_DIV_EXPR:
6921 case CEIL_DIV_EXPR:
6922 case EXACT_DIV_EXPR:
6923 if (integer_onep (arg1))
6924 return non_lvalue (fold_convert (type, arg0));
6925 if (integer_zerop (arg1))
6926 return t;
6927 /* X / -1 is -X. */
6928 if (!TYPE_UNSIGNED (type)
6929 && TREE_CODE (arg1) == INTEGER_CST
6930 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6931 && TREE_INT_CST_HIGH (arg1) == -1)
6932 return fold_convert (type, negate_expr (arg0));
6934 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6935 operation, EXACT_DIV_EXPR.
6937 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6938 At one time others generated faster code, it's not clear if they do
6939 after the last round to changes to the DIV code in expmed.c. */
6940 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6941 && multiple_of_p (type, arg0, arg1))
6942 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
6944 if (TREE_CODE (arg1) == INTEGER_CST
6945 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6946 code, NULL_TREE)))
6947 return fold_convert (type, tem);
6949 goto binary;
6951 case CEIL_MOD_EXPR:
6952 case FLOOR_MOD_EXPR:
6953 case ROUND_MOD_EXPR:
6954 case TRUNC_MOD_EXPR:
6955 if (integer_onep (arg1))
6956 return omit_one_operand (type, integer_zero_node, arg0);
6957 if (integer_zerop (arg1))
6958 return t;
6959 /* X % -1 is zero. */
6960 if (!TYPE_UNSIGNED (type)
6961 && TREE_CODE (arg1) == INTEGER_CST
6962 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6963 && TREE_INT_CST_HIGH (arg1) == -1)
6964 return omit_one_operand (type, integer_zero_node, arg0);
6966 if (TREE_CODE (arg1) == INTEGER_CST
6967 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6968 code, NULL_TREE)))
6969 return fold_convert (type, tem);
6971 goto binary;
6973 case LROTATE_EXPR:
6974 case RROTATE_EXPR:
6975 if (integer_all_onesp (arg0))
6976 return omit_one_operand (type, arg0, arg1);
6977 goto shift;
6979 case RSHIFT_EXPR:
6980 /* Optimize -1 >> x for arithmetic right shifts. */
6981 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
6982 return omit_one_operand (type, arg0, arg1);
6983 /* ... fall through ... */
6985 case LSHIFT_EXPR:
6986 shift:
6987 if (integer_zerop (arg1))
6988 return non_lvalue (fold_convert (type, arg0));
6989 if (integer_zerop (arg0))
6990 return omit_one_operand (type, arg0, arg1);
6992 /* Since negative shift count is not well-defined,
6993 don't try to compute it in the compiler. */
6994 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6995 return t;
6996 /* Rewrite an LROTATE_EXPR by a constant into an
6997 RROTATE_EXPR by a new constant. */
6998 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7000 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7001 tem = fold_convert (TREE_TYPE (arg1), tem);
7002 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7003 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7006 /* If we have a rotate of a bit operation with the rotate count and
7007 the second operand of the bit operation both constant,
7008 permute the two operations. */
7009 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7010 && (TREE_CODE (arg0) == BIT_AND_EXPR
7011 || TREE_CODE (arg0) == BIT_IOR_EXPR
7012 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7014 return fold (build2 (TREE_CODE (arg0), type,
7015 fold (build2 (code, type,
7016 TREE_OPERAND (arg0, 0), arg1)),
7017 fold (build2 (code, type,
7018 TREE_OPERAND (arg0, 1), arg1))));
7020 /* Two consecutive rotates adding up to the width of the mode can
7021 be ignored. */
7022 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7023 && TREE_CODE (arg0) == RROTATE_EXPR
7024 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7025 && TREE_INT_CST_HIGH (arg1) == 0
7026 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7027 && ((TREE_INT_CST_LOW (arg1)
7028 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7029 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7030 return TREE_OPERAND (arg0, 0);
7032 goto binary;
7034 case MIN_EXPR:
7035 if (operand_equal_p (arg0, arg1, 0))
7036 return omit_one_operand (type, arg0, arg1);
7037 if (INTEGRAL_TYPE_P (type)
7038 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7039 return omit_one_operand (type, arg1, arg0);
7040 goto associate;
7042 case MAX_EXPR:
7043 if (operand_equal_p (arg0, arg1, 0))
7044 return omit_one_operand (type, arg0, arg1);
7045 if (INTEGRAL_TYPE_P (type)
7046 && TYPE_MAX_VALUE (type)
7047 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7048 return omit_one_operand (type, arg1, arg0);
7049 goto associate;
7051 case TRUTH_NOT_EXPR:
7052 /* Note that the operand of this must be an int
7053 and its values must be 0 or 1.
7054 ("true" is a fixed value perhaps depending on the language,
7055 but we don't handle values other than 1 correctly yet.) */
7056 tem = invert_truthvalue (arg0);
7057 /* Avoid infinite recursion. */
7058 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7060 tem = fold_single_bit_test (code, arg0, arg1, type);
7061 if (tem)
7062 return tem;
7063 return t;
7065 return fold_convert (type, tem);
7067 case TRUTH_ANDIF_EXPR:
7068 /* Note that the operands of this must be ints
7069 and their values must be 0 or 1.
7070 ("true" is a fixed value perhaps depending on the language.) */
7071 /* If first arg is constant zero, return it. */
7072 if (integer_zerop (arg0))
7073 return fold_convert (type, arg0);
7074 case TRUTH_AND_EXPR:
7075 /* If either arg is constant true, drop it. */
7076 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7077 return non_lvalue (fold_convert (type, arg1));
7078 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7079 /* Preserve sequence points. */
7080 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7081 return non_lvalue (fold_convert (type, arg0));
7082 /* If second arg is constant zero, result is zero, but first arg
7083 must be evaluated. */
7084 if (integer_zerop (arg1))
7085 return omit_one_operand (type, arg1, arg0);
7086 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7087 case will be handled here. */
7088 if (integer_zerop (arg0))
7089 return omit_one_operand (type, arg0, arg1);
7091 truth_andor:
7092 /* We only do these simplifications if we are optimizing. */
7093 if (!optimize)
7094 return t;
7096 /* Check for things like (A || B) && (A || C). We can convert this
7097 to A || (B && C). Note that either operator can be any of the four
7098 truth and/or operations and the transformation will still be
7099 valid. Also note that we only care about order for the
7100 ANDIF and ORIF operators. If B contains side effects, this
7101 might change the truth-value of A. */
7102 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7103 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7104 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7105 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7106 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7107 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7109 tree a00 = TREE_OPERAND (arg0, 0);
7110 tree a01 = TREE_OPERAND (arg0, 1);
7111 tree a10 = TREE_OPERAND (arg1, 0);
7112 tree a11 = TREE_OPERAND (arg1, 1);
7113 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7114 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7115 && (code == TRUTH_AND_EXPR
7116 || code == TRUTH_OR_EXPR));
7118 if (operand_equal_p (a00, a10, 0))
7119 return fold (build2 (TREE_CODE (arg0), type, a00,
7120 fold (build2 (code, type, a01, a11))));
7121 else if (commutative && operand_equal_p (a00, a11, 0))
7122 return fold (build2 (TREE_CODE (arg0), type, a00,
7123 fold (build2 (code, type, a01, a10))));
7124 else if (commutative && operand_equal_p (a01, a10, 0))
7125 return fold (build2 (TREE_CODE (arg0), type, a01,
7126 fold (build2 (code, type, a00, a11))));
7128 /* This case if tricky because we must either have commutative
7129 operators or else A10 must not have side-effects. */
7131 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7132 && operand_equal_p (a01, a11, 0))
7133 return fold (build2 (TREE_CODE (arg0), type,
7134 fold (build2 (code, type, a00, a10)),
7135 a01));
7138 /* See if we can build a range comparison. */
7139 if (0 != (tem = fold_range_test (t)))
7140 return tem;
7142 /* Check for the possibility of merging component references. If our
7143 lhs is another similar operation, try to merge its rhs with our
7144 rhs. Then try to merge our lhs and rhs. */
7145 if (TREE_CODE (arg0) == code
7146 && 0 != (tem = fold_truthop (code, type,
7147 TREE_OPERAND (arg0, 1), arg1)))
7148 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7150 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7151 return tem;
7153 return t;
7155 case TRUTH_ORIF_EXPR:
7156 /* Note that the operands of this must be ints
7157 and their values must be 0 or true.
7158 ("true" is a fixed value perhaps depending on the language.) */
7159 /* If first arg is constant true, return it. */
7160 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7161 return fold_convert (type, arg0);
7162 case TRUTH_OR_EXPR:
7163 /* If either arg is constant zero, drop it. */
7164 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7165 return non_lvalue (fold_convert (type, arg1));
7166 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7167 /* Preserve sequence points. */
7168 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7169 return non_lvalue (fold_convert (type, arg0));
7170 /* If second arg is constant true, result is true, but we must
7171 evaluate first arg. */
7172 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7173 return omit_one_operand (type, arg1, arg0);
7174 /* Likewise for first arg, but note this only occurs here for
7175 TRUTH_OR_EXPR. */
7176 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7177 return omit_one_operand (type, arg0, arg1);
7178 goto truth_andor;
7180 case TRUTH_XOR_EXPR:
7181 /* If either arg is constant zero, drop it. */
7182 if (integer_zerop (arg0))
7183 return non_lvalue (fold_convert (type, arg1));
7184 if (integer_zerop (arg1))
7185 return non_lvalue (fold_convert (type, arg0));
7186 /* If either arg is constant true, this is a logical inversion. */
7187 if (integer_onep (arg0))
7188 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7189 if (integer_onep (arg1))
7190 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7191 /* Identical arguments cancel to zero. */
7192 if (operand_equal_p (arg0, arg1, 0))
7193 return omit_one_operand (type, integer_zero_node, arg0);
7194 return t;
7196 case EQ_EXPR:
7197 case NE_EXPR:
7198 case LT_EXPR:
7199 case GT_EXPR:
7200 case LE_EXPR:
7201 case GE_EXPR:
7202 /* If one arg is a real or integer constant, put it last. */
7203 if (tree_swap_operands_p (arg0, arg1, true))
7204 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7206 /* If this is an equality comparison of the address of a non-weak
7207 object against zero, then we know the result. */
7208 if ((code == EQ_EXPR || code == NE_EXPR)
7209 && TREE_CODE (arg0) == ADDR_EXPR
7210 && DECL_P (TREE_OPERAND (arg0, 0))
7211 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7212 && integer_zerop (arg1))
7214 if (code == EQ_EXPR)
7215 return fold_convert (type, integer_zero_node);
7216 else
7217 return fold_convert (type, integer_one_node);
7220 /* If this is an equality comparison of the address of two non-weak,
7221 unaliased symbols neither of which are extern (since we do not
7222 have access to attributes for externs), then we know the result. */
7223 if ((code == EQ_EXPR || code == NE_EXPR)
7224 && TREE_CODE (arg0) == ADDR_EXPR
7225 && DECL_P (TREE_OPERAND (arg0, 0))
7226 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7227 && ! lookup_attribute ("alias",
7228 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7229 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7230 && TREE_CODE (arg1) == ADDR_EXPR
7231 && DECL_P (TREE_OPERAND (arg1, 0))
7232 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7233 && ! lookup_attribute ("alias",
7234 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7235 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7237 if (code == EQ_EXPR)
7238 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7239 ? integer_one_node : integer_zero_node));
7240 else
7241 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7242 ? integer_zero_node : integer_one_node));
7245 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7247 tree targ0 = strip_float_extensions (arg0);
7248 tree targ1 = strip_float_extensions (arg1);
7249 tree newtype = TREE_TYPE (targ0);
7251 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7252 newtype = TREE_TYPE (targ1);
7254 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7255 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7256 return fold (build2 (code, type, fold_convert (newtype, targ0),
7257 fold_convert (newtype, targ1)));
7259 /* (-a) CMP (-b) -> b CMP a */
7260 if (TREE_CODE (arg0) == NEGATE_EXPR
7261 && TREE_CODE (arg1) == NEGATE_EXPR)
7262 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7263 TREE_OPERAND (arg0, 0)));
7265 if (TREE_CODE (arg1) == REAL_CST)
7267 REAL_VALUE_TYPE cst;
7268 cst = TREE_REAL_CST (arg1);
7270 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7271 if (TREE_CODE (arg0) == NEGATE_EXPR)
7272 return
7273 fold (build2 (swap_tree_comparison (code), type,
7274 TREE_OPERAND (arg0, 0),
7275 build_real (TREE_TYPE (arg1),
7276 REAL_VALUE_NEGATE (cst))));
7278 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7279 /* a CMP (-0) -> a CMP 0 */
7280 if (REAL_VALUE_MINUS_ZERO (cst))
7281 return fold (build2 (code, type, arg0,
7282 build_real (TREE_TYPE (arg1), dconst0)));
7284 /* x != NaN is always true, other ops are always false. */
7285 if (REAL_VALUE_ISNAN (cst)
7286 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7288 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7289 return omit_one_operand (type, fold_convert (type, tem), arg0);
7292 /* Fold comparisons against infinity. */
7293 if (REAL_VALUE_ISINF (cst))
7295 tem = fold_inf_compare (code, type, arg0, arg1);
7296 if (tem != NULL_TREE)
7297 return tem;
7301 /* If this is a comparison of a real constant with a PLUS_EXPR
7302 or a MINUS_EXPR of a real constant, we can convert it into a
7303 comparison with a revised real constant as long as no overflow
7304 occurs when unsafe_math_optimizations are enabled. */
7305 if (flag_unsafe_math_optimizations
7306 && TREE_CODE (arg1) == REAL_CST
7307 && (TREE_CODE (arg0) == PLUS_EXPR
7308 || TREE_CODE (arg0) == MINUS_EXPR)
7309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7310 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7311 ? MINUS_EXPR : PLUS_EXPR,
7312 arg1, TREE_OPERAND (arg0, 1), 0))
7313 && ! TREE_CONSTANT_OVERFLOW (tem))
7314 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7316 /* Likewise, we can simplify a comparison of a real constant with
7317 a MINUS_EXPR whose first operand is also a real constant, i.e.
7318 (c1 - x) < c2 becomes x > c1-c2. */
7319 if (flag_unsafe_math_optimizations
7320 && TREE_CODE (arg1) == REAL_CST
7321 && TREE_CODE (arg0) == MINUS_EXPR
7322 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7323 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7324 arg1, 0))
7325 && ! TREE_CONSTANT_OVERFLOW (tem))
7326 return fold (build2 (swap_tree_comparison (code), type,
7327 TREE_OPERAND (arg0, 1), tem));
7329 /* Fold comparisons against built-in math functions. */
7330 if (TREE_CODE (arg1) == REAL_CST
7331 && flag_unsafe_math_optimizations
7332 && ! flag_errno_math)
7334 enum built_in_function fcode = builtin_mathfn_code (arg0);
7336 if (fcode != END_BUILTINS)
7338 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7339 if (tem != NULL_TREE)
7340 return tem;
7345 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7346 if (TREE_CONSTANT (arg1)
7347 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7348 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7349 /* This optimization is invalid for ordered comparisons
7350 if CONST+INCR overflows or if foo+incr might overflow.
7351 This optimization is invalid for floating point due to rounding.
7352 For pointer types we assume overflow doesn't happen. */
7353 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7354 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7355 && (code == EQ_EXPR || code == NE_EXPR))))
7357 tree varop, newconst;
7359 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7361 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7362 arg1, TREE_OPERAND (arg0, 1)));
7363 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7364 TREE_OPERAND (arg0, 0),
7365 TREE_OPERAND (arg0, 1));
7367 else
7369 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7370 arg1, TREE_OPERAND (arg0, 1)));
7371 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7372 TREE_OPERAND (arg0, 0),
7373 TREE_OPERAND (arg0, 1));
7377 /* If VAROP is a reference to a bitfield, we must mask
7378 the constant by the width of the field. */
7379 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7380 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7382 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7383 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7384 tree folded_compare, shift;
7386 /* First check whether the comparison would come out
7387 always the same. If we don't do that we would
7388 change the meaning with the masking. */
7389 folded_compare = fold (build2 (code, type,
7390 TREE_OPERAND (varop, 0),
7391 arg1));
7392 if (integer_zerop (folded_compare)
7393 || integer_onep (folded_compare))
7394 return omit_one_operand (type, folded_compare, varop);
7396 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7398 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7399 newconst, shift));
7400 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7401 newconst, shift));
7404 return fold (build2 (code, type, varop, newconst));
7407 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7408 This transformation affects the cases which are handled in later
7409 optimizations involving comparisons with non-negative constants. */
7410 if (TREE_CODE (arg1) == INTEGER_CST
7411 && TREE_CODE (arg0) != INTEGER_CST
7412 && tree_int_cst_sgn (arg1) > 0)
7414 switch (code)
7416 case GE_EXPR:
7417 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7418 return fold (build2 (GT_EXPR, type, arg0, arg1));
7420 case LT_EXPR:
7421 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7422 return fold (build2 (LE_EXPR, type, arg0, arg1));
7424 default:
7425 break;
7429 /* Comparisons with the highest or lowest possible integer of
7430 the specified size will have known values.
7432 This is quite similar to fold_relational_hi_lo; however, my
7433 attempts to share the code have been nothing but trouble.
7434 I give up for now. */
7436 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7438 if (TREE_CODE (arg1) == INTEGER_CST
7439 && ! TREE_CONSTANT_OVERFLOW (arg1)
7440 && width <= HOST_BITS_PER_WIDE_INT
7441 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7442 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7444 unsigned HOST_WIDE_INT signed_max;
7445 unsigned HOST_WIDE_INT max, min;
7447 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7449 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7451 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7452 min = 0;
7454 else
7456 max = signed_max;
7457 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7460 if (TREE_INT_CST_HIGH (arg1) == 0
7461 && TREE_INT_CST_LOW (arg1) == max)
7462 switch (code)
7464 case GT_EXPR:
7465 return omit_one_operand (type,
7466 fold_convert (type,
7467 integer_zero_node),
7468 arg0);
7469 case GE_EXPR:
7470 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7472 case LE_EXPR:
7473 return omit_one_operand (type,
7474 fold_convert (type,
7475 integer_one_node),
7476 arg0);
7477 case LT_EXPR:
7478 return fold (build2 (NE_EXPR, type, arg0, arg1));
7480 /* The GE_EXPR and LT_EXPR cases above are not normally
7481 reached because of previous transformations. */
7483 default:
7484 break;
7486 else if (TREE_INT_CST_HIGH (arg1) == 0
7487 && TREE_INT_CST_LOW (arg1) == max - 1)
7488 switch (code)
7490 case GT_EXPR:
7491 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7492 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7493 case LE_EXPR:
7494 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7495 return fold (build2 (NE_EXPR, type, arg0, arg1));
7496 default:
7497 break;
7499 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7500 && TREE_INT_CST_LOW (arg1) == min)
7501 switch (code)
7503 case LT_EXPR:
7504 return omit_one_operand (type,
7505 fold_convert (type,
7506 integer_zero_node),
7507 arg0);
7508 case LE_EXPR:
7509 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7511 case GE_EXPR:
7512 return omit_one_operand (type,
7513 fold_convert (type,
7514 integer_one_node),
7515 arg0);
7516 case GT_EXPR:
7517 return fold (build2 (NE_EXPR, type, arg0, arg1));
7519 default:
7520 break;
7522 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7523 && TREE_INT_CST_LOW (arg1) == min + 1)
7524 switch (code)
7526 case GE_EXPR:
7527 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7528 return fold (build2 (NE_EXPR, type, arg0, arg1));
7529 case LT_EXPR:
7530 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7531 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7532 default:
7533 break;
7536 else if (!in_gimple_form
7537 && TREE_INT_CST_HIGH (arg1) == 0
7538 && TREE_INT_CST_LOW (arg1) == signed_max
7539 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7540 /* signed_type does not work on pointer types. */
7541 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7543 /* The following case also applies to X < signed_max+1
7544 and X >= signed_max+1 because previous transformations. */
7545 if (code == LE_EXPR || code == GT_EXPR)
7547 tree st0, st1;
7548 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7549 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7550 return fold
7551 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7552 type, fold_convert (st0, arg0),
7553 fold_convert (st1, integer_zero_node)));
7559 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7560 a MINUS_EXPR of a constant, we can convert it into a comparison with
7561 a revised constant as long as no overflow occurs. */
7562 if ((code == EQ_EXPR || code == NE_EXPR)
7563 && TREE_CODE (arg1) == INTEGER_CST
7564 && (TREE_CODE (arg0) == PLUS_EXPR
7565 || TREE_CODE (arg0) == MINUS_EXPR)
7566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7567 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7568 ? MINUS_EXPR : PLUS_EXPR,
7569 arg1, TREE_OPERAND (arg0, 1), 0))
7570 && ! TREE_CONSTANT_OVERFLOW (tem))
7571 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7573 /* Similarly for a NEGATE_EXPR. */
7574 else if ((code == EQ_EXPR || code == NE_EXPR)
7575 && TREE_CODE (arg0) == NEGATE_EXPR
7576 && TREE_CODE (arg1) == INTEGER_CST
7577 && 0 != (tem = negate_expr (arg1))
7578 && TREE_CODE (tem) == INTEGER_CST
7579 && ! TREE_CONSTANT_OVERFLOW (tem))
7580 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7582 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7583 for !=. Don't do this for ordered comparisons due to overflow. */
7584 else if ((code == NE_EXPR || code == EQ_EXPR)
7585 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7586 return fold (build2 (code, type,
7587 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7589 /* If we are widening one operand of an integer comparison,
7590 see if the other operand is similarly being widened. Perhaps we
7591 can do the comparison in the narrower type. */
7592 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7593 && TREE_CODE (arg0) == NOP_EXPR
7594 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7595 && (code == EQ_EXPR || code == NE_EXPR
7596 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7597 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7598 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7599 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7600 || (TREE_CODE (t1) == INTEGER_CST
7601 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7602 return fold (build2 (code, type, tem,
7603 fold_convert (TREE_TYPE (tem), t1)));
7605 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7606 constant, we can simplify it. */
7607 else if (TREE_CODE (arg1) == INTEGER_CST
7608 && (TREE_CODE (arg0) == MIN_EXPR
7609 || TREE_CODE (arg0) == MAX_EXPR)
7610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7611 return optimize_minmax_comparison (t);
7613 /* If we are comparing an ABS_EXPR with a constant, we can
7614 convert all the cases into explicit comparisons, but they may
7615 well not be faster than doing the ABS and one comparison.
7616 But ABS (X) <= C is a range comparison, which becomes a subtraction
7617 and a comparison, and is probably faster. */
7618 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7619 && TREE_CODE (arg0) == ABS_EXPR
7620 && ! TREE_SIDE_EFFECTS (arg0)
7621 && (0 != (tem = negate_expr (arg1)))
7622 && TREE_CODE (tem) == INTEGER_CST
7623 && ! TREE_CONSTANT_OVERFLOW (tem))
7624 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7625 build2 (GE_EXPR, type,
7626 TREE_OPERAND (arg0, 0), tem),
7627 build2 (LE_EXPR, type,
7628 TREE_OPERAND (arg0, 0), arg1)));
7630 /* If this is an EQ or NE comparison with zero and ARG0 is
7631 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7632 two operations, but the latter can be done in one less insn
7633 on machines that have only two-operand insns or on which a
7634 constant cannot be the first operand. */
7635 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7636 && TREE_CODE (arg0) == BIT_AND_EXPR)
7638 tree arg00 = TREE_OPERAND (arg0, 0);
7639 tree arg01 = TREE_OPERAND (arg0, 1);
7640 if (TREE_CODE (arg00) == LSHIFT_EXPR
7641 && integer_onep (TREE_OPERAND (arg00, 0)))
7642 return
7643 fold (build2 (code, type,
7644 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7645 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7646 arg01, TREE_OPERAND (arg00, 1)),
7647 fold_convert (TREE_TYPE (arg0),
7648 integer_one_node)),
7649 arg1));
7650 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7651 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7652 return
7653 fold (build2 (code, type,
7654 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7655 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7656 arg00, TREE_OPERAND (arg01, 1)),
7657 fold_convert (TREE_TYPE (arg0),
7658 integer_one_node)),
7659 arg1));
7662 /* If this is an NE or EQ comparison of zero against the result of a
7663 signed MOD operation whose second operand is a power of 2, make
7664 the MOD operation unsigned since it is simpler and equivalent. */
7665 if ((code == NE_EXPR || code == EQ_EXPR)
7666 && integer_zerop (arg1)
7667 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7668 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7669 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7670 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7671 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7672 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7674 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7675 tree newmod = build2 (TREE_CODE (arg0), newtype,
7676 fold_convert (newtype,
7677 TREE_OPERAND (arg0, 0)),
7678 fold_convert (newtype,
7679 TREE_OPERAND (arg0, 1)));
7681 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7684 /* If this is an NE comparison of zero with an AND of one, remove the
7685 comparison since the AND will give the correct value. */
7686 if (code == NE_EXPR && integer_zerop (arg1)
7687 && TREE_CODE (arg0) == BIT_AND_EXPR
7688 && integer_onep (TREE_OPERAND (arg0, 1)))
7689 return fold_convert (type, arg0);
7691 /* If we have (A & C) == C where C is a power of 2, convert this into
7692 (A & C) != 0. Similarly for NE_EXPR. */
7693 if ((code == EQ_EXPR || code == NE_EXPR)
7694 && TREE_CODE (arg0) == BIT_AND_EXPR
7695 && integer_pow2p (TREE_OPERAND (arg0, 1))
7696 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7697 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7698 arg0, integer_zero_node));
7700 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7701 2, then fold the expression into shifts and logical operations. */
7702 tem = fold_single_bit_test (code, arg0, arg1, type);
7703 if (tem)
7704 return tem;
7706 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7707 Similarly for NE_EXPR. */
7708 if ((code == EQ_EXPR || code == NE_EXPR)
7709 && TREE_CODE (arg0) == BIT_AND_EXPR
7710 && TREE_CODE (arg1) == INTEGER_CST
7711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7713 tree dandnotc
7714 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7715 arg1, build1 (BIT_NOT_EXPR,
7716 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7717 TREE_OPERAND (arg0, 1))));
7718 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7719 if (integer_nonzerop (dandnotc))
7720 return omit_one_operand (type, rslt, arg0);
7723 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7724 Similarly for NE_EXPR. */
7725 if ((code == EQ_EXPR || code == NE_EXPR)
7726 && TREE_CODE (arg0) == BIT_IOR_EXPR
7727 && TREE_CODE (arg1) == INTEGER_CST
7728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7730 tree candnotd
7731 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7732 TREE_OPERAND (arg0, 1),
7733 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7734 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7735 if (integer_nonzerop (candnotd))
7736 return omit_one_operand (type, rslt, arg0);
7739 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7740 and similarly for >= into !=. */
7741 if ((code == LT_EXPR || code == GE_EXPR)
7742 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7743 && TREE_CODE (arg1) == LSHIFT_EXPR
7744 && integer_onep (TREE_OPERAND (arg1, 0)))
7745 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7746 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7747 TREE_OPERAND (arg1, 1)),
7748 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7750 else if ((code == LT_EXPR || code == GE_EXPR)
7751 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7752 && (TREE_CODE (arg1) == NOP_EXPR
7753 || TREE_CODE (arg1) == CONVERT_EXPR)
7754 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7755 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7756 return
7757 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7758 fold_convert (TREE_TYPE (arg0),
7759 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7760 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7761 1))),
7762 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7764 /* Simplify comparison of something with itself. (For IEEE
7765 floating-point, we can only do some of these simplifications.) */
7766 if (operand_equal_p (arg0, arg1, 0))
7768 switch (code)
7770 case EQ_EXPR:
7771 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7772 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7773 return constant_boolean_node (1, type);
7774 break;
7776 case GE_EXPR:
7777 case LE_EXPR:
7778 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7779 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7780 return constant_boolean_node (1, type);
7781 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7783 case NE_EXPR:
7784 /* For NE, we can only do this simplification if integer
7785 or we don't honor IEEE floating point NaNs. */
7786 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7787 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7788 break;
7789 /* ... fall through ... */
7790 case GT_EXPR:
7791 case LT_EXPR:
7792 return constant_boolean_node (0, type);
7793 default:
7794 abort ();
7798 /* If we are comparing an expression that just has comparisons
7799 of two integer values, arithmetic expressions of those comparisons,
7800 and constants, we can simplify it. There are only three cases
7801 to check: the two values can either be equal, the first can be
7802 greater, or the second can be greater. Fold the expression for
7803 those three values. Since each value must be 0 or 1, we have
7804 eight possibilities, each of which corresponds to the constant 0
7805 or 1 or one of the six possible comparisons.
7807 This handles common cases like (a > b) == 0 but also handles
7808 expressions like ((x > y) - (y > x)) > 0, which supposedly
7809 occur in macroized code. */
7811 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7813 tree cval1 = 0, cval2 = 0;
7814 int save_p = 0;
7816 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7817 /* Don't handle degenerate cases here; they should already
7818 have been handled anyway. */
7819 && cval1 != 0 && cval2 != 0
7820 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7821 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7822 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7823 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7824 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7825 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7826 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7828 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7829 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7831 /* We can't just pass T to eval_subst in case cval1 or cval2
7832 was the same as ARG1. */
7834 tree high_result
7835 = fold (build2 (code, type,
7836 eval_subst (arg0, cval1, maxval,
7837 cval2, minval),
7838 arg1));
7839 tree equal_result
7840 = fold (build2 (code, type,
7841 eval_subst (arg0, cval1, maxval,
7842 cval2, maxval),
7843 arg1));
7844 tree low_result
7845 = fold (build2 (code, type,
7846 eval_subst (arg0, cval1, minval,
7847 cval2, maxval),
7848 arg1));
7850 /* All three of these results should be 0 or 1. Confirm they
7851 are. Then use those values to select the proper code
7852 to use. */
7854 if ((integer_zerop (high_result)
7855 || integer_onep (high_result))
7856 && (integer_zerop (equal_result)
7857 || integer_onep (equal_result))
7858 && (integer_zerop (low_result)
7859 || integer_onep (low_result)))
7861 /* Make a 3-bit mask with the high-order bit being the
7862 value for `>', the next for '=', and the low for '<'. */
7863 switch ((integer_onep (high_result) * 4)
7864 + (integer_onep (equal_result) * 2)
7865 + integer_onep (low_result))
7867 case 0:
7868 /* Always false. */
7869 return omit_one_operand (type, integer_zero_node, arg0);
7870 case 1:
7871 code = LT_EXPR;
7872 break;
7873 case 2:
7874 code = EQ_EXPR;
7875 break;
7876 case 3:
7877 code = LE_EXPR;
7878 break;
7879 case 4:
7880 code = GT_EXPR;
7881 break;
7882 case 5:
7883 code = NE_EXPR;
7884 break;
7885 case 6:
7886 code = GE_EXPR;
7887 break;
7888 case 7:
7889 /* Always true. */
7890 return omit_one_operand (type, integer_one_node, arg0);
7893 tem = build2 (code, type, cval1, cval2);
7894 if (save_p)
7895 return save_expr (tem);
7896 else
7897 return fold (tem);
7902 /* If this is a comparison of a field, we may be able to simplify it. */
7903 if (((TREE_CODE (arg0) == COMPONENT_REF
7904 && lang_hooks.can_use_bit_fields_p ())
7905 || TREE_CODE (arg0) == BIT_FIELD_REF)
7906 && (code == EQ_EXPR || code == NE_EXPR)
7907 /* Handle the constant case even without -O
7908 to make sure the warnings are given. */
7909 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7911 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7912 if (t1)
7913 return t1;
7916 /* If this is a comparison of complex values and either or both sides
7917 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7918 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7919 This may prevent needless evaluations. */
7920 if ((code == EQ_EXPR || code == NE_EXPR)
7921 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7922 && (TREE_CODE (arg0) == COMPLEX_EXPR
7923 || TREE_CODE (arg1) == COMPLEX_EXPR
7924 || TREE_CODE (arg0) == COMPLEX_CST
7925 || TREE_CODE (arg1) == COMPLEX_CST))
7927 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7928 tree real0, imag0, real1, imag1;
7930 arg0 = save_expr (arg0);
7931 arg1 = save_expr (arg1);
7932 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7933 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7934 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7935 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7937 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7938 : TRUTH_ORIF_EXPR),
7939 type,
7940 fold (build2 (code, type, real0, real1)),
7941 fold (build2 (code, type, imag0, imag1))));
7944 /* Optimize comparisons of strlen vs zero to a compare of the
7945 first character of the string vs zero. To wit,
7946 strlen(ptr) == 0 => *ptr == 0
7947 strlen(ptr) != 0 => *ptr != 0
7948 Other cases should reduce to one of these two (or a constant)
7949 due to the return value of strlen being unsigned. */
7950 if ((code == EQ_EXPR || code == NE_EXPR)
7951 && integer_zerop (arg1)
7952 && TREE_CODE (arg0) == CALL_EXPR)
7954 tree fndecl = get_callee_fndecl (arg0);
7955 tree arglist;
7957 if (fndecl
7958 && DECL_BUILT_IN (fndecl)
7959 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7960 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7961 && (arglist = TREE_OPERAND (arg0, 1))
7962 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7963 && ! TREE_CHAIN (arglist))
7964 return fold (build2 (code, type,
7965 build1 (INDIRECT_REF, char_type_node,
7966 TREE_VALUE(arglist)),
7967 integer_zero_node));
7970 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
7971 into a single range test. */
7972 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
7973 && TREE_CODE (arg1) == INTEGER_CST
7974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7975 && !integer_zerop (TREE_OPERAND (arg0, 1))
7976 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7977 && !TREE_OVERFLOW (arg1))
7979 t1 = fold_div_compare (code, type, arg0, arg1);
7980 if (t1 != NULL_TREE)
7981 return t1;
7984 /* Both ARG0 and ARG1 are known to be constants at this point. */
7985 t1 = fold_relational_const (code, type, arg0, arg1);
7986 return (t1 == NULL_TREE ? t : t1);
7988 case COND_EXPR:
7989 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7990 so all simple results must be passed through pedantic_non_lvalue. */
7991 if (TREE_CODE (arg0) == INTEGER_CST)
7993 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7994 /* Only optimize constant conditions when the selected branch
7995 has the same type as the COND_EXPR. This avoids optimizing
7996 away "c ? x : throw", where the throw has a void type. */
7997 if (! VOID_TYPE_P (TREE_TYPE (tem))
7998 || VOID_TYPE_P (type))
7999 return pedantic_non_lvalue (tem);
8000 return t;
8002 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8003 return pedantic_omit_one_operand (type, arg1, arg0);
8005 /* If we have A op B ? A : C, we may be able to convert this to a
8006 simpler expression, depending on the operation and the values
8007 of B and C. Signed zeros prevent all of these transformations,
8008 for reasons given above each one. */
8010 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8011 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8012 arg1, TREE_OPERAND (arg0, 1))
8013 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8015 tree arg2 = TREE_OPERAND (t, 2);
8016 enum tree_code comp_code = TREE_CODE (arg0);
8018 STRIP_NOPS (arg2);
8020 /* If we have A op 0 ? A : -A, consider applying the following
8021 transformations:
8023 A == 0? A : -A same as -A
8024 A != 0? A : -A same as A
8025 A >= 0? A : -A same as abs (A)
8026 A > 0? A : -A same as abs (A)
8027 A <= 0? A : -A same as -abs (A)
8028 A < 0? A : -A same as -abs (A)
8030 None of these transformations work for modes with signed
8031 zeros. If A is +/-0, the first two transformations will
8032 change the sign of the result (from +0 to -0, or vice
8033 versa). The last four will fix the sign of the result,
8034 even though the original expressions could be positive or
8035 negative, depending on the sign of A.
8037 Note that all these transformations are correct if A is
8038 NaN, since the two alternatives (A and -A) are also NaNs. */
8039 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8040 ? real_zerop (TREE_OPERAND (arg0, 1))
8041 : integer_zerop (TREE_OPERAND (arg0, 1)))
8042 && TREE_CODE (arg2) == NEGATE_EXPR
8043 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8044 switch (comp_code)
8046 case EQ_EXPR:
8047 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8048 tem = fold_convert (type, negate_expr (tem));
8049 return pedantic_non_lvalue (tem);
8050 case NE_EXPR:
8051 return pedantic_non_lvalue (fold_convert (type, arg1));
8052 case GE_EXPR:
8053 case GT_EXPR:
8054 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8055 arg1 = fold_convert (lang_hooks.types.signed_type
8056 (TREE_TYPE (arg1)), arg1);
8057 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8058 return pedantic_non_lvalue (fold_convert (type, arg1));
8059 case LE_EXPR:
8060 case LT_EXPR:
8061 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8062 arg1 = fold_convert (lang_hooks.types.signed_type
8063 (TREE_TYPE (arg1)), arg1);
8064 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8065 arg1 = negate_expr (fold_convert (type, arg1));
8066 return pedantic_non_lvalue (arg1);
8067 default:
8068 abort ();
8071 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8072 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8073 both transformations are correct when A is NaN: A != 0
8074 is then true, and A == 0 is false. */
8076 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8078 if (comp_code == NE_EXPR)
8079 return pedantic_non_lvalue (fold_convert (type, arg1));
8080 else if (comp_code == EQ_EXPR)
8081 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8084 /* Try some transformations of A op B ? A : B.
8086 A == B? A : B same as B
8087 A != B? A : B same as A
8088 A >= B? A : B same as max (A, B)
8089 A > B? A : B same as max (B, A)
8090 A <= B? A : B same as min (A, B)
8091 A < B? A : B same as min (B, A)
8093 As above, these transformations don't work in the presence
8094 of signed zeros. For example, if A and B are zeros of
8095 opposite sign, the first two transformations will change
8096 the sign of the result. In the last four, the original
8097 expressions give different results for (A=+0, B=-0) and
8098 (A=-0, B=+0), but the transformed expressions do not.
8100 The first two transformations are correct if either A or B
8101 is a NaN. In the first transformation, the condition will
8102 be false, and B will indeed be chosen. In the case of the
8103 second transformation, the condition A != B will be true,
8104 and A will be chosen.
8106 The conversions to max() and min() are not correct if B is
8107 a number and A is not. The conditions in the original
8108 expressions will be false, so all four give B. The min()
8109 and max() versions would give a NaN instead. */
8110 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8111 arg2, TREE_OPERAND (arg0, 0)))
8113 tree comp_op0 = TREE_OPERAND (arg0, 0);
8114 tree comp_op1 = TREE_OPERAND (arg0, 1);
8115 tree comp_type = TREE_TYPE (comp_op0);
8117 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8118 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8120 comp_type = type;
8121 comp_op0 = arg1;
8122 comp_op1 = arg2;
8125 switch (comp_code)
8127 case EQ_EXPR:
8128 return pedantic_non_lvalue (fold_convert (type, arg2));
8129 case NE_EXPR:
8130 return pedantic_non_lvalue (fold_convert (type, arg1));
8131 case LE_EXPR:
8132 case LT_EXPR:
8133 /* In C++ a ?: expression can be an lvalue, so put the
8134 operand which will be used if they are equal first
8135 so that we can convert this back to the
8136 corresponding COND_EXPR. */
8137 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8138 return pedantic_non_lvalue (fold_convert
8139 (type, fold (build2 (MIN_EXPR, comp_type,
8140 (comp_code == LE_EXPR
8141 ? comp_op0 : comp_op1),
8142 (comp_code == LE_EXPR
8143 ? comp_op1 : comp_op0)))));
8144 break;
8145 case GE_EXPR:
8146 case GT_EXPR:
8147 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8148 return pedantic_non_lvalue (fold_convert
8149 (type, fold (build2 (MAX_EXPR, comp_type,
8150 (comp_code == GE_EXPR
8151 ? comp_op0 : comp_op1),
8152 (comp_code == GE_EXPR
8153 ? comp_op1 : comp_op0)))));
8154 break;
8155 default:
8156 abort ();
8160 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8161 we might still be able to simplify this. For example,
8162 if C1 is one less or one more than C2, this might have started
8163 out as a MIN or MAX and been transformed by this function.
8164 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8166 if (INTEGRAL_TYPE_P (type)
8167 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8168 && TREE_CODE (arg2) == INTEGER_CST)
8169 switch (comp_code)
8171 case EQ_EXPR:
8172 /* We can replace A with C1 in this case. */
8173 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8174 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8175 TREE_OPERAND (t, 2)));
8177 case LT_EXPR:
8178 /* If C1 is C2 + 1, this is min(A, C2). */
8179 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8180 OEP_ONLY_CONST)
8181 && operand_equal_p (TREE_OPERAND (arg0, 1),
8182 const_binop (PLUS_EXPR, arg2,
8183 integer_one_node, 0),
8184 OEP_ONLY_CONST))
8185 return pedantic_non_lvalue
8186 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8187 break;
8189 case LE_EXPR:
8190 /* If C1 is C2 - 1, this is min(A, C2). */
8191 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8192 OEP_ONLY_CONST)
8193 && operand_equal_p (TREE_OPERAND (arg0, 1),
8194 const_binop (MINUS_EXPR, arg2,
8195 integer_one_node, 0),
8196 OEP_ONLY_CONST))
8197 return pedantic_non_lvalue
8198 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8199 break;
8201 case GT_EXPR:
8202 /* If C1 is C2 - 1, this is max(A, C2). */
8203 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8204 OEP_ONLY_CONST)
8205 && operand_equal_p (TREE_OPERAND (arg0, 1),
8206 const_binop (MINUS_EXPR, arg2,
8207 integer_one_node, 0),
8208 OEP_ONLY_CONST))
8209 return pedantic_non_lvalue
8210 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8211 break;
8213 case GE_EXPR:
8214 /* If C1 is C2 + 1, this is max(A, C2). */
8215 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8216 OEP_ONLY_CONST)
8217 && operand_equal_p (TREE_OPERAND (arg0, 1),
8218 const_binop (PLUS_EXPR, arg2,
8219 integer_one_node, 0),
8220 OEP_ONLY_CONST))
8221 return pedantic_non_lvalue
8222 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8223 break;
8224 case NE_EXPR:
8225 break;
8226 default:
8227 abort ();
8231 /* If the second operand is simpler than the third, swap them
8232 since that produces better jump optimization results. */
8233 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8234 TREE_OPERAND (t, 2), false))
8236 /* See if this can be inverted. If it can't, possibly because
8237 it was a floating-point inequality comparison, don't do
8238 anything. */
8239 tem = invert_truthvalue (arg0);
8241 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8242 return fold (build3 (code, type, tem,
8243 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8246 /* Convert A ? 1 : 0 to simply A. */
8247 if (integer_onep (TREE_OPERAND (t, 1))
8248 && integer_zerop (TREE_OPERAND (t, 2))
8249 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8250 call to fold will try to move the conversion inside
8251 a COND, which will recurse. In that case, the COND_EXPR
8252 is probably the best choice, so leave it alone. */
8253 && type == TREE_TYPE (arg0))
8254 return pedantic_non_lvalue (arg0);
8256 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8257 over COND_EXPR in cases such as floating point comparisons. */
8258 if (integer_zerop (TREE_OPERAND (t, 1))
8259 && integer_onep (TREE_OPERAND (t, 2))
8260 && truth_value_p (TREE_CODE (arg0)))
8261 return pedantic_non_lvalue (fold_convert (type,
8262 invert_truthvalue (arg0)));
8264 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8265 operation is simply A & 2. */
8267 if (integer_zerop (TREE_OPERAND (t, 2))
8268 && TREE_CODE (arg0) == NE_EXPR
8269 && integer_zerop (TREE_OPERAND (arg0, 1))
8270 && integer_pow2p (arg1)
8271 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8272 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8273 arg1, OEP_ONLY_CONST))
8274 return pedantic_non_lvalue (fold_convert (type,
8275 TREE_OPERAND (arg0, 0)));
8277 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8278 if (integer_zerop (TREE_OPERAND (t, 2))
8279 && truth_value_p (TREE_CODE (arg0))
8280 && truth_value_p (TREE_CODE (arg1)))
8281 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8282 arg0, arg1)));
8284 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8285 if (integer_onep (TREE_OPERAND (t, 2))
8286 && truth_value_p (TREE_CODE (arg0))
8287 && truth_value_p (TREE_CODE (arg1)))
8289 /* Only perform transformation if ARG0 is easily inverted. */
8290 tem = invert_truthvalue (arg0);
8291 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8292 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8293 tem, arg1)));
8296 return t;
8298 case COMPOUND_EXPR:
8299 /* When pedantic, a compound expression can be neither an lvalue
8300 nor an integer constant expression. */
8301 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8302 return t;
8303 /* Don't let (0, 0) be null pointer constant. */
8304 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8305 : fold_convert (type, arg1);
8306 return pedantic_non_lvalue (tem);
8308 case COMPLEX_EXPR:
8309 if (wins)
8310 return build_complex (type, arg0, arg1);
8311 return t;
8313 case REALPART_EXPR:
8314 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8315 return t;
8316 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8317 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8318 TREE_OPERAND (arg0, 1));
8319 else if (TREE_CODE (arg0) == COMPLEX_CST)
8320 return TREE_REALPART (arg0);
8321 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8322 return fold (build2 (TREE_CODE (arg0), type,
8323 fold (build1 (REALPART_EXPR, type,
8324 TREE_OPERAND (arg0, 0))),
8325 fold (build1 (REALPART_EXPR, type,
8326 TREE_OPERAND (arg0, 1)))));
8327 return t;
8329 case IMAGPART_EXPR:
8330 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8331 return fold_convert (type, integer_zero_node);
8332 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8333 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8334 TREE_OPERAND (arg0, 0));
8335 else if (TREE_CODE (arg0) == COMPLEX_CST)
8336 return TREE_IMAGPART (arg0);
8337 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8338 return fold (build2 (TREE_CODE (arg0), type,
8339 fold (build1 (IMAGPART_EXPR, type,
8340 TREE_OPERAND (arg0, 0))),
8341 fold (build1 (IMAGPART_EXPR, type,
8342 TREE_OPERAND (arg0, 1)))));
8343 return t;
8345 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8346 appropriate. */
8347 case CLEANUP_POINT_EXPR:
8348 if (! has_cleanups (arg0))
8349 return TREE_OPERAND (t, 0);
8352 enum tree_code code0 = TREE_CODE (arg0);
8353 int kind0 = TREE_CODE_CLASS (code0);
8354 tree arg00 = TREE_OPERAND (arg0, 0);
8355 tree arg01;
8357 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8358 return fold (build1 (code0, type,
8359 fold (build1 (CLEANUP_POINT_EXPR,
8360 TREE_TYPE (arg00), arg00))));
8362 if (kind0 == '<' || kind0 == '2'
8363 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8364 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8365 || code0 == TRUTH_XOR_EXPR)
8367 arg01 = TREE_OPERAND (arg0, 1);
8369 if (TREE_CONSTANT (arg00)
8370 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8371 && ! has_cleanups (arg00)))
8372 return fold (build2 (code0, type, arg00,
8373 fold (build1 (CLEANUP_POINT_EXPR,
8374 TREE_TYPE (arg01), arg01))));
8376 if (TREE_CONSTANT (arg01))
8377 return fold (build2 (code0, type,
8378 fold (build1 (CLEANUP_POINT_EXPR,
8379 TREE_TYPE (arg00), arg00)),
8380 arg01));
8383 return t;
8386 case CALL_EXPR:
8387 /* Check for a built-in function. */
8388 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8389 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8390 == FUNCTION_DECL)
8391 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8393 tree tmp = fold_builtin (t);
8394 if (tmp)
8395 return tmp;
8397 return t;
8399 default:
8400 return t;
8401 } /* switch (code) */
8404 #ifdef ENABLE_FOLD_CHECKING
8405 #undef fold
8407 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8408 static void fold_check_failed (tree, tree);
8409 void print_fold_checksum (tree);
8411 /* When --enable-checking=fold, compute a digest of expr before
8412 and after actual fold call to see if fold did not accidentally
8413 change original expr. */
8415 tree
8416 fold (tree expr)
8418 tree ret;
8419 struct md5_ctx ctx;
8420 unsigned char checksum_before[16], checksum_after[16];
8421 htab_t ht;
8423 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8424 md5_init_ctx (&ctx);
8425 fold_checksum_tree (expr, &ctx, ht);
8426 md5_finish_ctx (&ctx, checksum_before);
8427 htab_empty (ht);
8429 ret = fold_1 (expr);
8431 md5_init_ctx (&ctx);
8432 fold_checksum_tree (expr, &ctx, ht);
8433 md5_finish_ctx (&ctx, checksum_after);
8434 htab_delete (ht);
8436 if (memcmp (checksum_before, checksum_after, 16))
8437 fold_check_failed (expr, ret);
8439 return ret;
8442 void
8443 print_fold_checksum (tree expr)
8445 struct md5_ctx ctx;
8446 unsigned char checksum[16], cnt;
8447 htab_t ht;
8449 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8450 md5_init_ctx (&ctx);
8451 fold_checksum_tree (expr, &ctx, ht);
8452 md5_finish_ctx (&ctx, checksum);
8453 htab_delete (ht);
8454 for (cnt = 0; cnt < 16; ++cnt)
8455 fprintf (stderr, "%02x", checksum[cnt]);
8456 putc ('\n', stderr);
8459 static void
8460 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8462 internal_error ("fold check: original tree changed by fold");
8465 static void
8466 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8468 void **slot;
8469 enum tree_code code;
8470 char buf[sizeof (struct tree_decl)];
8471 int i, len;
8473 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8474 > sizeof (struct tree_decl)
8475 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8476 abort ();
8477 if (expr == NULL)
8478 return;
8479 slot = htab_find_slot (ht, expr, INSERT);
8480 if (*slot != NULL)
8481 return;
8482 *slot = expr;
8483 code = TREE_CODE (expr);
8484 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8486 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8487 memcpy (buf, expr, tree_size (expr));
8488 expr = (tree) buf;
8489 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8491 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8493 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8494 memcpy (buf, expr, tree_size (expr));
8495 expr = (tree) buf;
8496 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8498 else if (TREE_CODE_CLASS (code) == 't'
8499 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8501 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8502 memcpy (buf, expr, tree_size (expr));
8503 expr = (tree) buf;
8504 TYPE_POINTER_TO (expr) = NULL;
8505 TYPE_REFERENCE_TO (expr) = NULL;
8507 md5_process_bytes (expr, tree_size (expr), ctx);
8508 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8509 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8510 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8511 len = TREE_CODE_LENGTH (code);
8512 switch (TREE_CODE_CLASS (code))
8514 case 'c':
8515 switch (code)
8517 case STRING_CST:
8518 md5_process_bytes (TREE_STRING_POINTER (expr),
8519 TREE_STRING_LENGTH (expr), ctx);
8520 break;
8521 case COMPLEX_CST:
8522 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8523 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8524 break;
8525 case VECTOR_CST:
8526 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8527 break;
8528 default:
8529 break;
8531 break;
8532 case 'x':
8533 switch (code)
8535 case TREE_LIST:
8536 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8537 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8538 break;
8539 case TREE_VEC:
8540 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8541 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8542 break;
8543 default:
8544 break;
8546 break;
8547 case 'e':
8548 switch (code)
8550 case SAVE_EXPR: len = 2; break;
8551 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8552 case RTL_EXPR: len = 0; break;
8553 case WITH_CLEANUP_EXPR: len = 2; break;
8554 default: break;
8556 /* Fall through. */
8557 case 'r':
8558 case '<':
8559 case '1':
8560 case '2':
8561 case 's':
8562 for (i = 0; i < len; ++i)
8563 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8564 break;
8565 case 'd':
8566 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8567 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8568 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8569 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8570 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8571 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8572 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8573 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8574 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8575 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8576 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8577 break;
8578 case 't':
8579 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8580 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8581 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8582 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8583 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8584 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8585 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8586 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8587 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8588 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8589 break;
8590 default:
8591 break;
8595 #endif
8597 /* Perform constant folding and related simplification of initializer
8598 expression EXPR. This behaves identically to "fold" but ignores
8599 potential run-time traps and exceptions that fold must preserve. */
8601 tree
8602 fold_initializer (tree expr)
8604 int saved_signaling_nans = flag_signaling_nans;
8605 int saved_trapping_math = flag_trapping_math;
8606 int saved_trapv = flag_trapv;
8607 tree result;
8609 flag_signaling_nans = 0;
8610 flag_trapping_math = 0;
8611 flag_trapv = 0;
8613 result = fold (expr);
8615 flag_signaling_nans = saved_signaling_nans;
8616 flag_trapping_math = saved_trapping_math;
8617 flag_trapv = saved_trapv;
8619 return result;
8622 /* Determine if first argument is a multiple of second argument. Return 0 if
8623 it is not, or we cannot easily determined it to be.
8625 An example of the sort of thing we care about (at this point; this routine
8626 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8627 fold cases do now) is discovering that
8629 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8631 is a multiple of
8633 SAVE_EXPR (J * 8)
8635 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8637 This code also handles discovering that
8639 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8641 is a multiple of 8 so we don't have to worry about dealing with a
8642 possible remainder.
8644 Note that we *look* inside a SAVE_EXPR only to determine how it was
8645 calculated; it is not safe for fold to do much of anything else with the
8646 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8647 at run time. For example, the latter example above *cannot* be implemented
8648 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8649 evaluation time of the original SAVE_EXPR is not necessarily the same at
8650 the time the new expression is evaluated. The only optimization of this
8651 sort that would be valid is changing
8653 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8655 divided by 8 to
8657 SAVE_EXPR (I) * SAVE_EXPR (J)
8659 (where the same SAVE_EXPR (J) is used in the original and the
8660 transformed version). */
8662 static int
8663 multiple_of_p (tree type, tree top, tree bottom)
8665 if (operand_equal_p (top, bottom, 0))
8666 return 1;
8668 if (TREE_CODE (type) != INTEGER_TYPE)
8669 return 0;
8671 switch (TREE_CODE (top))
8673 case MULT_EXPR:
8674 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8675 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8677 case PLUS_EXPR:
8678 case MINUS_EXPR:
8679 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8680 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8682 case LSHIFT_EXPR:
8683 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8685 tree op1, t1;
8687 op1 = TREE_OPERAND (top, 1);
8688 /* const_binop may not detect overflow correctly,
8689 so check for it explicitly here. */
8690 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8691 > TREE_INT_CST_LOW (op1)
8692 && TREE_INT_CST_HIGH (op1) == 0
8693 && 0 != (t1 = fold_convert (type,
8694 const_binop (LSHIFT_EXPR,
8695 size_one_node,
8696 op1, 0)))
8697 && ! TREE_OVERFLOW (t1))
8698 return multiple_of_p (type, t1, bottom);
8700 return 0;
8702 case NOP_EXPR:
8703 /* Can't handle conversions from non-integral or wider integral type. */
8704 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8705 || (TYPE_PRECISION (type)
8706 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8707 return 0;
8709 /* .. fall through ... */
8711 case SAVE_EXPR:
8712 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8714 case INTEGER_CST:
8715 if (TREE_CODE (bottom) != INTEGER_CST
8716 || (TYPE_UNSIGNED (type)
8717 && (tree_int_cst_sgn (top) < 0
8718 || tree_int_cst_sgn (bottom) < 0)))
8719 return 0;
8720 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8721 top, bottom, 0));
8723 default:
8724 return 0;
8728 /* Return true if `t' is known to be non-negative. */
8731 tree_expr_nonnegative_p (tree t)
8733 switch (TREE_CODE (t))
8735 case ABS_EXPR:
8736 return 1;
8738 case INTEGER_CST:
8739 return tree_int_cst_sgn (t) >= 0;
8741 case REAL_CST:
8742 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8744 case PLUS_EXPR:
8745 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8746 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8747 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8749 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8750 both unsigned and at least 2 bits shorter than the result. */
8751 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8752 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8753 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8755 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8756 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8757 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8758 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8760 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8761 TYPE_PRECISION (inner2)) + 1;
8762 return prec < TYPE_PRECISION (TREE_TYPE (t));
8765 break;
8767 case MULT_EXPR:
8768 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8770 /* x * x for floating point x is always non-negative. */
8771 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8772 return 1;
8773 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8774 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8777 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8778 both unsigned and their total bits is shorter than the result. */
8779 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8780 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8781 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8783 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8784 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8785 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8786 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8787 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8788 < TYPE_PRECISION (TREE_TYPE (t));
8790 return 0;
8792 case TRUNC_DIV_EXPR:
8793 case CEIL_DIV_EXPR:
8794 case FLOOR_DIV_EXPR:
8795 case ROUND_DIV_EXPR:
8796 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8797 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8799 case TRUNC_MOD_EXPR:
8800 case CEIL_MOD_EXPR:
8801 case FLOOR_MOD_EXPR:
8802 case ROUND_MOD_EXPR:
8803 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8805 case RDIV_EXPR:
8806 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8807 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8809 case BIT_AND_EXPR:
8810 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8811 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8812 case BIT_IOR_EXPR:
8813 case BIT_XOR_EXPR:
8814 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8815 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8817 case NOP_EXPR:
8819 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8820 tree outer_type = TREE_TYPE (t);
8822 if (TREE_CODE (outer_type) == REAL_TYPE)
8824 if (TREE_CODE (inner_type) == REAL_TYPE)
8825 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8826 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8828 if (TYPE_UNSIGNED (inner_type))
8829 return 1;
8830 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8833 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8835 if (TREE_CODE (inner_type) == REAL_TYPE)
8836 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8837 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8838 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8839 && TYPE_UNSIGNED (inner_type);
8842 break;
8844 case COND_EXPR:
8845 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8846 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8847 case COMPOUND_EXPR:
8848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8849 case MIN_EXPR:
8850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8851 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8852 case MAX_EXPR:
8853 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8854 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8855 case MODIFY_EXPR:
8856 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8857 case BIND_EXPR:
8858 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8859 case SAVE_EXPR:
8860 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8861 case NON_LVALUE_EXPR:
8862 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8863 case FLOAT_EXPR:
8864 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8865 case RTL_EXPR:
8866 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8868 case CALL_EXPR:
8870 tree fndecl = get_callee_fndecl (t);
8871 tree arglist = TREE_OPERAND (t, 1);
8872 if (fndecl
8873 && DECL_BUILT_IN (fndecl)
8874 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8875 switch (DECL_FUNCTION_CODE (fndecl))
8877 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8878 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8879 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8880 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8882 CASE_BUILTIN_F (BUILT_IN_ACOS)
8883 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8884 CASE_BUILTIN_F (BUILT_IN_CABS)
8885 CASE_BUILTIN_F (BUILT_IN_COSH)
8886 CASE_BUILTIN_F (BUILT_IN_ERFC)
8887 CASE_BUILTIN_F (BUILT_IN_EXP)
8888 CASE_BUILTIN_F (BUILT_IN_EXP10)
8889 CASE_BUILTIN_F (BUILT_IN_EXP2)
8890 CASE_BUILTIN_F (BUILT_IN_FABS)
8891 CASE_BUILTIN_F (BUILT_IN_FDIM)
8892 CASE_BUILTIN_F (BUILT_IN_FREXP)
8893 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8894 CASE_BUILTIN_F (BUILT_IN_POW10)
8895 CASE_BUILTIN_F (BUILT_IN_SQRT)
8896 CASE_BUILTIN_I (BUILT_IN_FFS)
8897 CASE_BUILTIN_I (BUILT_IN_PARITY)
8898 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8899 /* Always true. */
8900 return 1;
8902 CASE_BUILTIN_F (BUILT_IN_ASINH)
8903 CASE_BUILTIN_F (BUILT_IN_ATAN)
8904 CASE_BUILTIN_F (BUILT_IN_ATANH)
8905 CASE_BUILTIN_F (BUILT_IN_CBRT)
8906 CASE_BUILTIN_F (BUILT_IN_CEIL)
8907 CASE_BUILTIN_F (BUILT_IN_ERF)
8908 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8909 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8910 CASE_BUILTIN_F (BUILT_IN_FMOD)
8911 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8912 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8913 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8914 CASE_BUILTIN_F (BUILT_IN_LRINT)
8915 CASE_BUILTIN_F (BUILT_IN_LROUND)
8916 CASE_BUILTIN_F (BUILT_IN_MODF)
8917 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8918 CASE_BUILTIN_F (BUILT_IN_POW)
8919 CASE_BUILTIN_F (BUILT_IN_RINT)
8920 CASE_BUILTIN_F (BUILT_IN_ROUND)
8921 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8922 CASE_BUILTIN_F (BUILT_IN_SINH)
8923 CASE_BUILTIN_F (BUILT_IN_TANH)
8924 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8925 /* True if the 1st argument is nonnegative. */
8926 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8928 CASE_BUILTIN_F(BUILT_IN_FMAX)
8929 /* True if the 1st OR 2nd arguments are nonnegative. */
8930 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8931 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8933 CASE_BUILTIN_F(BUILT_IN_FMIN)
8934 /* True if the 1st AND 2nd arguments are nonnegative. */
8935 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8936 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8938 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8939 /* True if the 2nd argument is nonnegative. */
8940 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8942 default:
8943 break;
8944 #undef CASE_BUILTIN_F
8945 #undef CASE_BUILTIN_I
8949 /* ... fall through ... */
8951 default:
8952 if (truth_value_p (TREE_CODE (t)))
8953 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8954 return 1;
8957 /* We don't know sign of `t', so be conservative and return false. */
8958 return 0;
8961 /* Return true when T is an address and is known to be nonzero.
8962 For floating point we further ensure that T is not denormal.
8963 Similar logic is present in nonzero_address in rtlanal.h */
8965 static bool
8966 tree_expr_nonzero_p (tree t)
8968 tree type = TREE_TYPE (t);
8970 /* Doing something useful for floating point would need more work. */
8971 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8972 return false;
8974 switch (TREE_CODE (t))
8976 case ABS_EXPR:
8977 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8978 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8980 case INTEGER_CST:
8981 return !integer_zerop (t);
8983 case PLUS_EXPR:
8984 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8986 /* With the presence of negative values it is hard
8987 to say something. */
8988 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8989 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8990 return false;
8991 /* One of operands must be positive and the other non-negative. */
8992 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8993 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8995 break;
8997 case MULT_EXPR:
8998 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9000 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9001 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9003 break;
9005 case NOP_EXPR:
9007 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9008 tree outer_type = TREE_TYPE (t);
9010 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9011 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9013 break;
9015 case ADDR_EXPR:
9016 /* Weak declarations may link to NULL. */
9017 if (DECL_P (TREE_OPERAND (t, 0)))
9018 return !DECL_WEAK (TREE_OPERAND (t, 0));
9019 /* Constants and all other cases are never weak. */
9020 return true;
9022 case COND_EXPR:
9023 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9024 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9026 case MIN_EXPR:
9027 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9028 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9030 case MAX_EXPR:
9031 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9033 /* When both operands are nonzero, then MAX must be too. */
9034 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9035 return true;
9037 /* MAX where operand 0 is positive is positive. */
9038 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9040 /* MAX where operand 1 is positive is positive. */
9041 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9042 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9043 return true;
9044 break;
9046 case COMPOUND_EXPR:
9047 case MODIFY_EXPR:
9048 case BIND_EXPR:
9049 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9051 case SAVE_EXPR:
9052 case NON_LVALUE_EXPR:
9053 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9055 case BIT_IOR_EXPR:
9056 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9057 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9059 default:
9060 break;
9062 return false;
9065 /* Return true if `r' is known to be non-negative.
9066 Only handles constants at the moment. */
9069 rtl_expr_nonnegative_p (rtx r)
9071 switch (GET_CODE (r))
9073 case CONST_INT:
9074 return INTVAL (r) >= 0;
9076 case CONST_DOUBLE:
9077 if (GET_MODE (r) == VOIDmode)
9078 return CONST_DOUBLE_HIGH (r) >= 0;
9079 return 0;
9081 case CONST_VECTOR:
9083 int units, i;
9084 rtx elt;
9086 units = CONST_VECTOR_NUNITS (r);
9088 for (i = 0; i < units; ++i)
9090 elt = CONST_VECTOR_ELT (r, i);
9091 if (!rtl_expr_nonnegative_p (elt))
9092 return 0;
9095 return 1;
9098 case SYMBOL_REF:
9099 case LABEL_REF:
9100 /* These are always nonnegative. */
9101 return 1;
9103 default:
9104 return 0;
9109 /* See if we are applying CODE, a relational to the highest or lowest
9110 possible integer of TYPE. If so, then the result is a compile
9111 time constant. */
9113 static tree
9114 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9115 tree *op1_p)
9117 tree op0 = *op0_p;
9118 tree op1 = *op1_p;
9119 enum tree_code code = *code_p;
9120 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9122 if (TREE_CODE (op1) == INTEGER_CST
9123 && ! TREE_CONSTANT_OVERFLOW (op1)
9124 && width <= HOST_BITS_PER_WIDE_INT
9125 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9126 || POINTER_TYPE_P (TREE_TYPE (op1))))
9128 unsigned HOST_WIDE_INT signed_max;
9129 unsigned HOST_WIDE_INT max, min;
9131 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9133 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9135 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9136 min = 0;
9138 else
9140 max = signed_max;
9141 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9144 if (TREE_INT_CST_HIGH (op1) == 0
9145 && TREE_INT_CST_LOW (op1) == max)
9146 switch (code)
9148 case GT_EXPR:
9149 return omit_one_operand (type,
9150 convert (type, integer_zero_node),
9151 op0);
9152 case GE_EXPR:
9153 *code_p = EQ_EXPR;
9154 break;
9155 case LE_EXPR:
9156 return omit_one_operand (type,
9157 convert (type, integer_one_node),
9158 op0);
9159 case LT_EXPR:
9160 *code_p = NE_EXPR;
9161 break;
9163 /* The GE_EXPR and LT_EXPR cases above are not normally
9164 reached because of previous transformations. */
9166 default:
9167 break;
9169 else if (TREE_INT_CST_HIGH (op1) == 0
9170 && TREE_INT_CST_LOW (op1) == max - 1)
9171 switch (code)
9173 case GT_EXPR:
9174 *code_p = EQ_EXPR;
9175 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9176 break;
9177 case LE_EXPR:
9178 *code_p = NE_EXPR;
9179 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9180 break;
9181 default:
9182 break;
9184 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9185 && TREE_INT_CST_LOW (op1) == min)
9186 switch (code)
9188 case LT_EXPR:
9189 return omit_one_operand (type,
9190 convert (type, integer_zero_node),
9191 op0);
9192 case LE_EXPR:
9193 *code_p = EQ_EXPR;
9194 break;
9196 case GE_EXPR:
9197 return omit_one_operand (type,
9198 convert (type, integer_one_node),
9199 op0);
9200 case GT_EXPR:
9201 *code_p = NE_EXPR;
9202 break;
9204 default:
9205 break;
9207 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9208 && TREE_INT_CST_LOW (op1) == min + 1)
9209 switch (code)
9211 case GE_EXPR:
9212 *code_p = NE_EXPR;
9213 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9214 break;
9215 case LT_EXPR:
9216 *code_p = EQ_EXPR;
9217 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9218 break;
9219 default:
9220 break;
9223 else if (TREE_INT_CST_HIGH (op1) == 0
9224 && TREE_INT_CST_LOW (op1) == signed_max
9225 && TYPE_UNSIGNED (TREE_TYPE (op1))
9226 /* signed_type does not work on pointer types. */
9227 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9229 /* The following case also applies to X < signed_max+1
9230 and X >= signed_max+1 because previous transformations. */
9231 if (code == LE_EXPR || code == GT_EXPR)
9233 tree st0, st1, exp, retval;
9234 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9235 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9237 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9238 type,
9239 convert (st0, op0),
9240 convert (st1, integer_zero_node));
9242 retval
9243 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9244 TREE_TYPE (exp),
9245 TREE_OPERAND (exp, 0),
9246 TREE_OPERAND (exp, 1));
9248 /* If we are in gimple form, then returning EXP would create
9249 non-gimple expressions. Clearing it is safe and insures
9250 we do not allow a non-gimple expression to escape. */
9251 if (in_gimple_form)
9252 exp = NULL;
9254 return (retval ? retval : exp);
9259 return NULL_TREE;
9263 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9264 attempt to fold the expression to a constant without modifying TYPE,
9265 OP0 or OP1.
9267 If the expression could be simplified to a constant, then return
9268 the constant. If the expression would not be simplified to a
9269 constant, then return NULL_TREE.
9271 Note this is primarily designed to be called after gimplification
9272 of the tree structures and when at least one operand is a constant.
9273 As a result of those simplifying assumptions this routine is far
9274 simpler than the generic fold routine. */
9276 tree
9277 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9278 tree op0, tree op1)
9280 int wins = 1;
9281 tree subop0;
9282 tree subop1;
9283 tree tem;
9285 /* If this is a commutative operation, and ARG0 is a constant, move it
9286 to ARG1 to reduce the number of tests below. */
9287 if (commutative_tree_code (code)
9288 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9290 tem = op0;
9291 op0 = op1;
9292 op1 = tem;
9295 /* If either operand is a complex type, extract its real component. */
9296 if (TREE_CODE (op0) == COMPLEX_CST)
9297 subop0 = TREE_REALPART (op0);
9298 else
9299 subop0 = op0;
9301 if (TREE_CODE (op1) == COMPLEX_CST)
9302 subop1 = TREE_REALPART (op1);
9303 else
9304 subop1 = op1;
9306 /* Note if either argument is not a real or integer constant.
9307 With a few exceptions, simplification is limited to cases
9308 where both arguments are constants. */
9309 if ((TREE_CODE (subop0) != INTEGER_CST
9310 && TREE_CODE (subop0) != REAL_CST)
9311 || (TREE_CODE (subop1) != INTEGER_CST
9312 && TREE_CODE (subop1) != REAL_CST))
9313 wins = 0;
9315 switch (code)
9317 case PLUS_EXPR:
9318 /* (plus (address) (const_int)) is a constant. */
9319 if (TREE_CODE (op0) == PLUS_EXPR
9320 && TREE_CODE (op1) == INTEGER_CST
9321 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9322 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9323 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9324 == ADDR_EXPR)))
9325 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9327 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9328 const_binop (PLUS_EXPR, op1,
9329 TREE_OPERAND (op0, 1), 0));
9331 case BIT_XOR_EXPR:
9333 binary:
9334 if (!wins)
9335 return NULL_TREE;
9337 /* Both arguments are constants. Simplify. */
9338 tem = const_binop (code, op0, op1, 0);
9339 if (tem != NULL_TREE)
9341 /* The return value should always have the same type as
9342 the original expression. */
9343 if (TREE_TYPE (tem) != type)
9344 tem = convert (type, tem);
9346 return tem;
9348 return NULL_TREE;
9350 case MINUS_EXPR:
9351 /* Fold &x - &x. This can happen from &x.foo - &x.
9352 This is unsafe for certain floats even in non-IEEE formats.
9353 In IEEE, it is unsafe because it does wrong for NaNs.
9354 Also note that operand_equal_p is always false if an
9355 operand is volatile. */
9356 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9357 return convert (type, integer_zero_node);
9359 goto binary;
9361 case MULT_EXPR:
9362 case BIT_AND_EXPR:
9363 /* Special case multiplication or bitwise AND where one argument
9364 is zero. */
9365 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9366 return omit_one_operand (type, op1, op0);
9367 else
9368 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9369 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9370 && real_zerop (op1))
9371 return omit_one_operand (type, op1, op0);
9373 goto binary;
9375 case BIT_IOR_EXPR:
9376 /* Special case when we know the result will be all ones. */
9377 if (integer_all_onesp (op1))
9378 return omit_one_operand (type, op1, op0);
9380 goto binary;
9382 case TRUNC_DIV_EXPR:
9383 case ROUND_DIV_EXPR:
9384 case FLOOR_DIV_EXPR:
9385 case CEIL_DIV_EXPR:
9386 case EXACT_DIV_EXPR:
9387 case TRUNC_MOD_EXPR:
9388 case ROUND_MOD_EXPR:
9389 case FLOOR_MOD_EXPR:
9390 case CEIL_MOD_EXPR:
9391 case RDIV_EXPR:
9392 /* Division by zero is undefined. */
9393 if (integer_zerop (op1))
9394 return NULL_TREE;
9396 if (TREE_CODE (op1) == REAL_CST
9397 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9398 && real_zerop (op1))
9399 return NULL_TREE;
9401 goto binary;
9403 case MIN_EXPR:
9404 if (INTEGRAL_TYPE_P (type)
9405 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9406 return omit_one_operand (type, op1, op0);
9408 goto binary;
9410 case MAX_EXPR:
9411 if (INTEGRAL_TYPE_P (type)
9412 && TYPE_MAX_VALUE (type)
9413 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9414 return omit_one_operand (type, op1, op0);
9416 goto binary;
9418 case RSHIFT_EXPR:
9419 /* Optimize -1 >> x for arithmetic right shifts. */
9420 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9421 return omit_one_operand (type, op0, op1);
9422 /* ... fall through ... */
9424 case LSHIFT_EXPR:
9425 if (integer_zerop (op0))
9426 return omit_one_operand (type, op0, op1);
9428 /* Since negative shift count is not well-defined, don't
9429 try to compute it in the compiler. */
9430 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9431 return NULL_TREE;
9433 goto binary;
9435 case LROTATE_EXPR:
9436 case RROTATE_EXPR:
9437 /* -1 rotated either direction by any amount is still -1. */
9438 if (integer_all_onesp (op0))
9439 return omit_one_operand (type, op0, op1);
9441 /* 0 rotated either direction by any amount is still zero. */
9442 if (integer_zerop (op0))
9443 return omit_one_operand (type, op0, op1);
9445 goto binary;
9447 case COMPLEX_EXPR:
9448 if (wins)
9449 return build_complex (type, op0, op1);
9450 return NULL_TREE;
9452 case LT_EXPR:
9453 case LE_EXPR:
9454 case GT_EXPR:
9455 case GE_EXPR:
9456 case EQ_EXPR:
9457 case NE_EXPR:
9458 /* If one arg is a real or integer constant, put it last. */
9459 if ((TREE_CODE (op0) == INTEGER_CST
9460 && TREE_CODE (op1) != INTEGER_CST)
9461 || (TREE_CODE (op0) == REAL_CST
9462 && TREE_CODE (op0) != REAL_CST))
9464 tree temp;
9466 temp = op0;
9467 op0 = op1;
9468 op1 = temp;
9469 code = swap_tree_comparison (code);
9472 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9473 This transformation affects the cases which are handled in later
9474 optimizations involving comparisons with non-negative constants. */
9475 if (TREE_CODE (op1) == INTEGER_CST
9476 && TREE_CODE (op0) != INTEGER_CST
9477 && tree_int_cst_sgn (op1) > 0)
9479 switch (code)
9481 case GE_EXPR:
9482 code = GT_EXPR;
9483 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9484 break;
9486 case LT_EXPR:
9487 code = LE_EXPR;
9488 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9489 break;
9491 default:
9492 break;
9496 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9497 if (tem)
9498 return tem;
9500 if (!wins)
9501 return NULL_TREE;
9503 return fold_relational_const (code, type, op0, op1);
9505 case RANGE_EXPR:
9506 /* This could probably be handled. */
9507 return NULL_TREE;
9509 case TRUTH_AND_EXPR:
9510 /* If second arg is constant zero, result is zero, but first arg
9511 must be evaluated. */
9512 if (integer_zerop (op1))
9513 return omit_one_operand (type, op1, op0);
9514 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9515 case will be handled here. */
9516 if (integer_zerop (op0))
9517 return omit_one_operand (type, op0, op1);
9518 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9520 int x1 = ! integer_zerop (op0);
9521 int x2 = ! integer_zerop (op1);
9523 return ((x1 & x2) ? integer_one_node : integer_zero_node);
9525 return NULL_TREE;
9527 case TRUTH_OR_EXPR:
9528 /* If second arg is constant true, result is true, but we must
9529 evaluate first arg. */
9530 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9531 return omit_one_operand (type, op1, op0);
9532 /* Likewise for first arg, but note this only occurs here for
9533 TRUTH_OR_EXPR. */
9534 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9535 return omit_one_operand (type, op0, op1);
9536 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9538 int x1 = ! integer_zerop (op0);
9539 int x2 = ! integer_zerop (op1);
9541 return ((x1 | x2) ? integer_one_node : integer_zero_node);
9543 return NULL_TREE;
9545 case TRUTH_XOR_EXPR:
9546 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9548 int x1 = ! integer_zerop (op0);
9549 int x2 = ! integer_zerop (op1);
9551 return ((x1 ^ x2) ? integer_one_node : integer_zero_node);
9553 return NULL_TREE;
9555 default:
9556 return NULL_TREE;
9560 /* Given the components of a unary expression CODE, TYPE and OP0,
9561 attempt to fold the expression to a constant without modifying
9562 TYPE or OP0.
9564 If the expression could be simplified to a constant, then return
9565 the constant. If the expression would not be simplified to a
9566 constant, then return NULL_TREE.
9568 Note this is primarily designed to be called after gimplification
9569 of the tree structures and when op0 is a constant. As a result
9570 of those simplifying assumptions this routine is far simpler than
9571 the generic fold routine. */
9573 tree
9574 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9575 tree op0)
9577 tree t;
9579 /* Make sure we have a suitable constant argument. */
9580 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9582 tree subop;
9584 if (TREE_CODE (op0) == COMPLEX_CST)
9585 subop = TREE_REALPART (op0);
9586 else
9587 subop = op0;
9589 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9590 return NULL_TREE;
9593 switch (code)
9595 case NOP_EXPR:
9596 case FLOAT_EXPR:
9597 case CONVERT_EXPR:
9598 case FIX_TRUNC_EXPR:
9599 case FIX_FLOOR_EXPR:
9600 case FIX_CEIL_EXPR:
9601 return fold_convert_const (code, type, op0);
9603 case NEGATE_EXPR:
9604 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9605 return fold_negate_const (op0, type);
9606 else
9607 return NULL_TREE;
9609 case ABS_EXPR:
9610 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9611 return fold_abs_const (op0, type);
9612 else
9613 return NULL_TREE;
9615 case BIT_NOT_EXPR:
9616 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9618 t = build_int_2 (~ TREE_INT_CST_LOW (op0), ~ TREE_INT_CST_HIGH (op0));
9619 TREE_TYPE (t) = type;
9620 force_fit_type (t, 0);
9621 TREE_OVERFLOW (t) = TREE_OVERFLOW (op0);
9622 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (op0);
9623 return t;
9625 else
9626 return NULL_TREE;
9628 case REALPART_EXPR:
9629 if (TREE_CODE (op0) == COMPLEX_CST)
9630 return TREE_REALPART (op0);
9631 else
9632 return NULL_TREE;
9634 case IMAGPART_EXPR:
9635 if (TREE_CODE (op0) == COMPLEX_CST)
9636 return TREE_IMAGPART (op0);
9637 else
9638 return NULL_TREE;
9640 case CONJ_EXPR:
9641 if (TREE_CODE (op0) == COMPLEX_CST
9642 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9643 return build_complex (type, TREE_REALPART (op0),
9644 negate_expr (TREE_IMAGPART (op0)));
9645 return NULL_TREE;
9647 default:
9648 return NULL_TREE;
9652 /* If EXP represents referencing an element in a constant string
9653 (either via pointer arithmetic or array indexing), return the
9654 tree representing the value accessed, otherwise return NULL. */
9656 tree
9657 fold_read_from_constant_string (tree exp)
9659 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9661 tree exp1 = TREE_OPERAND (exp, 0);
9662 tree index;
9663 tree string;
9665 if (TREE_CODE (exp) == INDIRECT_REF)
9667 string = string_constant (exp1, &index);
9669 else
9671 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9672 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9673 index = convert (sizetype, TREE_OPERAND (exp, 1));
9675 /* Optimize the special-case of a zero lower bound.
9677 We convert the low_bound to sizetype to avoid some problems
9678 with constant folding. (E.g. suppose the lower bound is 1,
9679 and its mode is QI. Without the conversion,l (ARRAY
9680 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9681 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9682 if (! integer_zerop (low_bound))
9683 index = size_diffop (index, convert (sizetype, low_bound));
9685 string = exp1;
9688 if (string
9689 && TREE_CODE (string) == STRING_CST
9690 && TREE_CODE (index) == INTEGER_CST
9691 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
9692 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
9693 == MODE_INT)
9694 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
9695 return build_int_2 ((TREE_STRING_POINTER (string)
9696 [TREE_INT_CST_LOW (index)]), 0);
9698 return NULL;
9701 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9702 an integer constant or real constant.
9704 TYPE is the type of the result. */
9706 static tree
9707 fold_negate_const (tree arg0, tree type)
9709 tree t = NULL_TREE;
9711 if (TREE_CODE (arg0) == INTEGER_CST)
9713 unsigned HOST_WIDE_INT low;
9714 HOST_WIDE_INT high;
9715 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9716 TREE_INT_CST_HIGH (arg0),
9717 &low, &high);
9718 t = build_int_2 (low, high);
9719 TREE_TYPE (t) = type;
9720 TREE_OVERFLOW (t)
9721 = (TREE_OVERFLOW (arg0)
9722 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9723 TREE_CONSTANT_OVERFLOW (t)
9724 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9726 else if (TREE_CODE (arg0) == REAL_CST)
9727 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9728 #ifdef ENABLE_CHECKING
9729 else
9730 abort ();
9731 #endif
9733 return t;
9736 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9737 an integer constant or real constant.
9739 TYPE is the type of the result. */
9741 static tree
9742 fold_abs_const (tree arg0, tree type)
9744 tree t = NULL_TREE;
9746 if (TREE_CODE (arg0) == INTEGER_CST)
9748 /* If the value is unsigned, then the absolute value is
9749 the same as the ordinary value. */
9750 if (TYPE_UNSIGNED (type))
9751 return arg0;
9752 /* Similarly, if the value is non-negative. */
9753 else if (INT_CST_LT (integer_minus_one_node, arg0))
9754 return arg0;
9755 /* If the value is negative, then the absolute value is
9756 its negation. */
9757 else
9759 unsigned HOST_WIDE_INT low;
9760 HOST_WIDE_INT high;
9761 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9762 TREE_INT_CST_HIGH (arg0),
9763 &low, &high);
9764 t = build_int_2 (low, high);
9765 TREE_TYPE (t) = type;
9766 TREE_OVERFLOW (t)
9767 = (TREE_OVERFLOW (arg0)
9768 | force_fit_type (t, overflow));
9769 TREE_CONSTANT_OVERFLOW (t)
9770 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9771 return t;
9774 else if (TREE_CODE (arg0) == REAL_CST)
9776 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9777 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9778 else
9779 return arg0;
9781 #ifdef ENABLE_CHECKING
9782 else
9783 abort ();
9784 #endif
9786 return t;
9789 /* Given CODE, a relational operator, the target type, TYPE and two
9790 constant operands OP0 and OP1, return the result of the
9791 relational operation. If the result is not a compile time
9792 constant, then return NULL_TREE. */
9794 static tree
9795 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9797 tree tem;
9798 int invert;
9800 /* From here on, the only cases we handle are when the result is
9801 known to be a constant.
9803 To compute GT, swap the arguments and do LT.
9804 To compute GE, do LT and invert the result.
9805 To compute LE, swap the arguments, do LT and invert the result.
9806 To compute NE, do EQ and invert the result.
9808 Therefore, the code below must handle only EQ and LT. */
9810 if (code == LE_EXPR || code == GT_EXPR)
9812 tem = op0, op0 = op1, op1 = tem;
9813 code = swap_tree_comparison (code);
9816 /* Note that it is safe to invert for real values here because we
9817 will check below in the one case that it matters. */
9819 tem = NULL_TREE;
9820 invert = 0;
9821 if (code == NE_EXPR || code == GE_EXPR)
9823 invert = 1;
9824 code = invert_tree_comparison (code);
9827 /* Compute a result for LT or EQ if args permit;
9828 Otherwise return T. */
9829 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9831 if (code == EQ_EXPR)
9832 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9833 else
9834 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9835 ? INT_CST_LT_UNSIGNED (op0, op1)
9836 : INT_CST_LT (op0, op1)),
9840 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9841 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9842 tem = build_int_2 (0, 0);
9844 /* Two real constants can be compared explicitly. */
9845 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9847 /* If either operand is a NaN, the result is false with two
9848 exceptions: First, an NE_EXPR is true on NaNs, but that case
9849 is already handled correctly since we will be inverting the
9850 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9851 or a GE_EXPR into a LT_EXPR, we must return true so that it
9852 will be inverted into false. */
9854 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9855 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9856 tem = build_int_2 (invert && code == LT_EXPR, 0);
9858 else if (code == EQ_EXPR)
9859 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9860 TREE_REAL_CST (op1)),
9862 else
9863 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9864 TREE_REAL_CST (op1)),
9868 if (tem == NULL_TREE)
9869 return NULL_TREE;
9871 if (invert)
9872 TREE_INT_CST_LOW (tem) ^= 1;
9874 TREE_TYPE (tem) = type;
9875 if (TREE_CODE (type) == BOOLEAN_TYPE)
9876 return lang_hooks.truthvalue_conversion (tem);
9877 return tem;
9880 #include "gt-fold-const.h"