* cp-tree.h (DECL_LOCAL_FUCNTION_P): New macro.
[official-gcc.git] / gcc / fold-const.c
blobb93f49e400e42bb55669ee8d49363264e8154a1f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
50 #include "rtl.h"
51 #include "tm_p.h"
52 #include "toplev.h"
53 #include "ggc.h"
55 static void encode PROTO((HOST_WIDE_INT *,
56 HOST_WIDE_INT, HOST_WIDE_INT));
57 static void decode PROTO((HOST_WIDE_INT *,
58 HOST_WIDE_INT *, HOST_WIDE_INT *));
59 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT,
61 HOST_WIDE_INT, HOST_WIDE_INT *,
62 HOST_WIDE_INT *, HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static int split_tree PROTO((tree, enum tree_code, tree *,
65 tree *, int *));
66 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
67 static tree const_binop PROTO((enum tree_code, tree, tree, int));
68 static tree fold_convert PROTO((tree, tree));
69 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
70 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
71 static int truth_value_p PROTO((enum tree_code));
72 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
73 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
74 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
75 static tree omit_one_operand PROTO((tree, tree, tree));
76 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
77 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
78 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
79 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
80 tree, tree));
81 static tree decode_field_reference PROTO((tree, int *, int *,
82 enum machine_mode *, int *,
83 int *, tree *, tree *));
84 static int all_ones_mask_p PROTO((tree, int));
85 static int simple_operand_p PROTO((tree));
86 static tree range_binop PROTO((enum tree_code, tree, tree, int,
87 tree, int));
88 static tree make_range PROTO((tree, int *, tree *, tree *));
89 static tree build_range_check PROTO((tree, tree, int, tree, tree));
90 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
91 int, tree, tree));
92 static tree fold_range_test PROTO((tree));
93 static tree unextend PROTO((tree, int, int, tree));
94 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
95 static tree optimize_minmax_comparison PROTO((tree));
96 static tree strip_compound_expr PROTO((tree, tree));
97 static int multiple_of_p PROTO((tree, tree, tree));
98 static tree constant_boolean_node PROTO((int, tree));
99 static int count_cond PROTO((tree, int));
100 static void const_binop_1 PROTO((PTR));
101 static void fold_convert_1 PROTO((PTR));
103 #ifndef BRANCH_COST
104 #define BRANCH_COST 1
105 #endif
107 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
108 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
109 Then this yields nonzero if overflow occurred during the addition.
110 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
111 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
112 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
114 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
115 We do that by representing the two-word integer in 4 words, with only
116 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
118 #define LOWPART(x) \
119 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
120 #define HIGHPART(x) \
121 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
122 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
124 /* Unpack a two-word integer into 4 words.
125 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
126 WORDS points to the array of HOST_WIDE_INTs. */
128 static void
129 encode (words, low, hi)
130 HOST_WIDE_INT *words;
131 HOST_WIDE_INT low, hi;
133 words[0] = LOWPART (low);
134 words[1] = HIGHPART (low);
135 words[2] = LOWPART (hi);
136 words[3] = HIGHPART (hi);
139 /* Pack an array of 4 words into a two-word integer.
140 WORDS points to the array of words.
141 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
143 static void
144 decode (words, low, hi)
145 HOST_WIDE_INT *words;
146 HOST_WIDE_INT *low, *hi;
148 *low = words[0] | words[1] * BASE;
149 *hi = words[2] | words[3] * BASE;
152 /* Make the integer constant T valid for its type
153 by setting to 0 or 1 all the bits in the constant
154 that don't belong in the type.
155 Yield 1 if a signed overflow occurs, 0 otherwise.
156 If OVERFLOW is nonzero, a signed overflow has already occurred
157 in calculating T, so propagate it.
159 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
160 if it exists. */
163 force_fit_type (t, overflow)
164 tree t;
165 int overflow;
167 HOST_WIDE_INT low, high;
168 register int prec;
170 if (TREE_CODE (t) == REAL_CST)
172 #ifdef CHECK_FLOAT_VALUE
173 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
174 overflow);
175 #endif
176 return overflow;
179 else if (TREE_CODE (t) != INTEGER_CST)
180 return overflow;
182 low = TREE_INT_CST_LOW (t);
183 high = TREE_INT_CST_HIGH (t);
185 if (POINTER_TYPE_P (TREE_TYPE (t)))
186 prec = POINTER_SIZE;
187 else
188 prec = TYPE_PRECISION (TREE_TYPE (t));
190 /* First clear all bits that are beyond the type's precision. */
192 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
194 else if (prec > HOST_BITS_PER_WIDE_INT)
196 TREE_INT_CST_HIGH (t)
197 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
199 else
201 TREE_INT_CST_HIGH (t) = 0;
202 if (prec < HOST_BITS_PER_WIDE_INT)
203 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
206 /* Unsigned types do not suffer sign extension or overflow. */
207 if (TREE_UNSIGNED (TREE_TYPE (t)))
208 return overflow;
210 /* If the value's sign bit is set, extend the sign. */
211 if (prec != 2 * HOST_BITS_PER_WIDE_INT
212 && (prec > HOST_BITS_PER_WIDE_INT
213 ? (TREE_INT_CST_HIGH (t)
214 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
215 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
217 /* Value is negative:
218 set to 1 all the bits that are outside this type's precision. */
219 if (prec > HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_HIGH (t)
222 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 TREE_INT_CST_HIGH (t) = -1;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
232 /* Yield nonzero if signed overflow occurred. */
233 return
234 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
235 != 0);
238 /* Add two doubleword integers with doubleword result.
239 Each argument is given as two `HOST_WIDE_INT' pieces.
240 One argument is L1 and H1; the other, L2 and H2.
241 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
244 add_double (l1, h1, l2, h2, lv, hv)
245 HOST_WIDE_INT l1, h1, l2, h2;
246 HOST_WIDE_INT *lv, *hv;
248 HOST_WIDE_INT l, h;
250 l = l1 + l2;
251 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
253 *lv = l;
254 *hv = h;
255 return overflow_sum_sign (h1, h2, h);
258 /* Negate a doubleword integer with doubleword result.
259 Return nonzero if the operation overflows, assuming it's signed.
260 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
261 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
264 neg_double (l1, h1, lv, hv)
265 HOST_WIDE_INT l1, h1;
266 HOST_WIDE_INT *lv, *hv;
268 if (l1 == 0)
270 *lv = 0;
271 *hv = - h1;
272 return (*hv & h1) < 0;
274 else
276 *lv = - l1;
277 *hv = ~ h1;
278 return 0;
282 /* Multiply two doubleword integers with doubleword result.
283 Return nonzero if the operation overflows, assuming it's signed.
284 Each argument is given as two `HOST_WIDE_INT' pieces.
285 One argument is L1 and H1; the other, L2 and H2.
286 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
289 mul_double (l1, h1, l2, h2, lv, hv)
290 HOST_WIDE_INT l1, h1, l2, h2;
291 HOST_WIDE_INT *lv, *hv;
293 HOST_WIDE_INT arg1[4];
294 HOST_WIDE_INT arg2[4];
295 HOST_WIDE_INT prod[4 * 2];
296 register unsigned HOST_WIDE_INT carry;
297 register int i, j, k;
298 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
300 encode (arg1, l1, h1);
301 encode (arg2, l2, h2);
303 bzero ((char *) prod, sizeof prod);
305 for (i = 0; i < 4; i++)
307 carry = 0;
308 for (j = 0; j < 4; j++)
310 k = i + j;
311 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
312 carry += arg1[i] * arg2[j];
313 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
314 carry += prod[k];
315 prod[k] = LOWPART (carry);
316 carry = HIGHPART (carry);
318 prod[i + 4] = carry;
321 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
323 /* Check for overflow by calculating the top half of the answer in full;
324 it should agree with the low half's sign bit. */
325 decode (prod+4, &toplow, &tophigh);
326 if (h1 < 0)
328 neg_double (l2, h2, &neglow, &neghigh);
329 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
331 if (h2 < 0)
333 neg_double (l1, h1, &neglow, &neghigh);
334 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
336 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
339 /* Shift the doubleword integer in L1, H1 left by COUNT places
340 keeping only PREC bits of result.
341 Shift right if COUNT is negative.
342 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
343 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 void
346 lshift_double (l1, h1, count, prec, lv, hv, arith)
347 HOST_WIDE_INT l1, h1, count;
348 int prec;
349 HOST_WIDE_INT *lv, *hv;
350 int arith;
352 if (count < 0)
354 rshift_double (l1, h1, - count, prec, lv, hv, arith);
355 return;
358 #ifdef SHIFT_COUNT_TRUNCATED
359 if (SHIFT_COUNT_TRUNCATED)
360 count %= prec;
361 #endif
363 if (count >= HOST_BITS_PER_WIDE_INT)
365 *hv = (unsigned HOST_WIDE_INT) l1 << (count - HOST_BITS_PER_WIDE_INT);
366 *lv = 0;
368 else
370 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
371 | ((unsigned HOST_WIDE_INT) l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
372 *lv = (unsigned HOST_WIDE_INT) l1 << count;
376 /* Shift the doubleword integer in L1, H1 right by COUNT places
377 keeping only PREC bits of result. COUNT must be positive.
378 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
379 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
381 void
382 rshift_double (l1, h1, count, prec, lv, hv, arith)
383 HOST_WIDE_INT l1, h1, count;
384 int prec ATTRIBUTE_UNUSED;
385 HOST_WIDE_INT *lv, *hv;
386 int arith;
388 unsigned HOST_WIDE_INT signmask;
389 signmask = (arith
390 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
391 : 0);
393 #ifdef SHIFT_COUNT_TRUNCATED
394 if (SHIFT_COUNT_TRUNCATED)
395 count %= prec;
396 #endif
398 if (count >= HOST_BITS_PER_WIDE_INT)
400 *hv = signmask;
401 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
402 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
404 else
406 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
407 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
408 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
409 | ((unsigned HOST_WIDE_INT) h1 >> count));
413 /* Rotate the doubleword integer in L1, H1 left by COUNT places
414 keeping only PREC bits of result.
415 Rotate right if COUNT is negative.
416 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
418 void
419 lrotate_double (l1, h1, count, prec, lv, hv)
420 HOST_WIDE_INT l1, h1, count;
421 int prec;
422 HOST_WIDE_INT *lv, *hv;
424 HOST_WIDE_INT s1l, s1h, s2l, s2h;
426 count %= prec;
427 if (count < 0)
428 count += prec;
430 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
431 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
432 *lv = s1l | s2l;
433 *hv = s1h | s2h;
436 /* Rotate the doubleword integer in L1, H1 left by COUNT places
437 keeping only PREC bits of result. COUNT must be positive.
438 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
440 void
441 rrotate_double (l1, h1, count, prec, lv, hv)
442 HOST_WIDE_INT l1, h1, count;
443 int prec;
444 HOST_WIDE_INT *lv, *hv;
446 HOST_WIDE_INT s1l, s1h, s2l, s2h;
448 count %= prec;
449 if (count < 0)
450 count += prec;
452 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
453 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
454 *lv = s1l | s2l;
455 *hv = s1h | s2h;
458 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
459 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
460 CODE is a tree code for a kind of division, one of
461 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
462 or EXACT_DIV_EXPR
463 It controls how the quotient is rounded to a integer.
464 Return nonzero if the operation overflows.
465 UNS nonzero says do unsigned division. */
468 div_and_round_double (code, uns,
469 lnum_orig, hnum_orig, lden_orig, hden_orig,
470 lquo, hquo, lrem, hrem)
471 enum tree_code code;
472 int uns;
473 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
474 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
475 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
477 int quo_neg = 0;
478 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
479 HOST_WIDE_INT den[4], quo[4];
480 register int i, j;
481 unsigned HOST_WIDE_INT work;
482 register unsigned HOST_WIDE_INT carry = 0;
483 HOST_WIDE_INT lnum = lnum_orig;
484 HOST_WIDE_INT hnum = hnum_orig;
485 HOST_WIDE_INT lden = lden_orig;
486 HOST_WIDE_INT hden = hden_orig;
487 int overflow = 0;
489 if ((hden == 0) && (lden == 0))
490 overflow = 1, lden = 1;
492 /* calculate quotient sign and convert operands to unsigned. */
493 if (!uns)
495 if (hnum < 0)
497 quo_neg = ~ quo_neg;
498 /* (minimum integer) / (-1) is the only overflow case. */
499 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
500 overflow = 1;
502 if (hden < 0)
504 quo_neg = ~ quo_neg;
505 neg_double (lden, hden, &lden, &hden);
509 if (hnum == 0 && hden == 0)
510 { /* single precision */
511 *hquo = *hrem = 0;
512 /* This unsigned division rounds toward zero. */
513 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
514 goto finish_up;
517 if (hnum == 0)
518 { /* trivial case: dividend < divisor */
519 /* hden != 0 already checked. */
520 *hquo = *lquo = 0;
521 *hrem = hnum;
522 *lrem = lnum;
523 goto finish_up;
526 bzero ((char *) quo, sizeof quo);
528 bzero ((char *) num, sizeof num); /* to zero 9th element */
529 bzero ((char *) den, sizeof den);
531 encode (num, lnum, hnum);
532 encode (den, lden, hden);
534 /* Special code for when the divisor < BASE. */
535 if (hden == 0 && lden < (HOST_WIDE_INT) BASE)
537 /* hnum != 0 already checked. */
538 for (i = 4 - 1; i >= 0; i--)
540 work = num[i] + carry * BASE;
541 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
542 carry = work % (unsigned HOST_WIDE_INT) lden;
545 else
547 /* Full double precision division,
548 with thanks to Don Knuth's "Seminumerical Algorithms". */
549 int num_hi_sig, den_hi_sig;
550 unsigned HOST_WIDE_INT quo_est, scale;
552 /* Find the highest non-zero divisor digit. */
553 for (i = 4 - 1; ; i--)
554 if (den[i] != 0) {
555 den_hi_sig = i;
556 break;
559 /* Insure that the first digit of the divisor is at least BASE/2.
560 This is required by the quotient digit estimation algorithm. */
562 scale = BASE / (den[den_hi_sig] + 1);
563 if (scale > 1) { /* scale divisor and dividend */
564 carry = 0;
565 for (i = 0; i <= 4 - 1; i++) {
566 work = (num[i] * scale) + carry;
567 num[i] = LOWPART (work);
568 carry = HIGHPART (work);
569 } num[4] = carry;
570 carry = 0;
571 for (i = 0; i <= 4 - 1; i++) {
572 work = (den[i] * scale) + carry;
573 den[i] = LOWPART (work);
574 carry = HIGHPART (work);
575 if (den[i] != 0) den_hi_sig = i;
579 num_hi_sig = 4;
581 /* Main loop */
582 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
583 /* guess the next quotient digit, quo_est, by dividing the first
584 two remaining dividend digits by the high order quotient digit.
585 quo_est is never low and is at most 2 high. */
586 unsigned HOST_WIDE_INT tmp;
588 num_hi_sig = i + den_hi_sig + 1;
589 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
590 if (num[num_hi_sig] != den[den_hi_sig])
591 quo_est = work / den[den_hi_sig];
592 else
593 quo_est = BASE - 1;
595 /* refine quo_est so it's usually correct, and at most one high. */
596 tmp = work - quo_est * den[den_hi_sig];
597 if (tmp < BASE
598 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
599 quo_est--;
601 /* Try QUO_EST as the quotient digit, by multiplying the
602 divisor by QUO_EST and subtracting from the remaining dividend.
603 Keep in mind that QUO_EST is the I - 1st digit. */
605 carry = 0;
606 for (j = 0; j <= den_hi_sig; j++)
608 work = quo_est * den[j] + carry;
609 carry = HIGHPART (work);
610 work = num[i + j] - LOWPART (work);
611 num[i + j] = LOWPART (work);
612 carry += HIGHPART (work) != 0;
615 /* if quo_est was high by one, then num[i] went negative and
616 we need to correct things. */
618 if (num[num_hi_sig] < carry)
620 quo_est--;
621 carry = 0; /* add divisor back in */
622 for (j = 0; j <= den_hi_sig; j++)
624 work = num[i + j] + den[j] + carry;
625 carry = HIGHPART (work);
626 num[i + j] = LOWPART (work);
628 num [num_hi_sig] += carry;
631 /* store the quotient digit. */
632 quo[i] = quo_est;
636 decode (quo, lquo, hquo);
638 finish_up:
639 /* if result is negative, make it so. */
640 if (quo_neg)
641 neg_double (*lquo, *hquo, lquo, hquo);
643 /* compute trial remainder: rem = num - (quo * den) */
644 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
645 neg_double (*lrem, *hrem, lrem, hrem);
646 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
648 switch (code)
650 case TRUNC_DIV_EXPR:
651 case TRUNC_MOD_EXPR: /* round toward zero */
652 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
653 return overflow;
655 case FLOOR_DIV_EXPR:
656 case FLOOR_MOD_EXPR: /* round toward negative infinity */
657 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
659 /* quo = quo - 1; */
660 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
661 lquo, hquo);
663 else return overflow;
664 break;
666 case CEIL_DIV_EXPR:
667 case CEIL_MOD_EXPR: /* round toward positive infinity */
668 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
670 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
671 lquo, hquo);
673 else return overflow;
674 break;
676 case ROUND_DIV_EXPR:
677 case ROUND_MOD_EXPR: /* round to closest integer */
679 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
680 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
682 /* get absolute values */
683 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
684 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
686 /* if (2 * abs (lrem) >= abs (lden)) */
687 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
688 labs_rem, habs_rem, &ltwice, &htwice);
689 if (((unsigned HOST_WIDE_INT) habs_den
690 < (unsigned HOST_WIDE_INT) htwice)
691 || (((unsigned HOST_WIDE_INT) habs_den
692 == (unsigned HOST_WIDE_INT) htwice)
693 && ((HOST_WIDE_INT unsigned) labs_den
694 < (unsigned HOST_WIDE_INT) ltwice)))
696 if (*hquo < 0)
697 /* quo = quo - 1; */
698 add_double (*lquo, *hquo,
699 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
700 else
701 /* quo = quo + 1; */
702 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
703 lquo, hquo);
705 else return overflow;
707 break;
709 default:
710 abort ();
713 /* compute true remainder: rem = num - (quo * den) */
714 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
715 neg_double (*lrem, *hrem, lrem, hrem);
716 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
717 return overflow;
720 #ifndef REAL_ARITHMETIC
721 /* Effectively truncate a real value to represent the nearest possible value
722 in a narrower mode. The result is actually represented in the same data
723 type as the argument, but its value is usually different.
725 A trap may occur during the FP operations and it is the responsibility
726 of the calling function to have a handler established. */
728 REAL_VALUE_TYPE
729 real_value_truncate (mode, arg)
730 enum machine_mode mode;
731 REAL_VALUE_TYPE arg;
733 return REAL_VALUE_TRUNCATE (mode, arg);
736 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
738 /* Check for infinity in an IEEE double precision number. */
741 target_isinf (x)
742 REAL_VALUE_TYPE x;
744 /* The IEEE 64-bit double format. */
745 union {
746 REAL_VALUE_TYPE d;
747 struct {
748 unsigned sign : 1;
749 unsigned exponent : 11;
750 unsigned mantissa1 : 20;
751 unsigned mantissa2;
752 } little_endian;
753 struct {
754 unsigned mantissa2;
755 unsigned mantissa1 : 20;
756 unsigned exponent : 11;
757 unsigned sign : 1;
758 } big_endian;
759 } u;
761 u.d = dconstm1;
762 if (u.big_endian.sign == 1)
764 u.d = x;
765 return (u.big_endian.exponent == 2047
766 && u.big_endian.mantissa1 == 0
767 && u.big_endian.mantissa2 == 0);
769 else
771 u.d = x;
772 return (u.little_endian.exponent == 2047
773 && u.little_endian.mantissa1 == 0
774 && u.little_endian.mantissa2 == 0);
778 /* Check whether an IEEE double precision number is a NaN. */
781 target_isnan (x)
782 REAL_VALUE_TYPE x;
784 /* The IEEE 64-bit double format. */
785 union {
786 REAL_VALUE_TYPE d;
787 struct {
788 unsigned sign : 1;
789 unsigned exponent : 11;
790 unsigned mantissa1 : 20;
791 unsigned mantissa2;
792 } little_endian;
793 struct {
794 unsigned mantissa2;
795 unsigned mantissa1 : 20;
796 unsigned exponent : 11;
797 unsigned sign : 1;
798 } big_endian;
799 } u;
801 u.d = dconstm1;
802 if (u.big_endian.sign == 1)
804 u.d = x;
805 return (u.big_endian.exponent == 2047
806 && (u.big_endian.mantissa1 != 0
807 || u.big_endian.mantissa2 != 0));
809 else
811 u.d = x;
812 return (u.little_endian.exponent == 2047
813 && (u.little_endian.mantissa1 != 0
814 || u.little_endian.mantissa2 != 0));
818 /* Check for a negative IEEE double precision number. */
821 target_negative (x)
822 REAL_VALUE_TYPE x;
824 /* The IEEE 64-bit double format. */
825 union {
826 REAL_VALUE_TYPE d;
827 struct {
828 unsigned sign : 1;
829 unsigned exponent : 11;
830 unsigned mantissa1 : 20;
831 unsigned mantissa2;
832 } little_endian;
833 struct {
834 unsigned mantissa2;
835 unsigned mantissa1 : 20;
836 unsigned exponent : 11;
837 unsigned sign : 1;
838 } big_endian;
839 } u;
841 u.d = dconstm1;
842 if (u.big_endian.sign == 1)
844 u.d = x;
845 return u.big_endian.sign;
847 else
849 u.d = x;
850 return u.little_endian.sign;
853 #else /* Target not IEEE */
855 /* Let's assume other float formats don't have infinity.
856 (This can be overridden by redefining REAL_VALUE_ISINF.) */
858 target_isinf (x)
859 REAL_VALUE_TYPE x;
861 return 0;
864 /* Let's assume other float formats don't have NaNs.
865 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
867 target_isnan (x)
868 REAL_VALUE_TYPE x;
870 return 0;
873 /* Let's assume other float formats don't have minus zero.
874 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
876 target_negative (x)
877 REAL_VALUE_TYPE x;
879 return x < 0;
881 #endif /* Target not IEEE */
883 /* Try to change R into its exact multiplicative inverse in machine mode
884 MODE. Return nonzero function value if successful. */
887 exact_real_inverse (mode, r)
888 enum machine_mode mode;
889 REAL_VALUE_TYPE *r;
891 jmp_buf float_error;
892 union
894 double d;
895 unsigned short i[4];
896 }x, t, y;
897 int i;
899 /* Usually disable if bounds checks are not reliable. */
900 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
901 return 0;
903 /* Set array index to the less significant bits in the unions, depending
904 on the endian-ness of the host doubles.
905 Disable if insufficient information on the data structure. */
906 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
907 return 0;
908 #else
909 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
910 #define K 2
911 #else
912 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
913 #define K 2
914 #else
915 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
916 #endif
917 #endif
918 #endif
920 if (setjmp (float_error))
922 /* Don't do the optimization if there was an arithmetic error. */
923 fail:
924 set_float_handler (NULL_PTR);
925 return 0;
927 set_float_handler (float_error);
929 /* Domain check the argument. */
930 x.d = *r;
931 if (x.d == 0.0)
932 goto fail;
934 #ifdef REAL_INFINITY
935 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
936 goto fail;
937 #endif
939 /* Compute the reciprocal and check for numerical exactness.
940 It is unnecessary to check all the significand bits to determine
941 whether X is a power of 2. If X is not, then it is impossible for
942 the bottom half significand of both X and 1/X to be all zero bits.
943 Hence we ignore the data structure of the top half and examine only
944 the low order bits of the two significands. */
945 t.d = 1.0 / x.d;
946 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
947 goto fail;
949 /* Truncate to the required mode and range-check the result. */
950 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
951 #ifdef CHECK_FLOAT_VALUE
952 i = 0;
953 if (CHECK_FLOAT_VALUE (mode, y.d, i))
954 goto fail;
955 #endif
957 /* Fail if truncation changed the value. */
958 if (y.d != t.d || y.d == 0.0)
959 goto fail;
961 #ifdef REAL_INFINITY
962 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
963 goto fail;
964 #endif
966 /* Output the reciprocal and return success flag. */
967 set_float_handler (NULL_PTR);
968 *r = y.d;
969 return 1;
973 /* Convert C9X hexadecimal floating point string constant S. Return
974 real value type in mode MODE. This function uses the host computer's
975 fp arithmetic when there is no REAL_ARITHMETIC. */
977 REAL_VALUE_TYPE
978 real_hex_to_f (s, mode)
979 char *s;
980 enum machine_mode mode;
982 REAL_VALUE_TYPE ip;
983 char *p = s;
984 unsigned HOST_WIDE_INT low, high;
985 int frexpon, expon, shcount, nrmcount, k;
986 int sign, expsign, decpt, isfloat, isldouble, gotp, lost;
987 char c;
989 isldouble = 0;
990 isfloat = 0;
991 frexpon = 0;
992 expon = 0;
993 expsign = 1;
994 ip = 0.0;
996 while (*p == ' ' || *p == '\t')
997 ++p;
999 /* Sign, if any, comes first. */
1000 sign = 1;
1001 if (*p == '-')
1003 sign = -1;
1004 ++p;
1007 /* The string is supposed to start with 0x or 0X . */
1008 if (*p == '0')
1010 ++p;
1011 if (*p == 'x' || *p == 'X')
1012 ++p;
1013 else
1014 abort ();
1016 else
1017 abort ();
1019 while (*p == '0')
1020 ++p;
1022 high = 0;
1023 low = 0;
1024 lost = 0; /* Nonzero low order bits shifted out and discarded. */
1025 frexpon = 0; /* Bits after the decimal point. */
1026 expon = 0; /* Value of exponent. */
1027 decpt = 0; /* How many decimal points. */
1028 gotp = 0; /* How many P's. */
1029 shcount = 0;
1030 while ((c = *p) != '\0')
1032 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1033 || (c >= 'a' && c <= 'f'))
1035 k = c & 0x7f;
1036 if (k >= 'a')
1037 k = k - 'a' + 10;
1038 else if (k >= 'A')
1039 k = k - 'A' + 10;
1040 else
1041 k = k - '0';
1043 if ((high & 0xf0000000) == 0)
1045 high = (high << 4) + ((low >> 28) & 15);
1046 low = (low << 4) + k;
1047 shcount += 4;
1048 if (decpt)
1049 frexpon += 4;
1051 else
1053 /* Record nonzero lost bits. */
1054 lost |= k;
1055 if (!decpt)
1056 frexpon -= 4;
1058 ++p;
1060 else if ( c == '.')
1062 ++decpt;
1063 ++p;
1065 else if (c == 'p' || c == 'P')
1067 ++gotp;
1068 ++p;
1069 /* Sign of exponent. */
1070 if (*p == '-')
1072 expsign = -1;
1073 ++p;
1075 /* Value of exponent.
1076 The exponent field is a decimal integer. */
1077 while (ISDIGIT(*p))
1079 k = (*p++ & 0x7f) - '0';
1080 expon = 10 * expon + k;
1082 expon *= expsign;
1083 /* F suffix is ambiguous in the significand part
1084 so it must appear after the decimal exponent field. */
1085 if (*p == 'f' || *p == 'F')
1087 isfloat = 1;
1088 ++p;
1089 break;
1092 else if (c == 'l' || c == 'L')
1094 isldouble = 1;
1095 ++p;
1096 break;
1098 else
1099 break;
1101 /* Abort if last character read was not legitimate. */
1102 c = *p;
1103 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1104 abort ();
1105 /* There must be either one decimal point or one p. */
1106 if (decpt == 0 && gotp == 0)
1107 abort ();
1108 shcount -= 4;
1109 if ((high == 0) && (low == 0))
1111 return dconst0;
1114 /* Normalize. */
1115 nrmcount = 0;
1116 if (high == 0)
1118 high = low;
1119 low = 0;
1120 nrmcount += 32;
1122 /* Leave a high guard bit for carry-out. */
1123 if ((high & 0x80000000) != 0)
1125 lost |= low & 1;
1126 low = (low >> 1) | (high << 31);
1127 high = high >> 1;
1128 nrmcount -= 1;
1130 if ((high & 0xffff8000) == 0)
1132 high = (high << 16) + ((low >> 16) & 0xffff);
1133 low = low << 16;
1134 nrmcount += 16;
1136 while ((high & 0xc0000000) == 0)
1138 high = (high << 1) + ((low >> 31) & 1);
1139 low = low << 1;
1140 nrmcount += 1;
1142 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1144 /* Keep 24 bits precision, bits 0x7fffff80.
1145 Rounding bit is 0x40. */
1146 lost = lost | low | (high & 0x3f);
1147 low = 0;
1148 if (high & 0x40)
1150 if ((high & 0x80) || lost)
1151 high += 0x40;
1153 high &= 0xffffff80;
1155 else
1157 /* We need real.c to do long double formats, so here default
1158 to double precision. */
1159 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1160 /* IEEE double.
1161 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1162 Rounding bit is low word 0x200. */
1163 lost = lost | (low & 0x1ff);
1164 if (low & 0x200)
1166 if ((low & 0x400) || lost)
1168 low = (low + 0x200) & 0xfffffc00;
1169 if (low == 0)
1170 high += 1;
1173 low &= 0xfffffc00;
1174 #else
1175 /* Assume it's a VAX with 56-bit significand,
1176 bits 0x7fffffff ffffff80. */
1177 lost = lost | (low & 0x7f);
1178 if (low & 0x40)
1180 if ((low & 0x80) || lost)
1182 low = (low + 0x40) & 0xffffff80;
1183 if (low == 0)
1184 high += 1;
1187 low &= 0xffffff80;
1188 #endif
1190 ip = (double) high;
1191 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1192 /* Apply shifts and exponent value as power of 2. */
1193 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1195 if (sign < 0)
1196 ip = -ip;
1197 return ip;
1200 #endif /* no REAL_ARITHMETIC */
1202 /* Split a tree IN into a constant and a variable part
1203 that could be combined with CODE to make IN.
1204 CODE must be a commutative arithmetic operation.
1205 Store the constant part into *CONP and the variable in &VARP.
1206 Return 1 if this was done; zero means the tree IN did not decompose
1207 this way.
1209 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
1210 Therefore, we must tell the caller whether the variable part
1211 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
1212 The value stored is the coefficient for the variable term.
1213 The constant term we return should always be added;
1214 we negate it if necessary. */
1216 static int
1217 split_tree (in, code, varp, conp, varsignp)
1218 tree in;
1219 enum tree_code code;
1220 tree *varp, *conp;
1221 int *varsignp;
1223 register tree outtype = TREE_TYPE (in);
1224 *varp = 0;
1225 *conp = 0;
1227 /* Strip any conversions that don't change the machine mode. */
1228 while ((TREE_CODE (in) == NOP_EXPR
1229 || TREE_CODE (in) == CONVERT_EXPR)
1230 && (TYPE_MODE (TREE_TYPE (in))
1231 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
1232 in = TREE_OPERAND (in, 0);
1234 if (TREE_CODE (in) == code
1235 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1236 /* We can associate addition and subtraction together
1237 (even though the C standard doesn't say so)
1238 for integers because the value is not affected.
1239 For reals, the value might be affected, so we can't. */
1240 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1241 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1243 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1244 if (code == INTEGER_CST)
1246 *conp = TREE_OPERAND (in, 0);
1247 *varp = TREE_OPERAND (in, 1);
1248 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1249 && TREE_TYPE (*varp) != outtype)
1250 *varp = convert (outtype, *varp);
1251 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1252 return 1;
1254 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1256 *conp = TREE_OPERAND (in, 1);
1257 *varp = TREE_OPERAND (in, 0);
1258 *varsignp = 1;
1259 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1260 && TREE_TYPE (*varp) != outtype)
1261 *varp = convert (outtype, *varp);
1262 if (TREE_CODE (in) == MINUS_EXPR)
1264 /* If operation is subtraction and constant is second,
1265 must negate it to get an additive constant.
1266 And this cannot be done unless it is a manifest constant.
1267 It could also be the address of a static variable.
1268 We cannot negate that, so give up. */
1269 if (TREE_CODE (*conp) == INTEGER_CST)
1270 /* Subtracting from integer_zero_node loses for long long. */
1271 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1272 else
1273 return 0;
1275 return 1;
1277 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1279 *conp = TREE_OPERAND (in, 0);
1280 *varp = TREE_OPERAND (in, 1);
1281 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1282 && TREE_TYPE (*varp) != outtype)
1283 *varp = convert (outtype, *varp);
1284 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1285 return 1;
1288 return 0;
1291 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1292 to produce a new constant.
1294 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1295 If FORSIZE is nonzero, compute overflow for unsigned types. */
1297 static tree
1298 int_const_binop (code, arg1, arg2, notrunc, forsize)
1299 enum tree_code code;
1300 register tree arg1, arg2;
1301 int notrunc, forsize;
1303 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1304 HOST_WIDE_INT low, hi;
1305 HOST_WIDE_INT garbagel, garbageh;
1306 register tree t;
1307 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1308 int overflow = 0;
1309 int no_overflow = 0;
1311 int1l = TREE_INT_CST_LOW (arg1);
1312 int1h = TREE_INT_CST_HIGH (arg1);
1313 int2l = TREE_INT_CST_LOW (arg2);
1314 int2h = TREE_INT_CST_HIGH (arg2);
1316 switch (code)
1318 case BIT_IOR_EXPR:
1319 low = int1l | int2l, hi = int1h | int2h;
1320 break;
1322 case BIT_XOR_EXPR:
1323 low = int1l ^ int2l, hi = int1h ^ int2h;
1324 break;
1326 case BIT_AND_EXPR:
1327 low = int1l & int2l, hi = int1h & int2h;
1328 break;
1330 case BIT_ANDTC_EXPR:
1331 low = int1l & ~int2l, hi = int1h & ~int2h;
1332 break;
1334 case RSHIFT_EXPR:
1335 int2l = - int2l;
1336 case LSHIFT_EXPR:
1337 /* It's unclear from the C standard whether shifts can overflow.
1338 The following code ignores overflow; perhaps a C standard
1339 interpretation ruling is needed. */
1340 lshift_double (int1l, int1h, int2l,
1341 TYPE_PRECISION (TREE_TYPE (arg1)),
1342 &low, &hi,
1343 !uns);
1344 no_overflow = 1;
1345 break;
1347 case RROTATE_EXPR:
1348 int2l = - int2l;
1349 case LROTATE_EXPR:
1350 lrotate_double (int1l, int1h, int2l,
1351 TYPE_PRECISION (TREE_TYPE (arg1)),
1352 &low, &hi);
1353 break;
1355 case PLUS_EXPR:
1356 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1357 break;
1359 case MINUS_EXPR:
1360 neg_double (int2l, int2h, &low, &hi);
1361 add_double (int1l, int1h, low, hi, &low, &hi);
1362 overflow = overflow_sum_sign (hi, int2h, int1h);
1363 break;
1365 case MULT_EXPR:
1366 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1367 break;
1369 case TRUNC_DIV_EXPR:
1370 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1371 case EXACT_DIV_EXPR:
1372 /* This is a shortcut for a common special case. */
1373 if (int2h == 0 && int2l > 0
1374 && ! TREE_CONSTANT_OVERFLOW (arg1)
1375 && ! TREE_CONSTANT_OVERFLOW (arg2)
1376 && int1h == 0 && int1l >= 0)
1378 if (code == CEIL_DIV_EXPR)
1379 int1l += int2l - 1;
1380 low = int1l / int2l, hi = 0;
1381 break;
1384 /* ... fall through ... */
1386 case ROUND_DIV_EXPR:
1387 if (int2h == 0 && int2l == 1)
1389 low = int1l, hi = int1h;
1390 break;
1392 if (int1l == int2l && int1h == int2h
1393 && ! (int1l == 0 && int1h == 0))
1395 low = 1, hi = 0;
1396 break;
1398 overflow = div_and_round_double (code, uns,
1399 int1l, int1h, int2l, int2h,
1400 &low, &hi, &garbagel, &garbageh);
1401 break;
1403 case TRUNC_MOD_EXPR:
1404 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1405 /* This is a shortcut for a common special case. */
1406 if (int2h == 0 && int2l > 0
1407 && ! TREE_CONSTANT_OVERFLOW (arg1)
1408 && ! TREE_CONSTANT_OVERFLOW (arg2)
1409 && int1h == 0 && int1l >= 0)
1411 if (code == CEIL_MOD_EXPR)
1412 int1l += int2l - 1;
1413 low = int1l % int2l, hi = 0;
1414 break;
1417 /* ... fall through ... */
1419 case ROUND_MOD_EXPR:
1420 overflow = div_and_round_double (code, uns,
1421 int1l, int1h, int2l, int2h,
1422 &garbagel, &garbageh, &low, &hi);
1423 break;
1425 case MIN_EXPR:
1426 case MAX_EXPR:
1427 if (uns)
1429 low = (((unsigned HOST_WIDE_INT) int1h
1430 < (unsigned HOST_WIDE_INT) int2h)
1431 || (((unsigned HOST_WIDE_INT) int1h
1432 == (unsigned HOST_WIDE_INT) int2h)
1433 && ((unsigned HOST_WIDE_INT) int1l
1434 < (unsigned HOST_WIDE_INT) int2l)));
1436 else
1438 low = ((int1h < int2h)
1439 || ((int1h == int2h)
1440 && ((unsigned HOST_WIDE_INT) int1l
1441 < (unsigned HOST_WIDE_INT) int2l)));
1443 if (low == (code == MIN_EXPR))
1444 low = int1l, hi = int1h;
1445 else
1446 low = int2l, hi = int2h;
1447 break;
1449 default:
1450 abort ();
1453 if (TREE_TYPE (arg1) == sizetype && hi == 0
1454 && low >= 0
1455 && (TYPE_MAX_VALUE (sizetype) == NULL
1456 || low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype)))
1457 && ! overflow
1458 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1459 t = size_int (low);
1460 else
1462 t = build_int_2 (low, hi);
1463 TREE_TYPE (t) = TREE_TYPE (arg1);
1466 TREE_OVERFLOW (t)
1467 = ((notrunc ? (!uns || forsize) && overflow
1468 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1469 | TREE_OVERFLOW (arg1)
1470 | TREE_OVERFLOW (arg2));
1471 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1472 So check if force_fit_type truncated the value. */
1473 if (forsize
1474 && ! TREE_OVERFLOW (t)
1475 && (TREE_INT_CST_HIGH (t) != hi
1476 || TREE_INT_CST_LOW (t) != low))
1477 TREE_OVERFLOW (t) = 1;
1478 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1479 | TREE_CONSTANT_OVERFLOW (arg1)
1480 | TREE_CONSTANT_OVERFLOW (arg2));
1481 return t;
1484 struct cb_args
1486 /* Input */
1487 tree arg1;
1488 REAL_VALUE_TYPE d1, d2;
1489 enum tree_code code;
1490 /* Output */
1491 tree t;
1494 static void
1495 const_binop_1 (data)
1496 PTR data;
1498 struct cb_args * args = (struct cb_args *) data;
1499 REAL_VALUE_TYPE value;
1501 #ifdef REAL_ARITHMETIC
1502 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1503 #else
1504 switch (args->code)
1506 case PLUS_EXPR:
1507 value = args->d1 + args->d2;
1508 break;
1510 case MINUS_EXPR:
1511 value = args->d1 - args->d2;
1512 break;
1514 case MULT_EXPR:
1515 value = args->d1 * args->d2;
1516 break;
1518 case RDIV_EXPR:
1519 #ifndef REAL_INFINITY
1520 if (args->d2 == 0)
1521 abort ();
1522 #endif
1524 value = args->d1 / args->d2;
1525 break;
1527 case MIN_EXPR:
1528 value = MIN (args->d1, args->d2);
1529 break;
1531 case MAX_EXPR:
1532 value = MAX (args->d1, args->d2);
1533 break;
1535 default:
1536 abort ();
1538 #endif /* no REAL_ARITHMETIC */
1539 args->t =
1540 build_real (TREE_TYPE (args->arg1),
1541 real_value_truncate (TYPE_MODE (TREE_TYPE (args->arg1)),
1542 value));
1545 /* Combine two constants ARG1 and ARG2 under operation CODE
1546 to produce a new constant.
1547 We assume ARG1 and ARG2 have the same data type,
1548 or at least are the same kind of constant and the same machine mode.
1550 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1552 static tree
1553 const_binop (code, arg1, arg2, notrunc)
1554 enum tree_code code;
1555 register tree arg1, arg2;
1556 int notrunc;
1558 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1560 if (TREE_CODE (arg1) == INTEGER_CST)
1561 return int_const_binop (code, arg1, arg2, notrunc, 0);
1563 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1564 if (TREE_CODE (arg1) == REAL_CST)
1566 REAL_VALUE_TYPE d1;
1567 REAL_VALUE_TYPE d2;
1568 int overflow = 0;
1569 tree t;
1570 struct cb_args args;
1572 d1 = TREE_REAL_CST (arg1);
1573 d2 = TREE_REAL_CST (arg2);
1575 /* If either operand is a NaN, just return it. Otherwise, set up
1576 for floating-point trap; we return an overflow. */
1577 if (REAL_VALUE_ISNAN (d1))
1578 return arg1;
1579 else if (REAL_VALUE_ISNAN (d2))
1580 return arg2;
1582 /* Setup input for const_binop_1() */
1583 args.arg1 = arg1;
1584 args.d1 = d1;
1585 args.d2 = d2;
1586 args.code = code;
1588 if (do_float_handler (const_binop_1, (PTR) &args))
1590 /* Receive output from const_binop_1() */
1591 t = args.t;
1593 else
1595 /* We got an exception from const_binop_1() */
1596 t = copy_node (arg1);
1597 overflow = 1;
1600 TREE_OVERFLOW (t)
1601 = (force_fit_type (t, overflow)
1602 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1603 TREE_CONSTANT_OVERFLOW (t)
1604 = TREE_OVERFLOW (t)
1605 | TREE_CONSTANT_OVERFLOW (arg1)
1606 | TREE_CONSTANT_OVERFLOW (arg2);
1607 return t;
1609 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1610 if (TREE_CODE (arg1) == COMPLEX_CST)
1612 register tree type = TREE_TYPE (arg1);
1613 register tree r1 = TREE_REALPART (arg1);
1614 register tree i1 = TREE_IMAGPART (arg1);
1615 register tree r2 = TREE_REALPART (arg2);
1616 register tree i2 = TREE_IMAGPART (arg2);
1617 register tree t;
1619 switch (code)
1621 case PLUS_EXPR:
1622 t = build_complex (type,
1623 const_binop (PLUS_EXPR, r1, r2, notrunc),
1624 const_binop (PLUS_EXPR, i1, i2, notrunc));
1625 break;
1627 case MINUS_EXPR:
1628 t = build_complex (type,
1629 const_binop (MINUS_EXPR, r1, r2, notrunc),
1630 const_binop (MINUS_EXPR, i1, i2, notrunc));
1631 break;
1633 case MULT_EXPR:
1634 t = build_complex (type,
1635 const_binop (MINUS_EXPR,
1636 const_binop (MULT_EXPR,
1637 r1, r2, notrunc),
1638 const_binop (MULT_EXPR,
1639 i1, i2, notrunc),
1640 notrunc),
1641 const_binop (PLUS_EXPR,
1642 const_binop (MULT_EXPR,
1643 r1, i2, notrunc),
1644 const_binop (MULT_EXPR,
1645 i1, r2, notrunc),
1646 notrunc));
1647 break;
1649 case RDIV_EXPR:
1651 register tree magsquared
1652 = const_binop (PLUS_EXPR,
1653 const_binop (MULT_EXPR, r2, r2, notrunc),
1654 const_binop (MULT_EXPR, i2, i2, notrunc),
1655 notrunc);
1657 t = build_complex (type,
1658 const_binop
1659 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1660 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1661 const_binop (PLUS_EXPR,
1662 const_binop (MULT_EXPR, r1, r2,
1663 notrunc),
1664 const_binop (MULT_EXPR, i1, i2,
1665 notrunc),
1666 notrunc),
1667 magsquared, notrunc),
1668 const_binop
1669 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1670 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1671 const_binop (MINUS_EXPR,
1672 const_binop (MULT_EXPR, i1, r2,
1673 notrunc),
1674 const_binop (MULT_EXPR, r1, i2,
1675 notrunc),
1676 notrunc),
1677 magsquared, notrunc));
1679 break;
1681 default:
1682 abort ();
1684 return t;
1686 return 0;
1689 /* Return an INTEGER_CST with value V . The type is determined by bit_p:
1690 if it is zero, the type is taken from sizetype; if it is one, the type
1691 is taken from bitsizetype. */
1693 tree
1694 size_int_wide (number, high, bit_p)
1695 unsigned HOST_WIDE_INT number, high;
1696 int bit_p;
1698 tree t;
1700 if (!ggc_p)
1702 /* Type-size nodes already made for small sizes. */
1703 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1][2];
1705 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high
1706 && size_table[number][bit_p] != 0)
1707 return size_table[number][bit_p];
1708 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high)
1710 push_obstacks_nochange ();
1711 /* Make this a permanent node. */
1712 end_temporary_allocation ();
1713 t = build_int_2 (number, 0);
1714 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1715 size_table[number][bit_p] = t;
1716 pop_obstacks ();
1717 return t;
1721 t = build_int_2 (number, high);
1722 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1723 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1724 return t;
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1728 CODE is a tree code. Data type is taken from `sizetype',
1729 If the operands are constant, so is the result. */
1731 tree
1732 size_binop (code, arg0, arg1)
1733 enum tree_code code;
1734 tree arg0, arg1;
1736 /* Handle the special case of two integer constants faster. */
1737 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1739 /* And some specific cases even faster than that. */
1740 if (code == PLUS_EXPR && integer_zerop (arg0))
1741 return arg1;
1742 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1743 && integer_zerop (arg1))
1744 return arg0;
1745 else if (code == MULT_EXPR && integer_onep (arg0))
1746 return arg1;
1748 /* Handle general case of two integer constants. */
1749 return int_const_binop (code, arg0, arg1, 0, 1);
1752 if (arg0 == error_mark_node || arg1 == error_mark_node)
1753 return error_mark_node;
1755 return fold (build (code, sizetype, arg0, arg1));
1758 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1759 CODE is a tree code. Data type is taken from `ssizetype',
1760 If the operands are constant, so is the result. */
1762 tree
1763 ssize_binop (code, arg0, arg1)
1764 enum tree_code code;
1765 tree arg0, arg1;
1767 /* Handle the special case of two integer constants faster. */
1768 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1770 /* And some specific cases even faster than that. */
1771 if (code == PLUS_EXPR && integer_zerop (arg0))
1772 return arg1;
1773 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1774 && integer_zerop (arg1))
1775 return arg0;
1776 else if (code == MULT_EXPR && integer_onep (arg0))
1777 return arg1;
1779 /* Handle general case of two integer constants. We convert
1780 arg0 to ssizetype because int_const_binop uses its type for the
1781 return value. */
1782 arg0 = convert (ssizetype, arg0);
1783 return int_const_binop (code, arg0, arg1, 0, 0);
1786 if (arg0 == error_mark_node || arg1 == error_mark_node)
1787 return error_mark_node;
1789 return fold (build (code, ssizetype, arg0, arg1));
1792 struct fc_args
1794 /* Input */
1795 tree arg1, type;
1796 /* Output */
1797 tree t;
1800 static void
1801 fold_convert_1 (data)
1802 PTR data;
1804 struct fc_args * args = (struct fc_args *) data;
1806 args->t = build_real (args->type,
1807 real_value_truncate (TYPE_MODE (args->type),
1808 TREE_REAL_CST (args->arg1)));
1811 /* Given T, a tree representing type conversion of ARG1, a constant,
1812 return a constant tree representing the result of conversion. */
1814 static tree
1815 fold_convert (t, arg1)
1816 register tree t;
1817 register tree arg1;
1819 register tree type = TREE_TYPE (t);
1820 int overflow = 0;
1822 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1824 if (TREE_CODE (arg1) == INTEGER_CST)
1826 /* If we would build a constant wider than GCC supports,
1827 leave the conversion unfolded. */
1828 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1829 return t;
1831 /* Given an integer constant, make new constant with new type,
1832 appropriately sign-extended or truncated. */
1833 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1834 TREE_INT_CST_HIGH (arg1));
1835 TREE_TYPE (t) = type;
1836 /* Indicate an overflow if (1) ARG1 already overflowed,
1837 or (2) force_fit_type indicates an overflow.
1838 Tell force_fit_type that an overflow has already occurred
1839 if ARG1 is a too-large unsigned value and T is signed.
1840 But don't indicate an overflow if converting a pointer. */
1841 TREE_OVERFLOW (t)
1842 = ((force_fit_type (t,
1843 (TREE_INT_CST_HIGH (arg1) < 0
1844 && (TREE_UNSIGNED (type)
1845 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1846 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1847 || TREE_OVERFLOW (arg1));
1848 TREE_CONSTANT_OVERFLOW (t)
1849 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1851 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1852 else if (TREE_CODE (arg1) == REAL_CST)
1854 /* Don't initialize these, use assignments.
1855 Initialized local aggregates don't work on old compilers. */
1856 REAL_VALUE_TYPE x;
1857 REAL_VALUE_TYPE l;
1858 REAL_VALUE_TYPE u;
1859 tree type1 = TREE_TYPE (arg1);
1860 int no_upper_bound;
1862 x = TREE_REAL_CST (arg1);
1863 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1865 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1866 if (!no_upper_bound)
1867 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1869 /* See if X will be in range after truncation towards 0.
1870 To compensate for truncation, move the bounds away from 0,
1871 but reject if X exactly equals the adjusted bounds. */
1872 #ifdef REAL_ARITHMETIC
1873 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1874 if (!no_upper_bound)
1875 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1876 #else
1877 l--;
1878 if (!no_upper_bound)
1879 u++;
1880 #endif
1881 /* If X is a NaN, use zero instead and show we have an overflow.
1882 Otherwise, range check. */
1883 if (REAL_VALUE_ISNAN (x))
1884 overflow = 1, x = dconst0;
1885 else if (! (REAL_VALUES_LESS (l, x)
1886 && !no_upper_bound
1887 && REAL_VALUES_LESS (x, u)))
1888 overflow = 1;
1890 #ifndef REAL_ARITHMETIC
1892 HOST_WIDE_INT low, high;
1893 HOST_WIDE_INT half_word
1894 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1896 if (x < 0)
1897 x = -x;
1899 high = (HOST_WIDE_INT) (x / half_word / half_word);
1900 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1901 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1903 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1904 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1906 else
1907 low = (HOST_WIDE_INT) x;
1908 if (TREE_REAL_CST (arg1) < 0)
1909 neg_double (low, high, &low, &high);
1910 t = build_int_2 (low, high);
1912 #else
1914 HOST_WIDE_INT low, high;
1915 REAL_VALUE_TO_INT (&low, &high, x);
1916 t = build_int_2 (low, high);
1918 #endif
1919 TREE_TYPE (t) = type;
1920 TREE_OVERFLOW (t)
1921 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1922 TREE_CONSTANT_OVERFLOW (t)
1923 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1925 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1926 TREE_TYPE (t) = type;
1928 else if (TREE_CODE (type) == REAL_TYPE)
1930 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1931 if (TREE_CODE (arg1) == INTEGER_CST)
1932 return build_real_from_int_cst (type, arg1);
1933 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1934 if (TREE_CODE (arg1) == REAL_CST)
1936 struct fc_args args;
1938 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1940 t = arg1;
1941 TREE_TYPE (arg1) = type;
1942 return t;
1945 /* Setup input for fold_convert_1() */
1946 args.arg1 = arg1;
1947 args.type = type;
1949 if (do_float_handler (fold_convert_1, (PTR) &args))
1951 /* Receive output from fold_convert_1() */
1952 t = args.t;
1954 else
1956 /* We got an exception from fold_convert_1() */
1957 overflow = 1;
1958 t = copy_node (arg1);
1961 TREE_OVERFLOW (t)
1962 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1963 TREE_CONSTANT_OVERFLOW (t)
1964 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1965 return t;
1968 TREE_CONSTANT (t) = 1;
1969 return t;
1972 /* Return an expr equal to X but certainly not valid as an lvalue. */
1974 tree
1975 non_lvalue (x)
1976 tree x;
1978 tree result;
1980 /* These things are certainly not lvalues. */
1981 if (TREE_CODE (x) == NON_LVALUE_EXPR
1982 || TREE_CODE (x) == INTEGER_CST
1983 || TREE_CODE (x) == REAL_CST
1984 || TREE_CODE (x) == STRING_CST
1985 || TREE_CODE (x) == ADDR_EXPR)
1986 return x;
1988 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1989 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1990 return result;
1993 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1994 Zero means allow extended lvalues. */
1996 int pedantic_lvalues;
1998 /* When pedantic, return an expr equal to X but certainly not valid as a
1999 pedantic lvalue. Otherwise, return X. */
2001 tree
2002 pedantic_non_lvalue (x)
2003 tree x;
2005 if (pedantic_lvalues)
2006 return non_lvalue (x);
2007 else
2008 return x;
2011 /* Given a tree comparison code, return the code that is the logical inverse
2012 of the given code. It is not safe to do this for floating-point
2013 comparisons, except for NE_EXPR and EQ_EXPR. */
2015 static enum tree_code
2016 invert_tree_comparison (code)
2017 enum tree_code code;
2019 switch (code)
2021 case EQ_EXPR:
2022 return NE_EXPR;
2023 case NE_EXPR:
2024 return EQ_EXPR;
2025 case GT_EXPR:
2026 return LE_EXPR;
2027 case GE_EXPR:
2028 return LT_EXPR;
2029 case LT_EXPR:
2030 return GE_EXPR;
2031 case LE_EXPR:
2032 return GT_EXPR;
2033 default:
2034 abort ();
2038 /* Similar, but return the comparison that results if the operands are
2039 swapped. This is safe for floating-point. */
2041 static enum tree_code
2042 swap_tree_comparison (code)
2043 enum tree_code code;
2045 switch (code)
2047 case EQ_EXPR:
2048 case NE_EXPR:
2049 return code;
2050 case GT_EXPR:
2051 return LT_EXPR;
2052 case GE_EXPR:
2053 return LE_EXPR;
2054 case LT_EXPR:
2055 return GT_EXPR;
2056 case LE_EXPR:
2057 return GE_EXPR;
2058 default:
2059 abort ();
2063 /* Return nonzero if CODE is a tree code that represents a truth value. */
2065 static int
2066 truth_value_p (code)
2067 enum tree_code code;
2069 return (TREE_CODE_CLASS (code) == '<'
2070 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2071 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2072 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2075 /* Return nonzero if two operands are necessarily equal.
2076 If ONLY_CONST is non-zero, only return non-zero for constants.
2077 This function tests whether the operands are indistinguishable;
2078 it does not test whether they are equal using C's == operation.
2079 The distinction is important for IEEE floating point, because
2080 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2081 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2084 operand_equal_p (arg0, arg1, only_const)
2085 tree arg0, arg1;
2086 int only_const;
2088 /* If both types don't have the same signedness, then we can't consider
2089 them equal. We must check this before the STRIP_NOPS calls
2090 because they may change the signedness of the arguments. */
2091 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2092 return 0;
2094 STRIP_NOPS (arg0);
2095 STRIP_NOPS (arg1);
2097 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2098 /* This is needed for conversions and for COMPONENT_REF.
2099 Might as well play it safe and always test this. */
2100 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2101 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2102 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2103 return 0;
2105 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2106 We don't care about side effects in that case because the SAVE_EXPR
2107 takes care of that for us. In all other cases, two expressions are
2108 equal if they have no side effects. If we have two identical
2109 expressions with side effects that should be treated the same due
2110 to the only side effects being identical SAVE_EXPR's, that will
2111 be detected in the recursive calls below. */
2112 if (arg0 == arg1 && ! only_const
2113 && (TREE_CODE (arg0) == SAVE_EXPR
2114 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2115 return 1;
2117 /* Next handle constant cases, those for which we can return 1 even
2118 if ONLY_CONST is set. */
2119 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2120 switch (TREE_CODE (arg0))
2122 case INTEGER_CST:
2123 return (! TREE_CONSTANT_OVERFLOW (arg0)
2124 && ! TREE_CONSTANT_OVERFLOW (arg1)
2125 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
2126 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
2128 case REAL_CST:
2129 return (! TREE_CONSTANT_OVERFLOW (arg0)
2130 && ! TREE_CONSTANT_OVERFLOW (arg1)
2131 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2132 TREE_REAL_CST (arg1)));
2134 case COMPLEX_CST:
2135 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2136 only_const)
2137 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2138 only_const));
2140 case STRING_CST:
2141 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2142 && ! strncmp (TREE_STRING_POINTER (arg0),
2143 TREE_STRING_POINTER (arg1),
2144 TREE_STRING_LENGTH (arg0)));
2146 case ADDR_EXPR:
2147 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2149 default:
2150 break;
2153 if (only_const)
2154 return 0;
2156 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2158 case '1':
2159 /* Two conversions are equal only if signedness and modes match. */
2160 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2161 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2162 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2163 return 0;
2165 return operand_equal_p (TREE_OPERAND (arg0, 0),
2166 TREE_OPERAND (arg1, 0), 0);
2168 case '<':
2169 case '2':
2170 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2171 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2173 return 1;
2175 /* For commutative ops, allow the other order. */
2176 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2177 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2178 || TREE_CODE (arg0) == BIT_IOR_EXPR
2179 || TREE_CODE (arg0) == BIT_XOR_EXPR
2180 || TREE_CODE (arg0) == BIT_AND_EXPR
2181 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2182 && operand_equal_p (TREE_OPERAND (arg0, 0),
2183 TREE_OPERAND (arg1, 1), 0)
2184 && operand_equal_p (TREE_OPERAND (arg0, 1),
2185 TREE_OPERAND (arg1, 0), 0));
2187 case 'r':
2188 /* If either of the pointer (or reference) expressions we are dereferencing
2189 contain a side effect, these cannot be equal. */
2190 if (TREE_SIDE_EFFECTS (arg0)
2191 || TREE_SIDE_EFFECTS (arg1))
2192 return 0;
2194 switch (TREE_CODE (arg0))
2196 case INDIRECT_REF:
2197 return operand_equal_p (TREE_OPERAND (arg0, 0),
2198 TREE_OPERAND (arg1, 0), 0);
2200 case COMPONENT_REF:
2201 case ARRAY_REF:
2202 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2203 TREE_OPERAND (arg1, 0), 0)
2204 && operand_equal_p (TREE_OPERAND (arg0, 1),
2205 TREE_OPERAND (arg1, 1), 0));
2207 case BIT_FIELD_REF:
2208 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2209 TREE_OPERAND (arg1, 0), 0)
2210 && operand_equal_p (TREE_OPERAND (arg0, 1),
2211 TREE_OPERAND (arg1, 1), 0)
2212 && operand_equal_p (TREE_OPERAND (arg0, 2),
2213 TREE_OPERAND (arg1, 2), 0));
2214 default:
2215 return 0;
2218 case 'e':
2219 if (TREE_CODE (arg0) == RTL_EXPR)
2220 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2221 return 0;
2223 default:
2224 return 0;
2228 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2229 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2231 When in doubt, return 0. */
2233 static int
2234 operand_equal_for_comparison_p (arg0, arg1, other)
2235 tree arg0, arg1;
2236 tree other;
2238 int unsignedp1, unsignedpo;
2239 tree primarg0, primarg1, primother;
2240 unsigned correct_width;
2242 if (operand_equal_p (arg0, arg1, 0))
2243 return 1;
2245 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2246 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2247 return 0;
2249 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2250 and see if the inner values are the same. This removes any
2251 signedness comparison, which doesn't matter here. */
2252 primarg0 = arg0, primarg1 = arg1;
2253 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2254 if (operand_equal_p (primarg0, primarg1, 0))
2255 return 1;
2257 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2258 actual comparison operand, ARG0.
2260 First throw away any conversions to wider types
2261 already present in the operands. */
2263 primarg1 = get_narrower (arg1, &unsignedp1);
2264 primother = get_narrower (other, &unsignedpo);
2266 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2267 if (unsignedp1 == unsignedpo
2268 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2269 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2271 tree type = TREE_TYPE (arg0);
2273 /* Make sure shorter operand is extended the right way
2274 to match the longer operand. */
2275 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2276 TREE_TYPE (primarg1)),
2277 primarg1);
2279 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2280 return 1;
2283 return 0;
2286 /* See if ARG is an expression that is either a comparison or is performing
2287 arithmetic on comparisons. The comparisons must only be comparing
2288 two different values, which will be stored in *CVAL1 and *CVAL2; if
2289 they are non-zero it means that some operands have already been found.
2290 No variables may be used anywhere else in the expression except in the
2291 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2292 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2294 If this is true, return 1. Otherwise, return zero. */
2296 static int
2297 twoval_comparison_p (arg, cval1, cval2, save_p)
2298 tree arg;
2299 tree *cval1, *cval2;
2300 int *save_p;
2302 enum tree_code code = TREE_CODE (arg);
2303 char class = TREE_CODE_CLASS (code);
2305 /* We can handle some of the 'e' cases here. */
2306 if (class == 'e' && code == TRUTH_NOT_EXPR)
2307 class = '1';
2308 else if (class == 'e'
2309 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2310 || code == COMPOUND_EXPR))
2311 class = '2';
2313 /* ??? Disable this since the SAVE_EXPR might already be in use outside
2314 the expression. There may be no way to make this work, but it needs
2315 to be looked at again for 2.6. */
2316 #if 0
2317 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
2319 /* If we've already found a CVAL1 or CVAL2, this expression is
2320 two complex to handle. */
2321 if (*cval1 || *cval2)
2322 return 0;
2324 class = '1';
2325 *save_p = 1;
2327 #endif
2329 switch (class)
2331 case '1':
2332 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2334 case '2':
2335 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2336 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2337 cval1, cval2, save_p));
2339 case 'c':
2340 return 1;
2342 case 'e':
2343 if (code == COND_EXPR)
2344 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2345 cval1, cval2, save_p)
2346 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2347 cval1, cval2, save_p)
2348 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2349 cval1, cval2, save_p));
2350 return 0;
2352 case '<':
2353 /* First see if we can handle the first operand, then the second. For
2354 the second operand, we know *CVAL1 can't be zero. It must be that
2355 one side of the comparison is each of the values; test for the
2356 case where this isn't true by failing if the two operands
2357 are the same. */
2359 if (operand_equal_p (TREE_OPERAND (arg, 0),
2360 TREE_OPERAND (arg, 1), 0))
2361 return 0;
2363 if (*cval1 == 0)
2364 *cval1 = TREE_OPERAND (arg, 0);
2365 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2367 else if (*cval2 == 0)
2368 *cval2 = TREE_OPERAND (arg, 0);
2369 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2371 else
2372 return 0;
2374 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2376 else if (*cval2 == 0)
2377 *cval2 = TREE_OPERAND (arg, 1);
2378 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2380 else
2381 return 0;
2383 return 1;
2385 default:
2386 return 0;
2390 /* ARG is a tree that is known to contain just arithmetic operations and
2391 comparisons. Evaluate the operations in the tree substituting NEW0 for
2392 any occurrence of OLD0 as an operand of a comparison and likewise for
2393 NEW1 and OLD1. */
2395 static tree
2396 eval_subst (arg, old0, new0, old1, new1)
2397 tree arg;
2398 tree old0, new0, old1, new1;
2400 tree type = TREE_TYPE (arg);
2401 enum tree_code code = TREE_CODE (arg);
2402 char class = TREE_CODE_CLASS (code);
2404 /* We can handle some of the 'e' cases here. */
2405 if (class == 'e' && code == TRUTH_NOT_EXPR)
2406 class = '1';
2407 else if (class == 'e'
2408 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2409 class = '2';
2411 switch (class)
2413 case '1':
2414 return fold (build1 (code, type,
2415 eval_subst (TREE_OPERAND (arg, 0),
2416 old0, new0, old1, new1)));
2418 case '2':
2419 return fold (build (code, type,
2420 eval_subst (TREE_OPERAND (arg, 0),
2421 old0, new0, old1, new1),
2422 eval_subst (TREE_OPERAND (arg, 1),
2423 old0, new0, old1, new1)));
2425 case 'e':
2426 switch (code)
2428 case SAVE_EXPR:
2429 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2431 case COMPOUND_EXPR:
2432 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2434 case COND_EXPR:
2435 return fold (build (code, type,
2436 eval_subst (TREE_OPERAND (arg, 0),
2437 old0, new0, old1, new1),
2438 eval_subst (TREE_OPERAND (arg, 1),
2439 old0, new0, old1, new1),
2440 eval_subst (TREE_OPERAND (arg, 2),
2441 old0, new0, old1, new1)));
2442 default:
2443 break;
2445 /* fall through - ??? */
2447 case '<':
2449 tree arg0 = TREE_OPERAND (arg, 0);
2450 tree arg1 = TREE_OPERAND (arg, 1);
2452 /* We need to check both for exact equality and tree equality. The
2453 former will be true if the operand has a side-effect. In that
2454 case, we know the operand occurred exactly once. */
2456 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2457 arg0 = new0;
2458 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2459 arg0 = new1;
2461 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2462 arg1 = new0;
2463 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2464 arg1 = new1;
2466 return fold (build (code, type, arg0, arg1));
2469 default:
2470 return arg;
2474 /* Return a tree for the case when the result of an expression is RESULT
2475 converted to TYPE and OMITTED was previously an operand of the expression
2476 but is now not needed (e.g., we folded OMITTED * 0).
2478 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2479 the conversion of RESULT to TYPE. */
2481 static tree
2482 omit_one_operand (type, result, omitted)
2483 tree type, result, omitted;
2485 tree t = convert (type, result);
2487 if (TREE_SIDE_EFFECTS (omitted))
2488 return build (COMPOUND_EXPR, type, omitted, t);
2490 return non_lvalue (t);
2493 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2495 static tree
2496 pedantic_omit_one_operand (type, result, omitted)
2497 tree type, result, omitted;
2499 tree t = convert (type, result);
2501 if (TREE_SIDE_EFFECTS (omitted))
2502 return build (COMPOUND_EXPR, type, omitted, t);
2504 return pedantic_non_lvalue (t);
2509 /* Return a simplified tree node for the truth-negation of ARG. This
2510 never alters ARG itself. We assume that ARG is an operation that
2511 returns a truth value (0 or 1). */
2513 tree
2514 invert_truthvalue (arg)
2515 tree arg;
2517 tree type = TREE_TYPE (arg);
2518 enum tree_code code = TREE_CODE (arg);
2520 if (code == ERROR_MARK)
2521 return arg;
2523 /* If this is a comparison, we can simply invert it, except for
2524 floating-point non-equality comparisons, in which case we just
2525 enclose a TRUTH_NOT_EXPR around what we have. */
2527 if (TREE_CODE_CLASS (code) == '<')
2529 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2530 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2531 return build1 (TRUTH_NOT_EXPR, type, arg);
2532 else
2533 return build (invert_tree_comparison (code), type,
2534 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2537 switch (code)
2539 case INTEGER_CST:
2540 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2541 && TREE_INT_CST_HIGH (arg) == 0, 0));
2543 case TRUTH_AND_EXPR:
2544 return build (TRUTH_OR_EXPR, type,
2545 invert_truthvalue (TREE_OPERAND (arg, 0)),
2546 invert_truthvalue (TREE_OPERAND (arg, 1)));
2548 case TRUTH_OR_EXPR:
2549 return build (TRUTH_AND_EXPR, type,
2550 invert_truthvalue (TREE_OPERAND (arg, 0)),
2551 invert_truthvalue (TREE_OPERAND (arg, 1)));
2553 case TRUTH_XOR_EXPR:
2554 /* Here we can invert either operand. We invert the first operand
2555 unless the second operand is a TRUTH_NOT_EXPR in which case our
2556 result is the XOR of the first operand with the inside of the
2557 negation of the second operand. */
2559 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2560 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2561 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2562 else
2563 return build (TRUTH_XOR_EXPR, type,
2564 invert_truthvalue (TREE_OPERAND (arg, 0)),
2565 TREE_OPERAND (arg, 1));
2567 case TRUTH_ANDIF_EXPR:
2568 return build (TRUTH_ORIF_EXPR, type,
2569 invert_truthvalue (TREE_OPERAND (arg, 0)),
2570 invert_truthvalue (TREE_OPERAND (arg, 1)));
2572 case TRUTH_ORIF_EXPR:
2573 return build (TRUTH_ANDIF_EXPR, type,
2574 invert_truthvalue (TREE_OPERAND (arg, 0)),
2575 invert_truthvalue (TREE_OPERAND (arg, 1)));
2577 case TRUTH_NOT_EXPR:
2578 return TREE_OPERAND (arg, 0);
2580 case COND_EXPR:
2581 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2582 invert_truthvalue (TREE_OPERAND (arg, 1)),
2583 invert_truthvalue (TREE_OPERAND (arg, 2)));
2585 case COMPOUND_EXPR:
2586 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2587 invert_truthvalue (TREE_OPERAND (arg, 1)));
2589 case WITH_RECORD_EXPR:
2590 return build (WITH_RECORD_EXPR, type,
2591 invert_truthvalue (TREE_OPERAND (arg, 0)),
2592 TREE_OPERAND (arg, 1));
2594 case NON_LVALUE_EXPR:
2595 return invert_truthvalue (TREE_OPERAND (arg, 0));
2597 case NOP_EXPR:
2598 case CONVERT_EXPR:
2599 case FLOAT_EXPR:
2600 return build1 (TREE_CODE (arg), type,
2601 invert_truthvalue (TREE_OPERAND (arg, 0)));
2603 case BIT_AND_EXPR:
2604 if (!integer_onep (TREE_OPERAND (arg, 1)))
2605 break;
2606 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2608 case SAVE_EXPR:
2609 return build1 (TRUTH_NOT_EXPR, type, arg);
2611 case CLEANUP_POINT_EXPR:
2612 return build1 (CLEANUP_POINT_EXPR, type,
2613 invert_truthvalue (TREE_OPERAND (arg, 0)));
2615 default:
2616 break;
2618 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2619 abort ();
2620 return build1 (TRUTH_NOT_EXPR, type, arg);
2623 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2624 operands are another bit-wise operation with a common input. If so,
2625 distribute the bit operations to save an operation and possibly two if
2626 constants are involved. For example, convert
2627 (A | B) & (A | C) into A | (B & C)
2628 Further simplification will occur if B and C are constants.
2630 If this optimization cannot be done, 0 will be returned. */
2632 static tree
2633 distribute_bit_expr (code, type, arg0, arg1)
2634 enum tree_code code;
2635 tree type;
2636 tree arg0, arg1;
2638 tree common;
2639 tree left, right;
2641 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2642 || TREE_CODE (arg0) == code
2643 || (TREE_CODE (arg0) != BIT_AND_EXPR
2644 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2645 return 0;
2647 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2649 common = TREE_OPERAND (arg0, 0);
2650 left = TREE_OPERAND (arg0, 1);
2651 right = TREE_OPERAND (arg1, 1);
2653 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2655 common = TREE_OPERAND (arg0, 0);
2656 left = TREE_OPERAND (arg0, 1);
2657 right = TREE_OPERAND (arg1, 0);
2659 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2661 common = TREE_OPERAND (arg0, 1);
2662 left = TREE_OPERAND (arg0, 0);
2663 right = TREE_OPERAND (arg1, 1);
2665 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2667 common = TREE_OPERAND (arg0, 1);
2668 left = TREE_OPERAND (arg0, 0);
2669 right = TREE_OPERAND (arg1, 0);
2671 else
2672 return 0;
2674 return fold (build (TREE_CODE (arg0), type, common,
2675 fold (build (code, type, left, right))));
2678 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2679 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2681 static tree
2682 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2683 tree inner;
2684 tree type;
2685 int bitsize, bitpos;
2686 int unsignedp;
2688 tree result = build (BIT_FIELD_REF, type, inner,
2689 size_int (bitsize), bitsize_int (bitpos, 0L));
2691 TREE_UNSIGNED (result) = unsignedp;
2693 return result;
2696 /* Optimize a bit-field compare.
2698 There are two cases: First is a compare against a constant and the
2699 second is a comparison of two items where the fields are at the same
2700 bit position relative to the start of a chunk (byte, halfword, word)
2701 large enough to contain it. In these cases we can avoid the shift
2702 implicit in bitfield extractions.
2704 For constants, we emit a compare of the shifted constant with the
2705 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2706 compared. For two fields at the same position, we do the ANDs with the
2707 similar mask and compare the result of the ANDs.
2709 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2710 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2711 are the left and right operands of the comparison, respectively.
2713 If the optimization described above can be done, we return the resulting
2714 tree. Otherwise we return zero. */
2716 static tree
2717 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2718 enum tree_code code;
2719 tree compare_type;
2720 tree lhs, rhs;
2722 int lbitpos, lbitsize, rbitpos, rbitsize;
2723 int lnbitpos, lnbitsize, rnbitpos = 0, rnbitsize = 0;
2724 tree type = TREE_TYPE (lhs);
2725 tree signed_type, unsigned_type;
2726 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2727 enum machine_mode lmode, rmode, lnmode, rnmode = VOIDmode;
2728 int lunsignedp, runsignedp;
2729 int lvolatilep = 0, rvolatilep = 0;
2730 int alignment;
2731 tree linner, rinner = NULL_TREE;
2732 tree mask;
2733 tree offset;
2735 /* Get all the information about the extractions being done. If the bit size
2736 if the same as the size of the underlying object, we aren't doing an
2737 extraction at all and so can do nothing. We also don't want to
2738 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2739 then will no longer be able to replace it. */
2740 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2741 &lunsignedp, &lvolatilep, &alignment);
2742 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2743 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2744 return 0;
2746 if (!const_p)
2748 /* If this is not a constant, we can only do something if bit positions,
2749 sizes, and signedness are the same. */
2750 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2751 &runsignedp, &rvolatilep, &alignment);
2753 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2754 || lunsignedp != runsignedp || offset != 0
2755 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2756 return 0;
2759 /* See if we can find a mode to refer to this field. We should be able to,
2760 but fail if we can't. */
2761 lnmode = get_best_mode (lbitsize, lbitpos,
2762 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2763 lvolatilep);
2764 if (lnmode == VOIDmode)
2765 return 0;
2767 /* Set signed and unsigned types of the precision of this mode for the
2768 shifts below. */
2769 signed_type = type_for_mode (lnmode, 0);
2770 unsigned_type = type_for_mode (lnmode, 1);
2772 if (! const_p)
2774 rnmode = get_best_mode (rbitsize, rbitpos,
2775 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2776 rvolatilep);
2777 if (rnmode == VOIDmode)
2778 return 0;
2781 /* Compute the bit position and size for the new reference and our offset
2782 within it. If the new reference is the same size as the original, we
2783 won't optimize anything, so return zero. */
2784 lnbitsize = GET_MODE_BITSIZE (lnmode);
2785 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2786 lbitpos -= lnbitpos;
2787 if (lnbitsize == lbitsize)
2788 return 0;
2790 if (! const_p)
2792 rnbitsize = GET_MODE_BITSIZE (rnmode);
2793 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2794 rbitpos -= rnbitpos;
2795 if (rnbitsize == rbitsize)
2796 return 0;
2799 if (BYTES_BIG_ENDIAN)
2800 lbitpos = lnbitsize - lbitsize - lbitpos;
2802 /* Make the mask to be used against the extracted field. */
2803 mask = build_int_2 (~0, ~0);
2804 TREE_TYPE (mask) = unsigned_type;
2805 force_fit_type (mask, 0);
2806 mask = convert (unsigned_type, mask);
2807 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2808 mask = const_binop (RSHIFT_EXPR, mask,
2809 size_int (lnbitsize - lbitsize - lbitpos), 0);
2811 if (! const_p)
2812 /* If not comparing with constant, just rework the comparison
2813 and return. */
2814 return build (code, compare_type,
2815 build (BIT_AND_EXPR, unsigned_type,
2816 make_bit_field_ref (linner, unsigned_type,
2817 lnbitsize, lnbitpos, 1),
2818 mask),
2819 build (BIT_AND_EXPR, unsigned_type,
2820 make_bit_field_ref (rinner, unsigned_type,
2821 rnbitsize, rnbitpos, 1),
2822 mask));
2824 /* Otherwise, we are handling the constant case. See if the constant is too
2825 big for the field. Warn and return a tree of for 0 (false) if so. We do
2826 this not only for its own sake, but to avoid having to test for this
2827 error case below. If we didn't, we might generate wrong code.
2829 For unsigned fields, the constant shifted right by the field length should
2830 be all zero. For signed fields, the high-order bits should agree with
2831 the sign bit. */
2833 if (lunsignedp)
2835 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2836 convert (unsigned_type, rhs),
2837 size_int (lbitsize), 0)))
2839 warning ("comparison is always %d due to width of bitfield",
2840 code == NE_EXPR);
2841 return convert (compare_type,
2842 (code == NE_EXPR
2843 ? integer_one_node : integer_zero_node));
2846 else
2848 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2849 size_int (lbitsize - 1), 0);
2850 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2852 warning ("comparison is always %d due to width of bitfield",
2853 code == NE_EXPR);
2854 return convert (compare_type,
2855 (code == NE_EXPR
2856 ? integer_one_node : integer_zero_node));
2860 /* Single-bit compares should always be against zero. */
2861 if (lbitsize == 1 && ! integer_zerop (rhs))
2863 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2864 rhs = convert (type, integer_zero_node);
2867 /* Make a new bitfield reference, shift the constant over the
2868 appropriate number of bits and mask it with the computed mask
2869 (in case this was a signed field). If we changed it, make a new one. */
2870 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2871 if (lvolatilep)
2873 TREE_SIDE_EFFECTS (lhs) = 1;
2874 TREE_THIS_VOLATILE (lhs) = 1;
2877 rhs = fold (const_binop (BIT_AND_EXPR,
2878 const_binop (LSHIFT_EXPR,
2879 convert (unsigned_type, rhs),
2880 size_int (lbitpos), 0),
2881 mask, 0));
2883 return build (code, compare_type,
2884 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2885 rhs);
2888 /* Subroutine for fold_truthop: decode a field reference.
2890 If EXP is a comparison reference, we return the innermost reference.
2892 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2893 set to the starting bit number.
2895 If the innermost field can be completely contained in a mode-sized
2896 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2898 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2899 otherwise it is not changed.
2901 *PUNSIGNEDP is set to the signedness of the field.
2903 *PMASK is set to the mask used. This is either contained in a
2904 BIT_AND_EXPR or derived from the width of the field.
2906 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2908 Return 0 if this is not a component reference or is one that we can't
2909 do anything with. */
2911 static tree
2912 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2913 pvolatilep, pmask, pand_mask)
2914 tree exp;
2915 int *pbitsize, *pbitpos;
2916 enum machine_mode *pmode;
2917 int *punsignedp, *pvolatilep;
2918 tree *pmask;
2919 tree *pand_mask;
2921 tree and_mask = 0;
2922 tree mask, inner, offset;
2923 tree unsigned_type;
2924 int precision;
2925 int alignment;
2927 /* All the optimizations using this function assume integer fields.
2928 There are problems with FP fields since the type_for_size call
2929 below can fail for, e.g., XFmode. */
2930 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2931 return 0;
2933 STRIP_NOPS (exp);
2935 if (TREE_CODE (exp) == BIT_AND_EXPR)
2937 and_mask = TREE_OPERAND (exp, 1);
2938 exp = TREE_OPERAND (exp, 0);
2939 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2940 if (TREE_CODE (and_mask) != INTEGER_CST)
2941 return 0;
2945 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2946 punsignedp, pvolatilep, &alignment);
2947 if ((inner == exp && and_mask == 0)
2948 || *pbitsize < 0 || offset != 0
2949 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2950 return 0;
2952 /* Compute the mask to access the bitfield. */
2953 unsigned_type = type_for_size (*pbitsize, 1);
2954 precision = TYPE_PRECISION (unsigned_type);
2956 mask = build_int_2 (~0, ~0);
2957 TREE_TYPE (mask) = unsigned_type;
2958 force_fit_type (mask, 0);
2959 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2960 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2962 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2963 if (and_mask != 0)
2964 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2965 convert (unsigned_type, and_mask), mask));
2967 *pmask = mask;
2968 *pand_mask = and_mask;
2969 return inner;
2972 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2973 bit positions. */
2975 static int
2976 all_ones_mask_p (mask, size)
2977 tree mask;
2978 int size;
2980 tree type = TREE_TYPE (mask);
2981 int precision = TYPE_PRECISION (type);
2982 tree tmask;
2984 tmask = build_int_2 (~0, ~0);
2985 TREE_TYPE (tmask) = signed_type (type);
2986 force_fit_type (tmask, 0);
2987 return
2988 tree_int_cst_equal (mask,
2989 const_binop (RSHIFT_EXPR,
2990 const_binop (LSHIFT_EXPR, tmask,
2991 size_int (precision - size),
2993 size_int (precision - size), 0));
2996 /* Subroutine for fold_truthop: determine if an operand is simple enough
2997 to be evaluated unconditionally. */
2999 static int
3000 simple_operand_p (exp)
3001 tree exp;
3003 /* Strip any conversions that don't change the machine mode. */
3004 while ((TREE_CODE (exp) == NOP_EXPR
3005 || TREE_CODE (exp) == CONVERT_EXPR)
3006 && (TYPE_MODE (TREE_TYPE (exp))
3007 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3008 exp = TREE_OPERAND (exp, 0);
3010 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3011 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
3012 && ! TREE_ADDRESSABLE (exp)
3013 && ! TREE_THIS_VOLATILE (exp)
3014 && ! DECL_NONLOCAL (exp)
3015 /* Don't regard global variables as simple. They may be
3016 allocated in ways unknown to the compiler (shared memory,
3017 #pragma weak, etc). */
3018 && ! TREE_PUBLIC (exp)
3019 && ! DECL_EXTERNAL (exp)
3020 /* Loading a static variable is unduly expensive, but global
3021 registers aren't expensive. */
3022 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3025 /* The following functions are subroutines to fold_range_test and allow it to
3026 try to change a logical combination of comparisons into a range test.
3028 For example, both
3029 X == 2 && X == 3 && X == 4 && X == 5
3031 X >= 2 && X <= 5
3032 are converted to
3033 (unsigned) (X - 2) <= 3
3035 We describe each set of comparisons as being either inside or outside
3036 a range, using a variable named like IN_P, and then describe the
3037 range with a lower and upper bound. If one of the bounds is omitted,
3038 it represents either the highest or lowest value of the type.
3040 In the comments below, we represent a range by two numbers in brackets
3041 preceded by a "+" to designate being inside that range, or a "-" to
3042 designate being outside that range, so the condition can be inverted by
3043 flipping the prefix. An omitted bound is represented by a "-". For
3044 example, "- [-, 10]" means being outside the range starting at the lowest
3045 possible value and ending at 10, in other words, being greater than 10.
3046 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3047 always false.
3049 We set up things so that the missing bounds are handled in a consistent
3050 manner so neither a missing bound nor "true" and "false" need to be
3051 handled using a special case. */
3053 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3054 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3055 and UPPER1_P are nonzero if the respective argument is an upper bound
3056 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3057 must be specified for a comparison. ARG1 will be converted to ARG0's
3058 type if both are specified. */
3060 static tree
3061 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3062 enum tree_code code;
3063 tree type;
3064 tree arg0, arg1;
3065 int upper0_p, upper1_p;
3067 tree tem;
3068 int result;
3069 int sgn0, sgn1;
3071 /* If neither arg represents infinity, do the normal operation.
3072 Else, if not a comparison, return infinity. Else handle the special
3073 comparison rules. Note that most of the cases below won't occur, but
3074 are handled for consistency. */
3076 if (arg0 != 0 && arg1 != 0)
3078 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3079 arg0, convert (TREE_TYPE (arg0), arg1)));
3080 STRIP_NOPS (tem);
3081 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3084 if (TREE_CODE_CLASS (code) != '<')
3085 return 0;
3087 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3088 for neither. In real maths, we cannot assume open ended ranges are
3089 the same. But, this is computer arithmetic, where numbers are finite.
3090 We can therefore make the transformation of any unbounded range with
3091 the value Z, Z being greater than any representable number. This permits
3092 us to treat unbounded ranges as equal. */
3093 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3094 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3095 switch (code)
3097 case EQ_EXPR:
3098 result = sgn0 == sgn1;
3099 break;
3100 case NE_EXPR:
3101 result = sgn0 != sgn1;
3102 break;
3103 case LT_EXPR:
3104 result = sgn0 < sgn1;
3105 break;
3106 case LE_EXPR:
3107 result = sgn0 <= sgn1;
3108 break;
3109 case GT_EXPR:
3110 result = sgn0 > sgn1;
3111 break;
3112 case GE_EXPR:
3113 result = sgn0 >= sgn1;
3114 break;
3115 default:
3116 abort ();
3119 return convert (type, result ? integer_one_node : integer_zero_node);
3122 /* Given EXP, a logical expression, set the range it is testing into
3123 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3124 actually being tested. *PLOW and *PHIGH will have be made the same type
3125 as the returned expression. If EXP is not a comparison, we will most
3126 likely not be returning a useful value and range. */
3128 static tree
3129 make_range (exp, pin_p, plow, phigh)
3130 tree exp;
3131 int *pin_p;
3132 tree *plow, *phigh;
3134 enum tree_code code;
3135 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3136 tree orig_type = NULL_TREE;
3137 int in_p, n_in_p;
3138 tree low, high, n_low, n_high;
3140 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3141 and see if we can refine the range. Some of the cases below may not
3142 happen, but it doesn't seem worth worrying about this. We "continue"
3143 the outer loop when we've changed something; otherwise we "break"
3144 the switch, which will "break" the while. */
3146 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3148 while (1)
3150 code = TREE_CODE (exp);
3152 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3154 arg0 = TREE_OPERAND (exp, 0);
3155 if (TREE_CODE_CLASS (code) == '<'
3156 || TREE_CODE_CLASS (code) == '1'
3157 || TREE_CODE_CLASS (code) == '2')
3158 type = TREE_TYPE (arg0);
3159 if (TREE_CODE_CLASS (code) == '2'
3160 || TREE_CODE_CLASS (code) == '<'
3161 || (TREE_CODE_CLASS (code) == 'e'
3162 && tree_code_length[(int) code] > 1))
3163 arg1 = TREE_OPERAND (exp, 1);
3166 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3167 lose a cast by accident. */
3168 if (type != NULL_TREE && orig_type == NULL_TREE)
3169 orig_type = type;
3171 switch (code)
3173 case TRUTH_NOT_EXPR:
3174 in_p = ! in_p, exp = arg0;
3175 continue;
3177 case EQ_EXPR: case NE_EXPR:
3178 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3179 /* We can only do something if the range is testing for zero
3180 and if the second operand is an integer constant. Note that
3181 saying something is "in" the range we make is done by
3182 complementing IN_P since it will set in the initial case of
3183 being not equal to zero; "out" is leaving it alone. */
3184 if (low == 0 || high == 0
3185 || ! integer_zerop (low) || ! integer_zerop (high)
3186 || TREE_CODE (arg1) != INTEGER_CST)
3187 break;
3189 switch (code)
3191 case NE_EXPR: /* - [c, c] */
3192 low = high = arg1;
3193 break;
3194 case EQ_EXPR: /* + [c, c] */
3195 in_p = ! in_p, low = high = arg1;
3196 break;
3197 case GT_EXPR: /* - [-, c] */
3198 low = 0, high = arg1;
3199 break;
3200 case GE_EXPR: /* + [c, -] */
3201 in_p = ! in_p, low = arg1, high = 0;
3202 break;
3203 case LT_EXPR: /* - [c, -] */
3204 low = arg1, high = 0;
3205 break;
3206 case LE_EXPR: /* + [-, c] */
3207 in_p = ! in_p, low = 0, high = arg1;
3208 break;
3209 default:
3210 abort ();
3213 exp = arg0;
3215 /* If this is an unsigned comparison, we also know that EXP is
3216 greater than or equal to zero. We base the range tests we make
3217 on that fact, so we record it here so we can parse existing
3218 range tests. */
3219 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3221 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3222 1, convert (type, integer_zero_node),
3223 NULL_TREE))
3224 break;
3226 in_p = n_in_p, low = n_low, high = n_high;
3228 /* If the high bound is missing, reverse the range so it
3229 goes from zero to the low bound minus 1. */
3230 if (high == 0)
3232 in_p = ! in_p;
3233 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3234 integer_one_node, 0);
3235 low = convert (type, integer_zero_node);
3238 continue;
3240 case NEGATE_EXPR:
3241 /* (-x) IN [a,b] -> x in [-b, -a] */
3242 n_low = range_binop (MINUS_EXPR, type,
3243 convert (type, integer_zero_node), 0, high, 1);
3244 n_high = range_binop (MINUS_EXPR, type,
3245 convert (type, integer_zero_node), 0, low, 0);
3246 low = n_low, high = n_high;
3247 exp = arg0;
3248 continue;
3250 case BIT_NOT_EXPR:
3251 /* ~ X -> -X - 1 */
3252 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
3253 convert (type, integer_one_node));
3254 continue;
3256 case PLUS_EXPR: case MINUS_EXPR:
3257 if (TREE_CODE (arg1) != INTEGER_CST)
3258 break;
3260 /* If EXP is signed, any overflow in the computation is undefined,
3261 so we don't worry about it so long as our computations on
3262 the bounds don't overflow. For unsigned, overflow is defined
3263 and this is exactly the right thing. */
3264 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3265 type, low, 0, arg1, 0);
3266 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3267 type, high, 1, arg1, 0);
3268 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3269 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3270 break;
3272 /* Check for an unsigned range which has wrapped around the maximum
3273 value thus making n_high < n_low, and normalize it. */
3274 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3276 low = range_binop (PLUS_EXPR, type, n_high, 0,
3277 integer_one_node, 0);
3278 high = range_binop (MINUS_EXPR, type, n_low, 0,
3279 integer_one_node, 0);
3280 in_p = ! in_p;
3282 else
3283 low = n_low, high = n_high;
3285 exp = arg0;
3286 continue;
3288 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3289 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3290 break;
3292 if (! INTEGRAL_TYPE_P (type)
3293 || (low != 0 && ! int_fits_type_p (low, type))
3294 || (high != 0 && ! int_fits_type_p (high, type)))
3295 break;
3297 n_low = low, n_high = high;
3299 if (n_low != 0)
3300 n_low = convert (type, n_low);
3302 if (n_high != 0)
3303 n_high = convert (type, n_high);
3305 /* If we're converting from an unsigned to a signed type,
3306 we will be doing the comparison as unsigned. The tests above
3307 have already verified that LOW and HIGH are both positive.
3309 So we have to make sure that the original unsigned value will
3310 be interpreted as positive. */
3311 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3313 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3314 tree high_positive;
3316 /* A range without an upper bound is, naturally, unbounded.
3317 Since convert would have cropped a very large value, use
3318 the max value for the destination type. */
3319 high_positive
3320 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3321 : TYPE_MAX_VALUE (type);
3323 high_positive = fold (build (RSHIFT_EXPR, type,
3324 convert (type, high_positive),
3325 convert (type, integer_one_node)));
3327 /* If the low bound is specified, "and" the range with the
3328 range for which the original unsigned value will be
3329 positive. */
3330 if (low != 0)
3332 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3333 1, n_low, n_high,
3334 1, convert (type, integer_zero_node),
3335 high_positive))
3336 break;
3338 in_p = (n_in_p == in_p);
3340 else
3342 /* Otherwise, "or" the range with the range of the input
3343 that will be interpreted as negative. */
3344 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3345 0, n_low, n_high,
3346 1, convert (type, integer_zero_node),
3347 high_positive))
3348 break;
3350 in_p = (in_p != n_in_p);
3354 exp = arg0;
3355 low = n_low, high = n_high;
3356 continue;
3358 default:
3359 break;
3362 break;
3365 /* If EXP is a constant, we can evaluate whether this is true or false. */
3366 if (TREE_CODE (exp) == INTEGER_CST)
3368 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3369 exp, 0, low, 0))
3370 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3371 exp, 1, high, 1)));
3372 low = high = 0;
3373 exp = 0;
3376 *pin_p = in_p, *plow = low, *phigh = high;
3377 return exp;
3380 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3381 type, TYPE, return an expression to test if EXP is in (or out of, depending
3382 on IN_P) the range. */
3384 static tree
3385 build_range_check (type, exp, in_p, low, high)
3386 tree type;
3387 tree exp;
3388 int in_p;
3389 tree low, high;
3391 tree etype = TREE_TYPE (exp);
3392 tree utype, value;
3394 if (! in_p
3395 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3396 return invert_truthvalue (value);
3398 else if (low == 0 && high == 0)
3399 return convert (type, integer_one_node);
3401 else if (low == 0)
3402 return fold (build (LE_EXPR, type, exp, high));
3404 else if (high == 0)
3405 return fold (build (GE_EXPR, type, exp, low));
3407 else if (operand_equal_p (low, high, 0))
3408 return fold (build (EQ_EXPR, type, exp, low));
3410 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3411 return build_range_check (type, exp, 1, 0, high);
3413 else if (integer_zerop (low))
3415 utype = unsigned_type (etype);
3416 return build_range_check (type, convert (utype, exp), 1, 0,
3417 convert (utype, high));
3420 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3421 && ! TREE_OVERFLOW (value))
3422 return build_range_check (type,
3423 fold (build (MINUS_EXPR, etype, exp, low)),
3424 1, convert (etype, integer_zero_node), value);
3425 else
3426 return 0;
3429 /* Given two ranges, see if we can merge them into one. Return 1 if we
3430 can, 0 if we can't. Set the output range into the specified parameters. */
3432 static int
3433 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3434 int *pin_p;
3435 tree *plow, *phigh;
3436 int in0_p, in1_p;
3437 tree low0, high0, low1, high1;
3439 int no_overlap;
3440 int subset;
3441 int temp;
3442 tree tem;
3443 int in_p;
3444 tree low, high;
3445 int lowequal = ((low0 == 0 && low1 == 0)
3446 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3447 low0, 0, low1, 0)));
3448 int highequal = ((high0 == 0 && high1 == 0)
3449 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3450 high0, 1, high1, 1)));
3452 /* Make range 0 be the range that starts first, or ends last if they
3453 start at the same value. Swap them if it isn't. */
3454 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3455 low0, 0, low1, 0))
3456 || (lowequal
3457 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3458 high1, 1, high0, 1))))
3460 temp = in0_p, in0_p = in1_p, in1_p = temp;
3461 tem = low0, low0 = low1, low1 = tem;
3462 tem = high0, high0 = high1, high1 = tem;
3465 /* Now flag two cases, whether the ranges are disjoint or whether the
3466 second range is totally subsumed in the first. Note that the tests
3467 below are simplified by the ones above. */
3468 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3469 high0, 1, low1, 0));
3470 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3471 high1, 1, high0, 1));
3473 /* We now have four cases, depending on whether we are including or
3474 excluding the two ranges. */
3475 if (in0_p && in1_p)
3477 /* If they don't overlap, the result is false. If the second range
3478 is a subset it is the result. Otherwise, the range is from the start
3479 of the second to the end of the first. */
3480 if (no_overlap)
3481 in_p = 0, low = high = 0;
3482 else if (subset)
3483 in_p = 1, low = low1, high = high1;
3484 else
3485 in_p = 1, low = low1, high = high0;
3488 else if (in0_p && ! in1_p)
3490 /* If they don't overlap, the result is the first range. If they are
3491 equal, the result is false. If the second range is a subset of the
3492 first, and the ranges begin at the same place, we go from just after
3493 the end of the first range to the end of the second. If the second
3494 range is not a subset of the first, or if it is a subset and both
3495 ranges end at the same place, the range starts at the start of the
3496 first range and ends just before the second range.
3497 Otherwise, we can't describe this as a single range. */
3498 if (no_overlap)
3499 in_p = 1, low = low0, high = high0;
3500 else if (lowequal && highequal)
3501 in_p = 0, low = high = 0;
3502 else if (subset && lowequal)
3504 in_p = 1, high = high0;
3505 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3506 integer_one_node, 0);
3508 else if (! subset || highequal)
3510 in_p = 1, low = low0;
3511 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3512 integer_one_node, 0);
3514 else
3515 return 0;
3518 else if (! in0_p && in1_p)
3520 /* If they don't overlap, the result is the second range. If the second
3521 is a subset of the first, the result is false. Otherwise,
3522 the range starts just after the first range and ends at the
3523 end of the second. */
3524 if (no_overlap)
3525 in_p = 1, low = low1, high = high1;
3526 else if (subset || highequal)
3527 in_p = 0, low = high = 0;
3528 else
3530 in_p = 1, high = high1;
3531 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3532 integer_one_node, 0);
3536 else
3538 /* The case where we are excluding both ranges. Here the complex case
3539 is if they don't overlap. In that case, the only time we have a
3540 range is if they are adjacent. If the second is a subset of the
3541 first, the result is the first. Otherwise, the range to exclude
3542 starts at the beginning of the first range and ends at the end of the
3543 second. */
3544 if (no_overlap)
3546 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3547 range_binop (PLUS_EXPR, NULL_TREE,
3548 high0, 1,
3549 integer_one_node, 1),
3550 1, low1, 0)))
3551 in_p = 0, low = low0, high = high1;
3552 else
3553 return 0;
3555 else if (subset)
3556 in_p = 0, low = low0, high = high0;
3557 else
3558 in_p = 0, low = low0, high = high1;
3561 *pin_p = in_p, *plow = low, *phigh = high;
3562 return 1;
3565 /* EXP is some logical combination of boolean tests. See if we can
3566 merge it into some range test. Return the new tree if so. */
3568 static tree
3569 fold_range_test (exp)
3570 tree exp;
3572 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3573 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3574 int in0_p, in1_p, in_p;
3575 tree low0, low1, low, high0, high1, high;
3576 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3577 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3578 tree tem;
3580 /* If this is an OR operation, invert both sides; we will invert
3581 again at the end. */
3582 if (or_op)
3583 in0_p = ! in0_p, in1_p = ! in1_p;
3585 /* If both expressions are the same, if we can merge the ranges, and we
3586 can build the range test, return it or it inverted. If one of the
3587 ranges is always true or always false, consider it to be the same
3588 expression as the other. */
3589 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3590 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3591 in1_p, low1, high1)
3592 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3593 lhs != 0 ? lhs
3594 : rhs != 0 ? rhs : integer_zero_node,
3595 in_p, low, high))))
3596 return or_op ? invert_truthvalue (tem) : tem;
3598 /* On machines where the branch cost is expensive, if this is a
3599 short-circuited branch and the underlying object on both sides
3600 is the same, make a non-short-circuit operation. */
3601 else if (BRANCH_COST >= 2
3602 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3603 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3604 && operand_equal_p (lhs, rhs, 0))
3606 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3607 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3608 which cases we can't do this. */
3609 if (simple_operand_p (lhs))
3610 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3612 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3613 TREE_OPERAND (exp, 1));
3615 else if (global_bindings_p () == 0
3616 && ! contains_placeholder_p (lhs))
3618 tree common = save_expr (lhs);
3620 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3621 or_op ? ! in0_p : in0_p,
3622 low0, high0))
3623 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3624 or_op ? ! in1_p : in1_p,
3625 low1, high1))))
3626 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3627 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3628 TREE_TYPE (exp), lhs, rhs);
3632 return 0;
3635 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3636 bit value. Arrange things so the extra bits will be set to zero if and
3637 only if C is signed-extended to its full width. If MASK is nonzero,
3638 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3640 static tree
3641 unextend (c, p, unsignedp, mask)
3642 tree c;
3643 int p;
3644 int unsignedp;
3645 tree mask;
3647 tree type = TREE_TYPE (c);
3648 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3649 tree temp;
3651 if (p == modesize || unsignedp)
3652 return c;
3654 /* We work by getting just the sign bit into the low-order bit, then
3655 into the high-order bit, then sign-extend. We then XOR that value
3656 with C. */
3657 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3658 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3660 /* We must use a signed type in order to get an arithmetic right shift.
3661 However, we must also avoid introducing accidental overflows, so that
3662 a subsequent call to integer_zerop will work. Hence we must
3663 do the type conversion here. At this point, the constant is either
3664 zero or one, and the conversion to a signed type can never overflow.
3665 We could get an overflow if this conversion is done anywhere else. */
3666 if (TREE_UNSIGNED (type))
3667 temp = convert (signed_type (type), temp);
3669 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3670 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3671 if (mask != 0)
3672 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3673 /* If necessary, convert the type back to match the type of C. */
3674 if (TREE_UNSIGNED (type))
3675 temp = convert (type, temp);
3677 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3680 /* Find ways of folding logical expressions of LHS and RHS:
3681 Try to merge two comparisons to the same innermost item.
3682 Look for range tests like "ch >= '0' && ch <= '9'".
3683 Look for combinations of simple terms on machines with expensive branches
3684 and evaluate the RHS unconditionally.
3686 For example, if we have p->a == 2 && p->b == 4 and we can make an
3687 object large enough to span both A and B, we can do this with a comparison
3688 against the object ANDed with the a mask.
3690 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3691 operations to do this with one comparison.
3693 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3694 function and the one above.
3696 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3697 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3699 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3700 two operands.
3702 We return the simplified tree or 0 if no optimization is possible. */
3704 static tree
3705 fold_truthop (code, truth_type, lhs, rhs)
3706 enum tree_code code;
3707 tree truth_type, lhs, rhs;
3709 /* If this is the "or" of two comparisons, we can do something if we
3710 the comparisons are NE_EXPR. If this is the "and", we can do something
3711 if the comparisons are EQ_EXPR. I.e.,
3712 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3714 WANTED_CODE is this operation code. For single bit fields, we can
3715 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3716 comparison for one-bit fields. */
3718 enum tree_code wanted_code;
3719 enum tree_code lcode, rcode;
3720 tree ll_arg, lr_arg, rl_arg, rr_arg;
3721 tree ll_inner, lr_inner, rl_inner, rr_inner;
3722 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3723 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3724 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3725 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3726 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3727 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3728 enum machine_mode lnmode, rnmode;
3729 tree ll_mask, lr_mask, rl_mask, rr_mask;
3730 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3731 tree l_const, r_const;
3732 tree lntype, rntype, result;
3733 int first_bit, end_bit;
3734 int volatilep;
3736 /* Start by getting the comparison codes. Fail if anything is volatile.
3737 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3738 it were surrounded with a NE_EXPR. */
3740 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3741 return 0;
3743 lcode = TREE_CODE (lhs);
3744 rcode = TREE_CODE (rhs);
3746 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3747 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3749 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3750 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3752 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3753 return 0;
3755 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3756 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3758 ll_arg = TREE_OPERAND (lhs, 0);
3759 lr_arg = TREE_OPERAND (lhs, 1);
3760 rl_arg = TREE_OPERAND (rhs, 0);
3761 rr_arg = TREE_OPERAND (rhs, 1);
3763 /* If the RHS can be evaluated unconditionally and its operands are
3764 simple, it wins to evaluate the RHS unconditionally on machines
3765 with expensive branches. In this case, this isn't a comparison
3766 that can be merged. */
3768 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3769 are with zero (tmw). */
3771 if (BRANCH_COST >= 2
3772 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3773 && simple_operand_p (rl_arg)
3774 && simple_operand_p (rr_arg))
3775 return build (code, truth_type, lhs, rhs);
3777 /* See if the comparisons can be merged. Then get all the parameters for
3778 each side. */
3780 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3781 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3782 return 0;
3784 volatilep = 0;
3785 ll_inner = decode_field_reference (ll_arg,
3786 &ll_bitsize, &ll_bitpos, &ll_mode,
3787 &ll_unsignedp, &volatilep, &ll_mask,
3788 &ll_and_mask);
3789 lr_inner = decode_field_reference (lr_arg,
3790 &lr_bitsize, &lr_bitpos, &lr_mode,
3791 &lr_unsignedp, &volatilep, &lr_mask,
3792 &lr_and_mask);
3793 rl_inner = decode_field_reference (rl_arg,
3794 &rl_bitsize, &rl_bitpos, &rl_mode,
3795 &rl_unsignedp, &volatilep, &rl_mask,
3796 &rl_and_mask);
3797 rr_inner = decode_field_reference (rr_arg,
3798 &rr_bitsize, &rr_bitpos, &rr_mode,
3799 &rr_unsignedp, &volatilep, &rr_mask,
3800 &rr_and_mask);
3802 /* It must be true that the inner operation on the lhs of each
3803 comparison must be the same if we are to be able to do anything.
3804 Then see if we have constants. If not, the same must be true for
3805 the rhs's. */
3806 if (volatilep || ll_inner == 0 || rl_inner == 0
3807 || ! operand_equal_p (ll_inner, rl_inner, 0))
3808 return 0;
3810 if (TREE_CODE (lr_arg) == INTEGER_CST
3811 && TREE_CODE (rr_arg) == INTEGER_CST)
3812 l_const = lr_arg, r_const = rr_arg;
3813 else if (lr_inner == 0 || rr_inner == 0
3814 || ! operand_equal_p (lr_inner, rr_inner, 0))
3815 return 0;
3816 else
3817 l_const = r_const = 0;
3819 /* If either comparison code is not correct for our logical operation,
3820 fail. However, we can convert a one-bit comparison against zero into
3821 the opposite comparison against that bit being set in the field. */
3823 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3824 if (lcode != wanted_code)
3826 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3828 /* Make the left operand unsigned, since we are only interested
3829 in the value of one bit. Otherwise we are doing the wrong
3830 thing below. */
3831 ll_unsignedp = 1;
3832 l_const = ll_mask;
3834 else
3835 return 0;
3838 /* This is analogous to the code for l_const above. */
3839 if (rcode != wanted_code)
3841 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3843 rl_unsignedp = 1;
3844 r_const = rl_mask;
3846 else
3847 return 0;
3850 /* See if we can find a mode that contains both fields being compared on
3851 the left. If we can't, fail. Otherwise, update all constants and masks
3852 to be relative to a field of that size. */
3853 first_bit = MIN (ll_bitpos, rl_bitpos);
3854 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3855 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3856 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3857 volatilep);
3858 if (lnmode == VOIDmode)
3859 return 0;
3861 lnbitsize = GET_MODE_BITSIZE (lnmode);
3862 lnbitpos = first_bit & ~ (lnbitsize - 1);
3863 lntype = type_for_size (lnbitsize, 1);
3864 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3866 if (BYTES_BIG_ENDIAN)
3868 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3869 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3872 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3873 size_int (xll_bitpos), 0);
3874 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3875 size_int (xrl_bitpos), 0);
3877 if (l_const)
3879 l_const = convert (lntype, l_const);
3880 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3881 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3882 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3883 fold (build1 (BIT_NOT_EXPR,
3884 lntype, ll_mask)),
3885 0)))
3887 warning ("comparison is always %d", wanted_code == NE_EXPR);
3889 return convert (truth_type,
3890 wanted_code == NE_EXPR
3891 ? integer_one_node : integer_zero_node);
3894 if (r_const)
3896 r_const = convert (lntype, r_const);
3897 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3898 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3899 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3900 fold (build1 (BIT_NOT_EXPR,
3901 lntype, rl_mask)),
3902 0)))
3904 warning ("comparison is always %d", wanted_code == NE_EXPR);
3906 return convert (truth_type,
3907 wanted_code == NE_EXPR
3908 ? integer_one_node : integer_zero_node);
3912 /* If the right sides are not constant, do the same for it. Also,
3913 disallow this optimization if a size or signedness mismatch occurs
3914 between the left and right sides. */
3915 if (l_const == 0)
3917 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3918 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3919 /* Make sure the two fields on the right
3920 correspond to the left without being swapped. */
3921 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3922 return 0;
3924 first_bit = MIN (lr_bitpos, rr_bitpos);
3925 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3926 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3927 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3928 volatilep);
3929 if (rnmode == VOIDmode)
3930 return 0;
3932 rnbitsize = GET_MODE_BITSIZE (rnmode);
3933 rnbitpos = first_bit & ~ (rnbitsize - 1);
3934 rntype = type_for_size (rnbitsize, 1);
3935 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3937 if (BYTES_BIG_ENDIAN)
3939 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3940 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3943 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3944 size_int (xlr_bitpos), 0);
3945 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3946 size_int (xrr_bitpos), 0);
3948 /* Make a mask that corresponds to both fields being compared.
3949 Do this for both items being compared. If the operands are the
3950 same size and the bits being compared are in the same position
3951 then we can do this by masking both and comparing the masked
3952 results. */
3953 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3954 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3955 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3957 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3958 ll_unsignedp || rl_unsignedp);
3959 if (! all_ones_mask_p (ll_mask, lnbitsize))
3960 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3962 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3963 lr_unsignedp || rr_unsignedp);
3964 if (! all_ones_mask_p (lr_mask, rnbitsize))
3965 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3967 return build (wanted_code, truth_type, lhs, rhs);
3970 /* There is still another way we can do something: If both pairs of
3971 fields being compared are adjacent, we may be able to make a wider
3972 field containing them both.
3974 Note that we still must mask the lhs/rhs expressions. Furthermore,
3975 the mask must be shifted to account for the shift done by
3976 make_bit_field_ref. */
3977 if ((ll_bitsize + ll_bitpos == rl_bitpos
3978 && lr_bitsize + lr_bitpos == rr_bitpos)
3979 || (ll_bitpos == rl_bitpos + rl_bitsize
3980 && lr_bitpos == rr_bitpos + rr_bitsize))
3982 tree type;
3984 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3985 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3986 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3987 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3989 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3990 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3991 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3992 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3994 /* Convert to the smaller type before masking out unwanted bits. */
3995 type = lntype;
3996 if (lntype != rntype)
3998 if (lnbitsize > rnbitsize)
4000 lhs = convert (rntype, lhs);
4001 ll_mask = convert (rntype, ll_mask);
4002 type = rntype;
4004 else if (lnbitsize < rnbitsize)
4006 rhs = convert (lntype, rhs);
4007 lr_mask = convert (lntype, lr_mask);
4008 type = lntype;
4012 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4013 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4015 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4016 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4018 return build (wanted_code, truth_type, lhs, rhs);
4021 return 0;
4024 /* Handle the case of comparisons with constants. If there is something in
4025 common between the masks, those bits of the constants must be the same.
4026 If not, the condition is always false. Test for this to avoid generating
4027 incorrect code below. */
4028 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4029 if (! integer_zerop (result)
4030 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4031 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4033 if (wanted_code == NE_EXPR)
4035 warning ("`or' of unmatched not-equal tests is always 1");
4036 return convert (truth_type, integer_one_node);
4038 else
4040 warning ("`and' of mutually exclusive equal-tests is always 0");
4041 return convert (truth_type, integer_zero_node);
4045 /* Construct the expression we will return. First get the component
4046 reference we will make. Unless the mask is all ones the width of
4047 that field, perform the mask operation. Then compare with the
4048 merged constant. */
4049 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4050 ll_unsignedp || rl_unsignedp);
4052 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4053 if (! all_ones_mask_p (ll_mask, lnbitsize))
4054 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4056 return build (wanted_code, truth_type, result,
4057 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4060 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4061 constant. */
4063 static tree
4064 optimize_minmax_comparison (t)
4065 tree t;
4067 tree type = TREE_TYPE (t);
4068 tree arg0 = TREE_OPERAND (t, 0);
4069 enum tree_code op_code;
4070 tree comp_const = TREE_OPERAND (t, 1);
4071 tree minmax_const;
4072 int consts_equal, consts_lt;
4073 tree inner;
4075 STRIP_SIGN_NOPS (arg0);
4077 op_code = TREE_CODE (arg0);
4078 minmax_const = TREE_OPERAND (arg0, 1);
4079 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4080 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4081 inner = TREE_OPERAND (arg0, 0);
4083 /* If something does not permit us to optimize, return the original tree. */
4084 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4085 || TREE_CODE (comp_const) != INTEGER_CST
4086 || TREE_CONSTANT_OVERFLOW (comp_const)
4087 || TREE_CODE (minmax_const) != INTEGER_CST
4088 || TREE_CONSTANT_OVERFLOW (minmax_const))
4089 return t;
4091 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4092 and GT_EXPR, doing the rest with recursive calls using logical
4093 simplifications. */
4094 switch (TREE_CODE (t))
4096 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4097 return
4098 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4100 case GE_EXPR:
4101 return
4102 fold (build (TRUTH_ORIF_EXPR, type,
4103 optimize_minmax_comparison
4104 (build (EQ_EXPR, type, arg0, comp_const)),
4105 optimize_minmax_comparison
4106 (build (GT_EXPR, type, arg0, comp_const))));
4108 case EQ_EXPR:
4109 if (op_code == MAX_EXPR && consts_equal)
4110 /* MAX (X, 0) == 0 -> X <= 0 */
4111 return fold (build (LE_EXPR, type, inner, comp_const));
4113 else if (op_code == MAX_EXPR && consts_lt)
4114 /* MAX (X, 0) == 5 -> X == 5 */
4115 return fold (build (EQ_EXPR, type, inner, comp_const));
4117 else if (op_code == MAX_EXPR)
4118 /* MAX (X, 0) == -1 -> false */
4119 return omit_one_operand (type, integer_zero_node, inner);
4121 else if (consts_equal)
4122 /* MIN (X, 0) == 0 -> X >= 0 */
4123 return fold (build (GE_EXPR, type, inner, comp_const));
4125 else if (consts_lt)
4126 /* MIN (X, 0) == 5 -> false */
4127 return omit_one_operand (type, integer_zero_node, inner);
4129 else
4130 /* MIN (X, 0) == -1 -> X == -1 */
4131 return fold (build (EQ_EXPR, type, inner, comp_const));
4133 case GT_EXPR:
4134 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4135 /* MAX (X, 0) > 0 -> X > 0
4136 MAX (X, 0) > 5 -> X > 5 */
4137 return fold (build (GT_EXPR, type, inner, comp_const));
4139 else if (op_code == MAX_EXPR)
4140 /* MAX (X, 0) > -1 -> true */
4141 return omit_one_operand (type, integer_one_node, inner);
4143 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4144 /* MIN (X, 0) > 0 -> false
4145 MIN (X, 0) > 5 -> false */
4146 return omit_one_operand (type, integer_zero_node, inner);
4148 else
4149 /* MIN (X, 0) > -1 -> X > -1 */
4150 return fold (build (GT_EXPR, type, inner, comp_const));
4152 default:
4153 return t;
4157 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4158 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4159 that we may sometimes modify the tree. */
4161 static tree
4162 strip_compound_expr (t, s)
4163 tree t;
4164 tree s;
4166 enum tree_code code = TREE_CODE (t);
4168 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4169 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4170 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4171 return TREE_OPERAND (t, 1);
4173 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4174 don't bother handling any other types. */
4175 else if (code == COND_EXPR)
4177 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4178 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4179 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4181 else if (TREE_CODE_CLASS (code) == '1')
4182 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4183 else if (TREE_CODE_CLASS (code) == '<'
4184 || TREE_CODE_CLASS (code) == '2')
4186 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4187 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4190 return t;
4193 /* Return a node which has the indicated constant VALUE (either 0 or
4194 1), and is of the indicated TYPE. */
4196 static tree
4197 constant_boolean_node (value, type)
4198 int value;
4199 tree type;
4201 if (type == integer_type_node)
4202 return value ? integer_one_node : integer_zero_node;
4203 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4204 return truthvalue_conversion (value ? integer_one_node :
4205 integer_zero_node);
4206 else
4208 tree t = build_int_2 (value, 0);
4209 TREE_TYPE (t) = type;
4210 return t;
4214 /* Utility function for the following routine, to see how complex a nesting of
4215 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4216 we don't care (to avoid spending too much time on complex expressions.). */
4218 static int
4219 count_cond (expr, lim)
4220 tree expr;
4221 int lim;
4223 int true, false;
4225 if (TREE_CODE (expr) != COND_EXPR)
4226 return 0;
4227 else if (lim <= 0)
4228 return 0;
4230 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4231 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4232 return MIN (lim, 1 + true + false);
4235 /* Perform constant folding and related simplification of EXPR.
4236 The related simplifications include x*1 => x, x*0 => 0, etc.,
4237 and application of the associative law.
4238 NOP_EXPR conversions may be removed freely (as long as we
4239 are careful not to change the C type of the overall expression)
4240 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4241 but we can constant-fold them if they have constant operands. */
4243 tree
4244 fold (expr)
4245 tree expr;
4247 register tree t = expr;
4248 tree t1 = NULL_TREE;
4249 tree tem;
4250 tree type = TREE_TYPE (expr);
4251 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4252 register enum tree_code code = TREE_CODE (t);
4253 register int kind;
4254 int invert;
4256 /* WINS will be nonzero when the switch is done
4257 if all operands are constant. */
4259 int wins = 1;
4261 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4262 Likewise for a SAVE_EXPR that's already been evaluated. */
4263 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4264 return t;
4266 /* Return right away if already constant. */
4267 if (TREE_CONSTANT (t))
4269 if (code == CONST_DECL)
4270 return DECL_INITIAL (t);
4271 return t;
4274 #ifdef MAX_INTEGER_COMPUTATION_MODE
4275 check_max_integer_computation_mode (expr);
4276 #endif
4278 kind = TREE_CODE_CLASS (code);
4279 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4281 tree subop;
4283 /* Special case for conversion ops that can have fixed point args. */
4284 arg0 = TREE_OPERAND (t, 0);
4286 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4287 if (arg0 != 0)
4288 STRIP_SIGN_NOPS (arg0);
4290 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4291 subop = TREE_REALPART (arg0);
4292 else
4293 subop = arg0;
4295 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4296 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4297 && TREE_CODE (subop) != REAL_CST
4298 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4300 /* Note that TREE_CONSTANT isn't enough:
4301 static var addresses are constant but we can't
4302 do arithmetic on them. */
4303 wins = 0;
4305 else if (kind == 'e' || kind == '<'
4306 || kind == '1' || kind == '2' || kind == 'r')
4308 register int len = tree_code_length[(int) code];
4309 register int i;
4310 for (i = 0; i < len; i++)
4312 tree op = TREE_OPERAND (t, i);
4313 tree subop;
4315 if (op == 0)
4316 continue; /* Valid for CALL_EXPR, at least. */
4318 if (kind == '<' || code == RSHIFT_EXPR)
4320 /* Signedness matters here. Perhaps we can refine this
4321 later. */
4322 STRIP_SIGN_NOPS (op);
4324 else
4326 /* Strip any conversions that don't change the mode. */
4327 STRIP_NOPS (op);
4330 if (TREE_CODE (op) == COMPLEX_CST)
4331 subop = TREE_REALPART (op);
4332 else
4333 subop = op;
4335 if (TREE_CODE (subop) != INTEGER_CST
4336 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4337 && TREE_CODE (subop) != REAL_CST
4338 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4340 /* Note that TREE_CONSTANT isn't enough:
4341 static var addresses are constant but we can't
4342 do arithmetic on them. */
4343 wins = 0;
4345 if (i == 0)
4346 arg0 = op;
4347 else if (i == 1)
4348 arg1 = op;
4352 /* If this is a commutative operation, and ARG0 is a constant, move it
4353 to ARG1 to reduce the number of tests below. */
4354 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4355 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4356 || code == BIT_AND_EXPR)
4357 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4359 tem = arg0; arg0 = arg1; arg1 = tem;
4361 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4362 TREE_OPERAND (t, 1) = tem;
4365 /* Now WINS is set as described above,
4366 ARG0 is the first operand of EXPR,
4367 and ARG1 is the second operand (if it has more than one operand).
4369 First check for cases where an arithmetic operation is applied to a
4370 compound, conditional, or comparison operation. Push the arithmetic
4371 operation inside the compound or conditional to see if any folding
4372 can then be done. Convert comparison to conditional for this purpose.
4373 The also optimizes non-constant cases that used to be done in
4374 expand_expr.
4376 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4377 one of the operands is a comparison and the other is a comparison, a
4378 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4379 code below would make the expression more complex. Change it to a
4380 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4381 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4383 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4384 || code == EQ_EXPR || code == NE_EXPR)
4385 && ((truth_value_p (TREE_CODE (arg0))
4386 && (truth_value_p (TREE_CODE (arg1))
4387 || (TREE_CODE (arg1) == BIT_AND_EXPR
4388 && integer_onep (TREE_OPERAND (arg1, 1)))))
4389 || (truth_value_p (TREE_CODE (arg1))
4390 && (truth_value_p (TREE_CODE (arg0))
4391 || (TREE_CODE (arg0) == BIT_AND_EXPR
4392 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4394 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4395 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4396 : TRUTH_XOR_EXPR,
4397 type, arg0, arg1));
4399 if (code == EQ_EXPR)
4400 t = invert_truthvalue (t);
4402 return t;
4405 if (TREE_CODE_CLASS (code) == '1')
4407 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4408 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4409 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4410 else if (TREE_CODE (arg0) == COND_EXPR)
4412 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4413 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4414 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4416 /* If this was a conversion, and all we did was to move into
4417 inside the COND_EXPR, bring it back out. But leave it if
4418 it is a conversion from integer to integer and the
4419 result precision is no wider than a word since such a
4420 conversion is cheap and may be optimized away by combine,
4421 while it couldn't if it were outside the COND_EXPR. Then return
4422 so we don't get into an infinite recursion loop taking the
4423 conversion out and then back in. */
4425 if ((code == NOP_EXPR || code == CONVERT_EXPR
4426 || code == NON_LVALUE_EXPR)
4427 && TREE_CODE (t) == COND_EXPR
4428 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4429 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4430 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4431 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4432 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4433 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
4434 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4435 t = build1 (code, type,
4436 build (COND_EXPR,
4437 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
4438 TREE_OPERAND (t, 0),
4439 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4440 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4441 return t;
4443 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4444 return fold (build (COND_EXPR, type, arg0,
4445 fold (build1 (code, type, integer_one_node)),
4446 fold (build1 (code, type, integer_zero_node))));
4448 else if (TREE_CODE_CLASS (code) == '2'
4449 || TREE_CODE_CLASS (code) == '<')
4451 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4452 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4453 fold (build (code, type,
4454 arg0, TREE_OPERAND (arg1, 1))));
4455 else if ((TREE_CODE (arg1) == COND_EXPR
4456 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4457 && TREE_CODE_CLASS (code) != '<'))
4458 && (TREE_CODE (arg0) != COND_EXPR
4459 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4460 && (! TREE_SIDE_EFFECTS (arg0)
4461 || (global_bindings_p () == 0
4462 && ! contains_placeholder_p (arg0))))
4464 tree test, true_value, false_value;
4465 tree lhs = 0, rhs = 0;
4467 if (TREE_CODE (arg1) == COND_EXPR)
4469 test = TREE_OPERAND (arg1, 0);
4470 true_value = TREE_OPERAND (arg1, 1);
4471 false_value = TREE_OPERAND (arg1, 2);
4473 else
4475 tree testtype = TREE_TYPE (arg1);
4476 test = arg1;
4477 true_value = convert (testtype, integer_one_node);
4478 false_value = convert (testtype, integer_zero_node);
4481 /* If ARG0 is complex we want to make sure we only evaluate
4482 it once. Though this is only required if it is volatile, it
4483 might be more efficient even if it is not. However, if we
4484 succeed in folding one part to a constant, we do not need
4485 to make this SAVE_EXPR. Since we do this optimization
4486 primarily to see if we do end up with constant and this
4487 SAVE_EXPR interferes with later optimizations, suppressing
4488 it when we can is important.
4490 If we are not in a function, we can't make a SAVE_EXPR, so don't
4491 try to do so. Don't try to see if the result is a constant
4492 if an arm is a COND_EXPR since we get exponential behavior
4493 in that case. */
4495 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4496 && global_bindings_p () == 0
4497 && ((TREE_CODE (arg0) != VAR_DECL
4498 && TREE_CODE (arg0) != PARM_DECL)
4499 || TREE_SIDE_EFFECTS (arg0)))
4501 if (TREE_CODE (true_value) != COND_EXPR)
4502 lhs = fold (build (code, type, arg0, true_value));
4504 if (TREE_CODE (false_value) != COND_EXPR)
4505 rhs = fold (build (code, type, arg0, false_value));
4507 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4508 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4509 arg0 = save_expr (arg0), lhs = rhs = 0;
4512 if (lhs == 0)
4513 lhs = fold (build (code, type, arg0, true_value));
4514 if (rhs == 0)
4515 rhs = fold (build (code, type, arg0, false_value));
4517 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4519 if (TREE_CODE (arg0) == SAVE_EXPR)
4520 return build (COMPOUND_EXPR, type,
4521 convert (void_type_node, arg0),
4522 strip_compound_expr (test, arg0));
4523 else
4524 return convert (type, test);
4527 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4528 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4529 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4530 else if ((TREE_CODE (arg0) == COND_EXPR
4531 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4532 && TREE_CODE_CLASS (code) != '<'))
4533 && (TREE_CODE (arg1) != COND_EXPR
4534 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4535 && (! TREE_SIDE_EFFECTS (arg1)
4536 || (global_bindings_p () == 0
4537 && ! contains_placeholder_p (arg1))))
4539 tree test, true_value, false_value;
4540 tree lhs = 0, rhs = 0;
4542 if (TREE_CODE (arg0) == COND_EXPR)
4544 test = TREE_OPERAND (arg0, 0);
4545 true_value = TREE_OPERAND (arg0, 1);
4546 false_value = TREE_OPERAND (arg0, 2);
4548 else
4550 tree testtype = TREE_TYPE (arg0);
4551 test = arg0;
4552 true_value = convert (testtype, integer_one_node);
4553 false_value = convert (testtype, integer_zero_node);
4556 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4557 && global_bindings_p () == 0
4558 && ((TREE_CODE (arg1) != VAR_DECL
4559 && TREE_CODE (arg1) != PARM_DECL)
4560 || TREE_SIDE_EFFECTS (arg1)))
4562 if (TREE_CODE (true_value) != COND_EXPR)
4563 lhs = fold (build (code, type, true_value, arg1));
4565 if (TREE_CODE (false_value) != COND_EXPR)
4566 rhs = fold (build (code, type, false_value, arg1));
4568 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4569 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4570 arg1 = save_expr (arg1), lhs = rhs = 0;
4573 if (lhs == 0)
4574 lhs = fold (build (code, type, true_value, arg1));
4576 if (rhs == 0)
4577 rhs = fold (build (code, type, false_value, arg1));
4579 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4580 if (TREE_CODE (arg1) == SAVE_EXPR)
4581 return build (COMPOUND_EXPR, type,
4582 convert (void_type_node, arg1),
4583 strip_compound_expr (test, arg1));
4584 else
4585 return convert (type, test);
4588 else if (TREE_CODE_CLASS (code) == '<'
4589 && TREE_CODE (arg0) == COMPOUND_EXPR)
4590 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4591 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4592 else if (TREE_CODE_CLASS (code) == '<'
4593 && TREE_CODE (arg1) == COMPOUND_EXPR)
4594 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4595 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4597 switch (code)
4599 case INTEGER_CST:
4600 case REAL_CST:
4601 case STRING_CST:
4602 case COMPLEX_CST:
4603 case CONSTRUCTOR:
4604 return t;
4606 case CONST_DECL:
4607 return fold (DECL_INITIAL (t));
4609 case NOP_EXPR:
4610 case FLOAT_EXPR:
4611 case CONVERT_EXPR:
4612 case FIX_TRUNC_EXPR:
4613 /* Other kinds of FIX are not handled properly by fold_convert. */
4615 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4616 return TREE_OPERAND (t, 0);
4618 /* Handle cases of two conversions in a row. */
4619 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4620 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4622 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4623 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4624 tree final_type = TREE_TYPE (t);
4625 int inside_int = INTEGRAL_TYPE_P (inside_type);
4626 int inside_ptr = POINTER_TYPE_P (inside_type);
4627 int inside_float = FLOAT_TYPE_P (inside_type);
4628 int inside_prec = TYPE_PRECISION (inside_type);
4629 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4630 int inter_int = INTEGRAL_TYPE_P (inter_type);
4631 int inter_ptr = POINTER_TYPE_P (inter_type);
4632 int inter_float = FLOAT_TYPE_P (inter_type);
4633 int inter_prec = TYPE_PRECISION (inter_type);
4634 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4635 int final_int = INTEGRAL_TYPE_P (final_type);
4636 int final_ptr = POINTER_TYPE_P (final_type);
4637 int final_float = FLOAT_TYPE_P (final_type);
4638 int final_prec = TYPE_PRECISION (final_type);
4639 int final_unsignedp = TREE_UNSIGNED (final_type);
4641 /* In addition to the cases of two conversions in a row
4642 handled below, if we are converting something to its own
4643 type via an object of identical or wider precision, neither
4644 conversion is needed. */
4645 if (inside_type == final_type
4646 && ((inter_int && final_int) || (inter_float && final_float))
4647 && inter_prec >= final_prec)
4648 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4650 /* Likewise, if the intermediate and final types are either both
4651 float or both integer, we don't need the middle conversion if
4652 it is wider than the final type and doesn't change the signedness
4653 (for integers). Avoid this if the final type is a pointer
4654 since then we sometimes need the inner conversion. Likewise if
4655 the outer has a precision not equal to the size of its mode. */
4656 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4657 || (inter_float && inside_float))
4658 && inter_prec >= inside_prec
4659 && (inter_float || inter_unsignedp == inside_unsignedp)
4660 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4661 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4662 && ! final_ptr)
4663 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4665 /* If we have a sign-extension of a zero-extended value, we can
4666 replace that by a single zero-extension. */
4667 if (inside_int && inter_int && final_int
4668 && inside_prec < inter_prec && inter_prec < final_prec
4669 && inside_unsignedp && !inter_unsignedp)
4670 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4672 /* Two conversions in a row are not needed unless:
4673 - some conversion is floating-point (overstrict for now), or
4674 - the intermediate type is narrower than both initial and
4675 final, or
4676 - the intermediate type and innermost type differ in signedness,
4677 and the outermost type is wider than the intermediate, or
4678 - the initial type is a pointer type and the precisions of the
4679 intermediate and final types differ, or
4680 - the final type is a pointer type and the precisions of the
4681 initial and intermediate types differ. */
4682 if (! inside_float && ! inter_float && ! final_float
4683 && (inter_prec > inside_prec || inter_prec > final_prec)
4684 && ! (inside_int && inter_int
4685 && inter_unsignedp != inside_unsignedp
4686 && inter_prec < final_prec)
4687 && ((inter_unsignedp && inter_prec > inside_prec)
4688 == (final_unsignedp && final_prec > inter_prec))
4689 && ! (inside_ptr && inter_prec != final_prec)
4690 && ! (final_ptr && inside_prec != inter_prec)
4691 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4692 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4693 && ! final_ptr)
4694 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4697 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4698 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4699 /* Detect assigning a bitfield. */
4700 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4701 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4703 /* Don't leave an assignment inside a conversion
4704 unless assigning a bitfield. */
4705 tree prev = TREE_OPERAND (t, 0);
4706 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4707 /* First do the assignment, then return converted constant. */
4708 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4709 TREE_USED (t) = 1;
4710 return t;
4712 if (!wins)
4714 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4715 return t;
4717 return fold_convert (t, arg0);
4719 #if 0 /* This loses on &"foo"[0]. */
4720 case ARRAY_REF:
4722 int i;
4724 /* Fold an expression like: "foo"[2] */
4725 if (TREE_CODE (arg0) == STRING_CST
4726 && TREE_CODE (arg1) == INTEGER_CST
4727 && !TREE_INT_CST_HIGH (arg1)
4728 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4730 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4731 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4732 force_fit_type (t, 0);
4735 return t;
4736 #endif /* 0 */
4738 case COMPONENT_REF:
4739 if (TREE_CODE (arg0) == CONSTRUCTOR)
4741 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4742 if (m)
4743 t = TREE_VALUE (m);
4745 return t;
4747 case RANGE_EXPR:
4748 TREE_CONSTANT (t) = wins;
4749 return t;
4751 case NEGATE_EXPR:
4752 if (wins)
4754 if (TREE_CODE (arg0) == INTEGER_CST)
4756 HOST_WIDE_INT low, high;
4757 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4758 TREE_INT_CST_HIGH (arg0),
4759 &low, &high);
4760 t = build_int_2 (low, high);
4761 TREE_TYPE (t) = type;
4762 TREE_OVERFLOW (t)
4763 = (TREE_OVERFLOW (arg0)
4764 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4765 TREE_CONSTANT_OVERFLOW (t)
4766 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4768 else if (TREE_CODE (arg0) == REAL_CST)
4769 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4771 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4772 return TREE_OPERAND (arg0, 0);
4774 /* Convert - (a - b) to (b - a) for non-floating-point. */
4775 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4776 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4777 TREE_OPERAND (arg0, 0));
4779 return t;
4781 case ABS_EXPR:
4782 if (wins)
4784 if (TREE_CODE (arg0) == INTEGER_CST)
4786 if (! TREE_UNSIGNED (type)
4787 && TREE_INT_CST_HIGH (arg0) < 0)
4789 HOST_WIDE_INT low, high;
4790 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4791 TREE_INT_CST_HIGH (arg0),
4792 &low, &high);
4793 t = build_int_2 (low, high);
4794 TREE_TYPE (t) = type;
4795 TREE_OVERFLOW (t)
4796 = (TREE_OVERFLOW (arg0)
4797 | force_fit_type (t, overflow));
4798 TREE_CONSTANT_OVERFLOW (t)
4799 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4802 else if (TREE_CODE (arg0) == REAL_CST)
4804 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4805 t = build_real (type,
4806 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4809 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4810 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4811 return t;
4813 case CONJ_EXPR:
4814 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4815 return arg0;
4816 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4817 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4818 TREE_OPERAND (arg0, 0),
4819 fold (build1 (NEGATE_EXPR,
4820 TREE_TYPE (TREE_TYPE (arg0)),
4821 TREE_OPERAND (arg0, 1))));
4822 else if (TREE_CODE (arg0) == COMPLEX_CST)
4823 return build_complex (type, TREE_OPERAND (arg0, 0),
4824 fold (build1 (NEGATE_EXPR,
4825 TREE_TYPE (TREE_TYPE (arg0)),
4826 TREE_OPERAND (arg0, 1))));
4827 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4828 return fold (build (TREE_CODE (arg0), type,
4829 fold (build1 (CONJ_EXPR, type,
4830 TREE_OPERAND (arg0, 0))),
4831 fold (build1 (CONJ_EXPR,
4832 type, TREE_OPERAND (arg0, 1)))));
4833 else if (TREE_CODE (arg0) == CONJ_EXPR)
4834 return TREE_OPERAND (arg0, 0);
4835 return t;
4837 case BIT_NOT_EXPR:
4838 if (wins)
4840 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4841 ~ TREE_INT_CST_HIGH (arg0));
4842 TREE_TYPE (t) = type;
4843 force_fit_type (t, 0);
4844 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4845 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4847 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4848 return TREE_OPERAND (arg0, 0);
4849 return t;
4851 case PLUS_EXPR:
4852 /* A + (-B) -> A - B */
4853 if (TREE_CODE (arg1) == NEGATE_EXPR)
4854 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4855 /* (-A) + B -> B - A */
4856 if (TREE_CODE (arg0) == NEGATE_EXPR)
4857 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4858 else if (! FLOAT_TYPE_P (type))
4860 if (integer_zerop (arg1))
4861 return non_lvalue (convert (type, arg0));
4863 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4864 with a constant, and the two constants have no bits in common,
4865 we should treat this as a BIT_IOR_EXPR since this may produce more
4866 simplifications. */
4867 if (TREE_CODE (arg0) == BIT_AND_EXPR
4868 && TREE_CODE (arg1) == BIT_AND_EXPR
4869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4870 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4871 && integer_zerop (const_binop (BIT_AND_EXPR,
4872 TREE_OPERAND (arg0, 1),
4873 TREE_OPERAND (arg1, 1), 0)))
4875 code = BIT_IOR_EXPR;
4876 goto bit_ior;
4879 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
4880 (plus (plus (mult) (mult)) (foo)) so that we can
4881 take advantage of the factoring cases below. */
4882 if ((TREE_CODE (arg0) == PLUS_EXPR
4883 && TREE_CODE (arg1) == MULT_EXPR)
4884 || (TREE_CODE (arg1) == PLUS_EXPR
4885 && TREE_CODE (arg0) == MULT_EXPR))
4887 tree parg0, parg1, parg, marg;
4889 if (TREE_CODE (arg0) == PLUS_EXPR)
4890 parg = arg0, marg = arg1;
4891 else
4892 parg = arg1, marg = arg0;
4893 parg0 = TREE_OPERAND (parg, 0);
4894 parg1 = TREE_OPERAND (parg, 1);
4895 STRIP_NOPS (parg0);
4896 STRIP_NOPS (parg1);
4898 if (TREE_CODE (parg0) == MULT_EXPR
4899 && TREE_CODE (parg1) != MULT_EXPR)
4900 return fold (build (PLUS_EXPR, type,
4901 fold (build (PLUS_EXPR, type, parg0, marg)),
4902 parg1));
4903 if (TREE_CODE (parg0) != MULT_EXPR
4904 && TREE_CODE (parg1) == MULT_EXPR)
4905 return fold (build (PLUS_EXPR, type,
4906 fold (build (PLUS_EXPR, type, parg1, marg)),
4907 parg0));
4910 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
4912 tree arg00, arg01, arg10, arg11;
4913 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
4915 /* (A * C) + (B * C) -> (A+B) * C.
4916 We are most concerned about the case where C is a constant,
4917 but other combinations show up during loop reduction. Since
4918 it is not difficult, try all four possibilities. */
4920 arg00 = TREE_OPERAND (arg0, 0);
4921 arg01 = TREE_OPERAND (arg0, 1);
4922 arg10 = TREE_OPERAND (arg1, 0);
4923 arg11 = TREE_OPERAND (arg1, 1);
4924 same = NULL_TREE;
4926 if (operand_equal_p (arg01, arg11, 0))
4927 same = arg01, alt0 = arg00, alt1 = arg10;
4928 else if (operand_equal_p (arg00, arg10, 0))
4929 same = arg00, alt0 = arg01, alt1 = arg11;
4930 else if (operand_equal_p (arg00, arg11, 0))
4931 same = arg00, alt0 = arg01, alt1 = arg10;
4932 else if (operand_equal_p (arg01, arg10, 0))
4933 same = arg01, alt0 = arg00, alt1 = arg11;
4935 /* No identical multiplicands; see if we can find a common
4936 power-of-two factor in non-power-of-two multiplies. This
4937 can help in multi-dimensional array access. */
4938 else if (TREE_CODE (arg01) == INTEGER_CST
4939 && TREE_CODE (arg11) == INTEGER_CST
4940 && TREE_INT_CST_HIGH (arg01) == 0
4941 && TREE_INT_CST_HIGH (arg11) == 0)
4943 HOST_WIDE_INT int01, int11, tmp;
4944 int01 = TREE_INT_CST_LOW (arg01);
4945 int11 = TREE_INT_CST_LOW (arg11);
4947 /* Move min of absolute values to int11. */
4948 if ((int01 >= 0 ? int01 : -int01)
4949 < (int11 >= 0 ? int11 : -int11))
4951 tmp = int01, int01 = int11, int11 = tmp;
4952 alt0 = arg00, arg00 = arg10, arg10 = alt0;
4953 alt0 = arg01, arg01 = arg11, arg11 = alt0;
4956 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
4958 alt0 = fold (build (MULT_EXPR, type, arg00,
4959 build_int_2 (int01 / int11, 0)));
4960 alt1 = arg10;
4961 same = arg11;
4965 if (same)
4966 return fold (build (MULT_EXPR, type,
4967 fold (build (PLUS_EXPR, type, alt0, alt1)),
4968 same));
4971 /* In IEEE floating point, x+0 may not equal x. */
4972 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4973 || flag_fast_math)
4974 && real_zerop (arg1))
4975 return non_lvalue (convert (type, arg0));
4976 /* x+(-0) equals x, even for IEEE. */
4977 else if (TREE_CODE (arg1) == REAL_CST
4978 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
4979 return non_lvalue (convert (type, arg0));
4981 bit_rotate:
4982 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
4983 is a rotate of A by C1 bits. */
4984 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
4985 is a rotate of A by B bits. */
4987 register enum tree_code code0, code1;
4988 code0 = TREE_CODE (arg0);
4989 code1 = TREE_CODE (arg1);
4990 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4991 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4992 && operand_equal_p (TREE_OPERAND (arg0, 0),
4993 TREE_OPERAND (arg1,0), 0)
4994 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4996 register tree tree01, tree11;
4997 register enum tree_code code01, code11;
4999 tree01 = TREE_OPERAND (arg0, 1);
5000 tree11 = TREE_OPERAND (arg1, 1);
5001 STRIP_NOPS (tree01);
5002 STRIP_NOPS (tree11);
5003 code01 = TREE_CODE (tree01);
5004 code11 = TREE_CODE (tree11);
5005 if (code01 == INTEGER_CST
5006 && code11 == INTEGER_CST
5007 && TREE_INT_CST_HIGH (tree01) == 0
5008 && TREE_INT_CST_HIGH (tree11) == 0
5009 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5010 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5011 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5012 code0 == LSHIFT_EXPR ? tree01 : tree11);
5013 else if (code11 == MINUS_EXPR)
5015 tree tree110, tree111;
5016 tree110 = TREE_OPERAND (tree11, 0);
5017 tree111 = TREE_OPERAND (tree11, 1);
5018 STRIP_NOPS (tree110);
5019 STRIP_NOPS (tree111);
5020 if (TREE_CODE (tree110) == INTEGER_CST
5021 && TREE_INT_CST_HIGH (tree110) == 0
5022 && (TREE_INT_CST_LOW (tree110)
5023 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5024 && operand_equal_p (tree01, tree111, 0))
5025 return build ((code0 == LSHIFT_EXPR
5026 ? LROTATE_EXPR
5027 : RROTATE_EXPR),
5028 type, TREE_OPERAND (arg0, 0), tree01);
5030 else if (code01 == MINUS_EXPR)
5032 tree tree010, tree011;
5033 tree010 = TREE_OPERAND (tree01, 0);
5034 tree011 = TREE_OPERAND (tree01, 1);
5035 STRIP_NOPS (tree010);
5036 STRIP_NOPS (tree011);
5037 if (TREE_CODE (tree010) == INTEGER_CST
5038 && TREE_INT_CST_HIGH (tree010) == 0
5039 && (TREE_INT_CST_LOW (tree010)
5040 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5041 && operand_equal_p (tree11, tree011, 0))
5042 return build ((code0 != LSHIFT_EXPR
5043 ? LROTATE_EXPR
5044 : RROTATE_EXPR),
5045 type, TREE_OPERAND (arg0, 0), tree11);
5050 associate:
5051 /* In most languages, can't associate operations on floats
5052 through parentheses. Rather than remember where the parentheses
5053 were, we don't associate floats at all. It shouldn't matter much.
5054 However, associating multiplications is only very slightly
5055 inaccurate, so do that if -ffast-math is specified. */
5056 if (FLOAT_TYPE_P (type)
5057 && ! (flag_fast_math && code == MULT_EXPR))
5058 goto binary;
5060 /* The varsign == -1 cases happen only for addition and subtraction.
5061 It says that the arg that was split was really CON minus VAR.
5062 The rest of the code applies to all associative operations. */
5063 if (!wins)
5065 tree var, con;
5066 int varsign;
5068 if (split_tree (arg0, code, &var, &con, &varsign))
5070 if (varsign == -1)
5072 /* EXPR is (CON-VAR) +- ARG1. */
5073 /* If it is + and VAR==ARG1, return just CONST. */
5074 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
5075 return convert (TREE_TYPE (t), con);
5077 /* If ARG0 is a constant, don't change things around;
5078 instead keep all the constant computations together. */
5080 if (TREE_CONSTANT (arg0))
5081 return t;
5083 /* Otherwise return (CON +- ARG1) - VAR. */
5084 t = build (MINUS_EXPR, type,
5085 fold (build (code, type, con, arg1)), var);
5087 else
5089 /* EXPR is (VAR+CON) +- ARG1. */
5090 /* If it is - and VAR==ARG1, return just CONST. */
5091 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
5092 return convert (TREE_TYPE (t), con);
5094 /* If ARG0 is a constant, don't change things around;
5095 instead keep all the constant computations together. */
5097 if (TREE_CONSTANT (arg0))
5098 return t;
5100 /* Otherwise return VAR +- (ARG1 +- CON). */
5101 tem = fold (build (code, type, arg1, con));
5102 t = build (code, type, var, tem);
5104 if (integer_zerop (tem)
5105 && (code == PLUS_EXPR || code == MINUS_EXPR))
5106 return convert (type, var);
5107 /* If we have x +/- (c - d) [c an explicit integer]
5108 change it to x -/+ (d - c) since if d is relocatable
5109 then the latter can be a single immediate insn
5110 and the former cannot. */
5111 if (TREE_CODE (tem) == MINUS_EXPR
5112 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
5114 tree tem1 = TREE_OPERAND (tem, 1);
5115 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
5116 TREE_OPERAND (tem, 0) = tem1;
5117 TREE_SET_CODE (t,
5118 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
5121 return t;
5124 if (split_tree (arg1, code, &var, &con, &varsign))
5126 if (TREE_CONSTANT (arg1))
5127 return t;
5129 if (varsign == -1)
5130 TREE_SET_CODE (t,
5131 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
5133 /* EXPR is ARG0 +- (CON +- VAR). */
5134 if (TREE_CODE (t) == MINUS_EXPR
5135 && operand_equal_p (var, arg0, 0))
5137 /* If VAR and ARG0 cancel, return just CON or -CON. */
5138 if (code == PLUS_EXPR)
5139 return convert (TREE_TYPE (t), con);
5140 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
5141 convert (TREE_TYPE (t), con)));
5144 t = build (TREE_CODE (t), type,
5145 fold (build (code, TREE_TYPE (t), arg0, con)), var);
5147 if (integer_zerop (TREE_OPERAND (t, 0))
5148 && TREE_CODE (t) == PLUS_EXPR)
5149 return convert (TREE_TYPE (t), var);
5150 return t;
5153 binary:
5154 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5155 if (TREE_CODE (arg1) == REAL_CST)
5156 return t;
5157 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5158 if (wins)
5159 t1 = const_binop (code, arg0, arg1, 0);
5160 if (t1 != NULL_TREE)
5162 /* The return value should always have
5163 the same type as the original expression. */
5164 if (TREE_TYPE (t1) != TREE_TYPE (t))
5165 t1 = convert (TREE_TYPE (t), t1);
5167 return t1;
5169 return t;
5171 case MINUS_EXPR:
5172 /* A - (-B) -> A + B */
5173 if (TREE_CODE (arg1) == NEGATE_EXPR)
5174 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5175 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5176 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5177 return
5178 fold (build (MINUS_EXPR, type,
5179 build_real (TREE_TYPE (arg1),
5180 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5181 TREE_OPERAND (arg0, 0)));
5183 if (! FLOAT_TYPE_P (type))
5185 if (! wins && integer_zerop (arg0))
5186 return build1 (NEGATE_EXPR, type, arg1);
5187 if (integer_zerop (arg1))
5188 return non_lvalue (convert (type, arg0));
5190 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5191 about the case where C is a constant, just try one of the
5192 four possibilities. */
5194 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5195 && operand_equal_p (TREE_OPERAND (arg0, 1),
5196 TREE_OPERAND (arg1, 1), 0))
5197 return fold (build (MULT_EXPR, type,
5198 fold (build (MINUS_EXPR, type,
5199 TREE_OPERAND (arg0, 0),
5200 TREE_OPERAND (arg1, 0))),
5201 TREE_OPERAND (arg0, 1)));
5204 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5205 || flag_fast_math)
5207 /* Except with IEEE floating point, 0-x equals -x. */
5208 if (! wins && real_zerop (arg0))
5209 return build1 (NEGATE_EXPR, type, arg1);
5210 /* Except with IEEE floating point, x-0 equals x. */
5211 if (real_zerop (arg1))
5212 return non_lvalue (convert (type, arg0));
5215 /* Fold &x - &x. This can happen from &x.foo - &x.
5216 This is unsafe for certain floats even in non-IEEE formats.
5217 In IEEE, it is unsafe because it does wrong for NaNs.
5218 Also note that operand_equal_p is always false if an operand
5219 is volatile. */
5221 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5222 && operand_equal_p (arg0, arg1, 0))
5223 return convert (type, integer_zero_node);
5225 goto associate;
5227 case MULT_EXPR:
5228 /* (-A) * (-B) -> A * B */
5229 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5230 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5231 TREE_OPERAND (arg1, 0)));
5233 if (! FLOAT_TYPE_P (type))
5235 if (integer_zerop (arg1))
5236 return omit_one_operand (type, arg1, arg0);
5237 if (integer_onep (arg1))
5238 return non_lvalue (convert (type, arg0));
5240 /* ((A / C) * C) is A if the division is an
5241 EXACT_DIV_EXPR. Since C is normally a constant,
5242 just check for one of the four possibilities. */
5244 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
5245 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5246 return TREE_OPERAND (arg0, 0);
5248 /* (a * (1 << b)) is (a << b) */
5249 if (TREE_CODE (arg1) == LSHIFT_EXPR
5250 && integer_onep (TREE_OPERAND (arg1, 0)))
5251 return fold (build (LSHIFT_EXPR, type, arg0,
5252 TREE_OPERAND (arg1, 1)));
5253 if (TREE_CODE (arg0) == LSHIFT_EXPR
5254 && integer_onep (TREE_OPERAND (arg0, 0)))
5255 return fold (build (LSHIFT_EXPR, type, arg1,
5256 TREE_OPERAND (arg0, 1)));
5258 else
5260 /* x*0 is 0, except for IEEE floating point. */
5261 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5262 || flag_fast_math)
5263 && real_zerop (arg1))
5264 return omit_one_operand (type, arg1, arg0);
5265 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5266 However, ANSI says we can drop signals,
5267 so we can do this anyway. */
5268 if (real_onep (arg1))
5269 return non_lvalue (convert (type, arg0));
5270 /* x*2 is x+x */
5271 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5272 && ! contains_placeholder_p (arg0))
5274 tree arg = save_expr (arg0);
5275 return build (PLUS_EXPR, type, arg, arg);
5278 goto associate;
5280 case BIT_IOR_EXPR:
5281 bit_ior:
5282 if (integer_all_onesp (arg1))
5283 return omit_one_operand (type, arg1, arg0);
5284 if (integer_zerop (arg1))
5285 return non_lvalue (convert (type, arg0));
5286 t1 = distribute_bit_expr (code, type, arg0, arg1);
5287 if (t1 != NULL_TREE)
5288 return t1;
5290 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5292 This results in more efficient code for machines without a NAND
5293 instruction. Combine will canonicalize to the first form
5294 which will allow use of NAND instructions provided by the
5295 backend if they exist. */
5296 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5297 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5299 return fold (build1 (BIT_NOT_EXPR, type,
5300 build (BIT_AND_EXPR, type,
5301 TREE_OPERAND (arg0, 0),
5302 TREE_OPERAND (arg1, 0))));
5305 /* See if this can be simplified into a rotate first. If that
5306 is unsuccessful continue in the association code. */
5307 goto bit_rotate;
5309 case BIT_XOR_EXPR:
5310 if (integer_zerop (arg1))
5311 return non_lvalue (convert (type, arg0));
5312 if (integer_all_onesp (arg1))
5313 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5315 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5316 with a constant, and the two constants have no bits in common,
5317 we should treat this as a BIT_IOR_EXPR since this may produce more
5318 simplifications. */
5319 if (TREE_CODE (arg0) == BIT_AND_EXPR
5320 && TREE_CODE (arg1) == BIT_AND_EXPR
5321 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5322 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5323 && integer_zerop (const_binop (BIT_AND_EXPR,
5324 TREE_OPERAND (arg0, 1),
5325 TREE_OPERAND (arg1, 1), 0)))
5327 code = BIT_IOR_EXPR;
5328 goto bit_ior;
5331 /* See if this can be simplified into a rotate first. If that
5332 is unsuccessful continue in the association code. */
5333 goto bit_rotate;
5335 case BIT_AND_EXPR:
5336 bit_and:
5337 if (integer_all_onesp (arg1))
5338 return non_lvalue (convert (type, arg0));
5339 if (integer_zerop (arg1))
5340 return omit_one_operand (type, arg1, arg0);
5341 t1 = distribute_bit_expr (code, type, arg0, arg1);
5342 if (t1 != NULL_TREE)
5343 return t1;
5344 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5345 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5346 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5348 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5349 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5350 && (~TREE_INT_CST_LOW (arg0)
5351 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5352 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5354 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5355 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5357 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5358 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5359 && (~TREE_INT_CST_LOW (arg1)
5360 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5361 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5364 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5366 This results in more efficient code for machines without a NOR
5367 instruction. Combine will canonicalize to the first form
5368 which will allow use of NOR instructions provided by the
5369 backend if they exist. */
5370 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5371 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5373 return fold (build1 (BIT_NOT_EXPR, type,
5374 build (BIT_IOR_EXPR, type,
5375 TREE_OPERAND (arg0, 0),
5376 TREE_OPERAND (arg1, 0))));
5379 goto associate;
5381 case BIT_ANDTC_EXPR:
5382 if (integer_all_onesp (arg0))
5383 return non_lvalue (convert (type, arg1));
5384 if (integer_zerop (arg0))
5385 return omit_one_operand (type, arg0, arg1);
5386 if (TREE_CODE (arg1) == INTEGER_CST)
5388 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5389 code = BIT_AND_EXPR;
5390 goto bit_and;
5392 goto binary;
5394 case RDIV_EXPR:
5395 /* In most cases, do nothing with a divide by zero. */
5396 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5397 #ifndef REAL_INFINITY
5398 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5399 return t;
5400 #endif
5401 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5403 /* (-A) / (-B) -> A / B */
5404 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5405 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5406 TREE_OPERAND (arg1, 0)));
5408 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5409 However, ANSI says we can drop signals, so we can do this anyway. */
5410 if (real_onep (arg1))
5411 return non_lvalue (convert (type, arg0));
5413 /* If ARG1 is a constant, we can convert this to a multiply by the
5414 reciprocal. This does not have the same rounding properties,
5415 so only do this if -ffast-math. We can actually always safely
5416 do it if ARG1 is a power of two, but it's hard to tell if it is
5417 or not in a portable manner. */
5418 if (TREE_CODE (arg1) == REAL_CST)
5420 if (flag_fast_math
5421 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5422 arg1, 0)))
5423 return fold (build (MULT_EXPR, type, arg0, tem));
5424 /* Find the reciprocal if optimizing and the result is exact. */
5425 else if (optimize)
5427 REAL_VALUE_TYPE r;
5428 r = TREE_REAL_CST (arg1);
5429 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5431 tem = build_real (type, r);
5432 return fold (build (MULT_EXPR, type, arg0, tem));
5436 goto binary;
5438 case TRUNC_DIV_EXPR:
5439 case ROUND_DIV_EXPR:
5440 case FLOOR_DIV_EXPR:
5441 case CEIL_DIV_EXPR:
5442 case EXACT_DIV_EXPR:
5443 if (integer_onep (arg1))
5444 return non_lvalue (convert (type, arg0));
5445 if (integer_zerop (arg1))
5446 return t;
5448 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5449 operation, EXACT_DIV_EXPR.
5451 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5452 At one time others generated faster code, it's not clear if they do
5453 after the last round to changes to the DIV code in expmed.c. */
5454 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5455 && multiple_of_p (type, arg0, arg1))
5456 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5458 /* If we have ((a / C1) / C2) where both division are the same type, try
5459 to simplify. First see if C1 * C2 overflows or not. */
5460 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
5461 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5463 tree new_divisor;
5465 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
5466 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
5468 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
5469 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
5471 /* If no overflow, divide by C1*C2. */
5472 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
5476 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
5477 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
5478 expressions, which often appear in the offsets or sizes of
5479 objects with a varying size. Only deal with positive divisors
5480 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
5482 Look for NOPs and SAVE_EXPRs inside. */
5484 if (TREE_CODE (arg1) == INTEGER_CST
5485 && tree_int_cst_sgn (arg1) >= 0)
5487 int have_save_expr = 0;
5488 tree c2 = integer_zero_node;
5489 tree xarg0 = arg0;
5491 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
5492 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
5494 STRIP_NOPS (xarg0);
5496 /* Look inside the dividend and simplify using EXACT_DIV_EXPR
5497 if possible. */
5498 if (TREE_CODE (xarg0) == MULT_EXPR
5499 && multiple_of_p (type, TREE_OPERAND (xarg0, 0), arg1))
5501 tree t;
5503 t = fold (build (MULT_EXPR, type,
5504 fold (build (EXACT_DIV_EXPR, type,
5505 TREE_OPERAND (xarg0, 0), arg1)),
5506 TREE_OPERAND (xarg0, 1)));
5507 if (have_save_expr)
5508 t = save_expr (t);
5509 return t;
5513 if (TREE_CODE (xarg0) == MULT_EXPR
5514 && multiple_of_p (type, TREE_OPERAND (xarg0, 1), arg1))
5516 tree t;
5518 t = fold (build (MULT_EXPR, type,
5519 fold (build (EXACT_DIV_EXPR, type,
5520 TREE_OPERAND (xarg0, 1), arg1)),
5521 TREE_OPERAND (xarg0, 0)));
5522 if (have_save_expr)
5523 t = save_expr (t);
5524 return t;
5527 if (TREE_CODE (xarg0) == PLUS_EXPR
5528 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
5529 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
5530 else if (TREE_CODE (xarg0) == MINUS_EXPR
5531 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5532 /* If we are doing this computation unsigned, the negate
5533 is incorrect. */
5534 && ! TREE_UNSIGNED (type))
5536 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
5537 xarg0 = TREE_OPERAND (xarg0, 0);
5540 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
5541 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
5543 STRIP_NOPS (xarg0);
5545 if (TREE_CODE (xarg0) == MULT_EXPR
5546 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5547 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
5548 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
5549 TREE_OPERAND (xarg0, 1), arg1, 1))
5550 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
5551 TREE_OPERAND (xarg0, 1), 1)))
5552 && (tree_int_cst_sgn (c2) >= 0
5553 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
5554 arg1, 1))))
5556 tree outer_div = integer_one_node;
5557 tree c1 = TREE_OPERAND (xarg0, 1);
5558 tree c3 = arg1;
5560 /* If C3 > C1, set them equal and do a divide by
5561 C3/C1 at the end of the operation. */
5562 if (tree_int_cst_lt (c1, c3))
5563 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
5565 /* The result is A * (C1/C3) + (C2/C3). */
5566 t = fold (build (PLUS_EXPR, type,
5567 fold (build (MULT_EXPR, type,
5568 TREE_OPERAND (xarg0, 0),
5569 const_binop (code, c1, c3, 1))),
5570 const_binop (code, c2, c3, 1)));
5572 if (! integer_onep (outer_div))
5573 t = fold (build (code, type, t, convert (type, outer_div)));
5575 if (have_save_expr)
5576 t = save_expr (t);
5578 return t;
5582 goto binary;
5584 case CEIL_MOD_EXPR:
5585 case FLOOR_MOD_EXPR:
5586 case ROUND_MOD_EXPR:
5587 case TRUNC_MOD_EXPR:
5588 if (integer_onep (arg1))
5589 return omit_one_operand (type, integer_zero_node, arg0);
5590 if (integer_zerop (arg1))
5591 return t;
5593 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
5594 where C1 % C3 == 0. Handle similarly to the division case,
5595 but don't bother with SAVE_EXPRs. */
5597 if (TREE_CODE (arg1) == INTEGER_CST
5598 && ! integer_zerop (arg1))
5600 tree c2 = integer_zero_node;
5601 tree xarg0 = arg0;
5603 if (TREE_CODE (xarg0) == PLUS_EXPR
5604 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
5605 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
5606 else if (TREE_CODE (xarg0) == MINUS_EXPR
5607 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5608 && ! TREE_UNSIGNED (type))
5610 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
5611 xarg0 = TREE_OPERAND (xarg0, 0);
5614 STRIP_NOPS (xarg0);
5616 if (TREE_CODE (xarg0) == MULT_EXPR
5617 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5618 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
5619 TREE_OPERAND (xarg0, 1),
5620 arg1, 1))
5621 && tree_int_cst_sgn (c2) >= 0)
5622 /* The result is (C2%C3). */
5623 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
5624 TREE_OPERAND (xarg0, 0));
5627 goto binary;
5629 case LSHIFT_EXPR:
5630 case RSHIFT_EXPR:
5631 case LROTATE_EXPR:
5632 case RROTATE_EXPR:
5633 if (integer_zerop (arg1))
5634 return non_lvalue (convert (type, arg0));
5635 /* Since negative shift count is not well-defined,
5636 don't try to compute it in the compiler. */
5637 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5638 return t;
5639 /* Rewrite an LROTATE_EXPR by a constant into an
5640 RROTATE_EXPR by a new constant. */
5641 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5643 TREE_SET_CODE (t, RROTATE_EXPR);
5644 code = RROTATE_EXPR;
5645 TREE_OPERAND (t, 1) = arg1
5646 = const_binop
5647 (MINUS_EXPR,
5648 convert (TREE_TYPE (arg1),
5649 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5650 arg1, 0);
5651 if (tree_int_cst_sgn (arg1) < 0)
5652 return t;
5655 /* If we have a rotate of a bit operation with the rotate count and
5656 the second operand of the bit operation both constant,
5657 permute the two operations. */
5658 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5659 && (TREE_CODE (arg0) == BIT_AND_EXPR
5660 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5661 || TREE_CODE (arg0) == BIT_IOR_EXPR
5662 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5664 return fold (build (TREE_CODE (arg0), type,
5665 fold (build (code, type,
5666 TREE_OPERAND (arg0, 0), arg1)),
5667 fold (build (code, type,
5668 TREE_OPERAND (arg0, 1), arg1))));
5670 /* Two consecutive rotates adding up to the width of the mode can
5671 be ignored. */
5672 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5673 && TREE_CODE (arg0) == RROTATE_EXPR
5674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5675 && TREE_INT_CST_HIGH (arg1) == 0
5676 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5677 && ((TREE_INT_CST_LOW (arg1)
5678 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5679 == GET_MODE_BITSIZE (TYPE_MODE (type))))
5680 return TREE_OPERAND (arg0, 0);
5682 goto binary;
5684 case MIN_EXPR:
5685 if (operand_equal_p (arg0, arg1, 0))
5686 return arg0;
5687 if (INTEGRAL_TYPE_P (type)
5688 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5689 return omit_one_operand (type, arg1, arg0);
5690 goto associate;
5692 case MAX_EXPR:
5693 if (operand_equal_p (arg0, arg1, 0))
5694 return arg0;
5695 if (INTEGRAL_TYPE_P (type)
5696 && TYPE_MAX_VALUE (type)
5697 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5698 return omit_one_operand (type, arg1, arg0);
5699 goto associate;
5701 case TRUTH_NOT_EXPR:
5702 /* Note that the operand of this must be an int
5703 and its values must be 0 or 1.
5704 ("true" is a fixed value perhaps depending on the language,
5705 but we don't handle values other than 1 correctly yet.) */
5706 tem = invert_truthvalue (arg0);
5707 /* Avoid infinite recursion. */
5708 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5709 return t;
5710 return convert (type, tem);
5712 case TRUTH_ANDIF_EXPR:
5713 /* Note that the operands of this must be ints
5714 and their values must be 0 or 1.
5715 ("true" is a fixed value perhaps depending on the language.) */
5716 /* If first arg is constant zero, return it. */
5717 if (integer_zerop (arg0))
5718 return arg0;
5719 case TRUTH_AND_EXPR:
5720 /* If either arg is constant true, drop it. */
5721 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5722 return non_lvalue (arg1);
5723 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5724 return non_lvalue (arg0);
5725 /* If second arg is constant zero, result is zero, but first arg
5726 must be evaluated. */
5727 if (integer_zerop (arg1))
5728 return omit_one_operand (type, arg1, arg0);
5729 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5730 case will be handled here. */
5731 if (integer_zerop (arg0))
5732 return omit_one_operand (type, arg0, arg1);
5734 truth_andor:
5735 /* We only do these simplifications if we are optimizing. */
5736 if (!optimize)
5737 return t;
5739 /* Check for things like (A || B) && (A || C). We can convert this
5740 to A || (B && C). Note that either operator can be any of the four
5741 truth and/or operations and the transformation will still be
5742 valid. Also note that we only care about order for the
5743 ANDIF and ORIF operators. If B contains side effects, this
5744 might change the truth-value of A. */
5745 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5746 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5747 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5748 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5749 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5750 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5752 tree a00 = TREE_OPERAND (arg0, 0);
5753 tree a01 = TREE_OPERAND (arg0, 1);
5754 tree a10 = TREE_OPERAND (arg1, 0);
5755 tree a11 = TREE_OPERAND (arg1, 1);
5756 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5757 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5758 && (code == TRUTH_AND_EXPR
5759 || code == TRUTH_OR_EXPR));
5761 if (operand_equal_p (a00, a10, 0))
5762 return fold (build (TREE_CODE (arg0), type, a00,
5763 fold (build (code, type, a01, a11))));
5764 else if (commutative && operand_equal_p (a00, a11, 0))
5765 return fold (build (TREE_CODE (arg0), type, a00,
5766 fold (build (code, type, a01, a10))));
5767 else if (commutative && operand_equal_p (a01, a10, 0))
5768 return fold (build (TREE_CODE (arg0), type, a01,
5769 fold (build (code, type, a00, a11))));
5771 /* This case if tricky because we must either have commutative
5772 operators or else A10 must not have side-effects. */
5774 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5775 && operand_equal_p (a01, a11, 0))
5776 return fold (build (TREE_CODE (arg0), type,
5777 fold (build (code, type, a00, a10)),
5778 a01));
5781 /* See if we can build a range comparison. */
5782 if (0 != (tem = fold_range_test (t)))
5783 return tem;
5785 /* Check for the possibility of merging component references. If our
5786 lhs is another similar operation, try to merge its rhs with our
5787 rhs. Then try to merge our lhs and rhs. */
5788 if (TREE_CODE (arg0) == code
5789 && 0 != (tem = fold_truthop (code, type,
5790 TREE_OPERAND (arg0, 1), arg1)))
5791 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5793 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5794 return tem;
5796 return t;
5798 case TRUTH_ORIF_EXPR:
5799 /* Note that the operands of this must be ints
5800 and their values must be 0 or true.
5801 ("true" is a fixed value perhaps depending on the language.) */
5802 /* If first arg is constant true, return it. */
5803 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5804 return arg0;
5805 case TRUTH_OR_EXPR:
5806 /* If either arg is constant zero, drop it. */
5807 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5808 return non_lvalue (arg1);
5809 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5810 return non_lvalue (arg0);
5811 /* If second arg is constant true, result is true, but we must
5812 evaluate first arg. */
5813 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5814 return omit_one_operand (type, arg1, arg0);
5815 /* Likewise for first arg, but note this only occurs here for
5816 TRUTH_OR_EXPR. */
5817 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5818 return omit_one_operand (type, arg0, arg1);
5819 goto truth_andor;
5821 case TRUTH_XOR_EXPR:
5822 /* If either arg is constant zero, drop it. */
5823 if (integer_zerop (arg0))
5824 return non_lvalue (arg1);
5825 if (integer_zerop (arg1))
5826 return non_lvalue (arg0);
5827 /* If either arg is constant true, this is a logical inversion. */
5828 if (integer_onep (arg0))
5829 return non_lvalue (invert_truthvalue (arg1));
5830 if (integer_onep (arg1))
5831 return non_lvalue (invert_truthvalue (arg0));
5832 return t;
5834 case EQ_EXPR:
5835 case NE_EXPR:
5836 case LT_EXPR:
5837 case GT_EXPR:
5838 case LE_EXPR:
5839 case GE_EXPR:
5840 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5842 /* (-a) CMP (-b) -> b CMP a */
5843 if (TREE_CODE (arg0) == NEGATE_EXPR
5844 && TREE_CODE (arg1) == NEGATE_EXPR)
5845 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5846 TREE_OPERAND (arg0, 0)));
5847 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5848 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5849 return
5850 fold (build
5851 (swap_tree_comparison (code), type,
5852 TREE_OPERAND (arg0, 0),
5853 build_real (TREE_TYPE (arg1),
5854 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5855 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5856 /* a CMP (-0) -> a CMP 0 */
5857 if (TREE_CODE (arg1) == REAL_CST
5858 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5859 return fold (build (code, type, arg0,
5860 build_real (TREE_TYPE (arg1), dconst0)));
5864 /* If one arg is a constant integer, put it last. */
5865 if (TREE_CODE (arg0) == INTEGER_CST
5866 && TREE_CODE (arg1) != INTEGER_CST)
5868 TREE_OPERAND (t, 0) = arg1;
5869 TREE_OPERAND (t, 1) = arg0;
5870 arg0 = TREE_OPERAND (t, 0);
5871 arg1 = TREE_OPERAND (t, 1);
5872 code = swap_tree_comparison (code);
5873 TREE_SET_CODE (t, code);
5876 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5877 First, see if one arg is constant; find the constant arg
5878 and the other one. */
5880 tree constop = 0, varop = NULL_TREE;
5881 int constopnum = -1;
5883 if (TREE_CONSTANT (arg1))
5884 constopnum = 1, constop = arg1, varop = arg0;
5885 if (TREE_CONSTANT (arg0))
5886 constopnum = 0, constop = arg0, varop = arg1;
5888 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5890 /* This optimization is invalid for ordered comparisons
5891 if CONST+INCR overflows or if foo+incr might overflow.
5892 This optimization is invalid for floating point due to rounding.
5893 For pointer types we assume overflow doesn't happen. */
5894 if (POINTER_TYPE_P (TREE_TYPE (varop))
5895 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5896 && (code == EQ_EXPR || code == NE_EXPR)))
5898 tree newconst
5899 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5900 constop, TREE_OPERAND (varop, 1)));
5901 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5903 /* If VAROP is a reference to a bitfield, we must mask
5904 the constant by the width of the field. */
5905 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5906 && DECL_BIT_FIELD(TREE_OPERAND
5907 (TREE_OPERAND (varop, 0), 1)))
5909 int size
5910 = TREE_INT_CST_LOW (DECL_SIZE
5911 (TREE_OPERAND
5912 (TREE_OPERAND (varop, 0), 1)));
5913 tree mask, unsigned_type;
5914 int precision;
5915 tree folded_compare;
5917 /* First check whether the comparison would come out
5918 always the same. If we don't do that we would
5919 change the meaning with the masking. */
5920 if (constopnum == 0)
5921 folded_compare = fold (build (code, type, constop,
5922 TREE_OPERAND (varop, 0)));
5923 else
5924 folded_compare = fold (build (code, type,
5925 TREE_OPERAND (varop, 0),
5926 constop));
5927 if (integer_zerop (folded_compare)
5928 || integer_onep (folded_compare))
5929 return omit_one_operand (type, folded_compare, varop);
5931 unsigned_type = type_for_size (size, 1);
5932 precision = TYPE_PRECISION (unsigned_type);
5933 mask = build_int_2 (~0, ~0);
5934 TREE_TYPE (mask) = unsigned_type;
5935 force_fit_type (mask, 0);
5936 mask = const_binop (RSHIFT_EXPR, mask,
5937 size_int (precision - size), 0);
5938 newconst = fold (build (BIT_AND_EXPR,
5939 TREE_TYPE (varop), newconst,
5940 convert (TREE_TYPE (varop),
5941 mask)));
5945 t = build (code, type, TREE_OPERAND (t, 0),
5946 TREE_OPERAND (t, 1));
5947 TREE_OPERAND (t, constopnum) = newconst;
5948 return t;
5951 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5953 if (POINTER_TYPE_P (TREE_TYPE (varop))
5954 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5955 && (code == EQ_EXPR || code == NE_EXPR)))
5957 tree newconst
5958 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5959 constop, TREE_OPERAND (varop, 1)));
5960 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5962 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5963 && DECL_BIT_FIELD(TREE_OPERAND
5964 (TREE_OPERAND (varop, 0), 1)))
5966 int size
5967 = TREE_INT_CST_LOW (DECL_SIZE
5968 (TREE_OPERAND
5969 (TREE_OPERAND (varop, 0), 1)));
5970 tree mask, unsigned_type;
5971 int precision;
5972 tree folded_compare;
5974 if (constopnum == 0)
5975 folded_compare = fold (build (code, type, constop,
5976 TREE_OPERAND (varop, 0)));
5977 else
5978 folded_compare = fold (build (code, type,
5979 TREE_OPERAND (varop, 0),
5980 constop));
5981 if (integer_zerop (folded_compare)
5982 || integer_onep (folded_compare))
5983 return omit_one_operand (type, folded_compare, varop);
5985 unsigned_type = type_for_size (size, 1);
5986 precision = TYPE_PRECISION (unsigned_type);
5987 mask = build_int_2 (~0, ~0);
5988 TREE_TYPE (mask) = TREE_TYPE (varop);
5989 force_fit_type (mask, 0);
5990 mask = const_binop (RSHIFT_EXPR, mask,
5991 size_int (precision - size), 0);
5992 newconst = fold (build (BIT_AND_EXPR,
5993 TREE_TYPE (varop), newconst,
5994 convert (TREE_TYPE (varop),
5995 mask)));
5999 t = build (code, type, TREE_OPERAND (t, 0),
6000 TREE_OPERAND (t, 1));
6001 TREE_OPERAND (t, constopnum) = newconst;
6002 return t;
6007 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6008 if (TREE_CODE (arg1) == INTEGER_CST
6009 && TREE_CODE (arg0) != INTEGER_CST
6010 && tree_int_cst_sgn (arg1) > 0)
6012 switch (TREE_CODE (t))
6014 case GE_EXPR:
6015 code = GT_EXPR;
6016 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6017 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6018 break;
6020 case LT_EXPR:
6021 code = LE_EXPR;
6022 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6023 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6024 break;
6026 default:
6027 break;
6031 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6032 a MINUS_EXPR of a constant, we can convert it into a comparison with
6033 a revised constant as long as no overflow occurs. */
6034 if ((code == EQ_EXPR || code == NE_EXPR)
6035 && TREE_CODE (arg1) == INTEGER_CST
6036 && (TREE_CODE (arg0) == PLUS_EXPR
6037 || TREE_CODE (arg0) == MINUS_EXPR)
6038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6039 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6040 ? MINUS_EXPR : PLUS_EXPR,
6041 arg1, TREE_OPERAND (arg0, 1), 0))
6042 && ! TREE_CONSTANT_OVERFLOW (tem))
6043 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6045 /* Similarly for a NEGATE_EXPR. */
6046 else if ((code == EQ_EXPR || code == NE_EXPR)
6047 && TREE_CODE (arg0) == NEGATE_EXPR
6048 && TREE_CODE (arg1) == INTEGER_CST
6049 && 0 != (tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (arg1),
6050 arg1)))
6051 && TREE_CODE (tem) == INTEGER_CST
6052 && ! TREE_CONSTANT_OVERFLOW (tem))
6053 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6055 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6056 for !=. Don't do this for ordered comparisons due to overflow. */
6057 else if ((code == NE_EXPR || code == EQ_EXPR)
6058 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6059 return fold (build (code, type,
6060 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6062 /* If we are widening one operand of an integer comparison,
6063 see if the other operand is similarly being widened. Perhaps we
6064 can do the comparison in the narrower type. */
6065 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6066 && TREE_CODE (arg0) == NOP_EXPR
6067 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6068 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6069 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6070 || (TREE_CODE (t1) == INTEGER_CST
6071 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6072 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6074 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6075 constant, we can simplify it. */
6076 else if (TREE_CODE (arg1) == INTEGER_CST
6077 && (TREE_CODE (arg0) == MIN_EXPR
6078 || TREE_CODE (arg0) == MAX_EXPR)
6079 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6080 return optimize_minmax_comparison (t);
6082 /* If we are comparing an ABS_EXPR with a constant, we can
6083 convert all the cases into explicit comparisons, but they may
6084 well not be faster than doing the ABS and one comparison.
6085 But ABS (X) <= C is a range comparison, which becomes a subtraction
6086 and a comparison, and is probably faster. */
6087 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6088 && TREE_CODE (arg0) == ABS_EXPR
6089 && ! TREE_SIDE_EFFECTS (arg0))
6091 tree inner = TREE_OPERAND (arg0, 0);
6093 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (arg1), arg1));
6094 if (TREE_CODE (tem) == INTEGER_CST
6095 && ! TREE_CONSTANT_OVERFLOW (tem))
6096 return fold (build (TRUTH_ANDIF_EXPR, type,
6097 build (GE_EXPR, type, inner, tem),
6098 build (LE_EXPR, type, inner, arg1)));
6101 /* If this is an EQ or NE comparison with zero and ARG0 is
6102 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6103 two operations, but the latter can be done in one less insn
6104 on machines that have only two-operand insns or on which a
6105 constant cannot be the first operand. */
6106 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6107 && TREE_CODE (arg0) == BIT_AND_EXPR)
6109 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6110 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6111 return
6112 fold (build (code, type,
6113 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6114 build (RSHIFT_EXPR,
6115 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6116 TREE_OPERAND (arg0, 1),
6117 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6118 convert (TREE_TYPE (arg0),
6119 integer_one_node)),
6120 arg1));
6121 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6122 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6123 return
6124 fold (build (code, type,
6125 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6126 build (RSHIFT_EXPR,
6127 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6128 TREE_OPERAND (arg0, 0),
6129 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6130 convert (TREE_TYPE (arg0),
6131 integer_one_node)),
6132 arg1));
6135 /* If this is an NE or EQ comparison of zero against the result of a
6136 signed MOD operation whose second operand is a power of 2, make
6137 the MOD operation unsigned since it is simpler and equivalent. */
6138 if ((code == NE_EXPR || code == EQ_EXPR)
6139 && integer_zerop (arg1)
6140 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6141 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6142 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6143 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6144 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6145 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6147 tree newtype = unsigned_type (TREE_TYPE (arg0));
6148 tree newmod = build (TREE_CODE (arg0), newtype,
6149 convert (newtype, TREE_OPERAND (arg0, 0)),
6150 convert (newtype, TREE_OPERAND (arg0, 1)));
6152 return build (code, type, newmod, convert (newtype, arg1));
6155 /* If this is an NE comparison of zero with an AND of one, remove the
6156 comparison since the AND will give the correct value. */
6157 if (code == NE_EXPR && integer_zerop (arg1)
6158 && TREE_CODE (arg0) == BIT_AND_EXPR
6159 && integer_onep (TREE_OPERAND (arg0, 1)))
6160 return convert (type, arg0);
6162 /* If we have (A & C) == C where C is a power of 2, convert this into
6163 (A & C) != 0. Similarly for NE_EXPR. */
6164 if ((code == EQ_EXPR || code == NE_EXPR)
6165 && TREE_CODE (arg0) == BIT_AND_EXPR
6166 && integer_pow2p (TREE_OPERAND (arg0, 1))
6167 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6168 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6169 arg0, integer_zero_node);
6171 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6172 and similarly for >= into !=. */
6173 if ((code == LT_EXPR || code == GE_EXPR)
6174 && TREE_UNSIGNED (TREE_TYPE (arg0))
6175 && TREE_CODE (arg1) == LSHIFT_EXPR
6176 && integer_onep (TREE_OPERAND (arg1, 0)))
6177 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6178 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6179 TREE_OPERAND (arg1, 1)),
6180 convert (TREE_TYPE (arg0), integer_zero_node));
6182 else if ((code == LT_EXPR || code == GE_EXPR)
6183 && TREE_UNSIGNED (TREE_TYPE (arg0))
6184 && (TREE_CODE (arg1) == NOP_EXPR
6185 || TREE_CODE (arg1) == CONVERT_EXPR)
6186 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6187 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6188 return
6189 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6190 convert (TREE_TYPE (arg0),
6191 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6192 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6193 convert (TREE_TYPE (arg0), integer_zero_node));
6195 /* Simplify comparison of something with itself. (For IEEE
6196 floating-point, we can only do some of these simplifications.) */
6197 if (operand_equal_p (arg0, arg1, 0))
6199 switch (code)
6201 case EQ_EXPR:
6202 case GE_EXPR:
6203 case LE_EXPR:
6204 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6205 return constant_boolean_node (1, type);
6206 code = EQ_EXPR;
6207 TREE_SET_CODE (t, code);
6208 break;
6210 case NE_EXPR:
6211 /* For NE, we can only do this simplification if integer. */
6212 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6213 break;
6214 /* ... fall through ... */
6215 case GT_EXPR:
6216 case LT_EXPR:
6217 return constant_boolean_node (0, type);
6218 default:
6219 abort ();
6223 /* An unsigned comparison against 0 can be simplified. */
6224 if (integer_zerop (arg1)
6225 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6226 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6227 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6229 switch (TREE_CODE (t))
6231 case GT_EXPR:
6232 code = NE_EXPR;
6233 TREE_SET_CODE (t, NE_EXPR);
6234 break;
6235 case LE_EXPR:
6236 code = EQ_EXPR;
6237 TREE_SET_CODE (t, EQ_EXPR);
6238 break;
6239 case GE_EXPR:
6240 return omit_one_operand (type,
6241 convert (type, integer_one_node),
6242 arg0);
6243 case LT_EXPR:
6244 return omit_one_operand (type,
6245 convert (type, integer_zero_node),
6246 arg0);
6247 default:
6248 break;
6252 /* Comparisons with the highest or lowest possible integer of
6253 the specified size will have known values and an unsigned
6254 <= 0x7fffffff can be simplified. */
6256 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6258 if (TREE_CODE (arg1) == INTEGER_CST
6259 && ! TREE_CONSTANT_OVERFLOW (arg1)
6260 && width <= HOST_BITS_PER_WIDE_INT
6261 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6262 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6264 if (TREE_INT_CST_HIGH (arg1) == 0
6265 && (TREE_INT_CST_LOW (arg1)
6266 == ((HOST_WIDE_INT) 1 << (width - 1)) - 1)
6267 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6268 switch (TREE_CODE (t))
6270 case GT_EXPR:
6271 return omit_one_operand (type,
6272 convert (type, integer_zero_node),
6273 arg0);
6274 case GE_EXPR:
6275 TREE_SET_CODE (t, EQ_EXPR);
6276 break;
6278 case LE_EXPR:
6279 return omit_one_operand (type,
6280 convert (type, integer_one_node),
6281 arg0);
6282 case LT_EXPR:
6283 TREE_SET_CODE (t, NE_EXPR);
6284 break;
6286 default:
6287 break;
6290 else if (TREE_INT_CST_HIGH (arg1) == -1
6291 && (- TREE_INT_CST_LOW (arg1)
6292 == ((HOST_WIDE_INT) 1 << (width - 1)))
6293 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6294 switch (TREE_CODE (t))
6296 case LT_EXPR:
6297 return omit_one_operand (type,
6298 convert (type, integer_zero_node),
6299 arg0);
6300 case LE_EXPR:
6301 TREE_SET_CODE (t, EQ_EXPR);
6302 break;
6304 case GE_EXPR:
6305 return omit_one_operand (type,
6306 convert (type, integer_one_node),
6307 arg0);
6308 case GT_EXPR:
6309 TREE_SET_CODE (t, NE_EXPR);
6310 break;
6312 default:
6313 break;
6316 else if (TREE_INT_CST_HIGH (arg1) == 0
6317 && (TREE_INT_CST_LOW (arg1)
6318 == ((HOST_WIDE_INT) 1 << (width - 1)) - 1)
6319 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6321 switch (TREE_CODE (t))
6323 case LE_EXPR:
6324 return fold (build (GE_EXPR, type,
6325 convert (signed_type (TREE_TYPE (arg0)),
6326 arg0),
6327 convert (signed_type (TREE_TYPE (arg1)),
6328 integer_zero_node)));
6329 case GT_EXPR:
6330 return fold (build (LT_EXPR, type,
6331 convert (signed_type (TREE_TYPE (arg0)),
6332 arg0),
6333 convert (signed_type (TREE_TYPE (arg1)),
6334 integer_zero_node)));
6336 default:
6337 break;
6342 /* If we are comparing an expression that just has comparisons
6343 of two integer values, arithmetic expressions of those comparisons,
6344 and constants, we can simplify it. There are only three cases
6345 to check: the two values can either be equal, the first can be
6346 greater, or the second can be greater. Fold the expression for
6347 those three values. Since each value must be 0 or 1, we have
6348 eight possibilities, each of which corresponds to the constant 0
6349 or 1 or one of the six possible comparisons.
6351 This handles common cases like (a > b) == 0 but also handles
6352 expressions like ((x > y) - (y > x)) > 0, which supposedly
6353 occur in macroized code. */
6355 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6357 tree cval1 = 0, cval2 = 0;
6358 int save_p = 0;
6360 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6361 /* Don't handle degenerate cases here; they should already
6362 have been handled anyway. */
6363 && cval1 != 0 && cval2 != 0
6364 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6365 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6366 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6367 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6368 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6369 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6370 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6372 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6373 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6375 /* We can't just pass T to eval_subst in case cval1 or cval2
6376 was the same as ARG1. */
6378 tree high_result
6379 = fold (build (code, type,
6380 eval_subst (arg0, cval1, maxval, cval2, minval),
6381 arg1));
6382 tree equal_result
6383 = fold (build (code, type,
6384 eval_subst (arg0, cval1, maxval, cval2, maxval),
6385 arg1));
6386 tree low_result
6387 = fold (build (code, type,
6388 eval_subst (arg0, cval1, minval, cval2, maxval),
6389 arg1));
6391 /* All three of these results should be 0 or 1. Confirm they
6392 are. Then use those values to select the proper code
6393 to use. */
6395 if ((integer_zerop (high_result)
6396 || integer_onep (high_result))
6397 && (integer_zerop (equal_result)
6398 || integer_onep (equal_result))
6399 && (integer_zerop (low_result)
6400 || integer_onep (low_result)))
6402 /* Make a 3-bit mask with the high-order bit being the
6403 value for `>', the next for '=', and the low for '<'. */
6404 switch ((integer_onep (high_result) * 4)
6405 + (integer_onep (equal_result) * 2)
6406 + integer_onep (low_result))
6408 case 0:
6409 /* Always false. */
6410 return omit_one_operand (type, integer_zero_node, arg0);
6411 case 1:
6412 code = LT_EXPR;
6413 break;
6414 case 2:
6415 code = EQ_EXPR;
6416 break;
6417 case 3:
6418 code = LE_EXPR;
6419 break;
6420 case 4:
6421 code = GT_EXPR;
6422 break;
6423 case 5:
6424 code = NE_EXPR;
6425 break;
6426 case 6:
6427 code = GE_EXPR;
6428 break;
6429 case 7:
6430 /* Always true. */
6431 return omit_one_operand (type, integer_one_node, arg0);
6434 t = build (code, type, cval1, cval2);
6435 if (save_p)
6436 return save_expr (t);
6437 else
6438 return fold (t);
6443 /* If this is a comparison of a field, we may be able to simplify it. */
6444 if ((TREE_CODE (arg0) == COMPONENT_REF
6445 || TREE_CODE (arg0) == BIT_FIELD_REF)
6446 && (code == EQ_EXPR || code == NE_EXPR)
6447 /* Handle the constant case even without -O
6448 to make sure the warnings are given. */
6449 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6451 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6452 return t1 ? t1 : t;
6455 /* If this is a comparison of complex values and either or both sides
6456 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6457 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6458 This may prevent needless evaluations. */
6459 if ((code == EQ_EXPR || code == NE_EXPR)
6460 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6461 && (TREE_CODE (arg0) == COMPLEX_EXPR
6462 || TREE_CODE (arg1) == COMPLEX_EXPR
6463 || TREE_CODE (arg0) == COMPLEX_CST
6464 || TREE_CODE (arg1) == COMPLEX_CST))
6466 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6467 tree real0, imag0, real1, imag1;
6469 arg0 = save_expr (arg0);
6470 arg1 = save_expr (arg1);
6471 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6472 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6473 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6474 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6476 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6477 : TRUTH_ORIF_EXPR),
6478 type,
6479 fold (build (code, type, real0, real1)),
6480 fold (build (code, type, imag0, imag1))));
6483 /* From here on, the only cases we handle are when the result is
6484 known to be a constant.
6486 To compute GT, swap the arguments and do LT.
6487 To compute GE, do LT and invert the result.
6488 To compute LE, swap the arguments, do LT and invert the result.
6489 To compute NE, do EQ and invert the result.
6491 Therefore, the code below must handle only EQ and LT. */
6493 if (code == LE_EXPR || code == GT_EXPR)
6495 tem = arg0, arg0 = arg1, arg1 = tem;
6496 code = swap_tree_comparison (code);
6499 /* Note that it is safe to invert for real values here because we
6500 will check below in the one case that it matters. */
6502 t1 = NULL_TREE;
6503 invert = 0;
6504 if (code == NE_EXPR || code == GE_EXPR)
6506 invert = 1;
6507 code = invert_tree_comparison (code);
6510 /* Compute a result for LT or EQ if args permit;
6511 otherwise return T. */
6512 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6514 if (code == EQ_EXPR)
6515 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
6516 == TREE_INT_CST_LOW (arg1))
6517 && (TREE_INT_CST_HIGH (arg0)
6518 == TREE_INT_CST_HIGH (arg1)),
6520 else
6521 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6522 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6523 : INT_CST_LT (arg0, arg1)),
6527 #if 0 /* This is no longer useful, but breaks some real code. */
6528 /* Assume a nonexplicit constant cannot equal an explicit one,
6529 since such code would be undefined anyway.
6530 Exception: on sysvr4, using #pragma weak,
6531 a label can come out as 0. */
6532 else if (TREE_CODE (arg1) == INTEGER_CST
6533 && !integer_zerop (arg1)
6534 && TREE_CONSTANT (arg0)
6535 && TREE_CODE (arg0) == ADDR_EXPR
6536 && code == EQ_EXPR)
6537 t1 = build_int_2 (0, 0);
6538 #endif
6539 /* Two real constants can be compared explicitly. */
6540 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6542 /* If either operand is a NaN, the result is false with two
6543 exceptions: First, an NE_EXPR is true on NaNs, but that case
6544 is already handled correctly since we will be inverting the
6545 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6546 or a GE_EXPR into a LT_EXPR, we must return true so that it
6547 will be inverted into false. */
6549 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6550 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6551 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6553 else if (code == EQ_EXPR)
6554 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6555 TREE_REAL_CST (arg1)),
6557 else
6558 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6559 TREE_REAL_CST (arg1)),
6563 if (t1 == NULL_TREE)
6564 return t;
6566 if (invert)
6567 TREE_INT_CST_LOW (t1) ^= 1;
6569 TREE_TYPE (t1) = type;
6570 if (TREE_CODE (type) == BOOLEAN_TYPE)
6571 return truthvalue_conversion (t1);
6572 return t1;
6574 case COND_EXPR:
6575 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6576 so all simple results must be passed through pedantic_non_lvalue. */
6577 if (TREE_CODE (arg0) == INTEGER_CST)
6578 return pedantic_non_lvalue
6579 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6580 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6581 return pedantic_omit_one_operand (type, arg1, arg0);
6583 /* If the second operand is zero, invert the comparison and swap
6584 the second and third operands. Likewise if the second operand
6585 is constant and the third is not or if the third operand is
6586 equivalent to the first operand of the comparison. */
6588 if (integer_zerop (arg1)
6589 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6590 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6591 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6592 TREE_OPERAND (t, 2),
6593 TREE_OPERAND (arg0, 1))))
6595 /* See if this can be inverted. If it can't, possibly because
6596 it was a floating-point inequality comparison, don't do
6597 anything. */
6598 tem = invert_truthvalue (arg0);
6600 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6602 t = build (code, type, tem,
6603 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6604 arg0 = tem;
6605 /* arg1 should be the first argument of the new T. */
6606 arg1 = TREE_OPERAND (t, 1);
6607 STRIP_NOPS (arg1);
6611 /* If we have A op B ? A : C, we may be able to convert this to a
6612 simpler expression, depending on the operation and the values
6613 of B and C. IEEE floating point prevents this though,
6614 because A or B might be -0.0 or a NaN. */
6616 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6617 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6618 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6619 || flag_fast_math)
6620 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6621 arg1, TREE_OPERAND (arg0, 1)))
6623 tree arg2 = TREE_OPERAND (t, 2);
6624 enum tree_code comp_code = TREE_CODE (arg0);
6626 STRIP_NOPS (arg2);
6628 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6629 depending on the comparison operation. */
6630 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6631 ? real_zerop (TREE_OPERAND (arg0, 1))
6632 : integer_zerop (TREE_OPERAND (arg0, 1)))
6633 && TREE_CODE (arg2) == NEGATE_EXPR
6634 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6635 switch (comp_code)
6637 case EQ_EXPR:
6638 return pedantic_non_lvalue
6639 (fold (build1 (NEGATE_EXPR, type, arg1)));
6640 case NE_EXPR:
6641 return pedantic_non_lvalue (convert (type, arg1));
6642 case GE_EXPR:
6643 case GT_EXPR:
6644 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6645 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6646 return pedantic_non_lvalue
6647 (convert (type, fold (build1 (ABS_EXPR,
6648 TREE_TYPE (arg1), arg1))));
6649 case LE_EXPR:
6650 case LT_EXPR:
6651 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6652 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6653 return pedantic_non_lvalue
6654 (fold (build1 (NEGATE_EXPR, type,
6655 convert (type,
6656 fold (build1 (ABS_EXPR,
6657 TREE_TYPE (arg1),
6658 arg1))))));
6659 default:
6660 abort ();
6663 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6664 always zero. */
6666 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6668 if (comp_code == NE_EXPR)
6669 return pedantic_non_lvalue (convert (type, arg1));
6670 else if (comp_code == EQ_EXPR)
6671 return pedantic_non_lvalue (convert (type, integer_zero_node));
6674 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6675 or max (A, B), depending on the operation. */
6677 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6678 arg2, TREE_OPERAND (arg0, 0)))
6680 tree comp_op0 = TREE_OPERAND (arg0, 0);
6681 tree comp_op1 = TREE_OPERAND (arg0, 1);
6682 tree comp_type = TREE_TYPE (comp_op0);
6684 switch (comp_code)
6686 case EQ_EXPR:
6687 return pedantic_non_lvalue (convert (type, arg2));
6688 case NE_EXPR:
6689 return pedantic_non_lvalue (convert (type, arg1));
6690 case LE_EXPR:
6691 case LT_EXPR:
6692 /* In C++ a ?: expression can be an lvalue, so put the
6693 operand which will be used if they are equal first
6694 so that we can convert this back to the
6695 corresponding COND_EXPR. */
6696 return pedantic_non_lvalue
6697 (convert (type, (fold (build (MIN_EXPR, comp_type,
6698 (comp_code == LE_EXPR
6699 ? comp_op0 : comp_op1),
6700 (comp_code == LE_EXPR
6701 ? comp_op1 : comp_op0))))));
6702 break;
6703 case GE_EXPR:
6704 case GT_EXPR:
6705 return pedantic_non_lvalue
6706 (convert (type, fold (build (MAX_EXPR, comp_type,
6707 (comp_code == GE_EXPR
6708 ? comp_op0 : comp_op1),
6709 (comp_code == GE_EXPR
6710 ? comp_op1 : comp_op0)))));
6711 break;
6712 default:
6713 abort ();
6717 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6718 we might still be able to simplify this. For example,
6719 if C1 is one less or one more than C2, this might have started
6720 out as a MIN or MAX and been transformed by this function.
6721 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6723 if (INTEGRAL_TYPE_P (type)
6724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6725 && TREE_CODE (arg2) == INTEGER_CST)
6726 switch (comp_code)
6728 case EQ_EXPR:
6729 /* We can replace A with C1 in this case. */
6730 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6731 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6732 TREE_OPERAND (t, 2));
6733 break;
6735 case LT_EXPR:
6736 /* If C1 is C2 + 1, this is min(A, C2). */
6737 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6738 && operand_equal_p (TREE_OPERAND (arg0, 1),
6739 const_binop (PLUS_EXPR, arg2,
6740 integer_one_node, 0), 1))
6741 return pedantic_non_lvalue
6742 (fold (build (MIN_EXPR, type, arg1, arg2)));
6743 break;
6745 case LE_EXPR:
6746 /* If C1 is C2 - 1, this is min(A, C2). */
6747 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6748 && operand_equal_p (TREE_OPERAND (arg0, 1),
6749 const_binop (MINUS_EXPR, arg2,
6750 integer_one_node, 0), 1))
6751 return pedantic_non_lvalue
6752 (fold (build (MIN_EXPR, type, arg1, arg2)));
6753 break;
6755 case GT_EXPR:
6756 /* If C1 is C2 - 1, this is max(A, C2). */
6757 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6758 && operand_equal_p (TREE_OPERAND (arg0, 1),
6759 const_binop (MINUS_EXPR, arg2,
6760 integer_one_node, 0), 1))
6761 return pedantic_non_lvalue
6762 (fold (build (MAX_EXPR, type, arg1, arg2)));
6763 break;
6765 case GE_EXPR:
6766 /* If C1 is C2 + 1, this is max(A, C2). */
6767 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6768 && operand_equal_p (TREE_OPERAND (arg0, 1),
6769 const_binop (PLUS_EXPR, arg2,
6770 integer_one_node, 0), 1))
6771 return pedantic_non_lvalue
6772 (fold (build (MAX_EXPR, type, arg1, arg2)));
6773 break;
6774 case NE_EXPR:
6775 break;
6776 default:
6777 abort ();
6781 /* If the second operand is simpler than the third, swap them
6782 since that produces better jump optimization results. */
6783 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
6784 || TREE_CODE (arg1) == SAVE_EXPR)
6785 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6786 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
6787 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6789 /* See if this can be inverted. If it can't, possibly because
6790 it was a floating-point inequality comparison, don't do
6791 anything. */
6792 tem = invert_truthvalue (arg0);
6794 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6796 t = build (code, type, tem,
6797 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6798 arg0 = tem;
6799 /* arg1 should be the first argument of the new T. */
6800 arg1 = TREE_OPERAND (t, 1);
6801 STRIP_NOPS (arg1);
6805 /* Convert A ? 1 : 0 to simply A. */
6806 if (integer_onep (TREE_OPERAND (t, 1))
6807 && integer_zerop (TREE_OPERAND (t, 2))
6808 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6809 call to fold will try to move the conversion inside
6810 a COND, which will recurse. In that case, the COND_EXPR
6811 is probably the best choice, so leave it alone. */
6812 && type == TREE_TYPE (arg0))
6813 return pedantic_non_lvalue (arg0);
6815 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6816 operation is simply A & 2. */
6818 if (integer_zerop (TREE_OPERAND (t, 2))
6819 && TREE_CODE (arg0) == NE_EXPR
6820 && integer_zerop (TREE_OPERAND (arg0, 1))
6821 && integer_pow2p (arg1)
6822 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6823 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6824 arg1, 1))
6825 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6827 return t;
6829 case COMPOUND_EXPR:
6830 /* When pedantic, a compound expression can be neither an lvalue
6831 nor an integer constant expression. */
6832 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6833 return t;
6834 /* Don't let (0, 0) be null pointer constant. */
6835 if (integer_zerop (arg1))
6836 return build1 (NOP_EXPR, TREE_TYPE (arg1), arg1);
6837 return arg1;
6839 case COMPLEX_EXPR:
6840 if (wins)
6841 return build_complex (type, arg0, arg1);
6842 return t;
6844 case REALPART_EXPR:
6845 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6846 return t;
6847 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6848 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6849 TREE_OPERAND (arg0, 1));
6850 else if (TREE_CODE (arg0) == COMPLEX_CST)
6851 return TREE_REALPART (arg0);
6852 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6853 return fold (build (TREE_CODE (arg0), type,
6854 fold (build1 (REALPART_EXPR, type,
6855 TREE_OPERAND (arg0, 0))),
6856 fold (build1 (REALPART_EXPR,
6857 type, TREE_OPERAND (arg0, 1)))));
6858 return t;
6860 case IMAGPART_EXPR:
6861 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6862 return convert (type, integer_zero_node);
6863 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6864 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6865 TREE_OPERAND (arg0, 0));
6866 else if (TREE_CODE (arg0) == COMPLEX_CST)
6867 return TREE_IMAGPART (arg0);
6868 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6869 return fold (build (TREE_CODE (arg0), type,
6870 fold (build1 (IMAGPART_EXPR, type,
6871 TREE_OPERAND (arg0, 0))),
6872 fold (build1 (IMAGPART_EXPR, type,
6873 TREE_OPERAND (arg0, 1)))));
6874 return t;
6876 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6877 appropriate. */
6878 case CLEANUP_POINT_EXPR:
6879 if (! has_cleanups (arg0))
6880 return TREE_OPERAND (t, 0);
6883 enum tree_code code0 = TREE_CODE (arg0);
6884 int kind0 = TREE_CODE_CLASS (code0);
6885 tree arg00 = TREE_OPERAND (arg0, 0);
6886 tree arg01;
6888 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6889 return fold (build1 (code0, type,
6890 fold (build1 (CLEANUP_POINT_EXPR,
6891 TREE_TYPE (arg00), arg00))));
6893 if (kind0 == '<' || kind0 == '2'
6894 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6895 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6896 || code0 == TRUTH_XOR_EXPR)
6898 arg01 = TREE_OPERAND (arg0, 1);
6900 if (TREE_CONSTANT (arg00)
6901 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6902 && ! has_cleanups (arg00)))
6903 return fold (build (code0, type, arg00,
6904 fold (build1 (CLEANUP_POINT_EXPR,
6905 TREE_TYPE (arg01), arg01))));
6907 if (TREE_CONSTANT (arg01))
6908 return fold (build (code0, type,
6909 fold (build1 (CLEANUP_POINT_EXPR,
6910 TREE_TYPE (arg00), arg00)),
6911 arg01));
6914 return t;
6917 default:
6918 return t;
6919 } /* switch (code) */
6922 /* Determine if first argument is a multiple of second argument. Return 0 if
6923 it is not, or we cannot easily determined it to be.
6925 An example of the sort of thing we care about (at this point; this routine
6926 could surely be made more general, and expanded to do what the *_DIV_EXPR's
6927 fold cases do now) is discovering that
6929 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6931 is a multiple of
6933 SAVE_EXPR (J * 8)
6935 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
6937 This code also handles discovering that
6939 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6941 is a multiple of 8 so we don't have to worry about dealing with a
6942 possible remainder.
6944 Note that we *look* inside a SAVE_EXPR only to determine how it was
6945 calculated; it is not safe for fold to do much of anything else with the
6946 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
6947 at run time. For example, the latter example above *cannot* be implemented
6948 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
6949 evaluation time of the original SAVE_EXPR is not necessarily the same at
6950 the time the new expression is evaluated. The only optimization of this
6951 sort that would be valid is changing
6953 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6955 divided by 8 to
6957 SAVE_EXPR (I) * SAVE_EXPR (J)
6959 (where the same SAVE_EXPR (J) is used in the original and the
6960 transformed version). */
6962 static int
6963 multiple_of_p (type, top, bottom)
6964 tree type;
6965 tree top;
6966 tree bottom;
6968 if (operand_equal_p (top, bottom, 0))
6969 return 1;
6971 if (TREE_CODE (type) != INTEGER_TYPE)
6972 return 0;
6974 switch (TREE_CODE (top))
6976 case MULT_EXPR:
6977 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6978 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6980 case PLUS_EXPR:
6981 case MINUS_EXPR:
6982 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6983 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6985 case NOP_EXPR:
6986 /* Can't handle conversions from non-integral or wider integral type. */
6987 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6988 || (TYPE_PRECISION (type)
6989 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6990 return 0;
6992 /* .. fall through ... */
6994 case SAVE_EXPR:
6995 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6997 case INTEGER_CST:
6998 if ((TREE_CODE (bottom) != INTEGER_CST)
6999 || (tree_int_cst_sgn (top) < 0)
7000 || (tree_int_cst_sgn (bottom) < 0))
7001 return 0;
7002 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7003 top, bottom, 0));
7005 default:
7006 return 0;