(TARGET_SHORT_BY_BYTES): New macro.
[official-gcc.git] / gcc / fold-const.c
blobe93516ebfcf09f8d54e82241ef1d5b9326f58e9c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
29 /* The entry points in this file are fold, size_int and size_binop.
31 fold takes a tree as argument and returns a simplified tree.
33 size_binop takes a tree code for an arithmetic operation
34 and two operands that are trees, and produces a tree for the
35 result, assuming the type comes from `sizetype'.
37 size_int takes an integer value, and creates a tree constant
38 with type from `sizetype'. */
40 #include <stdio.h>
41 #include <setjmp.h>
42 #include "config.h"
43 #include "flags.h"
44 #include "tree.h"
46 /* Handle floating overflow for `const_binop'. */
47 static jmp_buf float_error;
49 static void encode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT, HOST_WIDE_INT));
50 static void decode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT *, HOST_WIDE_INT *));
51 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
52 HOST_WIDE_INT, HOST_WIDE_INT,
53 HOST_WIDE_INT, HOST_WIDE_INT *,
54 HOST_WIDE_INT *, HOST_WIDE_INT *,
55 HOST_WIDE_INT *));
56 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
57 static tree const_binop PROTO((enum tree_code, tree, tree, int));
58 static tree fold_convert PROTO((tree, tree));
59 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
60 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
61 static int truth_value_p PROTO((enum tree_code));
62 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
63 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
64 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
65 static tree omit_one_operand PROTO((tree, tree, tree));
66 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
67 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
68 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
69 tree, tree));
70 static tree decode_field_reference PROTO((tree, int *, int *,
71 enum machine_mode *, int *,
72 int *, tree *));
73 static int all_ones_mask_p PROTO((tree, int));
74 static int simple_operand_p PROTO((tree));
75 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
76 enum tree_code, tree, tree, tree));
77 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
78 static tree strip_compound_expr PROTO((tree, tree));
80 #ifndef BRANCH_COST
81 #define BRANCH_COST 1
82 #endif
84 /* Yield nonzero if a signed left shift of A by B bits overflows. */
85 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
87 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
88 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
89 Then this yields nonzero if overflow occurred during the addition.
90 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
91 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
92 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
94 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
95 We do that by representing the two-word integer in 4 words, with only
96 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
98 #define LOWPART(x) \
99 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
100 #define HIGHPART(x) \
101 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
102 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
104 /* Unpack a two-word integer into 4 words.
105 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
106 WORDS points to the array of HOST_WIDE_INTs. */
108 static void
109 encode (words, low, hi)
110 HOST_WIDE_INT *words;
111 HOST_WIDE_INT low, hi;
113 words[0] = LOWPART (low);
114 words[1] = HIGHPART (low);
115 words[2] = LOWPART (hi);
116 words[3] = HIGHPART (hi);
119 /* Pack an array of 4 words into a two-word integer.
120 WORDS points to the array of words.
121 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
123 static void
124 decode (words, low, hi)
125 HOST_WIDE_INT *words;
126 HOST_WIDE_INT *low, *hi;
128 *low = words[0] | words[1] * BASE;
129 *hi = words[2] | words[3] * BASE;
132 /* Make the integer constant T valid for its type
133 by setting to 0 or 1 all the bits in the constant
134 that don't belong in the type.
135 Yield 1 if a signed overflow occurs, 0 otherwise.
136 If OVERFLOW is nonzero, a signed overflow has already occurred
137 in calculating T, so propagate it.
139 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
140 if it exists. */
143 force_fit_type (t, overflow)
144 tree t;
145 int overflow;
147 HOST_WIDE_INT low, high;
148 register int prec;
150 if (TREE_CODE (t) == REAL_CST)
152 #ifdef CHECK_FLOAT_VALUE
153 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
154 overflow);
155 #endif
156 return overflow;
159 else if (TREE_CODE (t) != INTEGER_CST)
160 return overflow;
162 low = TREE_INT_CST_LOW (t);
163 high = TREE_INT_CST_HIGH (t);
165 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
166 prec = POINTER_SIZE;
167 else
168 prec = TYPE_PRECISION (TREE_TYPE (t));
170 /* First clear all bits that are beyond the type's precision. */
172 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
174 else if (prec > HOST_BITS_PER_WIDE_INT)
176 TREE_INT_CST_HIGH (t)
177 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
179 else
181 TREE_INT_CST_HIGH (t) = 0;
182 if (prec < HOST_BITS_PER_WIDE_INT)
183 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
186 /* Unsigned types do not suffer sign extension or overflow. */
187 if (TREE_UNSIGNED (TREE_TYPE (t)))
188 return 0;
190 /* If the value's sign bit is set, extend the sign. */
191 if (prec != 2 * HOST_BITS_PER_WIDE_INT
192 && (prec > HOST_BITS_PER_WIDE_INT
193 ? (TREE_INT_CST_HIGH (t)
194 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
195 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
197 /* Value is negative:
198 set to 1 all the bits that are outside this type's precision. */
199 if (prec > HOST_BITS_PER_WIDE_INT)
201 TREE_INT_CST_HIGH (t)
202 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
204 else
206 TREE_INT_CST_HIGH (t) = -1;
207 if (prec < HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
212 /* Yield nonzero if signed overflow occurred. */
213 return
214 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
215 != 0);
218 /* Add two doubleword integers with doubleword result.
219 Each argument is given as two `HOST_WIDE_INT' pieces.
220 One argument is L1 and H1; the other, L2 and H2.
221 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
224 add_double (l1, h1, l2, h2, lv, hv)
225 HOST_WIDE_INT l1, h1, l2, h2;
226 HOST_WIDE_INT *lv, *hv;
228 HOST_WIDE_INT l, h;
230 l = l1 + l2;
231 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
233 *lv = l;
234 *hv = h;
235 return overflow_sum_sign (h1, h2, h);
238 /* Negate a doubleword integer with doubleword result.
239 Return nonzero if the operation overflows, assuming it's signed.
240 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
241 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
244 neg_double (l1, h1, lv, hv)
245 HOST_WIDE_INT l1, h1;
246 HOST_WIDE_INT *lv, *hv;
248 if (l1 == 0)
250 *lv = 0;
251 *hv = - h1;
252 return (*hv & h1) < 0;
254 else
256 *lv = - l1;
257 *hv = ~ h1;
258 return 0;
262 /* Multiply two doubleword integers with doubleword result.
263 Return nonzero if the operation overflows, assuming it's signed.
264 Each argument is given as two `HOST_WIDE_INT' pieces.
265 One argument is L1 and H1; the other, L2 and H2.
266 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
269 mul_double (l1, h1, l2, h2, lv, hv)
270 HOST_WIDE_INT l1, h1, l2, h2;
271 HOST_WIDE_INT *lv, *hv;
273 HOST_WIDE_INT arg1[4];
274 HOST_WIDE_INT arg2[4];
275 HOST_WIDE_INT prod[4 * 2];
276 register unsigned HOST_WIDE_INT carry;
277 register int i, j, k;
278 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
280 encode (arg1, l1, h1);
281 encode (arg2, l2, h2);
283 bzero ((char *) prod, sizeof prod);
285 for (i = 0; i < 4; i++)
287 carry = 0;
288 for (j = 0; j < 4; j++)
290 k = i + j;
291 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
292 carry += arg1[i] * arg2[j];
293 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
294 carry += prod[k];
295 prod[k] = LOWPART (carry);
296 carry = HIGHPART (carry);
298 prod[i + 4] = carry;
301 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
303 /* Check for overflow by calculating the top half of the answer in full;
304 it should agree with the low half's sign bit. */
305 decode (prod+4, &toplow, &tophigh);
306 if (h1 < 0)
308 neg_double (l2, h2, &neglow, &neghigh);
309 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
311 if (h2 < 0)
313 neg_double (l1, h1, &neglow, &neghigh);
314 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
316 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
319 /* Shift the doubleword integer in L1, H1 left by COUNT places
320 keeping only PREC bits of result.
321 Shift right if COUNT is negative.
322 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
323 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
325 void
326 lshift_double (l1, h1, count, prec, lv, hv, arith)
327 HOST_WIDE_INT l1, h1, count;
328 int prec;
329 HOST_WIDE_INT *lv, *hv;
330 int arith;
332 if (count < 0)
334 rshift_double (l1, h1, - count, prec, lv, hv, arith);
335 return;
338 if (count >= prec)
339 count = (unsigned HOST_WIDE_INT) count & prec;
341 if (count >= HOST_BITS_PER_WIDE_INT)
343 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
344 *lv = 0;
346 else
348 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
349 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
350 *lv = (unsigned HOST_WIDE_INT) l1 << count;
354 /* Shift the doubleword integer in L1, H1 right by COUNT places
355 keeping only PREC bits of result. COUNT must be positive.
356 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
357 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359 void
360 rshift_double (l1, h1, count, prec, lv, hv, arith)
361 HOST_WIDE_INT l1, h1, count;
362 int prec;
363 HOST_WIDE_INT *lv, *hv;
364 int arith;
366 unsigned HOST_WIDE_INT signmask;
367 signmask = (arith
368 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
369 : 0);
371 if (count >= prec)
372 count = (unsigned HOST_WIDE_INT) count % prec;
374 if (count >= HOST_BITS_PER_WIDE_INT)
376 *hv = signmask;
377 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
378 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
380 else
382 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
383 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
384 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
385 | ((unsigned HOST_WIDE_INT) h1 >> count));
389 /* Rotate the doubleword integer in L1, H1 left by COUNT places
390 keeping only PREC bits of result.
391 Rotate right if COUNT is negative.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
394 void
395 lrotate_double (l1, h1, count, prec, lv, hv)
396 HOST_WIDE_INT l1, h1, count;
397 int prec;
398 HOST_WIDE_INT *lv, *hv;
400 HOST_WIDE_INT arg1[4];
401 register int i;
402 register int carry;
404 if (count < 0)
406 rrotate_double (l1, h1, - count, prec, lv, hv);
407 return;
410 encode (arg1, l1, h1);
412 if (count > prec)
413 count = prec;
415 carry = arg1[4 - 1] >> 16 - 1;
416 while (count > 0)
418 for (i = 0; i < 4; i++)
420 carry += arg1[i] << 1;
421 arg1[i] = LOWPART (carry);
422 carry = HIGHPART (carry);
424 count--;
427 decode (arg1, lv, hv);
430 /* Rotate the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result. COUNT must be positive.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 void
435 rrotate_double (l1, h1, count, prec, lv, hv)
436 HOST_WIDE_INT l1, h1, count;
437 int prec;
438 HOST_WIDE_INT *lv, *hv;
440 HOST_WIDE_INT arg1[4];
441 register int i;
442 register int carry;
444 encode (arg1, l1, h1);
446 if (count > prec)
447 count = prec;
449 carry = arg1[0] & 1;
450 while (count > 0)
452 for (i = 4 - 1; i >= 0; i--)
454 carry *= BASE;
455 carry += arg1[i];
456 arg1[i] = LOWPART (carry >> 1);
458 count--;
461 decode (arg1, lv, hv);
464 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
465 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
466 CODE is a tree code for a kind of division, one of
467 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
468 or EXACT_DIV_EXPR
469 It controls how the quotient is rounded to a integer.
470 Return nonzero if the operation overflows.
471 UNS nonzero says do unsigned division. */
474 div_and_round_double (code, uns,
475 lnum_orig, hnum_orig, lden_orig, hden_orig,
476 lquo, hquo, lrem, hrem)
477 enum tree_code code;
478 int uns;
479 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
480 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
481 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
483 int quo_neg = 0;
484 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
485 HOST_WIDE_INT den[4], quo[4];
486 register int i, j;
487 unsigned HOST_WIDE_INT work;
488 register int carry = 0;
489 HOST_WIDE_INT lnum = lnum_orig;
490 HOST_WIDE_INT hnum = hnum_orig;
491 HOST_WIDE_INT lden = lden_orig;
492 HOST_WIDE_INT hden = hden_orig;
493 int overflow = 0;
495 if ((hden == 0) && (lden == 0))
496 abort ();
498 /* calculate quotient sign and convert operands to unsigned. */
499 if (!uns)
501 if (hnum < 0)
503 quo_neg = ~ quo_neg;
504 /* (minimum integer) / (-1) is the only overflow case. */
505 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
506 overflow = 1;
508 if (hden < 0)
510 quo_neg = ~ quo_neg;
511 neg_double (lden, hden, &lden, &hden);
515 if (hnum == 0 && hden == 0)
516 { /* single precision */
517 *hquo = *hrem = 0;
518 /* This unsigned division rounds toward zero. */
519 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
520 goto finish_up;
523 if (hnum == 0)
524 { /* trivial case: dividend < divisor */
525 /* hden != 0 already checked. */
526 *hquo = *lquo = 0;
527 *hrem = hnum;
528 *lrem = lnum;
529 goto finish_up;
532 bzero ((char *) quo, sizeof quo);
534 bzero ((char *) num, sizeof num); /* to zero 9th element */
535 bzero ((char *) den, sizeof den);
537 encode (num, lnum, hnum);
538 encode (den, lden, hden);
540 /* Special code for when the divisor < BASE. */
541 if (hden == 0 && lden < BASE)
543 /* hnum != 0 already checked. */
544 for (i = 4 - 1; i >= 0; i--)
546 work = num[i] + carry * BASE;
547 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
548 carry = work % (unsigned HOST_WIDE_INT) lden;
551 else
553 /* Full double precision division,
554 with thanks to Don Knuth's "Seminumerical Algorithms". */
555 int quo_est, scale, num_hi_sig, den_hi_sig;
557 /* Find the highest non-zero divisor digit. */
558 for (i = 4 - 1; ; i--)
559 if (den[i] != 0) {
560 den_hi_sig = i;
561 break;
564 /* Insure that the first digit of the divisor is at least BASE/2.
565 This is required by the quotient digit estimation algorithm. */
567 scale = BASE / (den[den_hi_sig] + 1);
568 if (scale > 1) { /* scale divisor and dividend */
569 carry = 0;
570 for (i = 0; i <= 4 - 1; i++) {
571 work = (num[i] * scale) + carry;
572 num[i] = LOWPART (work);
573 carry = HIGHPART (work);
574 } num[4] = carry;
575 carry = 0;
576 for (i = 0; i <= 4 - 1; i++) {
577 work = (den[i] * scale) + carry;
578 den[i] = LOWPART (work);
579 carry = HIGHPART (work);
580 if (den[i] != 0) den_hi_sig = i;
584 num_hi_sig = 4;
586 /* Main loop */
587 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
588 /* guess the next quotient digit, quo_est, by dividing the first
589 two remaining dividend digits by the high order quotient digit.
590 quo_est is never low and is at most 2 high. */
591 unsigned HOST_WIDE_INT tmp;
593 num_hi_sig = i + den_hi_sig + 1;
594 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
595 if (num[num_hi_sig] != den[den_hi_sig])
596 quo_est = work / den[den_hi_sig];
597 else
598 quo_est = BASE - 1;
600 /* refine quo_est so it's usually correct, and at most one high. */
601 tmp = work - quo_est * den[den_hi_sig];
602 if (tmp < BASE
603 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
604 quo_est--;
606 /* Try QUO_EST as the quotient digit, by multiplying the
607 divisor by QUO_EST and subtracting from the remaining dividend.
608 Keep in mind that QUO_EST is the I - 1st digit. */
610 carry = 0;
611 for (j = 0; j <= den_hi_sig; j++)
613 work = quo_est * den[j] + carry;
614 carry = HIGHPART (work);
615 work = num[i + j] - LOWPART (work);
616 num[i + j] = LOWPART (work);
617 carry += HIGHPART (work) != 0;
620 /* if quo_est was high by one, then num[i] went negative and
621 we need to correct things. */
623 if (num[num_hi_sig] < carry)
625 quo_est--;
626 carry = 0; /* add divisor back in */
627 for (j = 0; j <= den_hi_sig; j++)
629 work = num[i + j] + den[j] + carry;
630 carry = HIGHPART (work);
631 num[i + j] = LOWPART (work);
633 num [num_hi_sig] += carry;
636 /* store the quotient digit. */
637 quo[i] = quo_est;
641 decode (quo, lquo, hquo);
643 finish_up:
644 /* if result is negative, make it so. */
645 if (quo_neg)
646 neg_double (*lquo, *hquo, lquo, hquo);
648 /* compute trial remainder: rem = num - (quo * den) */
649 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
650 neg_double (*lrem, *hrem, lrem, hrem);
651 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
653 switch (code)
655 case TRUNC_DIV_EXPR:
656 case TRUNC_MOD_EXPR: /* round toward zero */
657 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
658 return overflow;
660 case FLOOR_DIV_EXPR:
661 case FLOOR_MOD_EXPR: /* round toward negative infinity */
662 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
664 /* quo = quo - 1; */
665 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
666 lquo, hquo);
668 else return overflow;
669 break;
671 case CEIL_DIV_EXPR:
672 case CEIL_MOD_EXPR: /* round toward positive infinity */
673 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
675 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
676 lquo, hquo);
678 else return overflow;
679 break;
681 case ROUND_DIV_EXPR:
682 case ROUND_MOD_EXPR: /* round to closest integer */
684 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
685 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
687 /* get absolute values */
688 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
689 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
691 /* if (2 * abs (lrem) >= abs (lden)) */
692 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
693 labs_rem, habs_rem, &ltwice, &htwice);
694 if (((unsigned HOST_WIDE_INT) habs_den
695 < (unsigned HOST_WIDE_INT) htwice)
696 || (((unsigned HOST_WIDE_INT) habs_den
697 == (unsigned HOST_WIDE_INT) htwice)
698 && ((HOST_WIDE_INT unsigned) labs_den
699 < (unsigned HOST_WIDE_INT) ltwice)))
701 if (*hquo < 0)
702 /* quo = quo - 1; */
703 add_double (*lquo, *hquo,
704 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
705 else
706 /* quo = quo + 1; */
707 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
708 lquo, hquo);
710 else return overflow;
712 break;
714 default:
715 abort ();
718 /* compute true remainder: rem = num - (quo * den) */
719 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
720 neg_double (*lrem, *hrem, lrem, hrem);
721 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
722 return overflow;
725 #ifndef REAL_ARITHMETIC
726 /* Effectively truncate a real value to represent the nearest possible value
727 in a narrower mode. The result is actually represented in the same data
728 type as the argument, but its value is usually different.
730 A trap may occur during the FP operations and it is the responsibility
731 of the calling function to have a handler established. */
733 REAL_VALUE_TYPE
734 real_value_truncate (mode, arg)
735 enum machine_mode mode;
736 REAL_VALUE_TYPE arg;
738 return REAL_VALUE_TRUNCATE (mode, arg);
741 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
743 /* Check for infinity in an IEEE double precision number. */
746 target_isinf (x)
747 REAL_VALUE_TYPE x;
749 /* The IEEE 64-bit double format. */
750 union {
751 REAL_VALUE_TYPE d;
752 struct {
753 unsigned sign : 1;
754 unsigned exponent : 11;
755 unsigned mantissa1 : 20;
756 unsigned mantissa2;
757 } little_endian;
758 struct {
759 unsigned mantissa2;
760 unsigned mantissa1 : 20;
761 unsigned exponent : 11;
762 unsigned sign : 1;
763 } big_endian;
764 } u;
766 u.d = dconstm1;
767 if (u.big_endian.sign == 1)
769 u.d = x;
770 return (u.big_endian.exponent == 2047
771 && u.big_endian.mantissa1 == 0
772 && u.big_endian.mantissa2 == 0);
774 else
776 u.d = x;
777 return (u.little_endian.exponent == 2047
778 && u.little_endian.mantissa1 == 0
779 && u.little_endian.mantissa2 == 0);
783 /* Check whether an IEEE double precision number is a NaN. */
786 target_isnan (x)
787 REAL_VALUE_TYPE x;
789 /* The IEEE 64-bit double format. */
790 union {
791 REAL_VALUE_TYPE d;
792 struct {
793 unsigned sign : 1;
794 unsigned exponent : 11;
795 unsigned mantissa1 : 20;
796 unsigned mantissa2;
797 } little_endian;
798 struct {
799 unsigned mantissa2;
800 unsigned mantissa1 : 20;
801 unsigned exponent : 11;
802 unsigned sign : 1;
803 } big_endian;
804 } u;
806 u.d = dconstm1;
807 if (u.big_endian.sign == 1)
809 u.d = x;
810 return (u.big_endian.exponent == 2047
811 && (u.big_endian.mantissa1 != 0
812 || u.big_endian.mantissa2 != 0));
814 else
816 u.d = x;
817 return (u.little_endian.exponent == 2047
818 && (u.little_endian.mantissa1 != 0
819 || u.little_endian.mantissa2 != 0));
823 /* Check for a negative IEEE double precision number. */
826 target_negative (x)
827 REAL_VALUE_TYPE x;
829 /* The IEEE 64-bit double format. */
830 union {
831 REAL_VALUE_TYPE d;
832 struct {
833 unsigned sign : 1;
834 unsigned exponent : 11;
835 unsigned mantissa1 : 20;
836 unsigned mantissa2;
837 } little_endian;
838 struct {
839 unsigned mantissa2;
840 unsigned mantissa1 : 20;
841 unsigned exponent : 11;
842 unsigned sign : 1;
843 } big_endian;
844 } u;
846 u.d = dconstm1;
847 if (u.big_endian.sign == 1)
849 u.d = x;
850 return u.big_endian.sign;
852 else
854 u.d = x;
855 return u.little_endian.sign;
858 #else /* Target not IEEE */
860 /* Let's assume other float formats don't have infinity.
861 (This can be overridden by redefining REAL_VALUE_ISINF.) */
863 target_isinf (x)
864 REAL_VALUE_TYPE x;
866 return 0;
869 /* Let's assume other float formats don't have NaNs.
870 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
872 target_isnan (x)
873 REAL_VALUE_TYPE x;
875 return 0;
878 /* Let's assume other float formats don't have minus zero.
879 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
881 target_negative (x)
882 REAL_VALUE_TYPE x;
884 return x < 0;
886 #endif /* Target not IEEE */
887 #endif /* no REAL_ARITHMETIC */
889 /* Split a tree IN into a constant and a variable part
890 that could be combined with CODE to make IN.
891 CODE must be a commutative arithmetic operation.
892 Store the constant part into *CONP and the variable in &VARP.
893 Return 1 if this was done; zero means the tree IN did not decompose
894 this way.
896 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
897 Therefore, we must tell the caller whether the variable part
898 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
899 The value stored is the coefficient for the variable term.
900 The constant term we return should always be added;
901 we negate it if necessary. */
903 static int
904 split_tree (in, code, varp, conp, varsignp)
905 tree in;
906 enum tree_code code;
907 tree *varp, *conp;
908 int *varsignp;
910 register tree outtype = TREE_TYPE (in);
911 *varp = 0;
912 *conp = 0;
914 /* Strip any conversions that don't change the machine mode. */
915 while ((TREE_CODE (in) == NOP_EXPR
916 || TREE_CODE (in) == CONVERT_EXPR)
917 && (TYPE_MODE (TREE_TYPE (in))
918 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
919 in = TREE_OPERAND (in, 0);
921 if (TREE_CODE (in) == code
922 || (! FLOAT_TYPE_P (TREE_TYPE (in))
923 /* We can associate addition and subtraction together
924 (even though the C standard doesn't say so)
925 for integers because the value is not affected.
926 For reals, the value might be affected, so we can't. */
927 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
928 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
930 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
931 if (code == INTEGER_CST)
933 *conp = TREE_OPERAND (in, 0);
934 *varp = TREE_OPERAND (in, 1);
935 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
936 && TREE_TYPE (*varp) != outtype)
937 *varp = convert (outtype, *varp);
938 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
939 return 1;
941 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
943 *conp = TREE_OPERAND (in, 1);
944 *varp = TREE_OPERAND (in, 0);
945 *varsignp = 1;
946 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
947 && TREE_TYPE (*varp) != outtype)
948 *varp = convert (outtype, *varp);
949 if (TREE_CODE (in) == MINUS_EXPR)
951 /* If operation is subtraction and constant is second,
952 must negate it to get an additive constant.
953 And this cannot be done unless it is a manifest constant.
954 It could also be the address of a static variable.
955 We cannot negate that, so give up. */
956 if (TREE_CODE (*conp) == INTEGER_CST)
957 /* Subtracting from integer_zero_node loses for long long. */
958 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
959 else
960 return 0;
962 return 1;
964 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
966 *conp = TREE_OPERAND (in, 0);
967 *varp = TREE_OPERAND (in, 1);
968 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
969 && TREE_TYPE (*varp) != outtype)
970 *varp = convert (outtype, *varp);
971 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
972 return 1;
975 return 0;
978 /* Combine two constants NUM and ARG2 under operation CODE
979 to produce a new constant.
980 We assume ARG1 and ARG2 have the same data type,
981 or at least are the same kind of constant and the same machine mode.
983 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
985 static tree
986 const_binop (code, arg1, arg2, notrunc)
987 enum tree_code code;
988 register tree arg1, arg2;
989 int notrunc;
991 if (TREE_CODE (arg1) == INTEGER_CST)
993 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
994 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
995 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
996 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
997 HOST_WIDE_INT low, hi;
998 HOST_WIDE_INT garbagel, garbageh;
999 register tree t;
1000 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1001 int overflow = 0;
1003 switch (code)
1005 case BIT_IOR_EXPR:
1006 t = build_int_2 (int1l | int2l, int1h | int2h);
1007 break;
1009 case BIT_XOR_EXPR:
1010 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
1011 break;
1013 case BIT_AND_EXPR:
1014 t = build_int_2 (int1l & int2l, int1h & int2h);
1015 break;
1017 case BIT_ANDTC_EXPR:
1018 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1019 break;
1021 case RSHIFT_EXPR:
1022 int2l = - int2l;
1023 case LSHIFT_EXPR:
1024 /* It's unclear from the C standard whether shifts can overflow.
1025 The following code ignores overflow; perhaps a C standard
1026 interpretation ruling is needed. */
1027 lshift_double (int1l, int1h, int2l,
1028 TYPE_PRECISION (TREE_TYPE (arg1)),
1029 &low, &hi,
1030 !uns);
1031 t = build_int_2 (low, hi);
1032 TREE_TYPE (t) = TREE_TYPE (arg1);
1033 if (!notrunc)
1034 force_fit_type (t, 0);
1035 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1036 TREE_CONSTANT_OVERFLOW (t)
1037 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1038 return t;
1040 case RROTATE_EXPR:
1041 int2l = - int2l;
1042 case LROTATE_EXPR:
1043 lrotate_double (int1l, int1h, int2l,
1044 TYPE_PRECISION (TREE_TYPE (arg1)),
1045 &low, &hi);
1046 t = build_int_2 (low, hi);
1047 break;
1049 case PLUS_EXPR:
1050 if (int1h == 0)
1052 int2l += int1l;
1053 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1055 hi = int2h++;
1056 overflow = int2h < hi;
1058 t = build_int_2 (int2l, int2h);
1059 break;
1061 if (int2h == 0)
1063 int1l += int2l;
1064 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1066 hi = int1h++;
1067 overflow = int1h < hi;
1069 t = build_int_2 (int1l, int1h);
1070 break;
1072 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1073 t = build_int_2 (low, hi);
1074 break;
1076 case MINUS_EXPR:
1077 if (int2h == 0 && int2l == 0)
1079 t = build_int_2 (int1l, int1h);
1080 break;
1082 neg_double (int2l, int2h, &low, &hi);
1083 add_double (int1l, int1h, low, hi, &low, &hi);
1084 overflow = overflow_sum_sign (hi, int2h, int1h);
1085 t = build_int_2 (low, hi);
1086 break;
1088 case MULT_EXPR:
1089 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1090 t = build_int_2 (low, hi);
1091 break;
1093 case TRUNC_DIV_EXPR:
1094 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1095 case EXACT_DIV_EXPR:
1096 /* This is a shortcut for a common special case.
1097 It reduces the number of tree nodes generated
1098 and saves time. */
1099 if (int2h == 0 && int2l > 0
1100 && TREE_TYPE (arg1) == sizetype
1101 && int1h == 0 && int1l >= 0)
1103 if (code == CEIL_DIV_EXPR)
1104 int1l += int2l-1;
1105 return size_int (int1l / int2l);
1107 case ROUND_DIV_EXPR:
1108 if (int2h == 0 && int2l == 1)
1110 t = build_int_2 (int1l, int1h);
1111 break;
1113 if (int1l == int2l && int1h == int2h)
1115 if ((int1l | int1h) == 0)
1116 abort ();
1117 t = build_int_2 (1, 0);
1118 break;
1120 overflow = div_and_round_double (code, uns,
1121 int1l, int1h, int2l, int2h,
1122 &low, &hi, &garbagel, &garbageh);
1123 t = build_int_2 (low, hi);
1124 break;
1126 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1127 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1128 overflow = div_and_round_double (code, uns,
1129 int1l, int1h, int2l, int2h,
1130 &garbagel, &garbageh, &low, &hi);
1131 t = build_int_2 (low, hi);
1132 break;
1134 case MIN_EXPR:
1135 case MAX_EXPR:
1136 if (uns)
1138 low = (((unsigned HOST_WIDE_INT) int1h
1139 < (unsigned HOST_WIDE_INT) int2h)
1140 || (((unsigned HOST_WIDE_INT) int1h
1141 == (unsigned HOST_WIDE_INT) int2h)
1142 && ((unsigned HOST_WIDE_INT) int1l
1143 < (unsigned HOST_WIDE_INT) int2l)));
1145 else
1147 low = ((int1h < int2h)
1148 || ((int1h == int2h)
1149 && ((unsigned HOST_WIDE_INT) int1l
1150 < (unsigned HOST_WIDE_INT) int2l)));
1152 if (low == (code == MIN_EXPR))
1153 t = build_int_2 (int1l, int1h);
1154 else
1155 t = build_int_2 (int2l, int2h);
1156 break;
1158 default:
1159 abort ();
1161 got_it:
1162 TREE_TYPE (t) = TREE_TYPE (arg1);
1163 TREE_OVERFLOW (t)
1164 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow))
1165 | TREE_OVERFLOW (arg1)
1166 | TREE_OVERFLOW (arg2));
1167 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1168 | TREE_CONSTANT_OVERFLOW (arg1)
1169 | TREE_CONSTANT_OVERFLOW (arg2));
1170 return t;
1172 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1173 if (TREE_CODE (arg1) == REAL_CST)
1175 REAL_VALUE_TYPE d1;
1176 REAL_VALUE_TYPE d2;
1177 int overflow = 0;
1178 REAL_VALUE_TYPE value;
1179 tree t;
1181 d1 = TREE_REAL_CST (arg1);
1182 d2 = TREE_REAL_CST (arg2);
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1190 else if (setjmp (float_error))
1192 t = copy_node (arg1);
1193 overflow = 1;
1194 goto got_float;
1197 set_float_handler (float_error);
1199 #ifdef REAL_ARITHMETIC
1200 REAL_ARITHMETIC (value, code, d1, d2);
1201 #else
1202 switch (code)
1204 case PLUS_EXPR:
1205 value = d1 + d2;
1206 break;
1208 case MINUS_EXPR:
1209 value = d1 - d2;
1210 break;
1212 case MULT_EXPR:
1213 value = d1 * d2;
1214 break;
1216 case RDIV_EXPR:
1217 #ifndef REAL_INFINITY
1218 if (d2 == 0)
1219 abort ();
1220 #endif
1222 value = d1 / d2;
1223 break;
1225 case MIN_EXPR:
1226 value = MIN (d1, d2);
1227 break;
1229 case MAX_EXPR:
1230 value = MAX (d1, d2);
1231 break;
1233 default:
1234 abort ();
1236 #endif /* no REAL_ARITHMETIC */
1237 t = build_real (TREE_TYPE (arg1),
1238 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1239 got_float:
1240 set_float_handler (NULL_PTR);
1242 TREE_OVERFLOW (t)
1243 = (force_fit_type (t, overflow)
1244 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1245 TREE_CONSTANT_OVERFLOW (t)
1246 = TREE_OVERFLOW (t)
1247 | TREE_CONSTANT_OVERFLOW (arg1)
1248 | TREE_CONSTANT_OVERFLOW (arg2);
1249 return t;
1251 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1252 if (TREE_CODE (arg1) == COMPLEX_CST)
1254 register tree r1 = TREE_REALPART (arg1);
1255 register tree i1 = TREE_IMAGPART (arg1);
1256 register tree r2 = TREE_REALPART (arg2);
1257 register tree i2 = TREE_IMAGPART (arg2);
1258 register tree t;
1260 switch (code)
1262 case PLUS_EXPR:
1263 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1264 const_binop (PLUS_EXPR, i1, i2, notrunc));
1265 break;
1267 case MINUS_EXPR:
1268 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1269 const_binop (MINUS_EXPR, i1, i2, notrunc));
1270 break;
1272 case MULT_EXPR:
1273 t = build_complex (const_binop (MINUS_EXPR,
1274 const_binop (MULT_EXPR,
1275 r1, r2, notrunc),
1276 const_binop (MULT_EXPR,
1277 i1, i2, notrunc),
1278 notrunc),
1279 const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR,
1281 r1, i2, notrunc),
1282 const_binop (MULT_EXPR,
1283 i1, r2, notrunc),
1284 notrunc));
1285 break;
1287 case RDIV_EXPR:
1289 register tree magsquared
1290 = const_binop (PLUS_EXPR,
1291 const_binop (MULT_EXPR, r2, r2, notrunc),
1292 const_binop (MULT_EXPR, i2, i2, notrunc),
1293 notrunc);
1295 t = build_complex
1296 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1297 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1298 const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, r2,
1300 notrunc),
1301 const_binop (MULT_EXPR, i1, i2,
1302 notrunc),
1303 notrunc),
1304 magsquared, notrunc),
1305 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1306 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1307 const_binop (MINUS_EXPR,
1308 const_binop (MULT_EXPR, i1, r2,
1309 notrunc),
1310 const_binop (MULT_EXPR, r1, i2,
1311 notrunc),
1312 notrunc),
1313 magsquared, notrunc));
1315 break;
1317 default:
1318 abort ();
1320 TREE_TYPE (t) = TREE_TYPE (arg1);
1321 return t;
1323 return 0;
1326 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1328 tree
1329 size_int (number)
1330 unsigned int number;
1332 register tree t;
1333 /* Type-size nodes already made for small sizes. */
1334 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1336 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1337 && size_table[number] != 0)
1338 return size_table[number];
1339 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1341 push_obstacks_nochange ();
1342 /* Make this a permanent node. */
1343 end_temporary_allocation ();
1344 t = build_int_2 (number, 0);
1345 TREE_TYPE (t) = sizetype;
1346 size_table[number] = t;
1347 pop_obstacks ();
1349 else
1351 t = build_int_2 (number, 0);
1352 TREE_TYPE (t) = sizetype;
1354 return t;
1357 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1358 CODE is a tree code. Data type is taken from `sizetype',
1359 If the operands are constant, so is the result. */
1361 tree
1362 size_binop (code, arg0, arg1)
1363 enum tree_code code;
1364 tree arg0, arg1;
1366 /* Handle the special case of two integer constants faster. */
1367 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1369 /* And some specific cases even faster than that. */
1370 if (code == PLUS_EXPR
1371 && TREE_INT_CST_LOW (arg0) == 0
1372 && TREE_INT_CST_HIGH (arg0) == 0)
1373 return arg1;
1374 if (code == MINUS_EXPR
1375 && TREE_INT_CST_LOW (arg1) == 0
1376 && TREE_INT_CST_HIGH (arg1) == 0)
1377 return arg0;
1378 if (code == MULT_EXPR
1379 && TREE_INT_CST_LOW (arg0) == 1
1380 && TREE_INT_CST_HIGH (arg0) == 0)
1381 return arg1;
1382 /* Handle general case of two integer constants. */
1383 return const_binop (code, arg0, arg1, 1);
1386 if (arg0 == error_mark_node || arg1 == error_mark_node)
1387 return error_mark_node;
1389 return fold (build (code, sizetype, arg0, arg1));
1392 /* Given T, a tree representing type conversion of ARG1, a constant,
1393 return a constant tree representing the result of conversion. */
1395 static tree
1396 fold_convert (t, arg1)
1397 register tree t;
1398 register tree arg1;
1400 register tree type = TREE_TYPE (t);
1401 int overflow = 0;
1403 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1405 if (TREE_CODE (arg1) == INTEGER_CST)
1407 /* Given an integer constant, make new constant with new type,
1408 appropriately sign-extended or truncated. */
1409 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1410 TREE_INT_CST_HIGH (arg1));
1411 TREE_TYPE (t) = type;
1412 /* Indicate an overflow if (1) ARG1 already overflowed,
1413 or (2) force_fit_type indicates an overflow.
1414 Tell force_fit_type that an overflow has already occurred
1415 if ARG1 is a too-large unsigned value and T is signed. */
1416 TREE_OVERFLOW (t)
1417 = (TREE_OVERFLOW (arg1)
1418 | force_fit_type (t,
1419 (TREE_INT_CST_HIGH (arg1) < 0
1420 & (TREE_UNSIGNED (type)
1421 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1422 TREE_CONSTANT_OVERFLOW (t)
1423 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1425 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1426 else if (TREE_CODE (arg1) == REAL_CST)
1428 /* Don't initialize these, use assignments.
1429 Initialized local aggregates don't work on old compilers. */
1430 REAL_VALUE_TYPE x;
1431 REAL_VALUE_TYPE l;
1432 REAL_VALUE_TYPE u;
1434 x = TREE_REAL_CST (arg1);
1435 l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1436 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1437 /* See if X will be in range after truncation towards 0.
1438 To compensate for truncation, move the bounds away from 0,
1439 but reject if X exactly equals the adjusted bounds. */
1440 #ifdef REAL_ARITHMETIC
1441 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1442 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1443 #else
1444 l--;
1445 u++;
1446 #endif
1447 /* If X is a NaN, use zero instead and show we have an overflow.
1448 Otherwise, range check. */
1449 if (REAL_VALUE_ISNAN (x))
1450 overflow = 1, x = dconst0;
1451 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1452 overflow = 1;
1454 #ifndef REAL_ARITHMETIC
1456 HOST_WIDE_INT low, high;
1457 HOST_WIDE_INT half_word
1458 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1460 if (x < 0)
1461 x = -x;
1463 high = (HOST_WIDE_INT) (x / half_word / half_word);
1464 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1465 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1467 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1468 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1470 else
1471 low = (HOST_WIDE_INT) x;
1472 if (TREE_REAL_CST (arg1) < 0)
1473 neg_double (low, high, &low, &high);
1474 t = build_int_2 (low, high);
1476 #else
1478 HOST_WIDE_INT low, high;
1479 REAL_VALUE_TO_INT (&low, &high, x);
1480 t = build_int_2 (low, high);
1482 #endif
1483 TREE_TYPE (t) = type;
1484 TREE_OVERFLOW (t)
1485 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1486 TREE_CONSTANT_OVERFLOW (t)
1487 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1489 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1490 TREE_TYPE (t) = type;
1492 else if (TREE_CODE (type) == REAL_TYPE)
1494 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1495 if (TREE_CODE (arg1) == INTEGER_CST)
1496 return build_real_from_int_cst (type, arg1);
1497 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1498 if (TREE_CODE (arg1) == REAL_CST)
1500 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1501 return arg1;
1502 else if (setjmp (float_error))
1504 overflow = 1;
1505 t = copy_node (arg1);
1506 goto got_it;
1508 set_float_handler (float_error);
1510 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1511 TREE_REAL_CST (arg1)));
1512 set_float_handler (NULL_PTR);
1514 got_it:
1515 TREE_OVERFLOW (t)
1516 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1517 TREE_CONSTANT_OVERFLOW (t)
1518 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1519 return t;
1522 TREE_CONSTANT (t) = 1;
1523 return t;
1526 /* Return an expr equal to X but certainly not valid as an lvalue.
1527 Also make sure it is not valid as an null pointer constant. */
1529 tree
1530 non_lvalue (x)
1531 tree x;
1533 tree result;
1535 /* These things are certainly not lvalues. */
1536 if (TREE_CODE (x) == NON_LVALUE_EXPR
1537 || TREE_CODE (x) == INTEGER_CST
1538 || TREE_CODE (x) == REAL_CST
1539 || TREE_CODE (x) == STRING_CST
1540 || TREE_CODE (x) == ADDR_EXPR)
1542 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1544 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1545 so convert_for_assignment won't strip it.
1546 This is so this 0 won't be treated as a null pointer constant. */
1547 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1548 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1549 return result;
1551 return x;
1554 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1555 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1556 return result;
1559 /* When pedantic, return an expr equal to X but certainly not valid as a
1560 pedantic lvalue. Otherwise, return X. */
1562 tree
1563 pedantic_non_lvalue (x)
1564 tree x;
1566 if (pedantic)
1567 return non_lvalue (x);
1568 else
1569 return x;
1572 /* Given a tree comparison code, return the code that is the logical inverse
1573 of the given code. It is not safe to do this for floating-point
1574 comparisons, except for NE_EXPR and EQ_EXPR. */
1576 static enum tree_code
1577 invert_tree_comparison (code)
1578 enum tree_code code;
1580 switch (code)
1582 case EQ_EXPR:
1583 return NE_EXPR;
1584 case NE_EXPR:
1585 return EQ_EXPR;
1586 case GT_EXPR:
1587 return LE_EXPR;
1588 case GE_EXPR:
1589 return LT_EXPR;
1590 case LT_EXPR:
1591 return GE_EXPR;
1592 case LE_EXPR:
1593 return GT_EXPR;
1594 default:
1595 abort ();
1599 /* Similar, but return the comparison that results if the operands are
1600 swapped. This is safe for floating-point. */
1602 static enum tree_code
1603 swap_tree_comparison (code)
1604 enum tree_code code;
1606 switch (code)
1608 case EQ_EXPR:
1609 case NE_EXPR:
1610 return code;
1611 case GT_EXPR:
1612 return LT_EXPR;
1613 case GE_EXPR:
1614 return LE_EXPR;
1615 case LT_EXPR:
1616 return GT_EXPR;
1617 case LE_EXPR:
1618 return GE_EXPR;
1619 default:
1620 abort ();
1624 /* Return nonzero if CODE is a tree code that represents a truth value. */
1626 static int
1627 truth_value_p (code)
1628 enum tree_code code;
1630 return (TREE_CODE_CLASS (code) == '<'
1631 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1632 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1633 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1636 /* Return nonzero if two operands are necessarily equal.
1637 If ONLY_CONST is non-zero, only return non-zero for constants.
1638 This function tests whether the operands are indistinguishable;
1639 it does not test whether they are equal using C's == operation.
1640 The distinction is important for IEEE floating point, because
1641 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1642 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1645 operand_equal_p (arg0, arg1, only_const)
1646 tree arg0, arg1;
1647 int only_const;
1649 /* If both types don't have the same signedness, then we can't consider
1650 them equal. We must check this before the STRIP_NOPS calls
1651 because they may change the signedness of the arguments. */
1652 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1653 return 0;
1655 STRIP_NOPS (arg0);
1656 STRIP_NOPS (arg1);
1658 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1659 We don't care about side effects in that case because the SAVE_EXPR
1660 takes care of that for us. */
1661 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1662 return ! only_const;
1664 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1665 return 0;
1667 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1668 && TREE_CODE (arg0) == ADDR_EXPR
1669 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1670 return 1;
1672 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1673 && TREE_CODE (arg0) == INTEGER_CST
1674 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1675 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1676 return 1;
1678 /* Detect when real constants are equal. */
1679 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1680 && TREE_CODE (arg0) == REAL_CST)
1681 return !bcmp ((char *) &TREE_REAL_CST (arg0),
1682 (char *) &TREE_REAL_CST (arg1),
1683 sizeof (REAL_VALUE_TYPE));
1685 if (only_const)
1686 return 0;
1688 if (arg0 == arg1)
1689 return 1;
1691 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1692 return 0;
1693 /* This is needed for conversions and for COMPONENT_REF.
1694 Might as well play it safe and always test this. */
1695 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1696 return 0;
1698 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1700 case '1':
1701 /* Two conversions are equal only if signedness and modes match. */
1702 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1703 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1704 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1705 return 0;
1707 return operand_equal_p (TREE_OPERAND (arg0, 0),
1708 TREE_OPERAND (arg1, 0), 0);
1710 case '<':
1711 case '2':
1712 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1713 TREE_OPERAND (arg1, 0), 0)
1714 && operand_equal_p (TREE_OPERAND (arg0, 1),
1715 TREE_OPERAND (arg1, 1), 0));
1717 case 'r':
1718 switch (TREE_CODE (arg0))
1720 case INDIRECT_REF:
1721 return operand_equal_p (TREE_OPERAND (arg0, 0),
1722 TREE_OPERAND (arg1, 0), 0);
1724 case COMPONENT_REF:
1725 case ARRAY_REF:
1726 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1727 TREE_OPERAND (arg1, 0), 0)
1728 && operand_equal_p (TREE_OPERAND (arg0, 1),
1729 TREE_OPERAND (arg1, 1), 0));
1731 case BIT_FIELD_REF:
1732 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1733 TREE_OPERAND (arg1, 0), 0)
1734 && operand_equal_p (TREE_OPERAND (arg0, 1),
1735 TREE_OPERAND (arg1, 1), 0)
1736 && operand_equal_p (TREE_OPERAND (arg0, 2),
1737 TREE_OPERAND (arg1, 2), 0));
1739 break;
1742 return 0;
1745 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1746 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1748 When in doubt, return 0. */
1750 static int
1751 operand_equal_for_comparison_p (arg0, arg1, other)
1752 tree arg0, arg1;
1753 tree other;
1755 int unsignedp1, unsignedpo;
1756 tree primarg1, primother;
1757 unsigned correct_width;
1759 if (operand_equal_p (arg0, arg1, 0))
1760 return 1;
1762 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
1763 return 0;
1765 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1766 actual comparison operand, ARG0.
1768 First throw away any conversions to wider types
1769 already present in the operands. */
1771 primarg1 = get_narrower (arg1, &unsignedp1);
1772 primother = get_narrower (other, &unsignedpo);
1774 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1775 if (unsignedp1 == unsignedpo
1776 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1777 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1779 tree type = TREE_TYPE (arg0);
1781 /* Make sure shorter operand is extended the right way
1782 to match the longer operand. */
1783 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1784 TREE_TYPE (primarg1)),
1785 primarg1);
1787 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1788 return 1;
1791 return 0;
1794 /* See if ARG is an expression that is either a comparison or is performing
1795 arithmetic on comparisons. The comparisons must only be comparing
1796 two different values, which will be stored in *CVAL1 and *CVAL2; if
1797 they are non-zero it means that some operands have already been found.
1798 No variables may be used anywhere else in the expression except in the
1799 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1800 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1802 If this is true, return 1. Otherwise, return zero. */
1804 static int
1805 twoval_comparison_p (arg, cval1, cval2, save_p)
1806 tree arg;
1807 tree *cval1, *cval2;
1808 int *save_p;
1810 enum tree_code code = TREE_CODE (arg);
1811 char class = TREE_CODE_CLASS (code);
1813 /* We can handle some of the 'e' cases here. */
1814 if (class == 'e' && code == TRUTH_NOT_EXPR)
1815 class = '1';
1816 else if (class == 'e'
1817 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1818 || code == COMPOUND_EXPR))
1819 class = '2';
1821 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1822 the expression. There may be no way to make this work, but it needs
1823 to be looked at again for 2.6. */
1824 #if 0
1825 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1827 /* If we've already found a CVAL1 or CVAL2, this expression is
1828 two complex to handle. */
1829 if (*cval1 || *cval2)
1830 return 0;
1832 class = '1';
1833 *save_p = 1;
1835 #endif
1837 switch (class)
1839 case '1':
1840 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1842 case '2':
1843 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1844 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1845 cval1, cval2, save_p));
1847 case 'c':
1848 return 1;
1850 case 'e':
1851 if (code == COND_EXPR)
1852 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1853 cval1, cval2, save_p)
1854 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1855 cval1, cval2, save_p)
1856 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1857 cval1, cval2, save_p));
1858 return 0;
1860 case '<':
1861 /* First see if we can handle the first operand, then the second. For
1862 the second operand, we know *CVAL1 can't be zero. It must be that
1863 one side of the comparison is each of the values; test for the
1864 case where this isn't true by failing if the two operands
1865 are the same. */
1867 if (operand_equal_p (TREE_OPERAND (arg, 0),
1868 TREE_OPERAND (arg, 1), 0))
1869 return 0;
1871 if (*cval1 == 0)
1872 *cval1 = TREE_OPERAND (arg, 0);
1873 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1875 else if (*cval2 == 0)
1876 *cval2 = TREE_OPERAND (arg, 0);
1877 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1879 else
1880 return 0;
1882 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1884 else if (*cval2 == 0)
1885 *cval2 = TREE_OPERAND (arg, 1);
1886 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1888 else
1889 return 0;
1891 return 1;
1894 return 0;
1897 /* ARG is a tree that is known to contain just arithmetic operations and
1898 comparisons. Evaluate the operations in the tree substituting NEW0 for
1899 any occurrence of OLD0 as an operand of a comparison and likewise for
1900 NEW1 and OLD1. */
1902 static tree
1903 eval_subst (arg, old0, new0, old1, new1)
1904 tree arg;
1905 tree old0, new0, old1, new1;
1907 tree type = TREE_TYPE (arg);
1908 enum tree_code code = TREE_CODE (arg);
1909 char class = TREE_CODE_CLASS (code);
1911 /* We can handle some of the 'e' cases here. */
1912 if (class == 'e' && code == TRUTH_NOT_EXPR)
1913 class = '1';
1914 else if (class == 'e'
1915 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1916 class = '2';
1918 switch (class)
1920 case '1':
1921 return fold (build1 (code, type,
1922 eval_subst (TREE_OPERAND (arg, 0),
1923 old0, new0, old1, new1)));
1925 case '2':
1926 return fold (build (code, type,
1927 eval_subst (TREE_OPERAND (arg, 0),
1928 old0, new0, old1, new1),
1929 eval_subst (TREE_OPERAND (arg, 1),
1930 old0, new0, old1, new1)));
1932 case 'e':
1933 switch (code)
1935 case SAVE_EXPR:
1936 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1938 case COMPOUND_EXPR:
1939 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1941 case COND_EXPR:
1942 return fold (build (code, type,
1943 eval_subst (TREE_OPERAND (arg, 0),
1944 old0, new0, old1, new1),
1945 eval_subst (TREE_OPERAND (arg, 1),
1946 old0, new0, old1, new1),
1947 eval_subst (TREE_OPERAND (arg, 2),
1948 old0, new0, old1, new1)));
1951 case '<':
1953 tree arg0 = TREE_OPERAND (arg, 0);
1954 tree arg1 = TREE_OPERAND (arg, 1);
1956 /* We need to check both for exact equality and tree equality. The
1957 former will be true if the operand has a side-effect. In that
1958 case, we know the operand occurred exactly once. */
1960 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1961 arg0 = new0;
1962 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1963 arg0 = new1;
1965 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1966 arg1 = new0;
1967 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1968 arg1 = new1;
1970 return fold (build (code, type, arg0, arg1));
1974 return arg;
1977 /* Return a tree for the case when the result of an expression is RESULT
1978 converted to TYPE and OMITTED was previously an operand of the expression
1979 but is now not needed (e.g., we folded OMITTED * 0).
1981 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1982 the conversion of RESULT to TYPE. */
1984 static tree
1985 omit_one_operand (type, result, omitted)
1986 tree type, result, omitted;
1988 tree t = convert (type, result);
1990 if (TREE_SIDE_EFFECTS (omitted))
1991 return build (COMPOUND_EXPR, type, omitted, t);
1993 return non_lvalue (t);
1996 /* Return a simplified tree node for the truth-negation of ARG. This
1997 never alters ARG itself. We assume that ARG is an operation that
1998 returns a truth value (0 or 1). */
2000 tree
2001 invert_truthvalue (arg)
2002 tree arg;
2004 tree type = TREE_TYPE (arg);
2005 enum tree_code code = TREE_CODE (arg);
2007 if (code == ERROR_MARK)
2008 return arg;
2010 /* If this is a comparison, we can simply invert it, except for
2011 floating-point non-equality comparisons, in which case we just
2012 enclose a TRUTH_NOT_EXPR around what we have. */
2014 if (TREE_CODE_CLASS (code) == '<')
2016 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2017 && code != NE_EXPR && code != EQ_EXPR)
2018 return build1 (TRUTH_NOT_EXPR, type, arg);
2019 else
2020 return build (invert_tree_comparison (code), type,
2021 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2024 switch (code)
2026 case INTEGER_CST:
2027 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2028 && TREE_INT_CST_HIGH (arg) == 0, 0));
2030 case TRUTH_AND_EXPR:
2031 return build (TRUTH_OR_EXPR, type,
2032 invert_truthvalue (TREE_OPERAND (arg, 0)),
2033 invert_truthvalue (TREE_OPERAND (arg, 1)));
2035 case TRUTH_OR_EXPR:
2036 return build (TRUTH_AND_EXPR, type,
2037 invert_truthvalue (TREE_OPERAND (arg, 0)),
2038 invert_truthvalue (TREE_OPERAND (arg, 1)));
2040 case TRUTH_XOR_EXPR:
2041 /* Here we can invert either operand. We invert the first operand
2042 unless the second operand is a TRUTH_NOT_EXPR in which case our
2043 result is the XOR of the first operand with the inside of the
2044 negation of the second operand. */
2046 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2047 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2048 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2049 else
2050 return build (TRUTH_XOR_EXPR, type,
2051 invert_truthvalue (TREE_OPERAND (arg, 0)),
2052 TREE_OPERAND (arg, 1));
2054 case TRUTH_ANDIF_EXPR:
2055 return build (TRUTH_ORIF_EXPR, type,
2056 invert_truthvalue (TREE_OPERAND (arg, 0)),
2057 invert_truthvalue (TREE_OPERAND (arg, 1)));
2059 case TRUTH_ORIF_EXPR:
2060 return build (TRUTH_ANDIF_EXPR, type,
2061 invert_truthvalue (TREE_OPERAND (arg, 0)),
2062 invert_truthvalue (TREE_OPERAND (arg, 1)));
2064 case TRUTH_NOT_EXPR:
2065 return TREE_OPERAND (arg, 0);
2067 case COND_EXPR:
2068 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2069 invert_truthvalue (TREE_OPERAND (arg, 1)),
2070 invert_truthvalue (TREE_OPERAND (arg, 2)));
2072 case COMPOUND_EXPR:
2073 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2074 invert_truthvalue (TREE_OPERAND (arg, 1)));
2076 case NON_LVALUE_EXPR:
2077 return invert_truthvalue (TREE_OPERAND (arg, 0));
2079 case NOP_EXPR:
2080 case CONVERT_EXPR:
2081 case FLOAT_EXPR:
2082 return build1 (TREE_CODE (arg), type,
2083 invert_truthvalue (TREE_OPERAND (arg, 0)));
2085 case BIT_AND_EXPR:
2086 if (!integer_onep (TREE_OPERAND (arg, 1)))
2087 break;
2088 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2090 case SAVE_EXPR:
2091 return build1 (TRUTH_NOT_EXPR, type, arg);
2093 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2094 abort ();
2095 return build1 (TRUTH_NOT_EXPR, type, arg);
2098 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2099 operands are another bit-wise operation with a common input. If so,
2100 distribute the bit operations to save an operation and possibly two if
2101 constants are involved. For example, convert
2102 (A | B) & (A | C) into A | (B & C)
2103 Further simplification will occur if B and C are constants.
2105 If this optimization cannot be done, 0 will be returned. */
2107 static tree
2108 distribute_bit_expr (code, type, arg0, arg1)
2109 enum tree_code code;
2110 tree type;
2111 tree arg0, arg1;
2113 tree common;
2114 tree left, right;
2116 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2117 || TREE_CODE (arg0) == code
2118 || (TREE_CODE (arg0) != BIT_AND_EXPR
2119 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2120 return 0;
2122 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2124 common = TREE_OPERAND (arg0, 0);
2125 left = TREE_OPERAND (arg0, 1);
2126 right = TREE_OPERAND (arg1, 1);
2128 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2130 common = TREE_OPERAND (arg0, 0);
2131 left = TREE_OPERAND (arg0, 1);
2132 right = TREE_OPERAND (arg1, 0);
2134 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2136 common = TREE_OPERAND (arg0, 1);
2137 left = TREE_OPERAND (arg0, 0);
2138 right = TREE_OPERAND (arg1, 1);
2140 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2142 common = TREE_OPERAND (arg0, 1);
2143 left = TREE_OPERAND (arg0, 0);
2144 right = TREE_OPERAND (arg1, 0);
2146 else
2147 return 0;
2149 return fold (build (TREE_CODE (arg0), type, common,
2150 fold (build (code, type, left, right))));
2153 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2154 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2156 static tree
2157 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2158 tree inner;
2159 tree type;
2160 int bitsize, bitpos;
2161 int unsignedp;
2163 tree result = build (BIT_FIELD_REF, type, inner,
2164 size_int (bitsize), size_int (bitpos));
2166 TREE_UNSIGNED (result) = unsignedp;
2168 return result;
2171 /* Optimize a bit-field compare.
2173 There are two cases: First is a compare against a constant and the
2174 second is a comparison of two items where the fields are at the same
2175 bit position relative to the start of a chunk (byte, halfword, word)
2176 large enough to contain it. In these cases we can avoid the shift
2177 implicit in bitfield extractions.
2179 For constants, we emit a compare of the shifted constant with the
2180 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2181 compared. For two fields at the same position, we do the ANDs with the
2182 similar mask and compare the result of the ANDs.
2184 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2185 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2186 are the left and right operands of the comparison, respectively.
2188 If the optimization described above can be done, we return the resulting
2189 tree. Otherwise we return zero. */
2191 static tree
2192 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2193 enum tree_code code;
2194 tree compare_type;
2195 tree lhs, rhs;
2197 int lbitpos, lbitsize, rbitpos, rbitsize;
2198 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2199 tree type = TREE_TYPE (lhs);
2200 tree signed_type, unsigned_type;
2201 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2202 enum machine_mode lmode, rmode, lnmode, rnmode;
2203 int lunsignedp, runsignedp;
2204 int lvolatilep = 0, rvolatilep = 0;
2205 tree linner, rinner;
2206 tree mask;
2207 tree offset;
2209 /* Get all the information about the extractions being done. If the bit size
2210 if the same as the size of the underlying object, we aren't doing an
2211 extraction at all and so can do nothing. */
2212 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2213 &lunsignedp, &lvolatilep);
2214 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2215 || offset != 0)
2216 return 0;
2218 if (!const_p)
2220 /* If this is not a constant, we can only do something if bit positions,
2221 sizes, and signedness are the same. */
2222 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2223 &rmode, &runsignedp, &rvolatilep);
2225 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2226 || lunsignedp != runsignedp || offset != 0)
2227 return 0;
2230 /* See if we can find a mode to refer to this field. We should be able to,
2231 but fail if we can't. */
2232 lnmode = get_best_mode (lbitsize, lbitpos,
2233 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2234 lvolatilep);
2235 if (lnmode == VOIDmode)
2236 return 0;
2238 /* Set signed and unsigned types of the precision of this mode for the
2239 shifts below. */
2240 signed_type = type_for_mode (lnmode, 0);
2241 unsigned_type = type_for_mode (lnmode, 1);
2243 if (! const_p)
2245 rnmode = get_best_mode (rbitsize, rbitpos,
2246 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2247 rvolatilep);
2248 if (rnmode == VOIDmode)
2249 return 0;
2252 /* Compute the bit position and size for the new reference and our offset
2253 within it. If the new reference is the same size as the original, we
2254 won't optimize anything, so return zero. */
2255 lnbitsize = GET_MODE_BITSIZE (lnmode);
2256 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2257 lbitpos -= lnbitpos;
2258 if (lnbitsize == lbitsize)
2259 return 0;
2261 if (! const_p)
2263 rnbitsize = GET_MODE_BITSIZE (rnmode);
2264 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2265 rbitpos -= rnbitpos;
2266 if (rnbitsize == rbitsize)
2267 return 0;
2270 #if BYTES_BIG_ENDIAN
2271 lbitpos = lnbitsize - lbitsize - lbitpos;
2272 #endif
2274 /* Make the mask to be used against the extracted field. */
2275 mask = build_int_2 (~0, ~0);
2276 TREE_TYPE (mask) = unsigned_type;
2277 force_fit_type (mask, 0);
2278 mask = convert (unsigned_type, mask);
2279 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2280 mask = const_binop (RSHIFT_EXPR, mask,
2281 size_int (lnbitsize - lbitsize - lbitpos), 0);
2283 if (! const_p)
2284 /* If not comparing with constant, just rework the comparison
2285 and return. */
2286 return build (code, compare_type,
2287 build (BIT_AND_EXPR, unsigned_type,
2288 make_bit_field_ref (linner, unsigned_type,
2289 lnbitsize, lnbitpos, 1),
2290 mask),
2291 build (BIT_AND_EXPR, unsigned_type,
2292 make_bit_field_ref (rinner, unsigned_type,
2293 rnbitsize, rnbitpos, 1),
2294 mask));
2296 /* Otherwise, we are handling the constant case. See if the constant is too
2297 big for the field. Warn and return a tree of for 0 (false) if so. We do
2298 this not only for its own sake, but to avoid having to test for this
2299 error case below. If we didn't, we might generate wrong code.
2301 For unsigned fields, the constant shifted right by the field length should
2302 be all zero. For signed fields, the high-order bits should agree with
2303 the sign bit. */
2305 if (lunsignedp)
2307 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2308 convert (unsigned_type, rhs),
2309 size_int (lbitsize), 0)))
2311 warning ("comparison is always %s due to width of bitfield",
2312 code == NE_EXPR ? "one" : "zero");
2313 return convert (compare_type,
2314 (code == NE_EXPR
2315 ? integer_one_node : integer_zero_node));
2318 else
2320 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2321 size_int (lbitsize - 1), 0);
2322 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2324 warning ("comparison is always %s due to width of bitfield",
2325 code == NE_EXPR ? "one" : "zero");
2326 return convert (compare_type,
2327 (code == NE_EXPR
2328 ? integer_one_node : integer_zero_node));
2332 /* Single-bit compares should always be against zero. */
2333 if (lbitsize == 1 && ! integer_zerop (rhs))
2335 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2336 rhs = convert (type, integer_zero_node);
2339 /* Make a new bitfield reference, shift the constant over the
2340 appropriate number of bits and mask it with the computed mask
2341 (in case this was a signed field). If we changed it, make a new one. */
2342 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2343 if (lvolatilep)
2345 TREE_SIDE_EFFECTS (lhs) = 1;
2346 TREE_THIS_VOLATILE (lhs) = 1;
2349 rhs = fold (const_binop (BIT_AND_EXPR,
2350 const_binop (LSHIFT_EXPR,
2351 convert (unsigned_type, rhs),
2352 size_int (lbitpos), 0),
2353 mask, 0));
2355 return build (code, compare_type,
2356 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2357 rhs);
2360 /* Subroutine for fold_truthop: decode a field reference.
2362 If EXP is a comparison reference, we return the innermost reference.
2364 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2365 set to the starting bit number.
2367 If the innermost field can be completely contained in a mode-sized
2368 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2370 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2371 otherwise it is not changed.
2373 *PUNSIGNEDP is set to the signedness of the field.
2375 *PMASK is set to the mask used. This is either contained in a
2376 BIT_AND_EXPR or derived from the width of the field.
2378 Return 0 if this is not a component reference or is one that we can't
2379 do anything with. */
2381 static tree
2382 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2383 pvolatilep, pmask)
2384 tree exp;
2385 int *pbitsize, *pbitpos;
2386 enum machine_mode *pmode;
2387 int *punsignedp, *pvolatilep;
2388 tree *pmask;
2390 tree and_mask = 0;
2391 tree mask, inner, offset;
2392 tree unsigned_type;
2393 int precision;
2395 /* All the optimizations using this function assume integer fields.
2396 There are problems with FP fields since the type_for_size call
2397 below can fail for, e.g., XFmode. */
2398 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2399 return 0;
2401 STRIP_NOPS (exp);
2403 if (TREE_CODE (exp) == BIT_AND_EXPR)
2405 and_mask = TREE_OPERAND (exp, 1);
2406 exp = TREE_OPERAND (exp, 0);
2407 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2408 if (TREE_CODE (and_mask) != INTEGER_CST)
2409 return 0;
2412 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2413 && TREE_CODE (exp) != BIT_FIELD_REF)
2414 return 0;
2416 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2417 punsignedp, pvolatilep);
2418 if (inner == exp || *pbitsize < 0 || offset != 0)
2419 return 0;
2421 /* Compute the mask to access the bitfield. */
2422 unsigned_type = type_for_size (*pbitsize, 1);
2423 precision = TYPE_PRECISION (unsigned_type);
2425 mask = build_int_2 (~0, ~0);
2426 TREE_TYPE (mask) = unsigned_type;
2427 force_fit_type (mask, 0);
2428 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2429 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2431 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2432 if (and_mask != 0)
2433 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2434 convert (unsigned_type, and_mask), mask));
2436 *pmask = mask;
2437 return inner;
2440 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2441 bit positions. */
2443 static int
2444 all_ones_mask_p (mask, size)
2445 tree mask;
2446 int size;
2448 tree type = TREE_TYPE (mask);
2449 int precision = TYPE_PRECISION (type);
2450 tree tmask;
2452 tmask = build_int_2 (~0, ~0);
2453 TREE_TYPE (tmask) = signed_type (type);
2454 force_fit_type (tmask, 0);
2455 return
2456 operand_equal_p (mask,
2457 const_binop (RSHIFT_EXPR,
2458 const_binop (LSHIFT_EXPR, tmask,
2459 size_int (precision - size), 0),
2460 size_int (precision - size), 0),
2464 /* Subroutine for fold_truthop: determine if an operand is simple enough
2465 to be evaluated unconditionally. */
2467 static int
2468 simple_operand_p (exp)
2469 tree exp;
2471 /* Strip any conversions that don't change the machine mode. */
2472 while ((TREE_CODE (exp) == NOP_EXPR
2473 || TREE_CODE (exp) == CONVERT_EXPR)
2474 && (TYPE_MODE (TREE_TYPE (exp))
2475 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2476 exp = TREE_OPERAND (exp, 0);
2478 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2479 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2480 && ! TREE_ADDRESSABLE (exp)
2481 && ! TREE_THIS_VOLATILE (exp)
2482 && ! DECL_NONLOCAL (exp)
2483 /* Don't regard global variables as simple. They may be
2484 allocated in ways unknown to the compiler (shared memory,
2485 #pragma weak, etc). */
2486 && ! TREE_PUBLIC (exp)
2487 && ! DECL_EXTERNAL (exp)
2488 /* Loading a static variable is unduly expensive, but global
2489 registers aren't expensive. */
2490 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2493 /* Subroutine for fold_truthop: try to optimize a range test.
2495 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2497 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2498 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2499 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2500 the result.
2502 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2503 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2504 larger than HI_CST (they may be equal).
2506 We return the simplified tree or 0 if no optimization is possible. */
2508 static tree
2509 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2510 enum tree_code jcode, lo_code, hi_code;
2511 tree type, var, lo_cst, hi_cst;
2513 tree utype;
2514 enum tree_code rcode;
2516 /* See if this is a range test and normalize the constant terms. */
2518 if (jcode == TRUTH_AND_EXPR)
2520 switch (lo_code)
2522 case NE_EXPR:
2523 /* See if we have VAR != CST && VAR != CST+1. */
2524 if (! (hi_code == NE_EXPR
2525 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2526 && tree_int_cst_equal (integer_one_node,
2527 const_binop (MINUS_EXPR,
2528 hi_cst, lo_cst, 0))))
2529 return 0;
2531 rcode = GT_EXPR;
2532 break;
2534 case GT_EXPR:
2535 case GE_EXPR:
2536 if (hi_code == LT_EXPR)
2537 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2538 else if (hi_code != LE_EXPR)
2539 return 0;
2541 if (lo_code == GT_EXPR)
2542 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2544 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2545 rcode = LE_EXPR;
2546 break;
2548 default:
2549 return 0;
2552 else
2554 switch (lo_code)
2556 case EQ_EXPR:
2557 /* See if we have VAR == CST || VAR == CST+1. */
2558 if (! (hi_code == EQ_EXPR
2559 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2560 && tree_int_cst_equal (integer_one_node,
2561 const_binop (MINUS_EXPR,
2562 hi_cst, lo_cst, 0))))
2563 return 0;
2565 rcode = LE_EXPR;
2566 break;
2568 case LE_EXPR:
2569 case LT_EXPR:
2570 if (hi_code == GE_EXPR)
2571 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2572 else if (hi_code != GT_EXPR)
2573 return 0;
2575 if (lo_code == LE_EXPR)
2576 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2578 /* We now have VAR < LO_CST || VAR > HI_CST. */
2579 rcode = GT_EXPR;
2580 break;
2582 default:
2583 return 0;
2587 /* When normalizing, it is possible to both increment the smaller constant
2588 and decrement the larger constant. See if they are still ordered. */
2589 if (tree_int_cst_lt (hi_cst, lo_cst))
2590 return 0;
2592 /* Fail if VAR isn't an integer. */
2593 utype = TREE_TYPE (var);
2594 if (! INTEGRAL_TYPE_P (utype))
2595 return 0;
2597 /* The range test is invalid if subtracting the two constants results
2598 in overflow. This can happen in traditional mode. */
2599 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2600 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2601 return 0;
2603 if (! TREE_UNSIGNED (utype))
2605 utype = unsigned_type (utype);
2606 var = convert (utype, var);
2607 lo_cst = convert (utype, lo_cst);
2608 hi_cst = convert (utype, hi_cst);
2611 return fold (convert (type,
2612 build (rcode, utype,
2613 build (MINUS_EXPR, utype, var, lo_cst),
2614 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2617 /* Find ways of folding logical expressions of LHS and RHS:
2618 Try to merge two comparisons to the same innermost item.
2619 Look for range tests like "ch >= '0' && ch <= '9'".
2620 Look for combinations of simple terms on machines with expensive branches
2621 and evaluate the RHS unconditionally.
2623 For example, if we have p->a == 2 && p->b == 4 and we can make an
2624 object large enough to span both A and B, we can do this with a comparison
2625 against the object ANDed with the a mask.
2627 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2628 operations to do this with one comparison.
2630 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2631 function and the one above.
2633 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2634 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2636 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2637 two operands.
2639 We return the simplified tree or 0 if no optimization is possible. */
2641 static tree
2642 fold_truthop (code, truth_type, lhs, rhs)
2643 enum tree_code code;
2644 tree truth_type, lhs, rhs;
2646 /* If this is the "or" of two comparisons, we can do something if we
2647 the comparisons are NE_EXPR. If this is the "and", we can do something
2648 if the comparisons are EQ_EXPR. I.e.,
2649 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2651 WANTED_CODE is this operation code. For single bit fields, we can
2652 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2653 comparison for one-bit fields. */
2655 enum tree_code wanted_code;
2656 enum tree_code lcode, rcode;
2657 tree ll_arg, lr_arg, rl_arg, rr_arg;
2658 tree ll_inner, lr_inner, rl_inner, rr_inner;
2659 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2660 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2661 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2662 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2663 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2664 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2665 enum machine_mode lnmode, rnmode;
2666 tree ll_mask, lr_mask, rl_mask, rr_mask;
2667 tree l_const, r_const;
2668 tree type, result;
2669 int first_bit, end_bit;
2670 int volatilep;
2672 /* Start by getting the comparison codes and seeing if this looks like
2673 a range test. Fail if anything is volatile. If one operand is a
2674 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2675 with a NE_EXPR. */
2677 if (TREE_SIDE_EFFECTS (lhs)
2678 || TREE_SIDE_EFFECTS (rhs))
2679 return 0;
2681 lcode = TREE_CODE (lhs);
2682 rcode = TREE_CODE (rhs);
2684 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2685 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2687 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2688 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2690 if (TREE_CODE_CLASS (lcode) != '<'
2691 || TREE_CODE_CLASS (rcode) != '<')
2692 return 0;
2694 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2695 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2697 ll_arg = TREE_OPERAND (lhs, 0);
2698 lr_arg = TREE_OPERAND (lhs, 1);
2699 rl_arg = TREE_OPERAND (rhs, 0);
2700 rr_arg = TREE_OPERAND (rhs, 1);
2702 if (TREE_CODE (lr_arg) == INTEGER_CST
2703 && TREE_CODE (rr_arg) == INTEGER_CST
2704 && operand_equal_p (ll_arg, rl_arg, 0))
2706 if (tree_int_cst_lt (lr_arg, rr_arg))
2707 result = range_test (code, truth_type, lcode, rcode,
2708 ll_arg, lr_arg, rr_arg);
2709 else
2710 result = range_test (code, truth_type, rcode, lcode,
2711 ll_arg, rr_arg, lr_arg);
2713 /* If this isn't a range test, it also isn't a comparison that
2714 can be merged. However, it wins to evaluate the RHS unconditionally
2715 on machines with expensive branches. */
2717 if (result == 0 && BRANCH_COST >= 2)
2719 if (TREE_CODE (ll_arg) != VAR_DECL
2720 && TREE_CODE (ll_arg) != PARM_DECL)
2722 /* Avoid evaluating the variable part twice. */
2723 ll_arg = save_expr (ll_arg);
2724 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2725 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2727 return build (code, truth_type, lhs, rhs);
2729 return result;
2732 /* If the RHS can be evaluated unconditionally and its operands are
2733 simple, it wins to evaluate the RHS unconditionally on machines
2734 with expensive branches. In this case, this isn't a comparison
2735 that can be merged. */
2737 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2738 are with zero (tmw). */
2740 if (BRANCH_COST >= 2
2741 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2742 && simple_operand_p (rl_arg)
2743 && simple_operand_p (rr_arg))
2744 return build (code, truth_type, lhs, rhs);
2746 /* See if the comparisons can be merged. Then get all the parameters for
2747 each side. */
2749 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2750 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2751 return 0;
2753 volatilep = 0;
2754 ll_inner = decode_field_reference (ll_arg,
2755 &ll_bitsize, &ll_bitpos, &ll_mode,
2756 &ll_unsignedp, &volatilep, &ll_mask);
2757 lr_inner = decode_field_reference (lr_arg,
2758 &lr_bitsize, &lr_bitpos, &lr_mode,
2759 &lr_unsignedp, &volatilep, &lr_mask);
2760 rl_inner = decode_field_reference (rl_arg,
2761 &rl_bitsize, &rl_bitpos, &rl_mode,
2762 &rl_unsignedp, &volatilep, &rl_mask);
2763 rr_inner = decode_field_reference (rr_arg,
2764 &rr_bitsize, &rr_bitpos, &rr_mode,
2765 &rr_unsignedp, &volatilep, &rr_mask);
2767 /* It must be true that the inner operation on the lhs of each
2768 comparison must be the same if we are to be able to do anything.
2769 Then see if we have constants. If not, the same must be true for
2770 the rhs's. */
2771 if (volatilep || ll_inner == 0 || rl_inner == 0
2772 || ! operand_equal_p (ll_inner, rl_inner, 0))
2773 return 0;
2775 if (TREE_CODE (lr_arg) == INTEGER_CST
2776 && TREE_CODE (rr_arg) == INTEGER_CST)
2777 l_const = lr_arg, r_const = rr_arg;
2778 else if (lr_inner == 0 || rr_inner == 0
2779 || ! operand_equal_p (lr_inner, rr_inner, 0))
2780 return 0;
2781 else
2782 l_const = r_const = 0;
2784 /* If either comparison code is not correct for our logical operation,
2785 fail. However, we can convert a one-bit comparison against zero into
2786 the opposite comparison against that bit being set in the field. */
2788 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2789 if (lcode != wanted_code)
2791 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2792 l_const = ll_mask;
2793 else
2794 return 0;
2797 if (rcode != wanted_code)
2799 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2800 r_const = rl_mask;
2801 else
2802 return 0;
2805 /* See if we can find a mode that contains both fields being compared on
2806 the left. If we can't, fail. Otherwise, update all constants and masks
2807 to be relative to a field of that size. */
2808 first_bit = MIN (ll_bitpos, rl_bitpos);
2809 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2810 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2811 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2812 volatilep);
2813 if (lnmode == VOIDmode)
2814 return 0;
2816 lnbitsize = GET_MODE_BITSIZE (lnmode);
2817 lnbitpos = first_bit & ~ (lnbitsize - 1);
2818 type = type_for_size (lnbitsize, 1);
2819 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2821 #if BYTES_BIG_ENDIAN
2822 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2823 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2824 #endif
2826 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2827 size_int (xll_bitpos), 0);
2828 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2829 size_int (xrl_bitpos), 0);
2831 /* Make sure the constants are interpreted as unsigned, so we
2832 don't have sign bits outside the range of their type. */
2834 if (l_const)
2836 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2837 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2838 size_int (xll_bitpos), 0);
2839 l_const = const_binop (BIT_AND_EXPR, l_const, ll_mask, 0);
2841 if (r_const)
2843 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2844 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2845 size_int (xrl_bitpos), 0);
2846 r_const = const_binop (BIT_AND_EXPR, r_const, rl_mask, 0);
2849 /* If the right sides are not constant, do the same for it. Also,
2850 disallow this optimization if a size or signedness mismatch occurs
2851 between the left and right sides. */
2852 if (l_const == 0)
2854 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2855 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2856 /* Make sure the two fields on the right
2857 correspond to the left without being swapped. */
2858 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2859 return 0;
2861 first_bit = MIN (lr_bitpos, rr_bitpos);
2862 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2863 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2864 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2865 volatilep);
2866 if (rnmode == VOIDmode)
2867 return 0;
2869 rnbitsize = GET_MODE_BITSIZE (rnmode);
2870 rnbitpos = first_bit & ~ (rnbitsize - 1);
2871 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2873 #if BYTES_BIG_ENDIAN
2874 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2875 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2876 #endif
2878 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2879 size_int (xlr_bitpos), 0);
2880 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2881 size_int (xrr_bitpos), 0);
2883 /* Make a mask that corresponds to both fields being compared.
2884 Do this for both items being compared. If the masks agree,
2885 we can do this by masking both and comparing the masked
2886 results. */
2887 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2888 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
2889 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2891 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2892 ll_unsignedp || rl_unsignedp);
2893 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2894 lr_unsignedp || rr_unsignedp);
2895 if (! all_ones_mask_p (ll_mask, lnbitsize))
2897 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2898 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2900 return build (wanted_code, truth_type, lhs, rhs);
2903 /* There is still another way we can do something: If both pairs of
2904 fields being compared are adjacent, we may be able to make a wider
2905 field containing them both. */
2906 if ((ll_bitsize + ll_bitpos == rl_bitpos
2907 && lr_bitsize + lr_bitpos == rr_bitpos)
2908 || (ll_bitpos == rl_bitpos + rl_bitsize
2909 && lr_bitpos == rr_bitpos + rr_bitsize))
2910 return build (wanted_code, truth_type,
2911 make_bit_field_ref (ll_inner, type,
2912 ll_bitsize + rl_bitsize,
2913 MIN (ll_bitpos, rl_bitpos),
2914 ll_unsignedp),
2915 make_bit_field_ref (lr_inner, type,
2916 lr_bitsize + rr_bitsize,
2917 MIN (lr_bitpos, rr_bitpos),
2918 lr_unsignedp));
2920 return 0;
2923 /* Handle the case of comparisons with constants. If there is something in
2924 common between the masks, those bits of the constants must be the same.
2925 If not, the condition is always false. Test for this to avoid generating
2926 incorrect code below. */
2927 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
2928 if (! integer_zerop (result)
2929 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
2930 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
2932 if (wanted_code == NE_EXPR)
2934 warning ("`or' of unmatched not-equal tests is always 1");
2935 return convert (truth_type, integer_one_node);
2937 else
2939 warning ("`and' of mutually exclusive equal-tests is always zero");
2940 return convert (truth_type, integer_zero_node);
2944 /* Construct the expression we will return. First get the component
2945 reference we will make. Unless the mask is all ones the width of
2946 that field, perform the mask operation. Then compare with the
2947 merged constant. */
2948 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2949 ll_unsignedp || rl_unsignedp);
2951 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2952 if (! all_ones_mask_p (ll_mask, lnbitsize))
2953 result = build (BIT_AND_EXPR, type, result, ll_mask);
2955 return build (wanted_code, truth_type, result,
2956 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
2959 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
2960 S, a SAVE_EXPR, return the expression actually being evaluated. Note
2961 that we may sometimes modify the tree. */
2963 static tree
2964 strip_compound_expr (t, s)
2965 tree t;
2966 tree s;
2968 tree type = TREE_TYPE (t);
2969 enum tree_code code = TREE_CODE (t);
2971 /* See if this is the COMPOUND_EXPR we want to eliminate. */
2972 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
2973 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
2974 return TREE_OPERAND (t, 1);
2976 /* See if this is a COND_EXPR or a simple arithmetic operator. We
2977 don't bother handling any other types. */
2978 else if (code == COND_EXPR)
2980 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
2981 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
2982 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
2984 else if (TREE_CODE_CLASS (code) == '1')
2985 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
2986 else if (TREE_CODE_CLASS (code) == '<'
2987 || TREE_CODE_CLASS (code) == '2')
2989 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
2990 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
2993 return t;
2996 /* Perform constant folding and related simplification of EXPR.
2997 The related simplifications include x*1 => x, x*0 => 0, etc.,
2998 and application of the associative law.
2999 NOP_EXPR conversions may be removed freely (as long as we
3000 are careful not to change the C type of the overall expression)
3001 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3002 but we can constant-fold them if they have constant operands. */
3004 tree
3005 fold (expr)
3006 tree expr;
3008 register tree t = expr;
3009 tree t1 = NULL_TREE;
3010 tree tem;
3011 tree type = TREE_TYPE (expr);
3012 register tree arg0, arg1;
3013 register enum tree_code code = TREE_CODE (t);
3014 register int kind;
3015 int invert;
3017 /* WINS will be nonzero when the switch is done
3018 if all operands are constant. */
3020 int wins = 1;
3022 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3023 if (code == RTL_EXPR)
3024 return t;
3026 /* Return right away if already constant. */
3027 if (TREE_CONSTANT (t))
3029 if (code == CONST_DECL)
3030 return DECL_INITIAL (t);
3031 return t;
3034 kind = TREE_CODE_CLASS (code);
3035 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3037 tree subop;
3039 /* Special case for conversion ops that can have fixed point args. */
3040 arg0 = TREE_OPERAND (t, 0);
3042 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3043 if (arg0 != 0)
3044 STRIP_TYPE_NOPS (arg0);
3046 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3047 subop = TREE_REALPART (arg0);
3048 else
3049 subop = arg0;
3051 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3052 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3053 && TREE_CODE (subop) != REAL_CST
3054 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3056 /* Note that TREE_CONSTANT isn't enough:
3057 static var addresses are constant but we can't
3058 do arithmetic on them. */
3059 wins = 0;
3061 else if (kind == 'e' || kind == '<'
3062 || kind == '1' || kind == '2' || kind == 'r')
3064 register int len = tree_code_length[(int) code];
3065 register int i;
3066 for (i = 0; i < len; i++)
3068 tree op = TREE_OPERAND (t, i);
3069 tree subop;
3071 if (op == 0)
3072 continue; /* Valid for CALL_EXPR, at least. */
3074 if (kind == '<' || code == RSHIFT_EXPR)
3076 /* Signedness matters here. Perhaps we can refine this
3077 later. */
3078 STRIP_TYPE_NOPS (op);
3080 else
3082 /* Strip any conversions that don't change the mode. */
3083 STRIP_NOPS (op);
3086 if (TREE_CODE (op) == COMPLEX_CST)
3087 subop = TREE_REALPART (op);
3088 else
3089 subop = op;
3091 if (TREE_CODE (subop) != INTEGER_CST
3092 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3093 && TREE_CODE (subop) != REAL_CST
3094 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3096 /* Note that TREE_CONSTANT isn't enough:
3097 static var addresses are constant but we can't
3098 do arithmetic on them. */
3099 wins = 0;
3101 if (i == 0)
3102 arg0 = op;
3103 else if (i == 1)
3104 arg1 = op;
3108 /* If this is a commutative operation, and ARG0 is a constant, move it
3109 to ARG1 to reduce the number of tests below. */
3110 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3111 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3112 || code == BIT_AND_EXPR)
3113 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3115 tem = arg0; arg0 = arg1; arg1 = tem;
3117 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3118 TREE_OPERAND (t, 1) = tem;
3121 /* Now WINS is set as described above,
3122 ARG0 is the first operand of EXPR,
3123 and ARG1 is the second operand (if it has more than one operand).
3125 First check for cases where an arithmetic operation is applied to a
3126 compound, conditional, or comparison operation. Push the arithmetic
3127 operation inside the compound or conditional to see if any folding
3128 can then be done. Convert comparison to conditional for this purpose.
3129 The also optimizes non-constant cases that used to be done in
3130 expand_expr.
3132 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3133 one of the operands is a comparison and the other is a comparison, a
3134 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3135 code below would make the expression more complex. Change it to a
3136 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3137 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3139 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3140 || code == EQ_EXPR || code == NE_EXPR)
3141 && ((truth_value_p (TREE_CODE (arg0))
3142 && (truth_value_p (TREE_CODE (arg1))
3143 || (TREE_CODE (arg1) == BIT_AND_EXPR
3144 && integer_onep (TREE_OPERAND (arg1, 1)))))
3145 || (truth_value_p (TREE_CODE (arg1))
3146 && (truth_value_p (TREE_CODE (arg0))
3147 || (TREE_CODE (arg0) == BIT_AND_EXPR
3148 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3150 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3151 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3152 : TRUTH_XOR_EXPR,
3153 type, arg0, arg1));
3155 if (code == EQ_EXPR)
3156 t = invert_truthvalue (t);
3158 return t;
3161 if (TREE_CODE_CLASS (code) == '1')
3163 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3164 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3165 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3166 else if (TREE_CODE (arg0) == COND_EXPR)
3168 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3169 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3170 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3172 /* If this was a conversion, and all we did was to move into
3173 inside the COND_EXPR, bring it back out. Then return so we
3174 don't get into an infinite recursion loop taking the conversion
3175 out and then back in. */
3177 if ((code == NOP_EXPR || code == CONVERT_EXPR
3178 || code == NON_LVALUE_EXPR)
3179 && TREE_CODE (t) == COND_EXPR
3180 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3181 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3182 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3183 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0))))
3184 t = build1 (code, type,
3185 build (COND_EXPR,
3186 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3187 TREE_OPERAND (t, 0),
3188 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3189 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3190 return t;
3192 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3193 return fold (build (COND_EXPR, type, arg0,
3194 fold (build1 (code, type, integer_one_node)),
3195 fold (build1 (code, type, integer_zero_node))));
3197 else if (TREE_CODE_CLASS (code) == '2'
3198 || TREE_CODE_CLASS (code) == '<')
3200 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3201 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3202 fold (build (code, type,
3203 arg0, TREE_OPERAND (arg1, 1))));
3204 else if (TREE_CODE (arg1) == COND_EXPR
3205 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3207 tree test, true_value, false_value;
3209 if (TREE_CODE (arg1) == COND_EXPR)
3211 test = TREE_OPERAND (arg1, 0);
3212 true_value = TREE_OPERAND (arg1, 1);
3213 false_value = TREE_OPERAND (arg1, 2);
3215 else
3217 test = arg1;
3218 true_value = integer_one_node;
3219 false_value = integer_zero_node;
3222 /* If ARG0 is complex we want to make sure we only evaluate
3223 it once. Though this is only required if it is volatile, it
3224 might be more efficient even if it is not. However, if we
3225 succeed in folding one part to a constant, we do not need
3226 to make this SAVE_EXPR. Since we do this optimization
3227 primarily to see if we do end up with constant and this
3228 SAVE_EXPR interfers with later optimizations, suppressing
3229 it when we can is important. */
3231 if (TREE_CODE (arg0) != SAVE_EXPR
3232 && ((TREE_CODE (arg0) != VAR_DECL
3233 && TREE_CODE (arg0) != PARM_DECL)
3234 || TREE_SIDE_EFFECTS (arg0)))
3236 tree lhs = fold (build (code, type, arg0, true_value));
3237 tree rhs = fold (build (code, type, arg0, false_value));
3239 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3240 return fold (build (COND_EXPR, type, test, lhs, rhs));
3242 arg0 = save_expr (arg0);
3245 test = fold (build (COND_EXPR, type, test,
3246 fold (build (code, type, arg0, true_value)),
3247 fold (build (code, type, arg0, false_value))));
3248 if (TREE_CODE (arg0) == SAVE_EXPR)
3249 return build (COMPOUND_EXPR, type,
3250 convert (void_type_node, arg0),
3251 strip_compound_expr (test, arg0));
3252 else
3253 return convert (type, test);
3256 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3257 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3258 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3259 else if (TREE_CODE (arg0) == COND_EXPR
3260 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3262 tree test, true_value, false_value;
3264 if (TREE_CODE (arg0) == COND_EXPR)
3266 test = TREE_OPERAND (arg0, 0);
3267 true_value = TREE_OPERAND (arg0, 1);
3268 false_value = TREE_OPERAND (arg0, 2);
3270 else
3272 test = arg0;
3273 true_value = integer_one_node;
3274 false_value = integer_zero_node;
3277 if (TREE_CODE (arg1) != SAVE_EXPR
3278 && ((TREE_CODE (arg1) != VAR_DECL
3279 && TREE_CODE (arg1) != PARM_DECL)
3280 || TREE_SIDE_EFFECTS (arg1)))
3282 tree lhs = fold (build (code, type, true_value, arg1));
3283 tree rhs = fold (build (code, type, false_value, arg1));
3285 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3286 || TREE_CONSTANT (arg1))
3287 return fold (build (COND_EXPR, type, test, lhs, rhs));
3289 arg1 = save_expr (arg1);
3292 test = fold (build (COND_EXPR, type, test,
3293 fold (build (code, type, true_value, arg1)),
3294 fold (build (code, type, false_value, arg1))));
3295 if (TREE_CODE (arg1) == SAVE_EXPR)
3296 return build (COMPOUND_EXPR, type,
3297 convert (void_type_node, arg1),
3298 strip_compound_expr (test, arg1));
3299 else
3300 return convert (type, test);
3303 else if (TREE_CODE_CLASS (code) == '<'
3304 && TREE_CODE (arg0) == COMPOUND_EXPR)
3305 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3306 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3307 else if (TREE_CODE_CLASS (code) == '<'
3308 && TREE_CODE (arg1) == COMPOUND_EXPR)
3309 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3310 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3312 switch (code)
3314 case INTEGER_CST:
3315 case REAL_CST:
3316 case STRING_CST:
3317 case COMPLEX_CST:
3318 case CONSTRUCTOR:
3319 return t;
3321 case CONST_DECL:
3322 return fold (DECL_INITIAL (t));
3324 case NOP_EXPR:
3325 case FLOAT_EXPR:
3326 case CONVERT_EXPR:
3327 case FIX_TRUNC_EXPR:
3328 /* Other kinds of FIX are not handled properly by fold_convert. */
3330 /* In addition to the cases of two conversions in a row
3331 handled below, if we are converting something to its own
3332 type via an object of identical or wider precision, neither
3333 conversion is needed. */
3334 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3335 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3336 && TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == TREE_TYPE (t)
3337 && ((INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3338 && INTEGRAL_TYPE_P (TREE_TYPE (t)))
3339 || (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3340 && FLOAT_TYPE_P (TREE_TYPE (t))))
3341 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3342 >= TYPE_PRECISION (TREE_TYPE (t))))
3343 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3345 /* Two conversions in a row are not needed unless:
3346 - the intermediate type is narrower than both initial and final, or
3347 - the intermediate type and innermost type differ in signedness,
3348 and the outermost type is wider than the intermediate, or
3349 - the initial type is a pointer type and the precisions of the
3350 intermediate and final types differ, or
3351 - the final type is a pointer type and the precisions of the
3352 initial and intermediate types differ. */
3353 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3354 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3355 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3356 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3358 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3359 > TYPE_PRECISION (TREE_TYPE (t)))
3360 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3361 == INTEGER_TYPE)
3362 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3363 == INTEGER_TYPE)
3364 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3365 != TREE_UNSIGNED (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3366 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3367 < TYPE_PRECISION (TREE_TYPE (t))))
3368 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3370 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
3372 (TREE_UNSIGNED (TREE_TYPE (t))
3373 && (TYPE_PRECISION (TREE_TYPE (t))
3374 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3375 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3376 == POINTER_TYPE)
3377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3378 != TYPE_PRECISION (TREE_TYPE (t))))
3379 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
3380 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3381 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3382 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3384 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3385 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3386 /* Detect assigning a bitfield. */
3387 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3388 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3390 /* Don't leave an assignment inside a conversion
3391 unless assigning a bitfield. */
3392 tree prev = TREE_OPERAND (t, 0);
3393 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3394 /* First do the assignment, then return converted constant. */
3395 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3396 TREE_USED (t) = 1;
3397 return t;
3399 if (!wins)
3401 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3402 return t;
3404 return fold_convert (t, arg0);
3406 #if 0 /* This loses on &"foo"[0]. */
3407 case ARRAY_REF:
3409 int i;
3411 /* Fold an expression like: "foo"[2] */
3412 if (TREE_CODE (arg0) == STRING_CST
3413 && TREE_CODE (arg1) == INTEGER_CST
3414 && !TREE_INT_CST_HIGH (arg1)
3415 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3417 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3418 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3419 force_fit_type (t, 0);
3422 return t;
3423 #endif /* 0 */
3425 case COMPONENT_REF:
3426 if (TREE_CODE (arg0) == CONSTRUCTOR)
3427 t = TREE_VALUE (purpose_member (arg1, CONSTRUCTOR_ELTS (arg0)));
3428 return t;
3430 case RANGE_EXPR:
3431 TREE_CONSTANT (t) = wins;
3432 return t;
3434 case NEGATE_EXPR:
3435 if (wins)
3437 if (TREE_CODE (arg0) == INTEGER_CST)
3439 HOST_WIDE_INT low, high;
3440 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3441 TREE_INT_CST_HIGH (arg0),
3442 &low, &high);
3443 t = build_int_2 (low, high);
3444 TREE_TYPE (t) = type;
3445 TREE_OVERFLOW (t)
3446 = (TREE_OVERFLOW (arg0)
3447 | force_fit_type (t, overflow));
3448 TREE_CONSTANT_OVERFLOW (t)
3449 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3451 else if (TREE_CODE (arg0) == REAL_CST)
3452 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3453 TREE_TYPE (t) = type;
3455 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3456 return TREE_OPERAND (arg0, 0);
3458 /* Convert - (a - b) to (b - a) for non-floating-point. */
3459 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3460 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3461 TREE_OPERAND (arg0, 0));
3463 return t;
3465 case ABS_EXPR:
3466 if (wins)
3468 if (TREE_CODE (arg0) == INTEGER_CST)
3470 if (! TREE_UNSIGNED (type)
3471 && TREE_INT_CST_HIGH (arg0) < 0)
3473 HOST_WIDE_INT low, high;
3474 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3475 TREE_INT_CST_HIGH (arg0),
3476 &low, &high);
3477 t = build_int_2 (low, high);
3478 TREE_TYPE (t) = type;
3479 TREE_OVERFLOW (t)
3480 = (TREE_OVERFLOW (arg0)
3481 | force_fit_type (t, overflow));
3482 TREE_CONSTANT_OVERFLOW (t)
3483 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3486 else if (TREE_CODE (arg0) == REAL_CST)
3488 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3489 t = build_real (type,
3490 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3492 TREE_TYPE (t) = type;
3494 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3495 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3496 return t;
3498 case CONJ_EXPR:
3499 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3500 return arg0;
3501 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3502 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3503 TREE_OPERAND (arg0, 0),
3504 fold (build1 (NEGATE_EXPR,
3505 TREE_TYPE (TREE_TYPE (arg0)),
3506 TREE_OPERAND (arg0, 1))));
3507 else if (TREE_CODE (arg0) == COMPLEX_CST)
3508 return build_complex (TREE_OPERAND (arg0, 0),
3509 fold (build1 (NEGATE_EXPR,
3510 TREE_TYPE (TREE_TYPE (arg0)),
3511 TREE_OPERAND (arg0, 1))));
3512 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3513 return fold (build (TREE_CODE (arg0), type,
3514 fold (build1 (CONJ_EXPR, type,
3515 TREE_OPERAND (arg0, 0))),
3516 fold (build1 (CONJ_EXPR,
3517 type, TREE_OPERAND (arg0, 1)))));
3518 else if (TREE_CODE (arg0) == CONJ_EXPR)
3519 return TREE_OPERAND (arg0, 0);
3520 return t;
3522 case BIT_NOT_EXPR:
3523 if (wins)
3525 if (TREE_CODE (arg0) == INTEGER_CST)
3526 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3527 ~ TREE_INT_CST_HIGH (arg0));
3528 TREE_TYPE (t) = type;
3529 force_fit_type (t, 0);
3530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3531 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3533 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3534 return TREE_OPERAND (arg0, 0);
3535 return t;
3537 case PLUS_EXPR:
3538 /* A + (-B) -> A - B */
3539 if (TREE_CODE (arg1) == NEGATE_EXPR)
3540 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3541 else if (! FLOAT_TYPE_P (type))
3543 if (integer_zerop (arg1))
3544 return non_lvalue (convert (type, arg0));
3546 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3547 with a constant, and the two constants have no bits in common,
3548 we should treat this as a BIT_IOR_EXPR since this may produce more
3549 simplifications. */
3550 if (TREE_CODE (arg0) == BIT_AND_EXPR
3551 && TREE_CODE (arg1) == BIT_AND_EXPR
3552 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3553 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3554 && integer_zerop (const_binop (BIT_AND_EXPR,
3555 TREE_OPERAND (arg0, 1),
3556 TREE_OPERAND (arg1, 1), 0)))
3558 code = BIT_IOR_EXPR;
3559 goto bit_ior;
3562 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3563 about the case where C is a constant, just try one of the
3564 four possibilities. */
3566 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3567 && operand_equal_p (TREE_OPERAND (arg0, 1),
3568 TREE_OPERAND (arg1, 1), 0))
3569 return fold (build (MULT_EXPR, type,
3570 fold (build (PLUS_EXPR, type,
3571 TREE_OPERAND (arg0, 0),
3572 TREE_OPERAND (arg1, 0))),
3573 TREE_OPERAND (arg0, 1)));
3575 /* In IEEE floating point, x+0 may not equal x. */
3576 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3577 || flag_fast_math)
3578 && real_zerop (arg1))
3579 return non_lvalue (convert (type, arg0));
3580 associate:
3581 /* In most languages, can't associate operations on floats
3582 through parentheses. Rather than remember where the parentheses
3583 were, we don't associate floats at all. It shouldn't matter much.
3584 However, associating multiplications is only very slightly
3585 inaccurate, so do that if -ffast-math is specified. */
3586 if (FLOAT_TYPE_P (type)
3587 && ! (flag_fast_math && code == MULT_EXPR))
3588 goto binary;
3590 /* The varsign == -1 cases happen only for addition and subtraction.
3591 It says that the arg that was split was really CON minus VAR.
3592 The rest of the code applies to all associative operations. */
3593 if (!wins)
3595 tree var, con;
3596 int varsign;
3598 if (split_tree (arg0, code, &var, &con, &varsign))
3600 if (varsign == -1)
3602 /* EXPR is (CON-VAR) +- ARG1. */
3603 /* If it is + and VAR==ARG1, return just CONST. */
3604 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3605 return convert (TREE_TYPE (t), con);
3607 /* If ARG0 is a constant, don't change things around;
3608 instead keep all the constant computations together. */
3610 if (TREE_CONSTANT (arg0))
3611 return t;
3613 /* Otherwise return (CON +- ARG1) - VAR. */
3614 TREE_SET_CODE (t, MINUS_EXPR);
3615 TREE_OPERAND (t, 1) = var;
3616 TREE_OPERAND (t, 0)
3617 = fold (build (code, TREE_TYPE (t), con, arg1));
3619 else
3621 /* EXPR is (VAR+CON) +- ARG1. */
3622 /* If it is - and VAR==ARG1, return just CONST. */
3623 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3624 return convert (TREE_TYPE (t), con);
3626 /* If ARG0 is a constant, don't change things around;
3627 instead keep all the constant computations together. */
3629 if (TREE_CONSTANT (arg0))
3630 return t;
3632 /* Otherwise return VAR +- (ARG1 +- CON). */
3633 TREE_OPERAND (t, 1) = tem
3634 = fold (build (code, TREE_TYPE (t), arg1, con));
3635 TREE_OPERAND (t, 0) = var;
3636 if (integer_zerop (tem)
3637 && (code == PLUS_EXPR || code == MINUS_EXPR))
3638 return convert (type, var);
3639 /* If we have x +/- (c - d) [c an explicit integer]
3640 change it to x -/+ (d - c) since if d is relocatable
3641 then the latter can be a single immediate insn
3642 and the former cannot. */
3643 if (TREE_CODE (tem) == MINUS_EXPR
3644 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3646 tree tem1 = TREE_OPERAND (tem, 1);
3647 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3648 TREE_OPERAND (tem, 0) = tem1;
3649 TREE_SET_CODE (t,
3650 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3653 return t;
3656 if (split_tree (arg1, code, &var, &con, &varsign))
3658 if (TREE_CONSTANT (arg1))
3659 return t;
3661 if (varsign == -1)
3662 TREE_SET_CODE (t,
3663 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3665 /* EXPR is ARG0 +- (CON +- VAR). */
3666 if (TREE_CODE (t) == MINUS_EXPR
3667 && operand_equal_p (var, arg0, 0))
3669 /* If VAR and ARG0 cancel, return just CON or -CON. */
3670 if (code == PLUS_EXPR)
3671 return convert (TREE_TYPE (t), con);
3672 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3673 convert (TREE_TYPE (t), con)));
3676 TREE_OPERAND (t, 0)
3677 = fold (build (code, TREE_TYPE (t), arg0, con));
3678 TREE_OPERAND (t, 1) = var;
3679 if (integer_zerop (TREE_OPERAND (t, 0))
3680 && TREE_CODE (t) == PLUS_EXPR)
3681 return convert (TREE_TYPE (t), var);
3682 return t;
3685 binary:
3686 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3687 if (TREE_CODE (arg1) == REAL_CST)
3688 return t;
3689 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3690 if (wins)
3691 t1 = const_binop (code, arg0, arg1, 0);
3692 if (t1 != NULL_TREE)
3694 /* The return value should always have
3695 the same type as the original expression. */
3696 TREE_TYPE (t1) = TREE_TYPE (t);
3697 return t1;
3699 return t;
3701 case MINUS_EXPR:
3702 if (! FLOAT_TYPE_P (type))
3704 if (! wins && integer_zerop (arg0))
3705 return build1 (NEGATE_EXPR, type, arg1);
3706 if (integer_zerop (arg1))
3707 return non_lvalue (convert (type, arg0));
3709 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3710 about the case where C is a constant, just try one of the
3711 four possibilities. */
3713 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3714 && operand_equal_p (TREE_OPERAND (arg0, 1),
3715 TREE_OPERAND (arg1, 1), 0))
3716 return fold (build (MULT_EXPR, type,
3717 fold (build (MINUS_EXPR, type,
3718 TREE_OPERAND (arg0, 0),
3719 TREE_OPERAND (arg1, 0))),
3720 TREE_OPERAND (arg0, 1)));
3722 /* Convert A - (-B) to A + B. */
3723 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3724 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3726 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3727 || flag_fast_math)
3729 /* Except with IEEE floating point, 0-x equals -x. */
3730 if (! wins && real_zerop (arg0))
3731 return build1 (NEGATE_EXPR, type, arg1);
3732 /* Except with IEEE floating point, x-0 equals x. */
3733 if (real_zerop (arg1))
3734 return non_lvalue (convert (type, arg0));
3737 /* Fold &x - &x. This can happen from &x.foo - &x.
3738 This is unsafe for certain floats even in non-IEEE formats.
3739 In IEEE, it is unsafe because it does wrong for NaNs.
3740 Also note that operand_equal_p is always false if an operand
3741 is volatile. */
3743 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
3744 && operand_equal_p (arg0, arg1, 0))
3745 return convert (type, integer_zero_node);
3747 goto associate;
3749 case MULT_EXPR:
3750 if (! FLOAT_TYPE_P (type))
3752 if (integer_zerop (arg1))
3753 return omit_one_operand (type, arg1, arg0);
3754 if (integer_onep (arg1))
3755 return non_lvalue (convert (type, arg0));
3757 /* ((A / C) * C) is A if the division is an
3758 EXACT_DIV_EXPR. Since C is normally a constant,
3759 just check for one of the four possibilities. */
3761 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3762 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3763 return TREE_OPERAND (arg0, 0);
3765 /* (a * (1 << b)) is (a << b) */
3766 if (TREE_CODE (arg1) == LSHIFT_EXPR
3767 && integer_onep (TREE_OPERAND (arg1, 0)))
3768 return fold (build (LSHIFT_EXPR, type, arg0,
3769 TREE_OPERAND (arg1, 1)));
3770 if (TREE_CODE (arg0) == LSHIFT_EXPR
3771 && integer_onep (TREE_OPERAND (arg0, 0)))
3772 return fold (build (LSHIFT_EXPR, type, arg1,
3773 TREE_OPERAND (arg0, 1)));
3775 else
3777 /* x*0 is 0, except for IEEE floating point. */
3778 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3779 || flag_fast_math)
3780 && real_zerop (arg1))
3781 return omit_one_operand (type, arg1, arg0);
3782 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3783 However, ANSI says we can drop signals,
3784 so we can do this anyway. */
3785 if (real_onep (arg1))
3786 return non_lvalue (convert (type, arg0));
3787 /* x*2 is x+x */
3788 if (! wins && real_twop (arg1))
3790 tree arg = save_expr (arg0);
3791 return build (PLUS_EXPR, type, arg, arg);
3794 goto associate;
3796 case BIT_IOR_EXPR:
3797 bit_ior:
3798 if (integer_all_onesp (arg1))
3799 return omit_one_operand (type, arg1, arg0);
3800 if (integer_zerop (arg1))
3801 return non_lvalue (convert (type, arg0));
3802 t1 = distribute_bit_expr (code, type, arg0, arg1);
3803 if (t1 != NULL_TREE)
3804 return t1;
3806 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3807 is a rotate of A by C1 bits. */
3809 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3810 || TREE_CODE (arg0) == LSHIFT_EXPR)
3811 && (TREE_CODE (arg1) == RSHIFT_EXPR
3812 || TREE_CODE (arg1) == LSHIFT_EXPR)
3813 && TREE_CODE (arg0) != TREE_CODE (arg1)
3814 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3815 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3817 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3818 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3819 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3820 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3821 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3822 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3823 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3824 TREE_CODE (arg0) == LSHIFT_EXPR
3825 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3827 goto associate;
3829 case BIT_XOR_EXPR:
3830 if (integer_zerop (arg1))
3831 return non_lvalue (convert (type, arg0));
3832 if (integer_all_onesp (arg1))
3833 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3834 goto associate;
3836 case BIT_AND_EXPR:
3837 bit_and:
3838 if (integer_all_onesp (arg1))
3839 return non_lvalue (convert (type, arg0));
3840 if (integer_zerop (arg1))
3841 return omit_one_operand (type, arg1, arg0);
3842 t1 = distribute_bit_expr (code, type, arg0, arg1);
3843 if (t1 != NULL_TREE)
3844 return t1;
3845 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3846 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3847 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3849 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3850 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3851 && (~TREE_INT_CST_LOW (arg0)
3852 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3853 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3855 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3856 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3858 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3859 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3860 && (~TREE_INT_CST_LOW (arg1)
3861 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3862 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3864 goto associate;
3866 case BIT_ANDTC_EXPR:
3867 if (integer_all_onesp (arg0))
3868 return non_lvalue (convert (type, arg1));
3869 if (integer_zerop (arg0))
3870 return omit_one_operand (type, arg0, arg1);
3871 if (TREE_CODE (arg1) == INTEGER_CST)
3873 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3874 code = BIT_AND_EXPR;
3875 goto bit_and;
3877 goto binary;
3879 case RDIV_EXPR:
3880 /* In most cases, do nothing with a divide by zero. */
3881 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3882 #ifndef REAL_INFINITY
3883 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
3884 return t;
3885 #endif
3886 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3888 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3889 However, ANSI says we can drop signals, so we can do this anyway. */
3890 if (real_onep (arg1))
3891 return non_lvalue (convert (type, arg0));
3893 /* If ARG1 is a constant, we can convert this to a multiply by the
3894 reciprocal. This does not have the same rounding properties,
3895 so only do this if -ffast-math. We can actually always safely
3896 do it if ARG1 is a power of two, but it's hard to tell if it is
3897 or not in a portable manner. */
3898 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
3899 && 0 != (tem = const_binop (code, build_real (type, dconst1),
3900 arg1, 0)))
3901 return fold (build (MULT_EXPR, type, arg0, tem));
3903 goto binary;
3905 case TRUNC_DIV_EXPR:
3906 case ROUND_DIV_EXPR:
3907 case FLOOR_DIV_EXPR:
3908 case CEIL_DIV_EXPR:
3909 case EXACT_DIV_EXPR:
3910 if (integer_onep (arg1))
3911 return non_lvalue (convert (type, arg0));
3912 if (integer_zerop (arg1))
3913 return t;
3915 /* If we have ((a / C1) / C2) where both division are the same type, try
3916 to simplify. First see if C1 * C2 overflows or not. */
3917 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
3918 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
3920 tree new_divisor;
3922 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
3923 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
3925 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
3926 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
3928 /* If no overflow, divide by C1*C2. */
3929 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
3933 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
3934 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
3935 expressions, which often appear in the offsets or sizes of
3936 objects with a varying size. Only deal with positive divisors
3937 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
3939 Look for NOPs and SAVE_EXPRs inside. */
3941 if (TREE_CODE (arg1) == INTEGER_CST
3942 && tree_int_cst_sgn (arg1) >= 0)
3944 int have_save_expr = 0;
3945 tree c2 = integer_zero_node;
3946 tree xarg0 = arg0;
3948 if (TREE_CODE (xarg0) == SAVE_EXPR)
3949 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
3951 STRIP_NOPS (xarg0);
3953 if (TREE_CODE (xarg0) == PLUS_EXPR
3954 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
3955 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
3956 else if (TREE_CODE (xarg0) == MINUS_EXPR
3957 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
3958 /* If we are doing this computation unsigned, the negate
3959 is incorrect. */
3960 && ! TREE_UNSIGNED (type))
3962 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
3963 xarg0 = TREE_OPERAND (xarg0, 0);
3966 if (TREE_CODE (xarg0) == SAVE_EXPR)
3967 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
3969 STRIP_NOPS (xarg0);
3971 if (TREE_CODE (xarg0) == MULT_EXPR
3972 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
3973 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
3974 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
3975 TREE_OPERAND (xarg0, 1), arg1, 1))
3976 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
3977 TREE_OPERAND (xarg0, 1), 1)))
3978 && (tree_int_cst_sgn (c2) >= 0
3979 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
3980 arg1, 1))))
3982 tree outer_div = integer_one_node;
3983 tree c1 = TREE_OPERAND (xarg0, 1);
3984 tree c3 = arg1;
3986 /* If C3 > C1, set them equal and do a divide by
3987 C3/C1 at the end of the operation. */
3988 if (tree_int_cst_lt (c1, c3))
3989 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
3991 /* The result is A * (C1/C3) + (C2/C3). */
3992 t = fold (build (PLUS_EXPR, type,
3993 fold (build (MULT_EXPR, type,
3994 TREE_OPERAND (xarg0, 0),
3995 const_binop (code, c1, c3, 1))),
3996 const_binop (code, c2, c3, 1)));
3998 if (! integer_onep (outer_div))
3999 t = fold (build (code, type, t, convert (type, outer_div)));
4001 if (have_save_expr)
4002 t = save_expr (t);
4004 return t;
4008 goto binary;
4010 case CEIL_MOD_EXPR:
4011 case FLOOR_MOD_EXPR:
4012 case ROUND_MOD_EXPR:
4013 case TRUNC_MOD_EXPR:
4014 if (integer_onep (arg1))
4015 return omit_one_operand (type, integer_zero_node, arg0);
4016 if (integer_zerop (arg1))
4017 return t;
4019 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4020 where C1 % C3 == 0. Handle similarly to the division case,
4021 but don't bother with SAVE_EXPRs. */
4023 if (TREE_CODE (arg1) == INTEGER_CST
4024 && ! integer_zerop (arg1))
4026 tree c2 = integer_zero_node;
4027 tree xarg0 = arg0;
4029 if (TREE_CODE (xarg0) == PLUS_EXPR
4030 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4031 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4032 else if (TREE_CODE (xarg0) == MINUS_EXPR
4033 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4034 && ! TREE_UNSIGNED (type))
4036 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4037 xarg0 = TREE_OPERAND (xarg0, 0);
4040 STRIP_NOPS (xarg0);
4042 if (TREE_CODE (xarg0) == MULT_EXPR
4043 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4044 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4045 TREE_OPERAND (xarg0, 1),
4046 arg1, 1))
4047 && tree_int_cst_sgn (c2) >= 0)
4048 /* The result is (C2%C3). */
4049 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4050 TREE_OPERAND (xarg0, 0));
4053 goto binary;
4055 case LSHIFT_EXPR:
4056 case RSHIFT_EXPR:
4057 case LROTATE_EXPR:
4058 case RROTATE_EXPR:
4059 if (integer_zerop (arg1))
4060 return non_lvalue (convert (type, arg0));
4061 /* Since negative shift count is not well-defined,
4062 don't try to compute it in the compiler. */
4063 if (tree_int_cst_sgn (arg1) < 0)
4064 return t;
4065 goto binary;
4067 case MIN_EXPR:
4068 if (operand_equal_p (arg0, arg1, 0))
4069 return arg0;
4070 if (INTEGRAL_TYPE_P (type)
4071 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4072 return omit_one_operand (type, arg1, arg0);
4073 goto associate;
4075 case MAX_EXPR:
4076 if (operand_equal_p (arg0, arg1, 0))
4077 return arg0;
4078 if (INTEGRAL_TYPE_P (type)
4079 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4080 return omit_one_operand (type, arg1, arg0);
4081 goto associate;
4083 case TRUTH_NOT_EXPR:
4084 /* Note that the operand of this must be an int
4085 and its values must be 0 or 1.
4086 ("true" is a fixed value perhaps depending on the language,
4087 but we don't handle values other than 1 correctly yet.) */
4088 return invert_truthvalue (arg0);
4090 case TRUTH_ANDIF_EXPR:
4091 /* Note that the operands of this must be ints
4092 and their values must be 0 or 1.
4093 ("true" is a fixed value perhaps depending on the language.) */
4094 /* If first arg is constant zero, return it. */
4095 if (integer_zerop (arg0))
4096 return arg0;
4097 case TRUTH_AND_EXPR:
4098 /* If either arg is constant true, drop it. */
4099 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4100 return non_lvalue (arg1);
4101 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4102 return non_lvalue (arg0);
4103 /* If second arg is constant zero, result is zero, but first arg
4104 must be evaluated. */
4105 if (integer_zerop (arg1))
4106 return omit_one_operand (type, arg1, arg0);
4108 truth_andor:
4109 /* We only do these simplifications if we are optimizing. */
4110 if (!optimize)
4111 return t;
4113 /* Check for things like (A || B) && (A || C). We can convert this
4114 to A || (B && C). Note that either operator can be any of the four
4115 truth and/or operations and the transformation will still be
4116 valid. Also note that we only care about order for the
4117 ANDIF and ORIF operators. */
4118 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4119 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4120 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4121 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4122 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4124 tree a00 = TREE_OPERAND (arg0, 0);
4125 tree a01 = TREE_OPERAND (arg0, 1);
4126 tree a10 = TREE_OPERAND (arg1, 0);
4127 tree a11 = TREE_OPERAND (arg1, 1);
4128 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4129 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4130 && (code == TRUTH_AND_EXPR
4131 || code == TRUTH_OR_EXPR));
4133 if (operand_equal_p (a00, a10, 0))
4134 return fold (build (TREE_CODE (arg0), type, a00,
4135 fold (build (code, type, a01, a11))));
4136 else if (commutative && operand_equal_p (a00, a11, 0))
4137 return fold (build (TREE_CODE (arg0), type, a00,
4138 fold (build (code, type, a01, a10))));
4139 else if (commutative && operand_equal_p (a01, a10, 0))
4140 return fold (build (TREE_CODE (arg0), type, a01,
4141 fold (build (code, type, a00, a11))));
4143 /* This case if tricky because we must either have commutative
4144 operators or else A10 must not have side-effects. */
4146 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4147 && operand_equal_p (a01, a11, 0))
4148 return fold (build (TREE_CODE (arg0), type,
4149 fold (build (code, type, a00, a10)),
4150 a01));
4153 /* Check for the possibility of merging component references. If our
4154 lhs is another similar operation, try to merge its rhs with our
4155 rhs. Then try to merge our lhs and rhs. */
4156 if (TREE_CODE (arg0) == code
4157 && 0 != (tem = fold_truthop (code, type,
4158 TREE_OPERAND (arg0, 1), arg1)))
4159 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4161 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4162 return tem;
4164 return t;
4166 case TRUTH_ORIF_EXPR:
4167 /* Note that the operands of this must be ints
4168 and their values must be 0 or true.
4169 ("true" is a fixed value perhaps depending on the language.) */
4170 /* If first arg is constant true, return it. */
4171 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4172 return arg0;
4173 case TRUTH_OR_EXPR:
4174 /* If either arg is constant zero, drop it. */
4175 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4176 return non_lvalue (arg1);
4177 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4178 return non_lvalue (arg0);
4179 /* If second arg is constant true, result is true, but we must
4180 evaluate first arg. */
4181 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4182 return omit_one_operand (type, arg1, arg0);
4183 goto truth_andor;
4185 case TRUTH_XOR_EXPR:
4186 /* If either arg is constant zero, drop it. */
4187 if (integer_zerop (arg0))
4188 return non_lvalue (arg1);
4189 if (integer_zerop (arg1))
4190 return non_lvalue (arg0);
4191 /* If either arg is constant true, this is a logical inversion. */
4192 if (integer_onep (arg0))
4193 return non_lvalue (invert_truthvalue (arg1));
4194 if (integer_onep (arg1))
4195 return non_lvalue (invert_truthvalue (arg0));
4196 return t;
4198 case EQ_EXPR:
4199 case NE_EXPR:
4200 case LT_EXPR:
4201 case GT_EXPR:
4202 case LE_EXPR:
4203 case GE_EXPR:
4204 /* If one arg is a constant integer, put it last. */
4205 if (TREE_CODE (arg0) == INTEGER_CST
4206 && TREE_CODE (arg1) != INTEGER_CST)
4208 TREE_OPERAND (t, 0) = arg1;
4209 TREE_OPERAND (t, 1) = arg0;
4210 arg0 = TREE_OPERAND (t, 0);
4211 arg1 = TREE_OPERAND (t, 1);
4212 code = swap_tree_comparison (code);
4213 TREE_SET_CODE (t, code);
4216 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4217 First, see if one arg is constant; find the constant arg
4218 and the other one. */
4220 tree constop = 0, varop;
4221 tree *constoploc;
4223 if (TREE_CONSTANT (arg1))
4224 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
4225 if (TREE_CONSTANT (arg0))
4226 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
4228 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4230 /* This optimization is invalid for ordered comparisons
4231 if CONST+INCR overflows or if foo+incr might overflow.
4232 This optimization is invalid for floating point due to rounding.
4233 For pointer types we assume overflow doesn't happen. */
4234 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4235 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4236 && (code == EQ_EXPR || code == NE_EXPR)))
4238 tree newconst
4239 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4240 constop, TREE_OPERAND (varop, 1)));
4241 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4242 *constoploc = newconst;
4243 return t;
4246 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4248 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4249 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4250 && (code == EQ_EXPR || code == NE_EXPR)))
4252 tree newconst
4253 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4254 constop, TREE_OPERAND (varop, 1)));
4255 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4256 *constoploc = newconst;
4257 return t;
4262 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4263 if (TREE_CODE (arg1) == INTEGER_CST
4264 && TREE_CODE (arg0) != INTEGER_CST
4265 && tree_int_cst_sgn (arg1) > 0)
4267 switch (TREE_CODE (t))
4269 case GE_EXPR:
4270 code = GT_EXPR;
4271 TREE_SET_CODE (t, code);
4272 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4273 TREE_OPERAND (t, 1) = arg1;
4274 break;
4276 case LT_EXPR:
4277 code = LE_EXPR;
4278 TREE_SET_CODE (t, code);
4279 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4280 TREE_OPERAND (t, 1) = arg1;
4284 /* If this is an EQ or NE comparison with zero and ARG0 is
4285 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4286 two operations, but the latter can be done in one less insn
4287 one machine that have only two-operand insns or on which a
4288 constant cannot be the first operand. */
4289 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4290 && TREE_CODE (arg0) == BIT_AND_EXPR)
4292 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4293 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4294 return
4295 fold (build (code, type,
4296 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4297 build (RSHIFT_EXPR,
4298 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4299 TREE_OPERAND (arg0, 1),
4300 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4301 convert (TREE_TYPE (arg0),
4302 integer_one_node)),
4303 arg1));
4304 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4305 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4306 return
4307 fold (build (code, type,
4308 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4309 build (RSHIFT_EXPR,
4310 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4311 TREE_OPERAND (arg0, 0),
4312 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4313 convert (TREE_TYPE (arg0),
4314 integer_one_node)),
4315 arg1));
4318 /* If this is an NE or EQ comparison of zero against the result of a
4319 signed MOD operation whose second operand is a power of 2, make
4320 the MOD operation unsigned since it is simpler and equivalent. */
4321 if ((code == NE_EXPR || code == EQ_EXPR)
4322 && integer_zerop (arg1)
4323 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4324 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4325 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4326 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4327 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4328 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4330 tree newtype = unsigned_type (TREE_TYPE (arg0));
4331 tree newmod = build (TREE_CODE (arg0), newtype,
4332 convert (newtype, TREE_OPERAND (arg0, 0)),
4333 convert (newtype, TREE_OPERAND (arg0, 1)));
4335 return build (code, type, newmod, convert (newtype, arg1));
4338 /* If this is an NE comparison of zero with an AND of one, remove the
4339 comparison since the AND will give the correct value. */
4340 if (code == NE_EXPR && integer_zerop (arg1)
4341 && TREE_CODE (arg0) == BIT_AND_EXPR
4342 && integer_onep (TREE_OPERAND (arg0, 1)))
4343 return convert (type, arg0);
4345 /* If we have (A & C) == C where C is a power of 2, convert this into
4346 (A & C) != 0. Similarly for NE_EXPR. */
4347 if ((code == EQ_EXPR || code == NE_EXPR)
4348 && TREE_CODE (arg0) == BIT_AND_EXPR
4349 && integer_pow2p (TREE_OPERAND (arg0, 1))
4350 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4351 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4352 arg0, integer_zero_node);
4354 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4355 and similarly for >= into !=. */
4356 if ((code == LT_EXPR || code == GE_EXPR)
4357 && TREE_UNSIGNED (TREE_TYPE (arg0))
4358 && TREE_CODE (arg1) == LSHIFT_EXPR
4359 && integer_onep (TREE_OPERAND (arg1, 0)))
4360 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4361 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4362 TREE_OPERAND (arg1, 1)),
4363 convert (TREE_TYPE (arg0), integer_zero_node));
4365 else if ((code == LT_EXPR || code == GE_EXPR)
4366 && TREE_UNSIGNED (TREE_TYPE (arg0))
4367 && (TREE_CODE (arg1) == NOP_EXPR
4368 || TREE_CODE (arg1) == CONVERT_EXPR)
4369 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4370 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4371 return
4372 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4373 convert (TREE_TYPE (arg0),
4374 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4375 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4376 convert (TREE_TYPE (arg0), integer_zero_node));
4378 /* Simplify comparison of something with itself. (For IEEE
4379 floating-point, we can only do some of these simplifications.) */
4380 if (operand_equal_p (arg0, arg1, 0))
4382 switch (code)
4384 case EQ_EXPR:
4385 case GE_EXPR:
4386 case LE_EXPR:
4387 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4389 t = build_int_2 (1, 0);
4390 TREE_TYPE (t) = type;
4391 return t;
4393 code = EQ_EXPR;
4394 TREE_SET_CODE (t, code);
4395 break;
4397 case NE_EXPR:
4398 /* For NE, we can only do this simplification if integer. */
4399 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4400 break;
4401 /* ... fall through ... */
4402 case GT_EXPR:
4403 case LT_EXPR:
4404 t = build_int_2 (0, 0);
4405 TREE_TYPE (t) = type;
4406 return t;
4410 /* An unsigned comparison against 0 can be simplified. */
4411 if (integer_zerop (arg1)
4412 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4413 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4414 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4416 switch (TREE_CODE (t))
4418 case GT_EXPR:
4419 code = NE_EXPR;
4420 TREE_SET_CODE (t, NE_EXPR);
4421 break;
4422 case LE_EXPR:
4423 code = EQ_EXPR;
4424 TREE_SET_CODE (t, EQ_EXPR);
4425 break;
4426 case GE_EXPR:
4427 return omit_one_operand (type,
4428 convert (type, integer_one_node),
4429 arg0);
4430 case LT_EXPR:
4431 return omit_one_operand (type,
4432 convert (type, integer_zero_node),
4433 arg0);
4437 /* If we are comparing an expression that just has comparisons
4438 of two integer values, arithmetic expressions of those comparisons,
4439 and constants, we can simplify it. There are only three cases
4440 to check: the two values can either be equal, the first can be
4441 greater, or the second can be greater. Fold the expression for
4442 those three values. Since each value must be 0 or 1, we have
4443 eight possibilities, each of which corresponds to the constant 0
4444 or 1 or one of the six possible comparisons.
4446 This handles common cases like (a > b) == 0 but also handles
4447 expressions like ((x > y) - (y > x)) > 0, which supposedly
4448 occur in macroized code. */
4450 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4452 tree cval1 = 0, cval2 = 0;
4453 int save_p = 0;
4455 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4456 /* Don't handle degenerate cases here; they should already
4457 have been handled anyway. */
4458 && cval1 != 0 && cval2 != 0
4459 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4460 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4461 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4462 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4463 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4465 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4466 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4468 /* We can't just pass T to eval_subst in case cval1 or cval2
4469 was the same as ARG1. */
4471 tree high_result
4472 = fold (build (code, type,
4473 eval_subst (arg0, cval1, maxval, cval2, minval),
4474 arg1));
4475 tree equal_result
4476 = fold (build (code, type,
4477 eval_subst (arg0, cval1, maxval, cval2, maxval),
4478 arg1));
4479 tree low_result
4480 = fold (build (code, type,
4481 eval_subst (arg0, cval1, minval, cval2, maxval),
4482 arg1));
4484 /* All three of these results should be 0 or 1. Confirm they
4485 are. Then use those values to select the proper code
4486 to use. */
4488 if ((integer_zerop (high_result)
4489 || integer_onep (high_result))
4490 && (integer_zerop (equal_result)
4491 || integer_onep (equal_result))
4492 && (integer_zerop (low_result)
4493 || integer_onep (low_result)))
4495 /* Make a 3-bit mask with the high-order bit being the
4496 value for `>', the next for '=', and the low for '<'. */
4497 switch ((integer_onep (high_result) * 4)
4498 + (integer_onep (equal_result) * 2)
4499 + integer_onep (low_result))
4501 case 0:
4502 /* Always false. */
4503 return omit_one_operand (type, integer_zero_node, arg0);
4504 case 1:
4505 code = LT_EXPR;
4506 break;
4507 case 2:
4508 code = EQ_EXPR;
4509 break;
4510 case 3:
4511 code = LE_EXPR;
4512 break;
4513 case 4:
4514 code = GT_EXPR;
4515 break;
4516 case 5:
4517 code = NE_EXPR;
4518 break;
4519 case 6:
4520 code = GE_EXPR;
4521 break;
4522 case 7:
4523 /* Always true. */
4524 return omit_one_operand (type, integer_one_node, arg0);
4527 t = build (code, type, cval1, cval2);
4528 if (save_p)
4529 return save_expr (t);
4530 else
4531 return fold (t);
4536 /* If this is a comparison of a field, we may be able to simplify it. */
4537 if ((TREE_CODE (arg0) == COMPONENT_REF
4538 || TREE_CODE (arg0) == BIT_FIELD_REF)
4539 && (code == EQ_EXPR || code == NE_EXPR)
4540 /* Handle the constant case even without -O
4541 to make sure the warnings are given. */
4542 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4544 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4545 return t1 ? t1 : t;
4548 /* If this is a comparison of complex values and either or both
4549 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4550 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4551 may prevent needless evaluations. */
4552 if ((code == EQ_EXPR || code == NE_EXPR)
4553 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4554 && (TREE_CODE (arg0) == COMPLEX_EXPR
4555 || TREE_CODE (arg1) == COMPLEX_EXPR))
4557 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4558 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4559 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4560 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4561 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4563 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4564 : TRUTH_ORIF_EXPR),
4565 type,
4566 fold (build (code, type, real0, real1)),
4567 fold (build (code, type, imag0, imag1))));
4570 /* From here on, the only cases we handle are when the result is
4571 known to be a constant.
4573 To compute GT, swap the arguments and do LT.
4574 To compute GE, do LT and invert the result.
4575 To compute LE, swap the arguments, do LT and invert the result.
4576 To compute NE, do EQ and invert the result.
4578 Therefore, the code below must handle only EQ and LT. */
4580 if (code == LE_EXPR || code == GT_EXPR)
4582 tem = arg0, arg0 = arg1, arg1 = tem;
4583 code = swap_tree_comparison (code);
4586 /* Note that it is safe to invert for real values here because we
4587 will check below in the one case that it matters. */
4589 invert = 0;
4590 if (code == NE_EXPR || code == GE_EXPR)
4592 invert = 1;
4593 code = invert_tree_comparison (code);
4596 /* Compute a result for LT or EQ if args permit;
4597 otherwise return T. */
4598 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4600 if (code == EQ_EXPR)
4601 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4602 == TREE_INT_CST_LOW (arg1))
4603 && (TREE_INT_CST_HIGH (arg0)
4604 == TREE_INT_CST_HIGH (arg1)),
4606 else
4607 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4608 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4609 : INT_CST_LT (arg0, arg1)),
4613 /* Assume a nonexplicit constant cannot equal an explicit one,
4614 since such code would be undefined anyway.
4615 Exception: on sysvr4, using #pragma weak,
4616 a label can come out as 0. */
4617 else if (TREE_CODE (arg1) == INTEGER_CST
4618 && !integer_zerop (arg1)
4619 && TREE_CONSTANT (arg0)
4620 && TREE_CODE (arg0) == ADDR_EXPR
4621 && code == EQ_EXPR)
4622 t1 = build_int_2 (0, 0);
4624 /* Two real constants can be compared explicitly. */
4625 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4627 /* If either operand is a NaN, the result is false with two
4628 exceptions: First, an NE_EXPR is true on NaNs, but that case
4629 is already handled correctly since we will be inverting the
4630 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4631 or a GE_EXPR into a LT_EXPR, we must return true so that it
4632 will be inverted into false. */
4634 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4635 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4636 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4638 else if (code == EQ_EXPR)
4639 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4640 TREE_REAL_CST (arg1)),
4642 else
4643 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4644 TREE_REAL_CST (arg1)),
4648 if (t1 == NULL_TREE)
4649 return t;
4651 if (invert)
4652 TREE_INT_CST_LOW (t1) ^= 1;
4654 TREE_TYPE (t1) = type;
4655 return t1;
4657 case COND_EXPR:
4658 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4659 so all simple results must be passed through pedantic_non_lvalue. */
4660 if (TREE_CODE (arg0) == INTEGER_CST)
4661 return pedantic_non_lvalue
4662 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4663 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4664 return pedantic_non_lvalue (omit_one_operand (type, arg1, arg0));
4666 /* If the second operand is zero, invert the comparison and swap
4667 the second and third operands. Likewise if the second operand
4668 is constant and the third is not or if the third operand is
4669 equivalent to the first operand of the comparison. */
4671 if (integer_zerop (arg1)
4672 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4673 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4674 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4675 TREE_OPERAND (t, 2),
4676 TREE_OPERAND (arg0, 1))))
4678 /* See if this can be inverted. If it can't, possibly because
4679 it was a floating-point inequality comparison, don't do
4680 anything. */
4681 tem = invert_truthvalue (arg0);
4683 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4685 arg0 = TREE_OPERAND (t, 0) = tem;
4686 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
4687 TREE_OPERAND (t, 2) = arg1;
4688 arg1 = TREE_OPERAND (t, 1);
4692 /* If we have A op B ? A : C, we may be able to convert this to a
4693 simpler expression, depending on the operation and the values
4694 of B and C. IEEE floating point prevents this though,
4695 because A or B might be -0.0 or a NaN. */
4697 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4698 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4699 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4700 || flag_fast_math)
4701 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4702 arg1, TREE_OPERAND (arg0, 1)))
4704 tree arg2 = TREE_OPERAND (t, 2);
4705 enum tree_code comp_code = TREE_CODE (arg0);
4707 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4708 depending on the comparison operation. */
4709 if (integer_zerop (TREE_OPERAND (arg0, 1))
4710 && TREE_CODE (arg2) == NEGATE_EXPR
4711 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4712 switch (comp_code)
4714 case EQ_EXPR:
4715 return pedantic_non_lvalue
4716 (fold (build1 (NEGATE_EXPR, type, arg1)));
4717 case NE_EXPR:
4718 return pedantic_non_lvalue (convert (type, arg1));
4719 case GE_EXPR:
4720 case GT_EXPR:
4721 return pedantic_non_lvalue
4722 (fold (build1 (ABS_EXPR, type, arg1)));
4723 case LE_EXPR:
4724 case LT_EXPR:
4725 return pedantic_non_lvalue
4726 (fold (build1 (NEGATE_EXPR, type,
4727 fold (build1 (ABS_EXPR, type, arg1)))));
4730 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4731 always zero. */
4733 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4735 if (comp_code == NE_EXPR)
4736 return pedantic_non_lvalue (convert (type, arg1));
4737 else if (comp_code == EQ_EXPR)
4738 return pedantic_non_lvalue (convert (type, integer_zero_node));
4741 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4742 or max (A, B), depending on the operation. */
4744 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4745 arg2, TREE_OPERAND (arg0, 0)))
4746 switch (comp_code)
4748 case EQ_EXPR:
4749 return pedantic_non_lvalue (convert (type, arg2));
4750 case NE_EXPR:
4751 return pedantic_non_lvalue (convert (type, arg1));
4752 case LE_EXPR:
4753 case LT_EXPR:
4754 return pedantic_non_lvalue
4755 (fold (build (MIN_EXPR, type, arg1, arg2)));
4756 case GE_EXPR:
4757 case GT_EXPR:
4758 return pedantic_non_lvalue
4759 (fold (build (MAX_EXPR, type, arg1, arg2)));
4762 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4763 we might still be able to simplify this. For example,
4764 if C1 is one less or one more than C2, this might have started
4765 out as a MIN or MAX and been transformed by this function.
4766 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4768 if (INTEGRAL_TYPE_P (type)
4769 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4770 && TREE_CODE (arg2) == INTEGER_CST)
4771 switch (comp_code)
4773 case EQ_EXPR:
4774 /* We can replace A with C1 in this case. */
4775 arg1 = TREE_OPERAND (t, 1)
4776 = convert (type, TREE_OPERAND (arg0, 1));
4777 break;
4779 case LT_EXPR:
4780 /* If C1 is C2 + 1, this is min(A, C2). */
4781 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4782 && operand_equal_p (TREE_OPERAND (arg0, 1),
4783 const_binop (PLUS_EXPR, arg2,
4784 integer_one_node, 0), 1))
4785 return pedantic_non_lvalue
4786 (fold (build (MIN_EXPR, type, arg1, arg2)));
4787 break;
4789 case LE_EXPR:
4790 /* If C1 is C2 - 1, this is min(A, C2). */
4791 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4792 && operand_equal_p (TREE_OPERAND (arg0, 1),
4793 const_binop (MINUS_EXPR, arg2,
4794 integer_one_node, 0), 1))
4795 return pedantic_non_lvalue
4796 (fold (build (MIN_EXPR, type, arg1, arg2)));
4797 break;
4799 case GT_EXPR:
4800 /* If C1 is C2 - 1, this is max(A, C2). */
4801 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4802 && operand_equal_p (TREE_OPERAND (arg0, 1),
4803 const_binop (MINUS_EXPR, arg2,
4804 integer_one_node, 0), 1))
4805 return pedantic_non_lvalue
4806 (fold (build (MAX_EXPR, type, arg1, arg2)));
4807 break;
4809 case GE_EXPR:
4810 /* If C1 is C2 + 1, this is max(A, C2). */
4811 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4812 && operand_equal_p (TREE_OPERAND (arg0, 1),
4813 const_binop (PLUS_EXPR, arg2,
4814 integer_one_node, 0), 1))
4815 return pedantic_non_lvalue
4816 (fold (build (MAX_EXPR, type, arg1, arg2)));
4817 break;
4821 /* Convert A ? 1 : 0 to simply A. */
4822 if (integer_onep (TREE_OPERAND (t, 1))
4823 && integer_zerop (TREE_OPERAND (t, 2))
4824 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4825 call to fold will try to move the conversion inside
4826 a COND, which will recurse. In that case, the COND_EXPR
4827 is probably the best choice, so leave it alone. */
4828 && type == TREE_TYPE (arg0))
4829 return pedantic_non_lvalue (arg0);
4832 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
4833 operation is simply A & 2. */
4835 if (integer_zerop (TREE_OPERAND (t, 2))
4836 && TREE_CODE (arg0) == NE_EXPR
4837 && integer_zerop (TREE_OPERAND (arg0, 1))
4838 && integer_pow2p (arg1)
4839 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
4840 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
4841 arg1, 1))
4842 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
4844 return t;
4846 case COMPOUND_EXPR:
4847 /* When pedantic, a compound expression can be neither an lvalue
4848 nor an integer constant expression. */
4849 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
4850 return t;
4851 /* Don't let (0, 0) be null pointer constant. */
4852 if (integer_zerop (arg1))
4853 return non_lvalue (arg1);
4854 return arg1;
4856 case COMPLEX_EXPR:
4857 if (wins)
4858 return build_complex (arg0, arg1);
4859 return t;
4861 case REALPART_EXPR:
4862 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4863 return t;
4864 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4865 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
4866 TREE_OPERAND (arg0, 1));
4867 else if (TREE_CODE (arg0) == COMPLEX_CST)
4868 return TREE_REALPART (arg0);
4869 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4870 return fold (build (TREE_CODE (arg0), type,
4871 fold (build1 (REALPART_EXPR, type,
4872 TREE_OPERAND (arg0, 0))),
4873 fold (build1 (REALPART_EXPR,
4874 type, TREE_OPERAND (arg0, 1)))));
4875 return t;
4877 case IMAGPART_EXPR:
4878 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4879 return convert (type, integer_zero_node);
4880 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4881 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
4882 TREE_OPERAND (arg0, 0));
4883 else if (TREE_CODE (arg0) == COMPLEX_CST)
4884 return TREE_IMAGPART (arg0);
4885 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4886 return fold (build (TREE_CODE (arg0), type,
4887 fold (build1 (IMAGPART_EXPR, type,
4888 TREE_OPERAND (arg0, 0))),
4889 fold (build1 (IMAGPART_EXPR, type,
4890 TREE_OPERAND (arg0, 1)))));
4891 return t;
4893 default:
4894 return t;
4895 } /* switch (code) */