Import gcc-2.8.1.tar.bz2
[official-gcc.git] / gcc / fold-const.c
blob3feee8ad3d2b42513dff8e0b70907102519d49d5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include <stdio.h>
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
51 /* Handle floating overflow for `const_binop'. */
52 static jmp_buf float_error;
54 static void encode PROTO((HOST_WIDE_INT *,
55 HOST_WIDE_INT, HOST_WIDE_INT));
56 static void decode PROTO((HOST_WIDE_INT *,
57 HOST_WIDE_INT *, HOST_WIDE_INT *));
58 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
59 HOST_WIDE_INT, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT *,
61 HOST_WIDE_INT *, HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static int split_tree PROTO((tree, enum tree_code, tree *,
64 tree *, int *));
65 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
66 static tree const_binop PROTO((enum tree_code, tree, tree, int));
67 static tree fold_convert PROTO((tree, tree));
68 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
69 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
70 static int truth_value_p PROTO((enum tree_code));
71 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
72 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
73 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
74 static tree omit_one_operand PROTO((tree, tree, tree));
75 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
76 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
77 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
78 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
79 tree, tree));
80 static tree decode_field_reference PROTO((tree, int *, int *,
81 enum machine_mode *, int *,
82 int *, tree *, tree *));
83 static int all_ones_mask_p PROTO((tree, int));
84 static int simple_operand_p PROTO((tree));
85 static tree range_binop PROTO((enum tree_code, tree, tree, int,
86 tree, int));
87 static tree make_range PROTO((tree, int *, tree *, tree *));
88 static tree build_range_check PROTO((tree, tree, int, tree, tree));
89 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
90 int, tree, tree));
91 static tree fold_range_test PROTO((tree));
92 static tree unextend PROTO((tree, int, int, tree));
93 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
94 static tree strip_compound_expr PROTO((tree, tree));
96 #ifndef BRANCH_COST
97 #define BRANCH_COST 1
98 #endif
100 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
101 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
102 Then this yields nonzero if overflow occurred during the addition.
103 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
104 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
105 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
107 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
108 We do that by representing the two-word integer in 4 words, with only
109 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
111 #define LOWPART(x) \
112 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
113 #define HIGHPART(x) \
114 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
115 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
117 /* Unpack a two-word integer into 4 words.
118 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
119 WORDS points to the array of HOST_WIDE_INTs. */
121 static void
122 encode (words, low, hi)
123 HOST_WIDE_INT *words;
124 HOST_WIDE_INT low, hi;
126 words[0] = LOWPART (low);
127 words[1] = HIGHPART (low);
128 words[2] = LOWPART (hi);
129 words[3] = HIGHPART (hi);
132 /* Pack an array of 4 words into a two-word integer.
133 WORDS points to the array of words.
134 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
136 static void
137 decode (words, low, hi)
138 HOST_WIDE_INT *words;
139 HOST_WIDE_INT *low, *hi;
141 *low = words[0] | words[1] * BASE;
142 *hi = words[2] | words[3] * BASE;
145 /* Make the integer constant T valid for its type
146 by setting to 0 or 1 all the bits in the constant
147 that don't belong in the type.
148 Yield 1 if a signed overflow occurs, 0 otherwise.
149 If OVERFLOW is nonzero, a signed overflow has already occurred
150 in calculating T, so propagate it.
152 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
153 if it exists. */
156 force_fit_type (t, overflow)
157 tree t;
158 int overflow;
160 HOST_WIDE_INT low, high;
161 register int prec;
163 if (TREE_CODE (t) == REAL_CST)
165 #ifdef CHECK_FLOAT_VALUE
166 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
167 overflow);
168 #endif
169 return overflow;
172 else if (TREE_CODE (t) != INTEGER_CST)
173 return overflow;
175 low = TREE_INT_CST_LOW (t);
176 high = TREE_INT_CST_HIGH (t);
178 if (POINTER_TYPE_P (TREE_TYPE (t)))
179 prec = POINTER_SIZE;
180 else
181 prec = TYPE_PRECISION (TREE_TYPE (t));
183 /* First clear all bits that are beyond the type's precision. */
185 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
187 else if (prec > HOST_BITS_PER_WIDE_INT)
189 TREE_INT_CST_HIGH (t)
190 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
192 else
194 TREE_INT_CST_HIGH (t) = 0;
195 if (prec < HOST_BITS_PER_WIDE_INT)
196 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
199 /* Unsigned types do not suffer sign extension or overflow. */
200 if (TREE_UNSIGNED (TREE_TYPE (t)))
201 return overflow;
203 /* If the value's sign bit is set, extend the sign. */
204 if (prec != 2 * HOST_BITS_PER_WIDE_INT
205 && (prec > HOST_BITS_PER_WIDE_INT
206 ? (TREE_INT_CST_HIGH (t)
207 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
208 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
210 /* Value is negative:
211 set to 1 all the bits that are outside this type's precision. */
212 if (prec > HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_HIGH (t)
215 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
217 else
219 TREE_INT_CST_HIGH (t) = -1;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
225 /* Yield nonzero if signed overflow occurred. */
226 return
227 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
228 != 0);
231 /* Add two doubleword integers with doubleword result.
232 Each argument is given as two `HOST_WIDE_INT' pieces.
233 One argument is L1 and H1; the other, L2 and H2.
234 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
237 add_double (l1, h1, l2, h2, lv, hv)
238 HOST_WIDE_INT l1, h1, l2, h2;
239 HOST_WIDE_INT *lv, *hv;
241 HOST_WIDE_INT l, h;
243 l = l1 + l2;
244 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
246 *lv = l;
247 *hv = h;
248 return overflow_sum_sign (h1, h2, h);
251 /* Negate a doubleword integer with doubleword result.
252 Return nonzero if the operation overflows, assuming it's signed.
253 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
254 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
257 neg_double (l1, h1, lv, hv)
258 HOST_WIDE_INT l1, h1;
259 HOST_WIDE_INT *lv, *hv;
261 if (l1 == 0)
263 *lv = 0;
264 *hv = - h1;
265 return (*hv & h1) < 0;
267 else
269 *lv = - l1;
270 *hv = ~ h1;
271 return 0;
275 /* Multiply two doubleword integers with doubleword result.
276 Return nonzero if the operation overflows, assuming it's signed.
277 Each argument is given as two `HOST_WIDE_INT' pieces.
278 One argument is L1 and H1; the other, L2 and H2.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
282 mul_double (l1, h1, l2, h2, lv, hv)
283 HOST_WIDE_INT l1, h1, l2, h2;
284 HOST_WIDE_INT *lv, *hv;
286 HOST_WIDE_INT arg1[4];
287 HOST_WIDE_INT arg2[4];
288 HOST_WIDE_INT prod[4 * 2];
289 register unsigned HOST_WIDE_INT carry;
290 register int i, j, k;
291 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
293 encode (arg1, l1, h1);
294 encode (arg2, l2, h2);
296 bzero ((char *) prod, sizeof prod);
298 for (i = 0; i < 4; i++)
300 carry = 0;
301 for (j = 0; j < 4; j++)
303 k = i + j;
304 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
305 carry += arg1[i] * arg2[j];
306 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
307 carry += prod[k];
308 prod[k] = LOWPART (carry);
309 carry = HIGHPART (carry);
311 prod[i + 4] = carry;
314 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
316 /* Check for overflow by calculating the top half of the answer in full;
317 it should agree with the low half's sign bit. */
318 decode (prod+4, &toplow, &tophigh);
319 if (h1 < 0)
321 neg_double (l2, h2, &neglow, &neghigh);
322 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
324 if (h2 < 0)
326 neg_double (l1, h1, &neglow, &neghigh);
327 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
329 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
332 /* Shift the doubleword integer in L1, H1 left by COUNT places
333 keeping only PREC bits of result.
334 Shift right if COUNT is negative.
335 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
336 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 void
339 lshift_double (l1, h1, count, prec, lv, hv, arith)
340 HOST_WIDE_INT l1, h1, count;
341 int prec;
342 HOST_WIDE_INT *lv, *hv;
343 int arith;
345 if (count < 0)
347 rshift_double (l1, h1, - count, prec, lv, hv, arith);
348 return;
351 #ifdef SHIFT_COUNT_TRUNCATED
352 if (SHIFT_COUNT_TRUNCATED)
353 count %= prec;
354 #endif
356 if (count >= HOST_BITS_PER_WIDE_INT)
358 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
359 *lv = 0;
361 else
363 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
364 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
365 *lv = (unsigned HOST_WIDE_INT) l1 << count;
369 /* Shift the doubleword integer in L1, H1 right by COUNT places
370 keeping only PREC bits of result. COUNT must be positive.
371 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
372 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 void
375 rshift_double (l1, h1, count, prec, lv, hv, arith)
376 HOST_WIDE_INT l1, h1, count;
377 int prec;
378 HOST_WIDE_INT *lv, *hv;
379 int arith;
381 unsigned HOST_WIDE_INT signmask;
382 signmask = (arith
383 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
384 : 0);
386 #ifdef SHIFT_COUNT_TRUNCATED
387 if (SHIFT_COUNT_TRUNCATED)
388 count %= prec;
389 #endif
391 if (count >= HOST_BITS_PER_WIDE_INT)
393 *hv = signmask;
394 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
395 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
397 else
399 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
400 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
401 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
402 | ((unsigned HOST_WIDE_INT) h1 >> count));
406 /* Rotate the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Rotate right if COUNT is negative.
409 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
411 void
412 lrotate_double (l1, h1, count, prec, lv, hv)
413 HOST_WIDE_INT l1, h1, count;
414 int prec;
415 HOST_WIDE_INT *lv, *hv;
417 HOST_WIDE_INT s1l, s1h, s2l, s2h;
419 count %= prec;
420 if (count < 0)
421 count += prec;
423 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
424 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
425 *lv = s1l | s2l;
426 *hv = s1h | s2h;
429 /* Rotate the doubleword integer in L1, H1 left by COUNT places
430 keeping only PREC bits of result. COUNT must be positive.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 void
434 rrotate_double (l1, h1, count, prec, lv, hv)
435 HOST_WIDE_INT l1, h1, count;
436 int prec;
437 HOST_WIDE_INT *lv, *hv;
439 HOST_WIDE_INT s1l, s1h, s2l, s2h;
441 count %= prec;
442 if (count < 0)
443 count += prec;
445 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
446 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
447 *lv = s1l | s2l;
448 *hv = s1h | s2h;
451 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
452 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
453 CODE is a tree code for a kind of division, one of
454 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
455 or EXACT_DIV_EXPR
456 It controls how the quotient is rounded to a integer.
457 Return nonzero if the operation overflows.
458 UNS nonzero says do unsigned division. */
461 div_and_round_double (code, uns,
462 lnum_orig, hnum_orig, lden_orig, hden_orig,
463 lquo, hquo, lrem, hrem)
464 enum tree_code code;
465 int uns;
466 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
467 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
468 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
470 int quo_neg = 0;
471 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
472 HOST_WIDE_INT den[4], quo[4];
473 register int i, j;
474 unsigned HOST_WIDE_INT work;
475 register unsigned HOST_WIDE_INT carry = 0;
476 HOST_WIDE_INT lnum = lnum_orig;
477 HOST_WIDE_INT hnum = hnum_orig;
478 HOST_WIDE_INT lden = lden_orig;
479 HOST_WIDE_INT hden = hden_orig;
480 int overflow = 0;
482 if ((hden == 0) && (lden == 0))
483 overflow = 1, lden = 1;
485 /* calculate quotient sign and convert operands to unsigned. */
486 if (!uns)
488 if (hnum < 0)
490 quo_neg = ~ quo_neg;
491 /* (minimum integer) / (-1) is the only overflow case. */
492 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
493 overflow = 1;
495 if (hden < 0)
497 quo_neg = ~ quo_neg;
498 neg_double (lden, hden, &lden, &hden);
502 if (hnum == 0 && hden == 0)
503 { /* single precision */
504 *hquo = *hrem = 0;
505 /* This unsigned division rounds toward zero. */
506 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
507 goto finish_up;
510 if (hnum == 0)
511 { /* trivial case: dividend < divisor */
512 /* hden != 0 already checked. */
513 *hquo = *lquo = 0;
514 *hrem = hnum;
515 *lrem = lnum;
516 goto finish_up;
519 bzero ((char *) quo, sizeof quo);
521 bzero ((char *) num, sizeof num); /* to zero 9th element */
522 bzero ((char *) den, sizeof den);
524 encode (num, lnum, hnum);
525 encode (den, lden, hden);
527 /* Special code for when the divisor < BASE. */
528 if (hden == 0 && lden < BASE)
530 /* hnum != 0 already checked. */
531 for (i = 4 - 1; i >= 0; i--)
533 work = num[i] + carry * BASE;
534 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
535 carry = work % (unsigned HOST_WIDE_INT) lden;
538 else
540 /* Full double precision division,
541 with thanks to Don Knuth's "Seminumerical Algorithms". */
542 int num_hi_sig, den_hi_sig;
543 unsigned HOST_WIDE_INT quo_est, scale;
545 /* Find the highest non-zero divisor digit. */
546 for (i = 4 - 1; ; i--)
547 if (den[i] != 0) {
548 den_hi_sig = i;
549 break;
552 /* Insure that the first digit of the divisor is at least BASE/2.
553 This is required by the quotient digit estimation algorithm. */
555 scale = BASE / (den[den_hi_sig] + 1);
556 if (scale > 1) { /* scale divisor and dividend */
557 carry = 0;
558 for (i = 0; i <= 4 - 1; i++) {
559 work = (num[i] * scale) + carry;
560 num[i] = LOWPART (work);
561 carry = HIGHPART (work);
562 } num[4] = carry;
563 carry = 0;
564 for (i = 0; i <= 4 - 1; i++) {
565 work = (den[i] * scale) + carry;
566 den[i] = LOWPART (work);
567 carry = HIGHPART (work);
568 if (den[i] != 0) den_hi_sig = i;
572 num_hi_sig = 4;
574 /* Main loop */
575 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
576 /* guess the next quotient digit, quo_est, by dividing the first
577 two remaining dividend digits by the high order quotient digit.
578 quo_est is never low and is at most 2 high. */
579 unsigned HOST_WIDE_INT tmp;
581 num_hi_sig = i + den_hi_sig + 1;
582 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
583 if (num[num_hi_sig] != den[den_hi_sig])
584 quo_est = work / den[den_hi_sig];
585 else
586 quo_est = BASE - 1;
588 /* refine quo_est so it's usually correct, and at most one high. */
589 tmp = work - quo_est * den[den_hi_sig];
590 if (tmp < BASE
591 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
592 quo_est--;
594 /* Try QUO_EST as the quotient digit, by multiplying the
595 divisor by QUO_EST and subtracting from the remaining dividend.
596 Keep in mind that QUO_EST is the I - 1st digit. */
598 carry = 0;
599 for (j = 0; j <= den_hi_sig; j++)
601 work = quo_est * den[j] + carry;
602 carry = HIGHPART (work);
603 work = num[i + j] - LOWPART (work);
604 num[i + j] = LOWPART (work);
605 carry += HIGHPART (work) != 0;
608 /* if quo_est was high by one, then num[i] went negative and
609 we need to correct things. */
611 if (num[num_hi_sig] < carry)
613 quo_est--;
614 carry = 0; /* add divisor back in */
615 for (j = 0; j <= den_hi_sig; j++)
617 work = num[i + j] + den[j] + carry;
618 carry = HIGHPART (work);
619 num[i + j] = LOWPART (work);
621 num [num_hi_sig] += carry;
624 /* store the quotient digit. */
625 quo[i] = quo_est;
629 decode (quo, lquo, hquo);
631 finish_up:
632 /* if result is negative, make it so. */
633 if (quo_neg)
634 neg_double (*lquo, *hquo, lquo, hquo);
636 /* compute trial remainder: rem = num - (quo * den) */
637 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
638 neg_double (*lrem, *hrem, lrem, hrem);
639 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
641 switch (code)
643 case TRUNC_DIV_EXPR:
644 case TRUNC_MOD_EXPR: /* round toward zero */
645 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
646 return overflow;
648 case FLOOR_DIV_EXPR:
649 case FLOOR_MOD_EXPR: /* round toward negative infinity */
650 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
652 /* quo = quo - 1; */
653 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
654 lquo, hquo);
656 else return overflow;
657 break;
659 case CEIL_DIV_EXPR:
660 case CEIL_MOD_EXPR: /* round toward positive infinity */
661 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
663 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
664 lquo, hquo);
666 else return overflow;
667 break;
669 case ROUND_DIV_EXPR:
670 case ROUND_MOD_EXPR: /* round to closest integer */
672 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
673 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
675 /* get absolute values */
676 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
677 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
679 /* if (2 * abs (lrem) >= abs (lden)) */
680 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
681 labs_rem, habs_rem, &ltwice, &htwice);
682 if (((unsigned HOST_WIDE_INT) habs_den
683 < (unsigned HOST_WIDE_INT) htwice)
684 || (((unsigned HOST_WIDE_INT) habs_den
685 == (unsigned HOST_WIDE_INT) htwice)
686 && ((HOST_WIDE_INT unsigned) labs_den
687 < (unsigned HOST_WIDE_INT) ltwice)))
689 if (*hquo < 0)
690 /* quo = quo - 1; */
691 add_double (*lquo, *hquo,
692 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
693 else
694 /* quo = quo + 1; */
695 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
696 lquo, hquo);
698 else return overflow;
700 break;
702 default:
703 abort ();
706 /* compute true remainder: rem = num - (quo * den) */
707 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
708 neg_double (*lrem, *hrem, lrem, hrem);
709 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
710 return overflow;
713 #ifndef REAL_ARITHMETIC
714 /* Effectively truncate a real value to represent the nearest possible value
715 in a narrower mode. The result is actually represented in the same data
716 type as the argument, but its value is usually different.
718 A trap may occur during the FP operations and it is the responsibility
719 of the calling function to have a handler established. */
721 REAL_VALUE_TYPE
722 real_value_truncate (mode, arg)
723 enum machine_mode mode;
724 REAL_VALUE_TYPE arg;
726 return REAL_VALUE_TRUNCATE (mode, arg);
729 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
731 /* Check for infinity in an IEEE double precision number. */
734 target_isinf (x)
735 REAL_VALUE_TYPE x;
737 /* The IEEE 64-bit double format. */
738 union {
739 REAL_VALUE_TYPE d;
740 struct {
741 unsigned sign : 1;
742 unsigned exponent : 11;
743 unsigned mantissa1 : 20;
744 unsigned mantissa2;
745 } little_endian;
746 struct {
747 unsigned mantissa2;
748 unsigned mantissa1 : 20;
749 unsigned exponent : 11;
750 unsigned sign : 1;
751 } big_endian;
752 } u;
754 u.d = dconstm1;
755 if (u.big_endian.sign == 1)
757 u.d = x;
758 return (u.big_endian.exponent == 2047
759 && u.big_endian.mantissa1 == 0
760 && u.big_endian.mantissa2 == 0);
762 else
764 u.d = x;
765 return (u.little_endian.exponent == 2047
766 && u.little_endian.mantissa1 == 0
767 && u.little_endian.mantissa2 == 0);
771 /* Check whether an IEEE double precision number is a NaN. */
774 target_isnan (x)
775 REAL_VALUE_TYPE x;
777 /* The IEEE 64-bit double format. */
778 union {
779 REAL_VALUE_TYPE d;
780 struct {
781 unsigned sign : 1;
782 unsigned exponent : 11;
783 unsigned mantissa1 : 20;
784 unsigned mantissa2;
785 } little_endian;
786 struct {
787 unsigned mantissa2;
788 unsigned mantissa1 : 20;
789 unsigned exponent : 11;
790 unsigned sign : 1;
791 } big_endian;
792 } u;
794 u.d = dconstm1;
795 if (u.big_endian.sign == 1)
797 u.d = x;
798 return (u.big_endian.exponent == 2047
799 && (u.big_endian.mantissa1 != 0
800 || u.big_endian.mantissa2 != 0));
802 else
804 u.d = x;
805 return (u.little_endian.exponent == 2047
806 && (u.little_endian.mantissa1 != 0
807 || u.little_endian.mantissa2 != 0));
811 /* Check for a negative IEEE double precision number. */
814 target_negative (x)
815 REAL_VALUE_TYPE x;
817 /* The IEEE 64-bit double format. */
818 union {
819 REAL_VALUE_TYPE d;
820 struct {
821 unsigned sign : 1;
822 unsigned exponent : 11;
823 unsigned mantissa1 : 20;
824 unsigned mantissa2;
825 } little_endian;
826 struct {
827 unsigned mantissa2;
828 unsigned mantissa1 : 20;
829 unsigned exponent : 11;
830 unsigned sign : 1;
831 } big_endian;
832 } u;
834 u.d = dconstm1;
835 if (u.big_endian.sign == 1)
837 u.d = x;
838 return u.big_endian.sign;
840 else
842 u.d = x;
843 return u.little_endian.sign;
846 #else /* Target not IEEE */
848 /* Let's assume other float formats don't have infinity.
849 (This can be overridden by redefining REAL_VALUE_ISINF.) */
851 target_isinf (x)
852 REAL_VALUE_TYPE x;
854 return 0;
857 /* Let's assume other float formats don't have NaNs.
858 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
860 target_isnan (x)
861 REAL_VALUE_TYPE x;
863 return 0;
866 /* Let's assume other float formats don't have minus zero.
867 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
869 target_negative (x)
870 REAL_VALUE_TYPE x;
872 return x < 0;
874 #endif /* Target not IEEE */
876 /* Try to change R into its exact multiplicative inverse in machine mode
877 MODE. Return nonzero function value if successful. */
880 exact_real_inverse (mode, r)
881 enum machine_mode mode;
882 REAL_VALUE_TYPE *r;
884 union
886 double d;
887 unsigned short i[4];
888 }x, t, y;
889 int i;
891 /* Usually disable if bounds checks are not reliable. */
892 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
893 return 0;
895 /* Set array index to the less significant bits in the unions, depending
896 on the endian-ness of the host doubles.
897 Disable if insufficient information on the data structure. */
898 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
899 return 0;
900 #else
901 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
902 #define K 2
903 #else
904 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
905 #define K 2
906 #else
907 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
908 #endif
909 #endif
910 #endif
912 if (setjmp (float_error))
914 /* Don't do the optimization if there was an arithmetic error. */
915 fail:
916 set_float_handler (NULL_PTR);
917 return 0;
919 set_float_handler (float_error);
921 /* Domain check the argument. */
922 x.d = *r;
923 if (x.d == 0.0)
924 goto fail;
926 #ifdef REAL_INFINITY
927 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
928 goto fail;
929 #endif
931 /* Compute the reciprocal and check for numerical exactness.
932 It is unnecessary to check all the significand bits to determine
933 whether X is a power of 2. If X is not, then it is impossible for
934 the bottom half significand of both X and 1/X to be all zero bits.
935 Hence we ignore the data structure of the top half and examine only
936 the low order bits of the two significands. */
937 t.d = 1.0 / x.d;
938 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
939 goto fail;
941 /* Truncate to the required mode and range-check the result. */
942 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
943 #ifdef CHECK_FLOAT_VALUE
944 i = 0;
945 if (CHECK_FLOAT_VALUE (mode, y.d, i))
946 goto fail;
947 #endif
949 /* Fail if truncation changed the value. */
950 if (y.d != t.d || y.d == 0.0)
951 goto fail;
953 #ifdef REAL_INFINITY
954 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
955 goto fail;
956 #endif
958 /* Output the reciprocal and return success flag. */
959 set_float_handler (NULL_PTR);
960 *r = y.d;
961 return 1;
963 #endif /* no REAL_ARITHMETIC */
965 /* Split a tree IN into a constant and a variable part
966 that could be combined with CODE to make IN.
967 CODE must be a commutative arithmetic operation.
968 Store the constant part into *CONP and the variable in &VARP.
969 Return 1 if this was done; zero means the tree IN did not decompose
970 this way.
972 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
973 Therefore, we must tell the caller whether the variable part
974 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
975 The value stored is the coefficient for the variable term.
976 The constant term we return should always be added;
977 we negate it if necessary. */
979 static int
980 split_tree (in, code, varp, conp, varsignp)
981 tree in;
982 enum tree_code code;
983 tree *varp, *conp;
984 int *varsignp;
986 register tree outtype = TREE_TYPE (in);
987 *varp = 0;
988 *conp = 0;
990 /* Strip any conversions that don't change the machine mode. */
991 while ((TREE_CODE (in) == NOP_EXPR
992 || TREE_CODE (in) == CONVERT_EXPR)
993 && (TYPE_MODE (TREE_TYPE (in))
994 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
995 in = TREE_OPERAND (in, 0);
997 if (TREE_CODE (in) == code
998 || (! FLOAT_TYPE_P (TREE_TYPE (in))
999 /* We can associate addition and subtraction together
1000 (even though the C standard doesn't say so)
1001 for integers because the value is not affected.
1002 For reals, the value might be affected, so we can't. */
1003 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1004 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1006 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1007 if (code == INTEGER_CST)
1009 *conp = TREE_OPERAND (in, 0);
1010 *varp = TREE_OPERAND (in, 1);
1011 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1012 && TREE_TYPE (*varp) != outtype)
1013 *varp = convert (outtype, *varp);
1014 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1015 return 1;
1017 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1019 *conp = TREE_OPERAND (in, 1);
1020 *varp = TREE_OPERAND (in, 0);
1021 *varsignp = 1;
1022 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1023 && TREE_TYPE (*varp) != outtype)
1024 *varp = convert (outtype, *varp);
1025 if (TREE_CODE (in) == MINUS_EXPR)
1027 /* If operation is subtraction and constant is second,
1028 must negate it to get an additive constant.
1029 And this cannot be done unless it is a manifest constant.
1030 It could also be the address of a static variable.
1031 We cannot negate that, so give up. */
1032 if (TREE_CODE (*conp) == INTEGER_CST)
1033 /* Subtracting from integer_zero_node loses for long long. */
1034 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1035 else
1036 return 0;
1038 return 1;
1040 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1042 *conp = TREE_OPERAND (in, 0);
1043 *varp = TREE_OPERAND (in, 1);
1044 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1045 && TREE_TYPE (*varp) != outtype)
1046 *varp = convert (outtype, *varp);
1047 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1048 return 1;
1051 return 0;
1054 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1055 to produce a new constant.
1057 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1058 If FORSIZE is nonzero, compute overflow for unsigned types. */
1060 static tree
1061 int_const_binop (code, arg1, arg2, notrunc, forsize)
1062 enum tree_code code;
1063 register tree arg1, arg2;
1064 int notrunc, forsize;
1066 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1067 HOST_WIDE_INT low, hi;
1068 HOST_WIDE_INT garbagel, garbageh;
1069 register tree t;
1070 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1071 int overflow = 0;
1072 int no_overflow = 0;
1074 int1l = TREE_INT_CST_LOW (arg1);
1075 int1h = TREE_INT_CST_HIGH (arg1);
1076 int2l = TREE_INT_CST_LOW (arg2);
1077 int2h = TREE_INT_CST_HIGH (arg2);
1079 switch (code)
1081 case BIT_IOR_EXPR:
1082 low = int1l | int2l, hi = int1h | int2h;
1083 break;
1085 case BIT_XOR_EXPR:
1086 low = int1l ^ int2l, hi = int1h ^ int2h;
1087 break;
1089 case BIT_AND_EXPR:
1090 low = int1l & int2l, hi = int1h & int2h;
1091 break;
1093 case BIT_ANDTC_EXPR:
1094 low = int1l & ~int2l, hi = int1h & ~int2h;
1095 break;
1097 case RSHIFT_EXPR:
1098 int2l = - int2l;
1099 case LSHIFT_EXPR:
1100 /* It's unclear from the C standard whether shifts can overflow.
1101 The following code ignores overflow; perhaps a C standard
1102 interpretation ruling is needed. */
1103 lshift_double (int1l, int1h, int2l,
1104 TYPE_PRECISION (TREE_TYPE (arg1)),
1105 &low, &hi,
1106 !uns);
1107 no_overflow = 1;
1108 break;
1110 case RROTATE_EXPR:
1111 int2l = - int2l;
1112 case LROTATE_EXPR:
1113 lrotate_double (int1l, int1h, int2l,
1114 TYPE_PRECISION (TREE_TYPE (arg1)),
1115 &low, &hi);
1116 break;
1118 case PLUS_EXPR:
1119 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1120 break;
1122 case MINUS_EXPR:
1123 neg_double (int2l, int2h, &low, &hi);
1124 add_double (int1l, int1h, low, hi, &low, &hi);
1125 overflow = overflow_sum_sign (hi, int2h, int1h);
1126 break;
1128 case MULT_EXPR:
1129 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1130 break;
1132 case TRUNC_DIV_EXPR:
1133 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1134 case EXACT_DIV_EXPR:
1135 /* This is a shortcut for a common special case. */
1136 if (int2h == 0 && int2l > 0
1137 && ! TREE_CONSTANT_OVERFLOW (arg1)
1138 && ! TREE_CONSTANT_OVERFLOW (arg2)
1139 && int1h == 0 && int1l >= 0)
1141 if (code == CEIL_DIV_EXPR)
1142 int1l += int2l - 1;
1143 low = int1l / int2l, hi = 0;
1144 break;
1147 /* ... fall through ... */
1149 case ROUND_DIV_EXPR:
1150 if (int2h == 0 && int2l == 1)
1152 low = int1l, hi = int1h;
1153 break;
1155 if (int1l == int2l && int1h == int2h
1156 && ! (int1l == 0 && int1h == 0))
1158 low = 1, hi = 0;
1159 break;
1161 overflow = div_and_round_double (code, uns,
1162 int1l, int1h, int2l, int2h,
1163 &low, &hi, &garbagel, &garbageh);
1164 break;
1166 case TRUNC_MOD_EXPR:
1167 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1168 /* This is a shortcut for a common special case. */
1169 if (int2h == 0 && int2l > 0
1170 && ! TREE_CONSTANT_OVERFLOW (arg1)
1171 && ! TREE_CONSTANT_OVERFLOW (arg2)
1172 && int1h == 0 && int1l >= 0)
1174 if (code == CEIL_MOD_EXPR)
1175 int1l += int2l - 1;
1176 low = int1l % int2l, hi = 0;
1177 break;
1180 /* ... fall through ... */
1182 case ROUND_MOD_EXPR:
1183 overflow = div_and_round_double (code, uns,
1184 int1l, int1h, int2l, int2h,
1185 &garbagel, &garbageh, &low, &hi);
1186 break;
1188 case MIN_EXPR:
1189 case MAX_EXPR:
1190 if (uns)
1192 low = (((unsigned HOST_WIDE_INT) int1h
1193 < (unsigned HOST_WIDE_INT) int2h)
1194 || (((unsigned HOST_WIDE_INT) int1h
1195 == (unsigned HOST_WIDE_INT) int2h)
1196 && ((unsigned HOST_WIDE_INT) int1l
1197 < (unsigned HOST_WIDE_INT) int2l)));
1199 else
1201 low = ((int1h < int2h)
1202 || ((int1h == int2h)
1203 && ((unsigned HOST_WIDE_INT) int1l
1204 < (unsigned HOST_WIDE_INT) int2l)));
1206 if (low == (code == MIN_EXPR))
1207 low = int1l, hi = int1h;
1208 else
1209 low = int2l, hi = int2h;
1210 break;
1212 default:
1213 abort ();
1216 if (TREE_TYPE (arg1) == sizetype && hi == 0
1217 && low >= 0 && low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype))
1218 && ! overflow
1219 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1220 t = size_int (low);
1221 else
1223 t = build_int_2 (low, hi);
1224 TREE_TYPE (t) = TREE_TYPE (arg1);
1227 TREE_OVERFLOW (t)
1228 = ((notrunc ? (!uns || forsize) && overflow
1229 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1230 | TREE_OVERFLOW (arg1)
1231 | TREE_OVERFLOW (arg2));
1232 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1233 So check if force_fit_type truncated the value. */
1234 if (forsize
1235 && ! TREE_OVERFLOW (t)
1236 && (TREE_INT_CST_HIGH (t) != hi
1237 || TREE_INT_CST_LOW (t) != low))
1238 TREE_OVERFLOW (t) = 1;
1239 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1240 | TREE_CONSTANT_OVERFLOW (arg1)
1241 | TREE_CONSTANT_OVERFLOW (arg2));
1242 return t;
1245 /* Combine two constants ARG1 and ARG2 under operation CODE
1246 to produce a new constant.
1247 We assume ARG1 and ARG2 have the same data type,
1248 or at least are the same kind of constant and the same machine mode.
1250 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1252 static tree
1253 const_binop (code, arg1, arg2, notrunc)
1254 enum tree_code code;
1255 register tree arg1, arg2;
1256 int notrunc;
1258 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1260 if (TREE_CODE (arg1) == INTEGER_CST)
1261 return int_const_binop (code, arg1, arg2, notrunc, 0);
1263 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1264 if (TREE_CODE (arg1) == REAL_CST)
1266 REAL_VALUE_TYPE d1;
1267 REAL_VALUE_TYPE d2;
1268 int overflow = 0;
1269 REAL_VALUE_TYPE value;
1270 tree t;
1272 d1 = TREE_REAL_CST (arg1);
1273 d2 = TREE_REAL_CST (arg2);
1275 /* If either operand is a NaN, just return it. Otherwise, set up
1276 for floating-point trap; we return an overflow. */
1277 if (REAL_VALUE_ISNAN (d1))
1278 return arg1;
1279 else if (REAL_VALUE_ISNAN (d2))
1280 return arg2;
1281 else if (setjmp (float_error))
1283 t = copy_node (arg1);
1284 overflow = 1;
1285 goto got_float;
1288 set_float_handler (float_error);
1290 #ifdef REAL_ARITHMETIC
1291 REAL_ARITHMETIC (value, code, d1, d2);
1292 #else
1293 switch (code)
1295 case PLUS_EXPR:
1296 value = d1 + d2;
1297 break;
1299 case MINUS_EXPR:
1300 value = d1 - d2;
1301 break;
1303 case MULT_EXPR:
1304 value = d1 * d2;
1305 break;
1307 case RDIV_EXPR:
1308 #ifndef REAL_INFINITY
1309 if (d2 == 0)
1310 abort ();
1311 #endif
1313 value = d1 / d2;
1314 break;
1316 case MIN_EXPR:
1317 value = MIN (d1, d2);
1318 break;
1320 case MAX_EXPR:
1321 value = MAX (d1, d2);
1322 break;
1324 default:
1325 abort ();
1327 #endif /* no REAL_ARITHMETIC */
1328 t = build_real (TREE_TYPE (arg1),
1329 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1330 got_float:
1331 set_float_handler (NULL_PTR);
1333 TREE_OVERFLOW (t)
1334 = (force_fit_type (t, overflow)
1335 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1336 TREE_CONSTANT_OVERFLOW (t)
1337 = TREE_OVERFLOW (t)
1338 | TREE_CONSTANT_OVERFLOW (arg1)
1339 | TREE_CONSTANT_OVERFLOW (arg2);
1340 return t;
1342 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1343 if (TREE_CODE (arg1) == COMPLEX_CST)
1345 register tree type = TREE_TYPE (arg1);
1346 register tree r1 = TREE_REALPART (arg1);
1347 register tree i1 = TREE_IMAGPART (arg1);
1348 register tree r2 = TREE_REALPART (arg2);
1349 register tree i2 = TREE_IMAGPART (arg2);
1350 register tree t;
1352 switch (code)
1354 case PLUS_EXPR:
1355 t = build_complex (type,
1356 const_binop (PLUS_EXPR, r1, r2, notrunc),
1357 const_binop (PLUS_EXPR, i1, i2, notrunc));
1358 break;
1360 case MINUS_EXPR:
1361 t = build_complex (type,
1362 const_binop (MINUS_EXPR, r1, r2, notrunc),
1363 const_binop (MINUS_EXPR, i1, i2, notrunc));
1364 break;
1366 case MULT_EXPR:
1367 t = build_complex (type,
1368 const_binop (MINUS_EXPR,
1369 const_binop (MULT_EXPR,
1370 r1, r2, notrunc),
1371 const_binop (MULT_EXPR,
1372 i1, i2, notrunc),
1373 notrunc),
1374 const_binop (PLUS_EXPR,
1375 const_binop (MULT_EXPR,
1376 r1, i2, notrunc),
1377 const_binop (MULT_EXPR,
1378 i1, r2, notrunc),
1379 notrunc));
1380 break;
1382 case RDIV_EXPR:
1384 register tree magsquared
1385 = const_binop (PLUS_EXPR,
1386 const_binop (MULT_EXPR, r2, r2, notrunc),
1387 const_binop (MULT_EXPR, i2, i2, notrunc),
1388 notrunc);
1390 t = build_complex (type,
1391 const_binop
1392 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1393 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1394 const_binop (PLUS_EXPR,
1395 const_binop (MULT_EXPR, r1, r2,
1396 notrunc),
1397 const_binop (MULT_EXPR, i1, i2,
1398 notrunc),
1399 notrunc),
1400 magsquared, notrunc),
1401 const_binop
1402 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1403 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1404 const_binop (MINUS_EXPR,
1405 const_binop (MULT_EXPR, i1, r2,
1406 notrunc),
1407 const_binop (MULT_EXPR, r1, i2,
1408 notrunc),
1409 notrunc),
1410 magsquared, notrunc));
1412 break;
1414 default:
1415 abort ();
1417 return t;
1419 return 0;
1422 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1424 tree
1425 size_int (number)
1426 unsigned HOST_WIDE_INT number;
1428 register tree t;
1429 /* Type-size nodes already made for small sizes. */
1430 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1432 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1433 && size_table[number] != 0)
1434 return size_table[number];
1435 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1437 push_obstacks_nochange ();
1438 /* Make this a permanent node. */
1439 end_temporary_allocation ();
1440 t = build_int_2 (number, 0);
1441 TREE_TYPE (t) = sizetype;
1442 size_table[number] = t;
1443 pop_obstacks ();
1445 else
1447 t = build_int_2 (number, 0);
1448 TREE_TYPE (t) = sizetype;
1449 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1451 return t;
1454 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1455 CODE is a tree code. Data type is taken from `sizetype',
1456 If the operands are constant, so is the result. */
1458 tree
1459 size_binop (code, arg0, arg1)
1460 enum tree_code code;
1461 tree arg0, arg1;
1463 /* Handle the special case of two integer constants faster. */
1464 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1466 /* And some specific cases even faster than that. */
1467 if (code == PLUS_EXPR && integer_zerop (arg0))
1468 return arg1;
1469 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1470 && integer_zerop (arg1))
1471 return arg0;
1472 else if (code == MULT_EXPR && integer_onep (arg0))
1473 return arg1;
1475 /* Handle general case of two integer constants. */
1476 return int_const_binop (code, arg0, arg1, 0, 1);
1479 if (arg0 == error_mark_node || arg1 == error_mark_node)
1480 return error_mark_node;
1482 return fold (build (code, sizetype, arg0, arg1));
1485 /* Given T, a tree representing type conversion of ARG1, a constant,
1486 return a constant tree representing the result of conversion. */
1488 static tree
1489 fold_convert (t, arg1)
1490 register tree t;
1491 register tree arg1;
1493 register tree type = TREE_TYPE (t);
1494 int overflow = 0;
1496 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1500 /* If we would build a constant wider than GCC supports,
1501 leave the conversion unfolded. */
1502 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1503 return t;
1505 /* Given an integer constant, make new constant with new type,
1506 appropriately sign-extended or truncated. */
1507 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1508 TREE_INT_CST_HIGH (arg1));
1509 TREE_TYPE (t) = type;
1510 /* Indicate an overflow if (1) ARG1 already overflowed,
1511 or (2) force_fit_type indicates an overflow.
1512 Tell force_fit_type that an overflow has already occurred
1513 if ARG1 is a too-large unsigned value and T is signed.
1514 But don't indicate an overflow if converting a pointer. */
1515 TREE_OVERFLOW (t)
1516 = ((force_fit_type (t,
1517 (TREE_INT_CST_HIGH (arg1) < 0
1518 & (TREE_UNSIGNED (type)
1519 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1520 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1521 || TREE_OVERFLOW (arg1));
1522 TREE_CONSTANT_OVERFLOW (t)
1523 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1525 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1526 else if (TREE_CODE (arg1) == REAL_CST)
1528 /* Don't initialize these, use assignments.
1529 Initialized local aggregates don't work on old compilers. */
1530 REAL_VALUE_TYPE x;
1531 REAL_VALUE_TYPE l;
1532 REAL_VALUE_TYPE u;
1533 tree type1 = TREE_TYPE (arg1);
1535 x = TREE_REAL_CST (arg1);
1536 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1537 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1538 /* See if X will be in range after truncation towards 0.
1539 To compensate for truncation, move the bounds away from 0,
1540 but reject if X exactly equals the adjusted bounds. */
1541 #ifdef REAL_ARITHMETIC
1542 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1543 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1544 #else
1545 l--;
1546 u++;
1547 #endif
1548 /* If X is a NaN, use zero instead and show we have an overflow.
1549 Otherwise, range check. */
1550 if (REAL_VALUE_ISNAN (x))
1551 overflow = 1, x = dconst0;
1552 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1553 overflow = 1;
1555 #ifndef REAL_ARITHMETIC
1557 HOST_WIDE_INT low, high;
1558 HOST_WIDE_INT half_word
1559 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1561 if (x < 0)
1562 x = -x;
1564 high = (HOST_WIDE_INT) (x / half_word / half_word);
1565 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1566 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1568 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1569 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1571 else
1572 low = (HOST_WIDE_INT) x;
1573 if (TREE_REAL_CST (arg1) < 0)
1574 neg_double (low, high, &low, &high);
1575 t = build_int_2 (low, high);
1577 #else
1579 HOST_WIDE_INT low, high;
1580 REAL_VALUE_TO_INT (&low, &high, x);
1581 t = build_int_2 (low, high);
1583 #endif
1584 TREE_TYPE (t) = type;
1585 TREE_OVERFLOW (t)
1586 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1587 TREE_CONSTANT_OVERFLOW (t)
1588 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1590 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1591 TREE_TYPE (t) = type;
1593 else if (TREE_CODE (type) == REAL_TYPE)
1595 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1596 if (TREE_CODE (arg1) == INTEGER_CST)
1597 return build_real_from_int_cst (type, arg1);
1598 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1599 if (TREE_CODE (arg1) == REAL_CST)
1601 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1603 t = arg1;
1604 TREE_TYPE (arg1) = type;
1605 return t;
1607 else if (setjmp (float_error))
1609 overflow = 1;
1610 t = copy_node (arg1);
1611 goto got_it;
1613 set_float_handler (float_error);
1615 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1616 TREE_REAL_CST (arg1)));
1617 set_float_handler (NULL_PTR);
1619 got_it:
1620 TREE_OVERFLOW (t)
1621 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1622 TREE_CONSTANT_OVERFLOW (t)
1623 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1624 return t;
1627 TREE_CONSTANT (t) = 1;
1628 return t;
1631 /* Return an expr equal to X but certainly not valid as an lvalue.
1632 Also make sure it is not valid as an null pointer constant. */
1634 tree
1635 non_lvalue (x)
1636 tree x;
1638 tree result;
1640 /* These things are certainly not lvalues. */
1641 if (TREE_CODE (x) == NON_LVALUE_EXPR
1642 || TREE_CODE (x) == INTEGER_CST
1643 || TREE_CODE (x) == REAL_CST
1644 || TREE_CODE (x) == STRING_CST
1645 || TREE_CODE (x) == ADDR_EXPR)
1647 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1649 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1650 so convert_for_assignment won't strip it.
1651 This is so this 0 won't be treated as a null pointer constant. */
1652 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1653 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1654 return result;
1656 return x;
1659 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1660 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1661 return result;
1664 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1665 Zero means allow extended lvalues. */
1667 int pedantic_lvalues;
1669 /* When pedantic, return an expr equal to X but certainly not valid as a
1670 pedantic lvalue. Otherwise, return X. */
1672 tree
1673 pedantic_non_lvalue (x)
1674 tree x;
1676 if (pedantic_lvalues)
1677 return non_lvalue (x);
1678 else
1679 return x;
1682 /* Given a tree comparison code, return the code that is the logical inverse
1683 of the given code. It is not safe to do this for floating-point
1684 comparisons, except for NE_EXPR and EQ_EXPR. */
1686 static enum tree_code
1687 invert_tree_comparison (code)
1688 enum tree_code code;
1690 switch (code)
1692 case EQ_EXPR:
1693 return NE_EXPR;
1694 case NE_EXPR:
1695 return EQ_EXPR;
1696 case GT_EXPR:
1697 return LE_EXPR;
1698 case GE_EXPR:
1699 return LT_EXPR;
1700 case LT_EXPR:
1701 return GE_EXPR;
1702 case LE_EXPR:
1703 return GT_EXPR;
1704 default:
1705 abort ();
1709 /* Similar, but return the comparison that results if the operands are
1710 swapped. This is safe for floating-point. */
1712 static enum tree_code
1713 swap_tree_comparison (code)
1714 enum tree_code code;
1716 switch (code)
1718 case EQ_EXPR:
1719 case NE_EXPR:
1720 return code;
1721 case GT_EXPR:
1722 return LT_EXPR;
1723 case GE_EXPR:
1724 return LE_EXPR;
1725 case LT_EXPR:
1726 return GT_EXPR;
1727 case LE_EXPR:
1728 return GE_EXPR;
1729 default:
1730 abort ();
1734 /* Return nonzero if CODE is a tree code that represents a truth value. */
1736 static int
1737 truth_value_p (code)
1738 enum tree_code code;
1740 return (TREE_CODE_CLASS (code) == '<'
1741 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1742 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1743 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1746 /* Return nonzero if two operands are necessarily equal.
1747 If ONLY_CONST is non-zero, only return non-zero for constants.
1748 This function tests whether the operands are indistinguishable;
1749 it does not test whether they are equal using C's == operation.
1750 The distinction is important for IEEE floating point, because
1751 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1752 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1755 operand_equal_p (arg0, arg1, only_const)
1756 tree arg0, arg1;
1757 int only_const;
1759 /* If both types don't have the same signedness, then we can't consider
1760 them equal. We must check this before the STRIP_NOPS calls
1761 because they may change the signedness of the arguments. */
1762 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1763 return 0;
1765 STRIP_NOPS (arg0);
1766 STRIP_NOPS (arg1);
1768 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1769 /* This is needed for conversions and for COMPONENT_REF.
1770 Might as well play it safe and always test this. */
1771 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1772 return 0;
1774 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1775 We don't care about side effects in that case because the SAVE_EXPR
1776 takes care of that for us. In all other cases, two expressions are
1777 equal if they have no side effects. If we have two identical
1778 expressions with side effects that should be treated the same due
1779 to the only side effects being identical SAVE_EXPR's, that will
1780 be detected in the recursive calls below. */
1781 if (arg0 == arg1 && ! only_const
1782 && (TREE_CODE (arg0) == SAVE_EXPR
1783 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1784 return 1;
1786 /* Next handle constant cases, those for which we can return 1 even
1787 if ONLY_CONST is set. */
1788 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1789 switch (TREE_CODE (arg0))
1791 case INTEGER_CST:
1792 return (! TREE_CONSTANT_OVERFLOW (arg0)
1793 && ! TREE_CONSTANT_OVERFLOW (arg1)
1794 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1795 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1797 case REAL_CST:
1798 return (! TREE_CONSTANT_OVERFLOW (arg0)
1799 && ! TREE_CONSTANT_OVERFLOW (arg1)
1800 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1801 TREE_REAL_CST (arg1)));
1803 case COMPLEX_CST:
1804 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1805 only_const)
1806 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1807 only_const));
1809 case STRING_CST:
1810 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1811 && ! strncmp (TREE_STRING_POINTER (arg0),
1812 TREE_STRING_POINTER (arg1),
1813 TREE_STRING_LENGTH (arg0)));
1815 case ADDR_EXPR:
1816 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1818 default:
1819 break;
1822 if (only_const)
1823 return 0;
1825 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1827 case '1':
1828 /* Two conversions are equal only if signedness and modes match. */
1829 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1830 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1831 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1832 return 0;
1834 return operand_equal_p (TREE_OPERAND (arg0, 0),
1835 TREE_OPERAND (arg1, 0), 0);
1837 case '<':
1838 case '2':
1839 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1840 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1842 return 1;
1844 /* For commutative ops, allow the other order. */
1845 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1846 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1847 || TREE_CODE (arg0) == BIT_IOR_EXPR
1848 || TREE_CODE (arg0) == BIT_XOR_EXPR
1849 || TREE_CODE (arg0) == BIT_AND_EXPR
1850 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1851 && operand_equal_p (TREE_OPERAND (arg0, 0),
1852 TREE_OPERAND (arg1, 1), 0)
1853 && operand_equal_p (TREE_OPERAND (arg0, 1),
1854 TREE_OPERAND (arg1, 0), 0));
1856 case 'r':
1857 switch (TREE_CODE (arg0))
1859 case INDIRECT_REF:
1860 return operand_equal_p (TREE_OPERAND (arg0, 0),
1861 TREE_OPERAND (arg1, 0), 0);
1863 case COMPONENT_REF:
1864 case ARRAY_REF:
1865 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1866 TREE_OPERAND (arg1, 0), 0)
1867 && operand_equal_p (TREE_OPERAND (arg0, 1),
1868 TREE_OPERAND (arg1, 1), 0));
1870 case BIT_FIELD_REF:
1871 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1872 TREE_OPERAND (arg1, 0), 0)
1873 && operand_equal_p (TREE_OPERAND (arg0, 1),
1874 TREE_OPERAND (arg1, 1), 0)
1875 && operand_equal_p (TREE_OPERAND (arg0, 2),
1876 TREE_OPERAND (arg1, 2), 0));
1877 default:
1878 return 0;
1881 default:
1882 return 0;
1886 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1887 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1889 When in doubt, return 0. */
1891 static int
1892 operand_equal_for_comparison_p (arg0, arg1, other)
1893 tree arg0, arg1;
1894 tree other;
1896 int unsignedp1, unsignedpo;
1897 tree primarg1, primother;
1898 unsigned correct_width;
1900 if (operand_equal_p (arg0, arg1, 0))
1901 return 1;
1903 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1904 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1905 return 0;
1907 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1908 actual comparison operand, ARG0.
1910 First throw away any conversions to wider types
1911 already present in the operands. */
1913 primarg1 = get_narrower (arg1, &unsignedp1);
1914 primother = get_narrower (other, &unsignedpo);
1916 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1917 if (unsignedp1 == unsignedpo
1918 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1919 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1921 tree type = TREE_TYPE (arg0);
1923 /* Make sure shorter operand is extended the right way
1924 to match the longer operand. */
1925 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1926 TREE_TYPE (primarg1)),
1927 primarg1);
1929 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1930 return 1;
1933 return 0;
1936 /* See if ARG is an expression that is either a comparison or is performing
1937 arithmetic on comparisons. The comparisons must only be comparing
1938 two different values, which will be stored in *CVAL1 and *CVAL2; if
1939 they are non-zero it means that some operands have already been found.
1940 No variables may be used anywhere else in the expression except in the
1941 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1942 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1944 If this is true, return 1. Otherwise, return zero. */
1946 static int
1947 twoval_comparison_p (arg, cval1, cval2, save_p)
1948 tree arg;
1949 tree *cval1, *cval2;
1950 int *save_p;
1952 enum tree_code code = TREE_CODE (arg);
1953 char class = TREE_CODE_CLASS (code);
1955 /* We can handle some of the 'e' cases here. */
1956 if (class == 'e' && code == TRUTH_NOT_EXPR)
1957 class = '1';
1958 else if (class == 'e'
1959 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1960 || code == COMPOUND_EXPR))
1961 class = '2';
1963 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1964 the expression. There may be no way to make this work, but it needs
1965 to be looked at again for 2.6. */
1966 #if 0
1967 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1969 /* If we've already found a CVAL1 or CVAL2, this expression is
1970 two complex to handle. */
1971 if (*cval1 || *cval2)
1972 return 0;
1974 class = '1';
1975 *save_p = 1;
1977 #endif
1979 switch (class)
1981 case '1':
1982 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1984 case '2':
1985 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1986 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1987 cval1, cval2, save_p));
1989 case 'c':
1990 return 1;
1992 case 'e':
1993 if (code == COND_EXPR)
1994 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1995 cval1, cval2, save_p)
1996 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1997 cval1, cval2, save_p)
1998 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1999 cval1, cval2, save_p));
2000 return 0;
2002 case '<':
2003 /* First see if we can handle the first operand, then the second. For
2004 the second operand, we know *CVAL1 can't be zero. It must be that
2005 one side of the comparison is each of the values; test for the
2006 case where this isn't true by failing if the two operands
2007 are the same. */
2009 if (operand_equal_p (TREE_OPERAND (arg, 0),
2010 TREE_OPERAND (arg, 1), 0))
2011 return 0;
2013 if (*cval1 == 0)
2014 *cval1 = TREE_OPERAND (arg, 0);
2015 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2017 else if (*cval2 == 0)
2018 *cval2 = TREE_OPERAND (arg, 0);
2019 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2021 else
2022 return 0;
2024 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2026 else if (*cval2 == 0)
2027 *cval2 = TREE_OPERAND (arg, 1);
2028 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2030 else
2031 return 0;
2033 return 1;
2035 default:
2036 return 0;
2040 /* ARG is a tree that is known to contain just arithmetic operations and
2041 comparisons. Evaluate the operations in the tree substituting NEW0 for
2042 any occurrence of OLD0 as an operand of a comparison and likewise for
2043 NEW1 and OLD1. */
2045 static tree
2046 eval_subst (arg, old0, new0, old1, new1)
2047 tree arg;
2048 tree old0, new0, old1, new1;
2050 tree type = TREE_TYPE (arg);
2051 enum tree_code code = TREE_CODE (arg);
2052 char class = TREE_CODE_CLASS (code);
2054 /* We can handle some of the 'e' cases here. */
2055 if (class == 'e' && code == TRUTH_NOT_EXPR)
2056 class = '1';
2057 else if (class == 'e'
2058 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2059 class = '2';
2061 switch (class)
2063 case '1':
2064 return fold (build1 (code, type,
2065 eval_subst (TREE_OPERAND (arg, 0),
2066 old0, new0, old1, new1)));
2068 case '2':
2069 return fold (build (code, type,
2070 eval_subst (TREE_OPERAND (arg, 0),
2071 old0, new0, old1, new1),
2072 eval_subst (TREE_OPERAND (arg, 1),
2073 old0, new0, old1, new1)));
2075 case 'e':
2076 switch (code)
2078 case SAVE_EXPR:
2079 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2081 case COMPOUND_EXPR:
2082 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2084 case COND_EXPR:
2085 return fold (build (code, type,
2086 eval_subst (TREE_OPERAND (arg, 0),
2087 old0, new0, old1, new1),
2088 eval_subst (TREE_OPERAND (arg, 1),
2089 old0, new0, old1, new1),
2090 eval_subst (TREE_OPERAND (arg, 2),
2091 old0, new0, old1, new1)));
2092 default:
2093 break;
2095 /* fall through (???) */
2097 case '<':
2099 tree arg0 = TREE_OPERAND (arg, 0);
2100 tree arg1 = TREE_OPERAND (arg, 1);
2102 /* We need to check both for exact equality and tree equality. The
2103 former will be true if the operand has a side-effect. In that
2104 case, we know the operand occurred exactly once. */
2106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2107 arg0 = new0;
2108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2109 arg0 = new1;
2111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2112 arg1 = new0;
2113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2114 arg1 = new1;
2116 return fold (build (code, type, arg0, arg1));
2119 default:
2120 return arg;
2124 /* Return a tree for the case when the result of an expression is RESULT
2125 converted to TYPE and OMITTED was previously an operand of the expression
2126 but is now not needed (e.g., we folded OMITTED * 0).
2128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2129 the conversion of RESULT to TYPE. */
2131 static tree
2132 omit_one_operand (type, result, omitted)
2133 tree type, result, omitted;
2135 tree t = convert (type, result);
2137 if (TREE_SIDE_EFFECTS (omitted))
2138 return build (COMPOUND_EXPR, type, omitted, t);
2140 return non_lvalue (t);
2143 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2145 static tree
2146 pedantic_omit_one_operand (type, result, omitted)
2147 tree type, result, omitted;
2149 tree t = convert (type, result);
2151 if (TREE_SIDE_EFFECTS (omitted))
2152 return build (COMPOUND_EXPR, type, omitted, t);
2154 return pedantic_non_lvalue (t);
2159 /* Return a simplified tree node for the truth-negation of ARG. This
2160 never alters ARG itself. We assume that ARG is an operation that
2161 returns a truth value (0 or 1). */
2163 tree
2164 invert_truthvalue (arg)
2165 tree arg;
2167 tree type = TREE_TYPE (arg);
2168 enum tree_code code = TREE_CODE (arg);
2170 if (code == ERROR_MARK)
2171 return arg;
2173 /* If this is a comparison, we can simply invert it, except for
2174 floating-point non-equality comparisons, in which case we just
2175 enclose a TRUTH_NOT_EXPR around what we have. */
2177 if (TREE_CODE_CLASS (code) == '<')
2179 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2180 && code != NE_EXPR && code != EQ_EXPR)
2181 return build1 (TRUTH_NOT_EXPR, type, arg);
2182 else
2183 return build (invert_tree_comparison (code), type,
2184 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2187 switch (code)
2189 case INTEGER_CST:
2190 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2191 && TREE_INT_CST_HIGH (arg) == 0, 0));
2193 case TRUTH_AND_EXPR:
2194 return build (TRUTH_OR_EXPR, type,
2195 invert_truthvalue (TREE_OPERAND (arg, 0)),
2196 invert_truthvalue (TREE_OPERAND (arg, 1)));
2198 case TRUTH_OR_EXPR:
2199 return build (TRUTH_AND_EXPR, type,
2200 invert_truthvalue (TREE_OPERAND (arg, 0)),
2201 invert_truthvalue (TREE_OPERAND (arg, 1)));
2203 case TRUTH_XOR_EXPR:
2204 /* Here we can invert either operand. We invert the first operand
2205 unless the second operand is a TRUTH_NOT_EXPR in which case our
2206 result is the XOR of the first operand with the inside of the
2207 negation of the second operand. */
2209 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2210 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2211 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2212 else
2213 return build (TRUTH_XOR_EXPR, type,
2214 invert_truthvalue (TREE_OPERAND (arg, 0)),
2215 TREE_OPERAND (arg, 1));
2217 case TRUTH_ANDIF_EXPR:
2218 return build (TRUTH_ORIF_EXPR, type,
2219 invert_truthvalue (TREE_OPERAND (arg, 0)),
2220 invert_truthvalue (TREE_OPERAND (arg, 1)));
2222 case TRUTH_ORIF_EXPR:
2223 return build (TRUTH_ANDIF_EXPR, type,
2224 invert_truthvalue (TREE_OPERAND (arg, 0)),
2225 invert_truthvalue (TREE_OPERAND (arg, 1)));
2227 case TRUTH_NOT_EXPR:
2228 return TREE_OPERAND (arg, 0);
2230 case COND_EXPR:
2231 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2232 invert_truthvalue (TREE_OPERAND (arg, 1)),
2233 invert_truthvalue (TREE_OPERAND (arg, 2)));
2235 case COMPOUND_EXPR:
2236 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2237 invert_truthvalue (TREE_OPERAND (arg, 1)));
2239 case NON_LVALUE_EXPR:
2240 return invert_truthvalue (TREE_OPERAND (arg, 0));
2242 case NOP_EXPR:
2243 case CONVERT_EXPR:
2244 case FLOAT_EXPR:
2245 return build1 (TREE_CODE (arg), type,
2246 invert_truthvalue (TREE_OPERAND (arg, 0)));
2248 case BIT_AND_EXPR:
2249 if (!integer_onep (TREE_OPERAND (arg, 1)))
2250 break;
2251 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2253 case SAVE_EXPR:
2254 return build1 (TRUTH_NOT_EXPR, type, arg);
2256 case CLEANUP_POINT_EXPR:
2257 return build1 (CLEANUP_POINT_EXPR, type,
2258 invert_truthvalue (TREE_OPERAND (arg, 0)));
2260 default:
2261 break;
2263 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2264 abort ();
2265 return build1 (TRUTH_NOT_EXPR, type, arg);
2268 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2269 operands are another bit-wise operation with a common input. If so,
2270 distribute the bit operations to save an operation and possibly two if
2271 constants are involved. For example, convert
2272 (A | B) & (A | C) into A | (B & C)
2273 Further simplification will occur if B and C are constants.
2275 If this optimization cannot be done, 0 will be returned. */
2277 static tree
2278 distribute_bit_expr (code, type, arg0, arg1)
2279 enum tree_code code;
2280 tree type;
2281 tree arg0, arg1;
2283 tree common;
2284 tree left, right;
2286 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2287 || TREE_CODE (arg0) == code
2288 || (TREE_CODE (arg0) != BIT_AND_EXPR
2289 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2290 return 0;
2292 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2294 common = TREE_OPERAND (arg0, 0);
2295 left = TREE_OPERAND (arg0, 1);
2296 right = TREE_OPERAND (arg1, 1);
2298 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2300 common = TREE_OPERAND (arg0, 0);
2301 left = TREE_OPERAND (arg0, 1);
2302 right = TREE_OPERAND (arg1, 0);
2304 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2306 common = TREE_OPERAND (arg0, 1);
2307 left = TREE_OPERAND (arg0, 0);
2308 right = TREE_OPERAND (arg1, 1);
2310 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2312 common = TREE_OPERAND (arg0, 1);
2313 left = TREE_OPERAND (arg0, 0);
2314 right = TREE_OPERAND (arg1, 0);
2316 else
2317 return 0;
2319 return fold (build (TREE_CODE (arg0), type, common,
2320 fold (build (code, type, left, right))));
2323 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2324 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2326 static tree
2327 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2328 tree inner;
2329 tree type;
2330 int bitsize, bitpos;
2331 int unsignedp;
2333 tree result = build (BIT_FIELD_REF, type, inner,
2334 size_int (bitsize), size_int (bitpos));
2336 TREE_UNSIGNED (result) = unsignedp;
2338 return result;
2341 /* Optimize a bit-field compare.
2343 There are two cases: First is a compare against a constant and the
2344 second is a comparison of two items where the fields are at the same
2345 bit position relative to the start of a chunk (byte, halfword, word)
2346 large enough to contain it. In these cases we can avoid the shift
2347 implicit in bitfield extractions.
2349 For constants, we emit a compare of the shifted constant with the
2350 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2351 compared. For two fields at the same position, we do the ANDs with the
2352 similar mask and compare the result of the ANDs.
2354 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2355 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2356 are the left and right operands of the comparison, respectively.
2358 If the optimization described above can be done, we return the resulting
2359 tree. Otherwise we return zero. */
2361 static tree
2362 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2363 enum tree_code code;
2364 tree compare_type;
2365 tree lhs, rhs;
2367 int lbitpos, lbitsize, rbitpos, rbitsize;
2368 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2369 tree type = TREE_TYPE (lhs);
2370 tree signed_type, unsigned_type;
2371 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2372 enum machine_mode lmode, rmode, lnmode, rnmode;
2373 int lunsignedp, runsignedp;
2374 int lvolatilep = 0, rvolatilep = 0;
2375 int alignment;
2376 tree linner, rinner;
2377 tree mask;
2378 tree offset;
2380 /* Get all the information about the extractions being done. If the bit size
2381 if the same as the size of the underlying object, we aren't doing an
2382 extraction at all and so can do nothing. */
2383 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2384 &lunsignedp, &lvolatilep, &alignment);
2385 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2386 || offset != 0)
2387 return 0;
2389 if (!const_p)
2391 /* If this is not a constant, we can only do something if bit positions,
2392 sizes, and signedness are the same. */
2393 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2394 &runsignedp, &rvolatilep, &alignment);
2396 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2397 || lunsignedp != runsignedp || offset != 0)
2398 return 0;
2401 /* See if we can find a mode to refer to this field. We should be able to,
2402 but fail if we can't. */
2403 lnmode = get_best_mode (lbitsize, lbitpos,
2404 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2405 lvolatilep);
2406 if (lnmode == VOIDmode)
2407 return 0;
2409 /* Set signed and unsigned types of the precision of this mode for the
2410 shifts below. */
2411 signed_type = type_for_mode (lnmode, 0);
2412 unsigned_type = type_for_mode (lnmode, 1);
2414 if (! const_p)
2416 rnmode = get_best_mode (rbitsize, rbitpos,
2417 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2418 rvolatilep);
2419 if (rnmode == VOIDmode)
2420 return 0;
2423 /* Compute the bit position and size for the new reference and our offset
2424 within it. If the new reference is the same size as the original, we
2425 won't optimize anything, so return zero. */
2426 lnbitsize = GET_MODE_BITSIZE (lnmode);
2427 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2428 lbitpos -= lnbitpos;
2429 if (lnbitsize == lbitsize)
2430 return 0;
2432 if (! const_p)
2434 rnbitsize = GET_MODE_BITSIZE (rnmode);
2435 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2436 rbitpos -= rnbitpos;
2437 if (rnbitsize == rbitsize)
2438 return 0;
2441 if (BYTES_BIG_ENDIAN)
2442 lbitpos = lnbitsize - lbitsize - lbitpos;
2444 /* Make the mask to be used against the extracted field. */
2445 mask = build_int_2 (~0, ~0);
2446 TREE_TYPE (mask) = unsigned_type;
2447 force_fit_type (mask, 0);
2448 mask = convert (unsigned_type, mask);
2449 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2450 mask = const_binop (RSHIFT_EXPR, mask,
2451 size_int (lnbitsize - lbitsize - lbitpos), 0);
2453 if (! const_p)
2454 /* If not comparing with constant, just rework the comparison
2455 and return. */
2456 return build (code, compare_type,
2457 build (BIT_AND_EXPR, unsigned_type,
2458 make_bit_field_ref (linner, unsigned_type,
2459 lnbitsize, lnbitpos, 1),
2460 mask),
2461 build (BIT_AND_EXPR, unsigned_type,
2462 make_bit_field_ref (rinner, unsigned_type,
2463 rnbitsize, rnbitpos, 1),
2464 mask));
2466 /* Otherwise, we are handling the constant case. See if the constant is too
2467 big for the field. Warn and return a tree of for 0 (false) if so. We do
2468 this not only for its own sake, but to avoid having to test for this
2469 error case below. If we didn't, we might generate wrong code.
2471 For unsigned fields, the constant shifted right by the field length should
2472 be all zero. For signed fields, the high-order bits should agree with
2473 the sign bit. */
2475 if (lunsignedp)
2477 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2478 convert (unsigned_type, rhs),
2479 size_int (lbitsize), 0)))
2481 warning ("comparison is always %s due to width of bitfield",
2482 code == NE_EXPR ? "one" : "zero");
2483 return convert (compare_type,
2484 (code == NE_EXPR
2485 ? integer_one_node : integer_zero_node));
2488 else
2490 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2491 size_int (lbitsize - 1), 0);
2492 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2494 warning ("comparison is always %s due to width of bitfield",
2495 code == NE_EXPR ? "one" : "zero");
2496 return convert (compare_type,
2497 (code == NE_EXPR
2498 ? integer_one_node : integer_zero_node));
2502 /* Single-bit compares should always be against zero. */
2503 if (lbitsize == 1 && ! integer_zerop (rhs))
2505 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2506 rhs = convert (type, integer_zero_node);
2509 /* Make a new bitfield reference, shift the constant over the
2510 appropriate number of bits and mask it with the computed mask
2511 (in case this was a signed field). If we changed it, make a new one. */
2512 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2513 if (lvolatilep)
2515 TREE_SIDE_EFFECTS (lhs) = 1;
2516 TREE_THIS_VOLATILE (lhs) = 1;
2519 rhs = fold (const_binop (BIT_AND_EXPR,
2520 const_binop (LSHIFT_EXPR,
2521 convert (unsigned_type, rhs),
2522 size_int (lbitpos), 0),
2523 mask, 0));
2525 return build (code, compare_type,
2526 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2527 rhs);
2530 /* Subroutine for fold_truthop: decode a field reference.
2532 If EXP is a comparison reference, we return the innermost reference.
2534 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2535 set to the starting bit number.
2537 If the innermost field can be completely contained in a mode-sized
2538 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2540 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2541 otherwise it is not changed.
2543 *PUNSIGNEDP is set to the signedness of the field.
2545 *PMASK is set to the mask used. This is either contained in a
2546 BIT_AND_EXPR or derived from the width of the field.
2548 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2550 Return 0 if this is not a component reference or is one that we can't
2551 do anything with. */
2553 static tree
2554 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2555 pvolatilep, pmask, pand_mask)
2556 tree exp;
2557 int *pbitsize, *pbitpos;
2558 enum machine_mode *pmode;
2559 int *punsignedp, *pvolatilep;
2560 tree *pmask;
2561 tree *pand_mask;
2563 tree and_mask = 0;
2564 tree mask, inner, offset;
2565 tree unsigned_type;
2566 int precision;
2567 int alignment;
2569 /* All the optimizations using this function assume integer fields.
2570 There are problems with FP fields since the type_for_size call
2571 below can fail for, e.g., XFmode. */
2572 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2573 return 0;
2575 STRIP_NOPS (exp);
2577 if (TREE_CODE (exp) == BIT_AND_EXPR)
2579 and_mask = TREE_OPERAND (exp, 1);
2580 exp = TREE_OPERAND (exp, 0);
2581 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2582 if (TREE_CODE (and_mask) != INTEGER_CST)
2583 return 0;
2587 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2588 punsignedp, pvolatilep, &alignment);
2589 if ((inner == exp && and_mask == 0)
2590 || *pbitsize < 0 || offset != 0)
2591 return 0;
2593 /* Compute the mask to access the bitfield. */
2594 unsigned_type = type_for_size (*pbitsize, 1);
2595 precision = TYPE_PRECISION (unsigned_type);
2597 mask = build_int_2 (~0, ~0);
2598 TREE_TYPE (mask) = unsigned_type;
2599 force_fit_type (mask, 0);
2600 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2601 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2603 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2604 if (and_mask != 0)
2605 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2606 convert (unsigned_type, and_mask), mask));
2608 *pmask = mask;
2609 *pand_mask = and_mask;
2610 return inner;
2613 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2614 bit positions. */
2616 static int
2617 all_ones_mask_p (mask, size)
2618 tree mask;
2619 int size;
2621 tree type = TREE_TYPE (mask);
2622 int precision = TYPE_PRECISION (type);
2623 tree tmask;
2625 tmask = build_int_2 (~0, ~0);
2626 TREE_TYPE (tmask) = signed_type (type);
2627 force_fit_type (tmask, 0);
2628 return
2629 tree_int_cst_equal (mask,
2630 const_binop (RSHIFT_EXPR,
2631 const_binop (LSHIFT_EXPR, tmask,
2632 size_int (precision - size),
2634 size_int (precision - size), 0));
2637 /* Subroutine for fold_truthop: determine if an operand is simple enough
2638 to be evaluated unconditionally. */
2640 static int
2641 simple_operand_p (exp)
2642 tree exp;
2644 /* Strip any conversions that don't change the machine mode. */
2645 while ((TREE_CODE (exp) == NOP_EXPR
2646 || TREE_CODE (exp) == CONVERT_EXPR)
2647 && (TYPE_MODE (TREE_TYPE (exp))
2648 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2649 exp = TREE_OPERAND (exp, 0);
2651 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2652 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2653 && ! TREE_ADDRESSABLE (exp)
2654 && ! TREE_THIS_VOLATILE (exp)
2655 && ! DECL_NONLOCAL (exp)
2656 /* Don't regard global variables as simple. They may be
2657 allocated in ways unknown to the compiler (shared memory,
2658 #pragma weak, etc). */
2659 && ! TREE_PUBLIC (exp)
2660 && ! DECL_EXTERNAL (exp)
2661 /* Loading a static variable is unduly expensive, but global
2662 registers aren't expensive. */
2663 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2666 /* The following functions are subroutines to fold_range_test and allow it to
2667 try to change a logical combination of comparisons into a range test.
2669 For example, both
2670 X == 2 && X == 3 && X == 4 && X == 5
2672 X >= 2 && X <= 5
2673 are converted to
2674 (unsigned) (X - 2) <= 3
2676 We describe each set of comparisons as being either inside or outside
2677 a range, using a variable named like IN_P, and then describe the
2678 range with a lower and upper bound. If one of the bounds is omitted,
2679 it represents either the highest or lowest value of the type.
2681 In the comments below, we represent a range by two numbers in brackets
2682 preceded by a "+" to designate being inside that range, or a "-" to
2683 designate being outside that range, so the condition can be inverted by
2684 flipping the prefix. An omitted bound is represented by a "-". For
2685 example, "- [-, 10]" means being outside the range starting at the lowest
2686 possible value and ending at 10, in other words, being greater than 10.
2687 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2688 always false.
2690 We set up things so that the missing bounds are handled in a consistent
2691 manner so neither a missing bound nor "true" and "false" need to be
2692 handled using a special case. */
2694 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2695 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2696 and UPPER1_P are nonzero if the respective argument is an upper bound
2697 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2698 must be specified for a comparison. ARG1 will be converted to ARG0's
2699 type if both are specified. */
2701 static tree
2702 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2703 enum tree_code code;
2704 tree type;
2705 tree arg0, arg1;
2706 int upper0_p, upper1_p;
2708 tree tem;
2709 int result;
2710 int sgn0, sgn1;
2712 /* If neither arg represents infinity, do the normal operation.
2713 Else, if not a comparison, return infinity. Else handle the special
2714 comparison rules. Note that most of the cases below won't occur, but
2715 are handled for consistency. */
2717 if (arg0 != 0 && arg1 != 0)
2719 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2720 arg0, convert (TREE_TYPE (arg0), arg1)));
2721 STRIP_NOPS (tem);
2722 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2725 if (TREE_CODE_CLASS (code) != '<')
2726 return 0;
2728 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2729 for neither. Then compute our result treating them as never equal
2730 and comparing bounds to non-bounds as above. */
2731 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2732 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2733 switch (code)
2735 case EQ_EXPR: case NE_EXPR:
2736 result = (code == NE_EXPR);
2737 break;
2738 case LT_EXPR: case LE_EXPR:
2739 result = sgn0 < sgn1;
2740 break;
2741 case GT_EXPR: case GE_EXPR:
2742 result = sgn0 > sgn1;
2743 break;
2744 default:
2745 abort ();
2748 return convert (type, result ? integer_one_node : integer_zero_node);
2751 /* Given EXP, a logical expression, set the range it is testing into
2752 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2753 actually being tested. *PLOW and *PHIGH will have be made the same type
2754 as the returned expression. If EXP is not a comparison, we will most
2755 likely not be returning a useful value and range. */
2757 static tree
2758 make_range (exp, pin_p, plow, phigh)
2759 tree exp;
2760 int *pin_p;
2761 tree *plow, *phigh;
2763 enum tree_code code;
2764 tree arg0, arg1, type;
2765 int in_p, n_in_p;
2766 tree low, high, n_low, n_high;
2768 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2769 and see if we can refine the range. Some of the cases below may not
2770 happen, but it doesn't seem worth worrying about this. We "continue"
2771 the outer loop when we've changed something; otherwise we "break"
2772 the switch, which will "break" the while. */
2774 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2776 while (1)
2778 code = TREE_CODE (exp);
2779 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2780 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2781 || TREE_CODE_CLASS (code) == '2')
2782 type = TREE_TYPE (arg0);
2784 switch (code)
2786 case TRUTH_NOT_EXPR:
2787 in_p = ! in_p, exp = arg0;
2788 continue;
2790 case EQ_EXPR: case NE_EXPR:
2791 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2792 /* We can only do something if the range is testing for zero
2793 and if the second operand is an integer constant. Note that
2794 saying something is "in" the range we make is done by
2795 complementing IN_P since it will set in the initial case of
2796 being not equal to zero; "out" is leaving it alone. */
2797 if (low == 0 || high == 0
2798 || ! integer_zerop (low) || ! integer_zerop (high)
2799 || TREE_CODE (arg1) != INTEGER_CST)
2800 break;
2802 switch (code)
2804 case NE_EXPR: /* - [c, c] */
2805 low = high = arg1;
2806 break;
2807 case EQ_EXPR: /* + [c, c] */
2808 in_p = ! in_p, low = high = arg1;
2809 break;
2810 case GT_EXPR: /* - [-, c] */
2811 low = 0, high = arg1;
2812 break;
2813 case GE_EXPR: /* + [c, -] */
2814 in_p = ! in_p, low = arg1, high = 0;
2815 break;
2816 case LT_EXPR: /* - [c, -] */
2817 low = arg1, high = 0;
2818 break;
2819 case LE_EXPR: /* + [-, c] */
2820 in_p = ! in_p, low = 0, high = arg1;
2821 break;
2822 default:
2823 abort ();
2826 exp = arg0;
2828 /* If this is an unsigned comparison, we also know that EXP is
2829 greater than or equal to zero. We base the range tests we make
2830 on that fact, so we record it here so we can parse existing
2831 range tests. */
2832 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2834 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2835 1, convert (type, integer_zero_node),
2836 NULL_TREE))
2837 break;
2839 in_p = n_in_p, low = n_low, high = n_high;
2841 /* If the high bound is missing, reverse the range so it
2842 goes from zero to the low bound minus 1. */
2843 if (high == 0)
2845 in_p = ! in_p;
2846 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2847 integer_one_node, 0);
2848 low = convert (type, integer_zero_node);
2851 continue;
2853 case NEGATE_EXPR:
2854 /* (-x) IN [a,b] -> x in [-b, -a] */
2855 n_low = range_binop (MINUS_EXPR, type,
2856 convert (type, integer_zero_node), 0, high, 1);
2857 n_high = range_binop (MINUS_EXPR, type,
2858 convert (type, integer_zero_node), 0, low, 0);
2859 low = n_low, high = n_high;
2860 exp = arg0;
2861 continue;
2863 case BIT_NOT_EXPR:
2864 /* ~ X -> -X - 1 */
2865 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2866 convert (type, integer_one_node));
2867 continue;
2869 case PLUS_EXPR: case MINUS_EXPR:
2870 if (TREE_CODE (arg1) != INTEGER_CST)
2871 break;
2873 /* If EXP is signed, any overflow in the computation is undefined,
2874 so we don't worry about it so long as our computations on
2875 the bounds don't overflow. For unsigned, overflow is defined
2876 and this is exactly the right thing. */
2877 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2878 type, low, 0, arg1, 0);
2879 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2880 type, high, 1, arg1, 0);
2881 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2882 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2883 break;
2885 /* Check for an unsigned range which has wrapped around the maximum
2886 value thus making n_high < n_low, and normalize it. */
2887 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2889 low = range_binop (PLUS_EXPR, type, n_high, 0,
2890 integer_one_node, 0);
2891 high = range_binop (MINUS_EXPR, type, n_low, 0,
2892 integer_one_node, 0);
2893 in_p = ! in_p;
2895 else
2896 low = n_low, high = n_high;
2898 exp = arg0;
2899 continue;
2901 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2902 if (! INTEGRAL_TYPE_P (type)
2903 || (low != 0 && ! int_fits_type_p (low, type))
2904 || (high != 0 && ! int_fits_type_p (high, type)))
2905 break;
2907 n_low = low, n_high = high;
2909 if (n_low != 0)
2910 n_low = convert (type, n_low);
2912 if (n_high != 0)
2913 n_high = convert (type, n_high);
2915 /* If we're converting from an unsigned to a signed type,
2916 we will be doing the comparison as unsigned. The tests above
2917 have already verified that LOW and HIGH are both positive.
2919 So we have to make sure that the original unsigned value will
2920 be interpreted as positive. */
2921 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2923 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
2924 tree high_positive
2925 = fold (build (RSHIFT_EXPR, type,
2926 convert (type,
2927 TYPE_MAX_VALUE (equiv_type)),
2928 convert (type, integer_one_node)));
2930 /* If the low bound is specified, "and" the range with the
2931 range for which the original unsigned value will be
2932 positive. */
2933 if (low != 0)
2935 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2936 1, n_low, n_high,
2937 1, convert (type, integer_zero_node),
2938 high_positive))
2939 break;
2941 in_p = (n_in_p == in_p);
2943 else
2945 /* Otherwise, "or" the range with the range of the input
2946 that will be interpreted as negative. */
2947 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2948 0, n_low, n_high,
2949 1, convert (type, integer_zero_node),
2950 high_positive))
2951 break;
2953 in_p = (in_p != n_in_p);
2957 exp = arg0;
2958 low = n_low, high = n_high;
2959 continue;
2961 default:
2962 break;
2965 break;
2968 /* If EXP is a constant, we can evaluate whether this is true or false. */
2969 if (TREE_CODE (exp) == INTEGER_CST)
2971 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
2972 exp, 0, low, 0))
2973 && integer_onep (range_binop (LE_EXPR, integer_type_node,
2974 exp, 1, high, 1)));
2975 low = high = 0;
2976 exp = 0;
2979 *pin_p = in_p, *plow = low, *phigh = high;
2980 return exp;
2983 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
2984 type, TYPE, return an expression to test if EXP is in (or out of, depending
2985 on IN_P) the range. */
2987 static tree
2988 build_range_check (type, exp, in_p, low, high)
2989 tree type;
2990 tree exp;
2991 int in_p;
2992 tree low, high;
2994 tree etype = TREE_TYPE (exp);
2995 tree utype, value;
2997 if (! in_p
2998 && (0 != (value = build_range_check (type, exp, 1, low, high))))
2999 return invert_truthvalue (value);
3001 else if (low == 0 && high == 0)
3002 return convert (type, integer_one_node);
3004 else if (low == 0)
3005 return fold (build (LE_EXPR, type, exp, high));
3007 else if (high == 0)
3008 return fold (build (GE_EXPR, type, exp, low));
3010 else if (operand_equal_p (low, high, 0))
3011 return fold (build (EQ_EXPR, type, exp, low));
3013 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3014 return build_range_check (type, exp, 1, 0, high);
3016 else if (integer_zerop (low))
3018 utype = unsigned_type (etype);
3019 return build_range_check (type, convert (utype, exp), 1, 0,
3020 convert (utype, high));
3023 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3024 && ! TREE_OVERFLOW (value))
3025 return build_range_check (type,
3026 fold (build (MINUS_EXPR, etype, exp, low)),
3027 1, convert (etype, integer_zero_node), value);
3028 else
3029 return 0;
3032 /* Given two ranges, see if we can merge them into one. Return 1 if we
3033 can, 0 if we can't. Set the output range into the specified parameters. */
3035 static int
3036 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3037 int *pin_p;
3038 tree *plow, *phigh;
3039 int in0_p, in1_p;
3040 tree low0, high0, low1, high1;
3042 int no_overlap;
3043 int subset;
3044 int temp;
3045 tree tem;
3046 int in_p;
3047 tree low, high;
3048 int lowequal = ((low0 == 0 && low1 == 0)
3049 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3050 low0, 0, low1, 0)));
3051 int highequal = ((high0 == 0 && high1 == 0)
3052 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3053 high0, 1, high1, 1)));
3055 /* Make range 0 be the range that starts first, or ends last if they
3056 start at the same value. Swap them if it isn't. */
3057 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3058 low0, 0, low1, 0))
3059 || (lowequal
3060 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3061 high1, 1, high0, 1))))
3063 temp = in0_p, in0_p = in1_p, in1_p = temp;
3064 tem = low0, low0 = low1, low1 = tem;
3065 tem = high0, high0 = high1, high1 = tem;
3068 /* Now flag two cases, whether the ranges are disjoint or whether the
3069 second range is totally subsumed in the first. Note that the tests
3070 below are simplified by the ones above. */
3071 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3072 high0, 1, low1, 0));
3073 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3074 high1, 1, high0, 1));
3076 /* We now have four cases, depending on whether we are including or
3077 excluding the two ranges. */
3078 if (in0_p && in1_p)
3080 /* If they don't overlap, the result is false. If the second range
3081 is a subset it is the result. Otherwise, the range is from the start
3082 of the second to the end of the first. */
3083 if (no_overlap)
3084 in_p = 0, low = high = 0;
3085 else if (subset)
3086 in_p = 1, low = low1, high = high1;
3087 else
3088 in_p = 1, low = low1, high = high0;
3091 else if (in0_p && ! in1_p)
3093 /* If they don't overlap, the result is the first range. If they are
3094 equal, the result is false. If the second range is a subset of the
3095 first, and the ranges begin at the same place, we go from just after
3096 the end of the first range to the end of the second. If the second
3097 range is not a subset of the first, or if it is a subset and both
3098 ranges end at the same place, the range starts at the start of the
3099 first range and ends just before the second range.
3100 Otherwise, we can't describe this as a single range. */
3101 if (no_overlap)
3102 in_p = 1, low = low0, high = high0;
3103 else if (lowequal && highequal)
3104 in_p = 0, low = high = 0;
3105 else if (subset && lowequal)
3107 in_p = 1, high = high0;
3108 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3109 integer_one_node, 0);
3111 else if (! subset || highequal)
3113 in_p = 1, low = low0;
3114 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3115 integer_one_node, 0);
3117 else
3118 return 0;
3121 else if (! in0_p && in1_p)
3123 /* If they don't overlap, the result is the second range. If the second
3124 is a subset of the first, the result is false. Otherwise,
3125 the range starts just after the first range and ends at the
3126 end of the second. */
3127 if (no_overlap)
3128 in_p = 1, low = low1, high = high1;
3129 else if (subset)
3130 in_p = 0, low = high = 0;
3131 else
3133 in_p = 1, high = high1;
3134 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3135 integer_one_node, 0);
3139 else
3141 /* The case where we are excluding both ranges. Here the complex case
3142 is if they don't overlap. In that case, the only time we have a
3143 range is if they are adjacent. If the second is a subset of the
3144 first, the result is the first. Otherwise, the range to exclude
3145 starts at the beginning of the first range and ends at the end of the
3146 second. */
3147 if (no_overlap)
3149 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3150 range_binop (PLUS_EXPR, NULL_TREE,
3151 high0, 1,
3152 integer_one_node, 1),
3153 1, low1, 0)))
3154 in_p = 0, low = low0, high = high1;
3155 else
3156 return 0;
3158 else if (subset)
3159 in_p = 0, low = low0, high = high0;
3160 else
3161 in_p = 0, low = low0, high = high1;
3164 *pin_p = in_p, *plow = low, *phigh = high;
3165 return 1;
3168 /* EXP is some logical combination of boolean tests. See if we can
3169 merge it into some range test. Return the new tree if so. */
3171 static tree
3172 fold_range_test (exp)
3173 tree exp;
3175 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3176 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3177 int in0_p, in1_p, in_p;
3178 tree low0, low1, low, high0, high1, high;
3179 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3180 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3181 tree tem;
3183 /* If this is an OR operation, invert both sides; we will invert
3184 again at the end. */
3185 if (or_op)
3186 in0_p = ! in0_p, in1_p = ! in1_p;
3188 /* If both expressions are the same, if we can merge the ranges, and we
3189 can build the range test, return it or it inverted. If one of the
3190 ranges is always true or always false, consider it to be the same
3191 expression as the other. */
3192 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3193 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3194 in1_p, low1, high1)
3195 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3196 lhs != 0 ? lhs
3197 : rhs != 0 ? rhs : integer_zero_node,
3198 in_p, low, high))))
3199 return or_op ? invert_truthvalue (tem) : tem;
3201 /* On machines where the branch cost is expensive, if this is a
3202 short-circuited branch and the underlying object on both sides
3203 is the same, make a non-short-circuit operation. */
3204 else if (BRANCH_COST >= 2
3205 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3206 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3207 && operand_equal_p (lhs, rhs, 0))
3209 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3210 unless we are at top level, in which case we can't do this. */
3211 if (simple_operand_p (lhs))
3212 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3213 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3214 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3215 TREE_OPERAND (exp, 1));
3217 else if (current_function_decl != 0)
3219 tree common = save_expr (lhs);
3221 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3222 or_op ? ! in0_p : in0_p,
3223 low0, high0))
3224 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3225 or_op ? ! in1_p : in1_p,
3226 low1, high1))))
3227 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3228 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3229 TREE_TYPE (exp), lhs, rhs);
3232 else
3233 return 0;
3236 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3237 bit value. Arrange things so the extra bits will be set to zero if and
3238 only if C is signed-extended to its full width. If MASK is nonzero,
3239 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3241 static tree
3242 unextend (c, p, unsignedp, mask)
3243 tree c;
3244 int p;
3245 int unsignedp;
3246 tree mask;
3248 tree type = TREE_TYPE (c);
3249 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3250 tree temp;
3252 if (p == modesize || unsignedp)
3253 return c;
3255 /* We work by getting just the sign bit into the low-order bit, then
3256 into the high-order bit, then sign-extend. We then XOR that value
3257 with C. */
3258 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3259 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3261 /* We must use a signed type in order to get an arithmetic right shift.
3262 However, we must also avoid introducing accidental overflows, so that
3263 a subsequent call to integer_zerop will work. Hence we must
3264 do the type conversion here. At this point, the constant is either
3265 zero or one, and the conversion to a signed type can never overflow.
3266 We could get an overflow if this conversion is done anywhere else. */
3267 if (TREE_UNSIGNED (type))
3268 temp = convert (signed_type (type), temp);
3270 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3271 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3272 if (mask != 0)
3273 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3274 /* If necessary, convert the type back to match the type of C. */
3275 if (TREE_UNSIGNED (type))
3276 temp = convert (type, temp);
3278 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3281 /* Find ways of folding logical expressions of LHS and RHS:
3282 Try to merge two comparisons to the same innermost item.
3283 Look for range tests like "ch >= '0' && ch <= '9'".
3284 Look for combinations of simple terms on machines with expensive branches
3285 and evaluate the RHS unconditionally.
3287 For example, if we have p->a == 2 && p->b == 4 and we can make an
3288 object large enough to span both A and B, we can do this with a comparison
3289 against the object ANDed with the a mask.
3291 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3292 operations to do this with one comparison.
3294 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3295 function and the one above.
3297 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3298 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3300 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3301 two operands.
3303 We return the simplified tree or 0 if no optimization is possible. */
3305 static tree
3306 fold_truthop (code, truth_type, lhs, rhs)
3307 enum tree_code code;
3308 tree truth_type, lhs, rhs;
3310 /* If this is the "or" of two comparisons, we can do something if we
3311 the comparisons are NE_EXPR. If this is the "and", we can do something
3312 if the comparisons are EQ_EXPR. I.e.,
3313 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3315 WANTED_CODE is this operation code. For single bit fields, we can
3316 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3317 comparison for one-bit fields. */
3319 enum tree_code wanted_code;
3320 enum tree_code lcode, rcode;
3321 tree ll_arg, lr_arg, rl_arg, rr_arg;
3322 tree ll_inner, lr_inner, rl_inner, rr_inner;
3323 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3324 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3325 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3326 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3327 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3328 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3329 enum machine_mode lnmode, rnmode;
3330 tree ll_mask, lr_mask, rl_mask, rr_mask;
3331 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3332 tree l_const, r_const;
3333 tree type, result;
3334 int first_bit, end_bit;
3335 int volatilep;
3337 /* Start by getting the comparison codes. Fail if anything is volatile.
3338 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3339 it were surrounded with a NE_EXPR. */
3341 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3342 return 0;
3344 lcode = TREE_CODE (lhs);
3345 rcode = TREE_CODE (rhs);
3347 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3348 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3350 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3351 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3353 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3354 return 0;
3356 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3357 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3359 ll_arg = TREE_OPERAND (lhs, 0);
3360 lr_arg = TREE_OPERAND (lhs, 1);
3361 rl_arg = TREE_OPERAND (rhs, 0);
3362 rr_arg = TREE_OPERAND (rhs, 1);
3364 /* If the RHS can be evaluated unconditionally and its operands are
3365 simple, it wins to evaluate the RHS unconditionally on machines
3366 with expensive branches. In this case, this isn't a comparison
3367 that can be merged. */
3369 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3370 are with zero (tmw). */
3372 if (BRANCH_COST >= 2
3373 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3374 && simple_operand_p (rl_arg)
3375 && simple_operand_p (rr_arg))
3376 return build (code, truth_type, lhs, rhs);
3378 /* See if the comparisons can be merged. Then get all the parameters for
3379 each side. */
3381 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3382 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3383 return 0;
3385 volatilep = 0;
3386 ll_inner = decode_field_reference (ll_arg,
3387 &ll_bitsize, &ll_bitpos, &ll_mode,
3388 &ll_unsignedp, &volatilep, &ll_mask,
3389 &ll_and_mask);
3390 lr_inner = decode_field_reference (lr_arg,
3391 &lr_bitsize, &lr_bitpos, &lr_mode,
3392 &lr_unsignedp, &volatilep, &lr_mask,
3393 &lr_and_mask);
3394 rl_inner = decode_field_reference (rl_arg,
3395 &rl_bitsize, &rl_bitpos, &rl_mode,
3396 &rl_unsignedp, &volatilep, &rl_mask,
3397 &rl_and_mask);
3398 rr_inner = decode_field_reference (rr_arg,
3399 &rr_bitsize, &rr_bitpos, &rr_mode,
3400 &rr_unsignedp, &volatilep, &rr_mask,
3401 &rr_and_mask);
3403 /* It must be true that the inner operation on the lhs of each
3404 comparison must be the same if we are to be able to do anything.
3405 Then see if we have constants. If not, the same must be true for
3406 the rhs's. */
3407 if (volatilep || ll_inner == 0 || rl_inner == 0
3408 || ! operand_equal_p (ll_inner, rl_inner, 0))
3409 return 0;
3411 if (TREE_CODE (lr_arg) == INTEGER_CST
3412 && TREE_CODE (rr_arg) == INTEGER_CST)
3413 l_const = lr_arg, r_const = rr_arg;
3414 else if (lr_inner == 0 || rr_inner == 0
3415 || ! operand_equal_p (lr_inner, rr_inner, 0))
3416 return 0;
3417 else
3418 l_const = r_const = 0;
3420 /* If either comparison code is not correct for our logical operation,
3421 fail. However, we can convert a one-bit comparison against zero into
3422 the opposite comparison against that bit being set in the field. */
3424 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3425 if (lcode != wanted_code)
3427 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3428 l_const = ll_mask;
3429 else
3430 return 0;
3433 if (rcode != wanted_code)
3435 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3436 r_const = rl_mask;
3437 else
3438 return 0;
3441 /* See if we can find a mode that contains both fields being compared on
3442 the left. If we can't, fail. Otherwise, update all constants and masks
3443 to be relative to a field of that size. */
3444 first_bit = MIN (ll_bitpos, rl_bitpos);
3445 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3446 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3447 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3448 volatilep);
3449 if (lnmode == VOIDmode)
3450 return 0;
3452 lnbitsize = GET_MODE_BITSIZE (lnmode);
3453 lnbitpos = first_bit & ~ (lnbitsize - 1);
3454 type = type_for_size (lnbitsize, 1);
3455 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3457 if (BYTES_BIG_ENDIAN)
3459 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3460 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3463 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3464 size_int (xll_bitpos), 0);
3465 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3466 size_int (xrl_bitpos), 0);
3468 if (l_const)
3470 l_const = convert (type, l_const);
3471 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3472 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3473 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3474 fold (build1 (BIT_NOT_EXPR,
3475 type, ll_mask)),
3476 0)))
3478 warning ("comparison is always %s",
3479 wanted_code == NE_EXPR ? "one" : "zero");
3481 return convert (truth_type,
3482 wanted_code == NE_EXPR
3483 ? integer_one_node : integer_zero_node);
3486 if (r_const)
3488 r_const = convert (type, r_const);
3489 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3490 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3491 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3492 fold (build1 (BIT_NOT_EXPR,
3493 type, rl_mask)),
3494 0)))
3496 warning ("comparison is always %s",
3497 wanted_code == NE_EXPR ? "one" : "zero");
3499 return convert (truth_type,
3500 wanted_code == NE_EXPR
3501 ? integer_one_node : integer_zero_node);
3505 /* If the right sides are not constant, do the same for it. Also,
3506 disallow this optimization if a size or signedness mismatch occurs
3507 between the left and right sides. */
3508 if (l_const == 0)
3510 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3511 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3512 /* Make sure the two fields on the right
3513 correspond to the left without being swapped. */
3514 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3515 return 0;
3517 first_bit = MIN (lr_bitpos, rr_bitpos);
3518 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3519 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3520 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3521 volatilep);
3522 if (rnmode == VOIDmode)
3523 return 0;
3525 rnbitsize = GET_MODE_BITSIZE (rnmode);
3526 rnbitpos = first_bit & ~ (rnbitsize - 1);
3527 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3529 if (BYTES_BIG_ENDIAN)
3531 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3532 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3535 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3536 size_int (xlr_bitpos), 0);
3537 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3538 size_int (xrr_bitpos), 0);
3540 /* Make a mask that corresponds to both fields being compared.
3541 Do this for both items being compared. If the masks agree,
3542 we can do this by masking both and comparing the masked
3543 results. */
3544 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3545 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3546 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3548 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3549 ll_unsignedp || rl_unsignedp);
3550 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3551 lr_unsignedp || rr_unsignedp);
3552 if (! all_ones_mask_p (ll_mask, lnbitsize))
3554 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3555 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3557 return build (wanted_code, truth_type, lhs, rhs);
3560 /* There is still another way we can do something: If both pairs of
3561 fields being compared are adjacent, we may be able to make a wider
3562 field containing them both. */
3563 if ((ll_bitsize + ll_bitpos == rl_bitpos
3564 && lr_bitsize + lr_bitpos == rr_bitpos)
3565 || (ll_bitpos == rl_bitpos + rl_bitsize
3566 && lr_bitpos == rr_bitpos + rr_bitsize))
3567 return build (wanted_code, truth_type,
3568 make_bit_field_ref (ll_inner, type,
3569 ll_bitsize + rl_bitsize,
3570 MIN (ll_bitpos, rl_bitpos),
3571 ll_unsignedp),
3572 make_bit_field_ref (lr_inner, type,
3573 lr_bitsize + rr_bitsize,
3574 MIN (lr_bitpos, rr_bitpos),
3575 lr_unsignedp));
3577 return 0;
3580 /* Handle the case of comparisons with constants. If there is something in
3581 common between the masks, those bits of the constants must be the same.
3582 If not, the condition is always false. Test for this to avoid generating
3583 incorrect code below. */
3584 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3585 if (! integer_zerop (result)
3586 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3587 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3589 if (wanted_code == NE_EXPR)
3591 warning ("`or' of unmatched not-equal tests is always 1");
3592 return convert (truth_type, integer_one_node);
3594 else
3596 warning ("`and' of mutually exclusive equal-tests is always zero");
3597 return convert (truth_type, integer_zero_node);
3601 /* Construct the expression we will return. First get the component
3602 reference we will make. Unless the mask is all ones the width of
3603 that field, perform the mask operation. Then compare with the
3604 merged constant. */
3605 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3606 ll_unsignedp || rl_unsignedp);
3608 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3609 if (! all_ones_mask_p (ll_mask, lnbitsize))
3610 result = build (BIT_AND_EXPR, type, result, ll_mask);
3612 return build (wanted_code, truth_type, result,
3613 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3616 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3617 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3618 that we may sometimes modify the tree. */
3620 static tree
3621 strip_compound_expr (t, s)
3622 tree t;
3623 tree s;
3625 tree type = TREE_TYPE (t);
3626 enum tree_code code = TREE_CODE (t);
3628 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3629 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3630 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3631 return TREE_OPERAND (t, 1);
3633 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3634 don't bother handling any other types. */
3635 else if (code == COND_EXPR)
3637 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3638 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3639 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3641 else if (TREE_CODE_CLASS (code) == '1')
3642 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3643 else if (TREE_CODE_CLASS (code) == '<'
3644 || TREE_CODE_CLASS (code) == '2')
3646 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3647 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3650 return t;
3653 /* Perform constant folding and related simplification of EXPR.
3654 The related simplifications include x*1 => x, x*0 => 0, etc.,
3655 and application of the associative law.
3656 NOP_EXPR conversions may be removed freely (as long as we
3657 are careful not to change the C type of the overall expression)
3658 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3659 but we can constant-fold them if they have constant operands. */
3661 tree
3662 fold (expr)
3663 tree expr;
3665 register tree t = expr;
3666 tree t1 = NULL_TREE;
3667 tree tem;
3668 tree type = TREE_TYPE (expr);
3669 register tree arg0, arg1;
3670 register enum tree_code code = TREE_CODE (t);
3671 register int kind;
3672 int invert;
3674 /* WINS will be nonzero when the switch is done
3675 if all operands are constant. */
3677 int wins = 1;
3679 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3680 Likewise for a SAVE_EXPR that's already been evaluated. */
3681 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3682 return t;
3684 /* Return right away if already constant. */
3685 if (TREE_CONSTANT (t))
3687 if (code == CONST_DECL)
3688 return DECL_INITIAL (t);
3689 return t;
3692 kind = TREE_CODE_CLASS (code);
3693 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3695 tree subop;
3697 /* Special case for conversion ops that can have fixed point args. */
3698 arg0 = TREE_OPERAND (t, 0);
3700 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3701 if (arg0 != 0)
3702 STRIP_TYPE_NOPS (arg0);
3704 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3705 subop = TREE_REALPART (arg0);
3706 else
3707 subop = arg0;
3709 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3710 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3711 && TREE_CODE (subop) != REAL_CST
3712 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3714 /* Note that TREE_CONSTANT isn't enough:
3715 static var addresses are constant but we can't
3716 do arithmetic on them. */
3717 wins = 0;
3719 else if (kind == 'e' || kind == '<'
3720 || kind == '1' || kind == '2' || kind == 'r')
3722 register int len = tree_code_length[(int) code];
3723 register int i;
3724 for (i = 0; i < len; i++)
3726 tree op = TREE_OPERAND (t, i);
3727 tree subop;
3729 if (op == 0)
3730 continue; /* Valid for CALL_EXPR, at least. */
3732 if (kind == '<' || code == RSHIFT_EXPR)
3734 /* Signedness matters here. Perhaps we can refine this
3735 later. */
3736 STRIP_TYPE_NOPS (op);
3738 else
3740 /* Strip any conversions that don't change the mode. */
3741 STRIP_NOPS (op);
3744 if (TREE_CODE (op) == COMPLEX_CST)
3745 subop = TREE_REALPART (op);
3746 else
3747 subop = op;
3749 if (TREE_CODE (subop) != INTEGER_CST
3750 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3751 && TREE_CODE (subop) != REAL_CST
3752 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3754 /* Note that TREE_CONSTANT isn't enough:
3755 static var addresses are constant but we can't
3756 do arithmetic on them. */
3757 wins = 0;
3759 if (i == 0)
3760 arg0 = op;
3761 else if (i == 1)
3762 arg1 = op;
3766 /* If this is a commutative operation, and ARG0 is a constant, move it
3767 to ARG1 to reduce the number of tests below. */
3768 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3769 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3770 || code == BIT_AND_EXPR)
3771 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3773 tem = arg0; arg0 = arg1; arg1 = tem;
3775 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3776 TREE_OPERAND (t, 1) = tem;
3779 /* Now WINS is set as described above,
3780 ARG0 is the first operand of EXPR,
3781 and ARG1 is the second operand (if it has more than one operand).
3783 First check for cases where an arithmetic operation is applied to a
3784 compound, conditional, or comparison operation. Push the arithmetic
3785 operation inside the compound or conditional to see if any folding
3786 can then be done. Convert comparison to conditional for this purpose.
3787 The also optimizes non-constant cases that used to be done in
3788 expand_expr.
3790 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3791 one of the operands is a comparison and the other is a comparison, a
3792 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3793 code below would make the expression more complex. Change it to a
3794 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3795 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3797 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3798 || code == EQ_EXPR || code == NE_EXPR)
3799 && ((truth_value_p (TREE_CODE (arg0))
3800 && (truth_value_p (TREE_CODE (arg1))
3801 || (TREE_CODE (arg1) == BIT_AND_EXPR
3802 && integer_onep (TREE_OPERAND (arg1, 1)))))
3803 || (truth_value_p (TREE_CODE (arg1))
3804 && (truth_value_p (TREE_CODE (arg0))
3805 || (TREE_CODE (arg0) == BIT_AND_EXPR
3806 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3808 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3809 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3810 : TRUTH_XOR_EXPR,
3811 type, arg0, arg1));
3813 if (code == EQ_EXPR)
3814 t = invert_truthvalue (t);
3816 return t;
3819 if (TREE_CODE_CLASS (code) == '1')
3821 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3822 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3823 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3824 else if (TREE_CODE (arg0) == COND_EXPR)
3826 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3827 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3828 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3830 /* If this was a conversion, and all we did was to move into
3831 inside the COND_EXPR, bring it back out. But leave it if
3832 it is a conversion from integer to integer and the
3833 result precision is no wider than a word since such a
3834 conversion is cheap and may be optimized away by combine,
3835 while it couldn't if it were outside the COND_EXPR. Then return
3836 so we don't get into an infinite recursion loop taking the
3837 conversion out and then back in. */
3839 if ((code == NOP_EXPR || code == CONVERT_EXPR
3840 || code == NON_LVALUE_EXPR)
3841 && TREE_CODE (t) == COND_EXPR
3842 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3843 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3844 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3845 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3846 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3847 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3848 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3849 t = build1 (code, type,
3850 build (COND_EXPR,
3851 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3852 TREE_OPERAND (t, 0),
3853 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3854 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3855 return t;
3857 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3858 return fold (build (COND_EXPR, type, arg0,
3859 fold (build1 (code, type, integer_one_node)),
3860 fold (build1 (code, type, integer_zero_node))));
3862 else if (TREE_CODE_CLASS (code) == '2'
3863 || TREE_CODE_CLASS (code) == '<')
3865 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3866 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3867 fold (build (code, type,
3868 arg0, TREE_OPERAND (arg1, 1))));
3869 else if ((TREE_CODE (arg1) == COND_EXPR
3870 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3871 && TREE_CODE_CLASS (code) != '<'))
3872 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3874 tree test, true_value, false_value;
3876 if (TREE_CODE (arg1) == COND_EXPR)
3878 test = TREE_OPERAND (arg1, 0);
3879 true_value = TREE_OPERAND (arg1, 1);
3880 false_value = TREE_OPERAND (arg1, 2);
3882 else
3884 tree testtype = TREE_TYPE (arg1);
3885 test = arg1;
3886 true_value = convert (testtype, integer_one_node);
3887 false_value = convert (testtype, integer_zero_node);
3890 /* If ARG0 is complex we want to make sure we only evaluate
3891 it once. Though this is only required if it is volatile, it
3892 might be more efficient even if it is not. However, if we
3893 succeed in folding one part to a constant, we do not need
3894 to make this SAVE_EXPR. Since we do this optimization
3895 primarily to see if we do end up with constant and this
3896 SAVE_EXPR interferes with later optimizations, suppressing
3897 it when we can is important. */
3899 if (TREE_CODE (arg0) != SAVE_EXPR
3900 && ((TREE_CODE (arg0) != VAR_DECL
3901 && TREE_CODE (arg0) != PARM_DECL)
3902 || TREE_SIDE_EFFECTS (arg0)))
3904 tree lhs = fold (build (code, type, arg0, true_value));
3905 tree rhs = fold (build (code, type, arg0, false_value));
3907 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3908 return fold (build (COND_EXPR, type, test, lhs, rhs));
3910 if (current_function_decl != 0)
3911 arg0 = save_expr (arg0);
3914 test = fold (build (COND_EXPR, type, test,
3915 fold (build (code, type, arg0, true_value)),
3916 fold (build (code, type, arg0, false_value))));
3917 if (TREE_CODE (arg0) == SAVE_EXPR)
3918 return build (COMPOUND_EXPR, type,
3919 convert (void_type_node, arg0),
3920 strip_compound_expr (test, arg0));
3921 else
3922 return convert (type, test);
3925 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3926 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3927 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3928 else if ((TREE_CODE (arg0) == COND_EXPR
3929 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3930 && TREE_CODE_CLASS (code) != '<'))
3931 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
3933 tree test, true_value, false_value;
3935 if (TREE_CODE (arg0) == COND_EXPR)
3937 test = TREE_OPERAND (arg0, 0);
3938 true_value = TREE_OPERAND (arg0, 1);
3939 false_value = TREE_OPERAND (arg0, 2);
3941 else
3943 tree testtype = TREE_TYPE (arg0);
3944 test = arg0;
3945 true_value = convert (testtype, integer_one_node);
3946 false_value = convert (testtype, integer_zero_node);
3949 if (TREE_CODE (arg1) != SAVE_EXPR
3950 && ((TREE_CODE (arg1) != VAR_DECL
3951 && TREE_CODE (arg1) != PARM_DECL)
3952 || TREE_SIDE_EFFECTS (arg1)))
3954 tree lhs = fold (build (code, type, true_value, arg1));
3955 tree rhs = fold (build (code, type, false_value, arg1));
3957 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3958 || TREE_CONSTANT (arg1))
3959 return fold (build (COND_EXPR, type, test, lhs, rhs));
3961 if (current_function_decl != 0)
3962 arg1 = save_expr (arg1);
3965 test = fold (build (COND_EXPR, type, test,
3966 fold (build (code, type, true_value, arg1)),
3967 fold (build (code, type, false_value, arg1))));
3968 if (TREE_CODE (arg1) == SAVE_EXPR)
3969 return build (COMPOUND_EXPR, type,
3970 convert (void_type_node, arg1),
3971 strip_compound_expr (test, arg1));
3972 else
3973 return convert (type, test);
3976 else if (TREE_CODE_CLASS (code) == '<'
3977 && TREE_CODE (arg0) == COMPOUND_EXPR)
3978 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3979 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3980 else if (TREE_CODE_CLASS (code) == '<'
3981 && TREE_CODE (arg1) == COMPOUND_EXPR)
3982 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3983 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3985 switch (code)
3987 case INTEGER_CST:
3988 case REAL_CST:
3989 case STRING_CST:
3990 case COMPLEX_CST:
3991 case CONSTRUCTOR:
3992 return t;
3994 case CONST_DECL:
3995 return fold (DECL_INITIAL (t));
3997 case NOP_EXPR:
3998 case FLOAT_EXPR:
3999 case CONVERT_EXPR:
4000 case FIX_TRUNC_EXPR:
4001 /* Other kinds of FIX are not handled properly by fold_convert. */
4003 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4004 return TREE_OPERAND (t, 0);
4006 /* Handle cases of two conversions in a row. */
4007 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4008 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4010 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4011 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4012 tree final_type = TREE_TYPE (t);
4013 int inside_int = INTEGRAL_TYPE_P (inside_type);
4014 int inside_ptr = POINTER_TYPE_P (inside_type);
4015 int inside_float = FLOAT_TYPE_P (inside_type);
4016 int inside_prec = TYPE_PRECISION (inside_type);
4017 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4018 int inter_int = INTEGRAL_TYPE_P (inter_type);
4019 int inter_ptr = POINTER_TYPE_P (inter_type);
4020 int inter_float = FLOAT_TYPE_P (inter_type);
4021 int inter_prec = TYPE_PRECISION (inter_type);
4022 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4023 int final_int = INTEGRAL_TYPE_P (final_type);
4024 int final_ptr = POINTER_TYPE_P (final_type);
4025 int final_float = FLOAT_TYPE_P (final_type);
4026 int final_prec = TYPE_PRECISION (final_type);
4027 int final_unsignedp = TREE_UNSIGNED (final_type);
4029 /* In addition to the cases of two conversions in a row
4030 handled below, if we are converting something to its own
4031 type via an object of identical or wider precision, neither
4032 conversion is needed. */
4033 if (inside_type == final_type
4034 && ((inter_int && final_int) || (inter_float && final_float))
4035 && inter_prec >= final_prec)
4036 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4038 /* Likewise, if the intermediate and final types are either both
4039 float or both integer, we don't need the middle conversion if
4040 it is wider than the final type and doesn't change the signedness
4041 (for integers). Avoid this if the final type is a pointer
4042 since then we sometimes need the inner conversion. Likewise if
4043 the outer has a precision not equal to the size of its mode. */
4044 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4045 || (inter_float && inside_float))
4046 && inter_prec >= inside_prec
4047 && (inter_float || inter_unsignedp == inside_unsignedp)
4048 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4049 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4050 && ! final_ptr)
4051 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4053 /* Two conversions in a row are not needed unless:
4054 - some conversion is floating-point (overstrict for now), or
4055 - the intermediate type is narrower than both initial and
4056 final, or
4057 - the intermediate type and innermost type differ in signedness,
4058 and the outermost type is wider than the intermediate, or
4059 - the initial type is a pointer type and the precisions of the
4060 intermediate and final types differ, or
4061 - the final type is a pointer type and the precisions of the
4062 initial and intermediate types differ. */
4063 if (! inside_float && ! inter_float && ! final_float
4064 && (inter_prec > inside_prec || inter_prec > final_prec)
4065 && ! (inside_int && inter_int
4066 && inter_unsignedp != inside_unsignedp
4067 && inter_prec < final_prec)
4068 && ((inter_unsignedp && inter_prec > inside_prec)
4069 == (final_unsignedp && final_prec > inter_prec))
4070 && ! (inside_ptr && inter_prec != final_prec)
4071 && ! (final_ptr && inside_prec != inter_prec)
4072 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4073 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4074 && ! final_ptr)
4075 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4078 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4079 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4080 /* Detect assigning a bitfield. */
4081 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4082 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4084 /* Don't leave an assignment inside a conversion
4085 unless assigning a bitfield. */
4086 tree prev = TREE_OPERAND (t, 0);
4087 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4088 /* First do the assignment, then return converted constant. */
4089 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4090 TREE_USED (t) = 1;
4091 return t;
4093 if (!wins)
4095 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4096 return t;
4098 return fold_convert (t, arg0);
4100 #if 0 /* This loses on &"foo"[0]. */
4101 case ARRAY_REF:
4103 int i;
4105 /* Fold an expression like: "foo"[2] */
4106 if (TREE_CODE (arg0) == STRING_CST
4107 && TREE_CODE (arg1) == INTEGER_CST
4108 && !TREE_INT_CST_HIGH (arg1)
4109 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4111 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4112 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4113 force_fit_type (t, 0);
4116 return t;
4117 #endif /* 0 */
4119 case COMPONENT_REF:
4120 if (TREE_CODE (arg0) == CONSTRUCTOR)
4122 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4123 if (m)
4124 t = TREE_VALUE (m);
4126 return t;
4128 case RANGE_EXPR:
4129 TREE_CONSTANT (t) = wins;
4130 return t;
4132 case NEGATE_EXPR:
4133 if (wins)
4135 if (TREE_CODE (arg0) == INTEGER_CST)
4137 HOST_WIDE_INT low, high;
4138 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4139 TREE_INT_CST_HIGH (arg0),
4140 &low, &high);
4141 t = build_int_2 (low, high);
4142 TREE_TYPE (t) = type;
4143 TREE_OVERFLOW (t)
4144 = (TREE_OVERFLOW (arg0)
4145 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4146 TREE_CONSTANT_OVERFLOW (t)
4147 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4149 else if (TREE_CODE (arg0) == REAL_CST)
4150 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4152 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4153 return TREE_OPERAND (arg0, 0);
4155 /* Convert - (a - b) to (b - a) for non-floating-point. */
4156 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4157 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4158 TREE_OPERAND (arg0, 0));
4160 return t;
4162 case ABS_EXPR:
4163 if (wins)
4165 if (TREE_CODE (arg0) == INTEGER_CST)
4167 if (! TREE_UNSIGNED (type)
4168 && TREE_INT_CST_HIGH (arg0) < 0)
4170 HOST_WIDE_INT low, high;
4171 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4172 TREE_INT_CST_HIGH (arg0),
4173 &low, &high);
4174 t = build_int_2 (low, high);
4175 TREE_TYPE (t) = type;
4176 TREE_OVERFLOW (t)
4177 = (TREE_OVERFLOW (arg0)
4178 | force_fit_type (t, overflow));
4179 TREE_CONSTANT_OVERFLOW (t)
4180 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4183 else if (TREE_CODE (arg0) == REAL_CST)
4185 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4186 t = build_real (type,
4187 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4190 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4191 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4192 return t;
4194 case CONJ_EXPR:
4195 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4196 return arg0;
4197 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4198 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4199 TREE_OPERAND (arg0, 0),
4200 fold (build1 (NEGATE_EXPR,
4201 TREE_TYPE (TREE_TYPE (arg0)),
4202 TREE_OPERAND (arg0, 1))));
4203 else if (TREE_CODE (arg0) == COMPLEX_CST)
4204 return build_complex (type, TREE_OPERAND (arg0, 0),
4205 fold (build1 (NEGATE_EXPR,
4206 TREE_TYPE (TREE_TYPE (arg0)),
4207 TREE_OPERAND (arg0, 1))));
4208 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4209 return fold (build (TREE_CODE (arg0), type,
4210 fold (build1 (CONJ_EXPR, type,
4211 TREE_OPERAND (arg0, 0))),
4212 fold (build1 (CONJ_EXPR,
4213 type, TREE_OPERAND (arg0, 1)))));
4214 else if (TREE_CODE (arg0) == CONJ_EXPR)
4215 return TREE_OPERAND (arg0, 0);
4216 return t;
4218 case BIT_NOT_EXPR:
4219 if (wins)
4221 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4222 ~ TREE_INT_CST_HIGH (arg0));
4223 TREE_TYPE (t) = type;
4224 force_fit_type (t, 0);
4225 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4226 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4228 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4229 return TREE_OPERAND (arg0, 0);
4230 return t;
4232 case PLUS_EXPR:
4233 /* A + (-B) -> A - B */
4234 if (TREE_CODE (arg1) == NEGATE_EXPR)
4235 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4236 else if (! FLOAT_TYPE_P (type))
4238 if (integer_zerop (arg1))
4239 return non_lvalue (convert (type, arg0));
4241 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4242 with a constant, and the two constants have no bits in common,
4243 we should treat this as a BIT_IOR_EXPR since this may produce more
4244 simplifications. */
4245 if (TREE_CODE (arg0) == BIT_AND_EXPR
4246 && TREE_CODE (arg1) == BIT_AND_EXPR
4247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4248 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4249 && integer_zerop (const_binop (BIT_AND_EXPR,
4250 TREE_OPERAND (arg0, 1),
4251 TREE_OPERAND (arg1, 1), 0)))
4253 code = BIT_IOR_EXPR;
4254 goto bit_ior;
4257 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4258 about the case where C is a constant, just try one of the
4259 four possibilities. */
4261 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4262 && operand_equal_p (TREE_OPERAND (arg0, 1),
4263 TREE_OPERAND (arg1, 1), 0))
4264 return fold (build (MULT_EXPR, type,
4265 fold (build (PLUS_EXPR, type,
4266 TREE_OPERAND (arg0, 0),
4267 TREE_OPERAND (arg1, 0))),
4268 TREE_OPERAND (arg0, 1)));
4270 /* In IEEE floating point, x+0 may not equal x. */
4271 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4272 || flag_fast_math)
4273 && real_zerop (arg1))
4274 return non_lvalue (convert (type, arg0));
4275 associate:
4276 /* In most languages, can't associate operations on floats
4277 through parentheses. Rather than remember where the parentheses
4278 were, we don't associate floats at all. It shouldn't matter much.
4279 However, associating multiplications is only very slightly
4280 inaccurate, so do that if -ffast-math is specified. */
4281 if (FLOAT_TYPE_P (type)
4282 && ! (flag_fast_math && code == MULT_EXPR))
4283 goto binary;
4285 /* The varsign == -1 cases happen only for addition and subtraction.
4286 It says that the arg that was split was really CON minus VAR.
4287 The rest of the code applies to all associative operations. */
4288 if (!wins)
4290 tree var, con;
4291 int varsign;
4293 if (split_tree (arg0, code, &var, &con, &varsign))
4295 if (varsign == -1)
4297 /* EXPR is (CON-VAR) +- ARG1. */
4298 /* If it is + and VAR==ARG1, return just CONST. */
4299 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4300 return convert (TREE_TYPE (t), con);
4302 /* If ARG0 is a constant, don't change things around;
4303 instead keep all the constant computations together. */
4305 if (TREE_CONSTANT (arg0))
4306 return t;
4308 /* Otherwise return (CON +- ARG1) - VAR. */
4309 t = build (MINUS_EXPR, type,
4310 fold (build (code, type, con, arg1)), var);
4312 else
4314 /* EXPR is (VAR+CON) +- ARG1. */
4315 /* If it is - and VAR==ARG1, return just CONST. */
4316 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4317 return convert (TREE_TYPE (t), con);
4319 /* If ARG0 is a constant, don't change things around;
4320 instead keep all the constant computations together. */
4322 if (TREE_CONSTANT (arg0))
4323 return t;
4325 /* Otherwise return VAR +- (ARG1 +- CON). */
4326 tem = fold (build (code, type, arg1, con));
4327 t = build (code, type, var, tem);
4329 if (integer_zerop (tem)
4330 && (code == PLUS_EXPR || code == MINUS_EXPR))
4331 return convert (type, var);
4332 /* If we have x +/- (c - d) [c an explicit integer]
4333 change it to x -/+ (d - c) since if d is relocatable
4334 then the latter can be a single immediate insn
4335 and the former cannot. */
4336 if (TREE_CODE (tem) == MINUS_EXPR
4337 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4339 tree tem1 = TREE_OPERAND (tem, 1);
4340 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4341 TREE_OPERAND (tem, 0) = tem1;
4342 TREE_SET_CODE (t,
4343 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4346 return t;
4349 if (split_tree (arg1, code, &var, &con, &varsign))
4351 if (TREE_CONSTANT (arg1))
4352 return t;
4354 if (varsign == -1)
4355 TREE_SET_CODE (t,
4356 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4358 /* EXPR is ARG0 +- (CON +- VAR). */
4359 if (TREE_CODE (t) == MINUS_EXPR
4360 && operand_equal_p (var, arg0, 0))
4362 /* If VAR and ARG0 cancel, return just CON or -CON. */
4363 if (code == PLUS_EXPR)
4364 return convert (TREE_TYPE (t), con);
4365 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4366 convert (TREE_TYPE (t), con)));
4369 t = build (TREE_CODE (t), type,
4370 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4372 if (integer_zerop (TREE_OPERAND (t, 0))
4373 && TREE_CODE (t) == PLUS_EXPR)
4374 return convert (TREE_TYPE (t), var);
4375 return t;
4378 binary:
4379 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4380 if (TREE_CODE (arg1) == REAL_CST)
4381 return t;
4382 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4383 if (wins)
4384 t1 = const_binop (code, arg0, arg1, 0);
4385 if (t1 != NULL_TREE)
4387 /* The return value should always have
4388 the same type as the original expression. */
4389 if (TREE_TYPE (t1) != TREE_TYPE (t))
4390 t1 = convert (TREE_TYPE (t), t1);
4392 return t1;
4394 return t;
4396 case MINUS_EXPR:
4397 if (! FLOAT_TYPE_P (type))
4399 if (! wins && integer_zerop (arg0))
4400 return build1 (NEGATE_EXPR, type, arg1);
4401 if (integer_zerop (arg1))
4402 return non_lvalue (convert (type, arg0));
4404 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4405 about the case where C is a constant, just try one of the
4406 four possibilities. */
4408 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4409 && operand_equal_p (TREE_OPERAND (arg0, 1),
4410 TREE_OPERAND (arg1, 1), 0))
4411 return fold (build (MULT_EXPR, type,
4412 fold (build (MINUS_EXPR, type,
4413 TREE_OPERAND (arg0, 0),
4414 TREE_OPERAND (arg1, 0))),
4415 TREE_OPERAND (arg0, 1)));
4417 /* Convert A - (-B) to A + B. */
4418 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4419 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4421 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4422 || flag_fast_math)
4424 /* Except with IEEE floating point, 0-x equals -x. */
4425 if (! wins && real_zerop (arg0))
4426 return build1 (NEGATE_EXPR, type, arg1);
4427 /* Except with IEEE floating point, x-0 equals x. */
4428 if (real_zerop (arg1))
4429 return non_lvalue (convert (type, arg0));
4432 /* Fold &x - &x. This can happen from &x.foo - &x.
4433 This is unsafe for certain floats even in non-IEEE formats.
4434 In IEEE, it is unsafe because it does wrong for NaNs.
4435 Also note that operand_equal_p is always false if an operand
4436 is volatile. */
4438 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4439 && operand_equal_p (arg0, arg1, 0))
4440 return convert (type, integer_zero_node);
4442 goto associate;
4444 case MULT_EXPR:
4445 if (! FLOAT_TYPE_P (type))
4447 if (integer_zerop (arg1))
4448 return omit_one_operand (type, arg1, arg0);
4449 if (integer_onep (arg1))
4450 return non_lvalue (convert (type, arg0));
4452 /* ((A / C) * C) is A if the division is an
4453 EXACT_DIV_EXPR. Since C is normally a constant,
4454 just check for one of the four possibilities. */
4456 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4457 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4458 return TREE_OPERAND (arg0, 0);
4460 /* (a * (1 << b)) is (a << b) */
4461 if (TREE_CODE (arg1) == LSHIFT_EXPR
4462 && integer_onep (TREE_OPERAND (arg1, 0)))
4463 return fold (build (LSHIFT_EXPR, type, arg0,
4464 TREE_OPERAND (arg1, 1)));
4465 if (TREE_CODE (arg0) == LSHIFT_EXPR
4466 && integer_onep (TREE_OPERAND (arg0, 0)))
4467 return fold (build (LSHIFT_EXPR, type, arg1,
4468 TREE_OPERAND (arg0, 1)));
4470 else
4472 /* x*0 is 0, except for IEEE floating point. */
4473 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4474 || flag_fast_math)
4475 && real_zerop (arg1))
4476 return omit_one_operand (type, arg1, arg0);
4477 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4478 However, ANSI says we can drop signals,
4479 so we can do this anyway. */
4480 if (real_onep (arg1))
4481 return non_lvalue (convert (type, arg0));
4482 /* x*2 is x+x */
4483 if (! wins && real_twop (arg1) && current_function_decl != 0)
4485 tree arg = save_expr (arg0);
4486 return build (PLUS_EXPR, type, arg, arg);
4489 goto associate;
4491 case BIT_IOR_EXPR:
4492 bit_ior:
4494 register enum tree_code code0, code1;
4496 if (integer_all_onesp (arg1))
4497 return omit_one_operand (type, arg1, arg0);
4498 if (integer_zerop (arg1))
4499 return non_lvalue (convert (type, arg0));
4500 t1 = distribute_bit_expr (code, type, arg0, arg1);
4501 if (t1 != NULL_TREE)
4502 return t1;
4504 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4505 is a rotate of A by C1 bits. */
4506 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4507 is a rotate of A by B bits. */
4509 code0 = TREE_CODE (arg0);
4510 code1 = TREE_CODE (arg1);
4511 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4512 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4513 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4514 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4516 register tree tree01, tree11;
4517 register enum tree_code code01, code11;
4519 tree01 = TREE_OPERAND (arg0, 1);
4520 tree11 = TREE_OPERAND (arg1, 1);
4521 code01 = TREE_CODE (tree01);
4522 code11 = TREE_CODE (tree11);
4523 if (code01 == INTEGER_CST
4524 && code11 == INTEGER_CST
4525 && TREE_INT_CST_HIGH (tree01) == 0
4526 && TREE_INT_CST_HIGH (tree11) == 0
4527 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4528 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4529 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4530 code0 == LSHIFT_EXPR ? tree01 : tree11);
4531 else if (code11 == MINUS_EXPR
4532 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4533 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4534 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4535 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4536 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4537 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4538 type, TREE_OPERAND (arg0, 0), tree01);
4539 else if (code01 == MINUS_EXPR
4540 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4541 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4542 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4543 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4544 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4545 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4546 type, TREE_OPERAND (arg0, 0), tree11);
4549 goto associate;
4552 case BIT_XOR_EXPR:
4553 if (integer_zerop (arg1))
4554 return non_lvalue (convert (type, arg0));
4555 if (integer_all_onesp (arg1))
4556 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4557 goto associate;
4559 case BIT_AND_EXPR:
4560 bit_and:
4561 if (integer_all_onesp (arg1))
4562 return non_lvalue (convert (type, arg0));
4563 if (integer_zerop (arg1))
4564 return omit_one_operand (type, arg1, arg0);
4565 t1 = distribute_bit_expr (code, type, arg0, arg1);
4566 if (t1 != NULL_TREE)
4567 return t1;
4568 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4569 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4570 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4572 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4573 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4574 && (~TREE_INT_CST_LOW (arg0)
4575 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4576 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4578 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4579 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4581 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4582 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4583 && (~TREE_INT_CST_LOW (arg1)
4584 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4585 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4587 goto associate;
4589 case BIT_ANDTC_EXPR:
4590 if (integer_all_onesp (arg0))
4591 return non_lvalue (convert (type, arg1));
4592 if (integer_zerop (arg0))
4593 return omit_one_operand (type, arg0, arg1);
4594 if (TREE_CODE (arg1) == INTEGER_CST)
4596 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4597 code = BIT_AND_EXPR;
4598 goto bit_and;
4600 goto binary;
4602 case RDIV_EXPR:
4603 /* In most cases, do nothing with a divide by zero. */
4604 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4605 #ifndef REAL_INFINITY
4606 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4607 return t;
4608 #endif
4609 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4611 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4612 However, ANSI says we can drop signals, so we can do this anyway. */
4613 if (real_onep (arg1))
4614 return non_lvalue (convert (type, arg0));
4616 /* If ARG1 is a constant, we can convert this to a multiply by the
4617 reciprocal. This does not have the same rounding properties,
4618 so only do this if -ffast-math. We can actually always safely
4619 do it if ARG1 is a power of two, but it's hard to tell if it is
4620 or not in a portable manner. */
4621 if (TREE_CODE (arg1) == REAL_CST)
4623 if (flag_fast_math
4624 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4625 arg1, 0)))
4626 return fold (build (MULT_EXPR, type, arg0, tem));
4627 /* Find the reciprocal if optimizing and the result is exact. */
4628 else if (optimize)
4630 REAL_VALUE_TYPE r;
4631 r = TREE_REAL_CST (arg1);
4632 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4634 tem = build_real (type, r);
4635 return fold (build (MULT_EXPR, type, arg0, tem));
4639 goto binary;
4641 case TRUNC_DIV_EXPR:
4642 case ROUND_DIV_EXPR:
4643 case FLOOR_DIV_EXPR:
4644 case CEIL_DIV_EXPR:
4645 case EXACT_DIV_EXPR:
4646 if (integer_onep (arg1))
4647 return non_lvalue (convert (type, arg0));
4648 if (integer_zerop (arg1))
4649 return t;
4651 /* If we have ((a / C1) / C2) where both division are the same type, try
4652 to simplify. First see if C1 * C2 overflows or not. */
4653 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4656 tree new_divisor;
4658 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4659 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4661 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4662 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4664 /* If no overflow, divide by C1*C2. */
4665 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4669 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4670 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4671 expressions, which often appear in the offsets or sizes of
4672 objects with a varying size. Only deal with positive divisors
4673 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4675 Look for NOPs and SAVE_EXPRs inside. */
4677 if (TREE_CODE (arg1) == INTEGER_CST
4678 && tree_int_cst_sgn (arg1) >= 0)
4680 int have_save_expr = 0;
4681 tree c2 = integer_zero_node;
4682 tree xarg0 = arg0;
4684 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4685 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4687 STRIP_NOPS (xarg0);
4689 if (TREE_CODE (xarg0) == PLUS_EXPR
4690 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4691 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4692 else if (TREE_CODE (xarg0) == MINUS_EXPR
4693 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4694 /* If we are doing this computation unsigned, the negate
4695 is incorrect. */
4696 && ! TREE_UNSIGNED (type))
4698 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4699 xarg0 = TREE_OPERAND (xarg0, 0);
4702 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4703 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4705 STRIP_NOPS (xarg0);
4707 if (TREE_CODE (xarg0) == MULT_EXPR
4708 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4709 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4710 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4711 TREE_OPERAND (xarg0, 1), arg1, 1))
4712 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4713 TREE_OPERAND (xarg0, 1), 1)))
4714 && (tree_int_cst_sgn (c2) >= 0
4715 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4716 arg1, 1))))
4718 tree outer_div = integer_one_node;
4719 tree c1 = TREE_OPERAND (xarg0, 1);
4720 tree c3 = arg1;
4722 /* If C3 > C1, set them equal and do a divide by
4723 C3/C1 at the end of the operation. */
4724 if (tree_int_cst_lt (c1, c3))
4725 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4727 /* The result is A * (C1/C3) + (C2/C3). */
4728 t = fold (build (PLUS_EXPR, type,
4729 fold (build (MULT_EXPR, type,
4730 TREE_OPERAND (xarg0, 0),
4731 const_binop (code, c1, c3, 1))),
4732 const_binop (code, c2, c3, 1)));
4734 if (! integer_onep (outer_div))
4735 t = fold (build (code, type, t, convert (type, outer_div)));
4737 if (have_save_expr)
4738 t = save_expr (t);
4740 return t;
4744 goto binary;
4746 case CEIL_MOD_EXPR:
4747 case FLOOR_MOD_EXPR:
4748 case ROUND_MOD_EXPR:
4749 case TRUNC_MOD_EXPR:
4750 if (integer_onep (arg1))
4751 return omit_one_operand (type, integer_zero_node, arg0);
4752 if (integer_zerop (arg1))
4753 return t;
4755 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4756 where C1 % C3 == 0. Handle similarly to the division case,
4757 but don't bother with SAVE_EXPRs. */
4759 if (TREE_CODE (arg1) == INTEGER_CST
4760 && ! integer_zerop (arg1))
4762 tree c2 = integer_zero_node;
4763 tree xarg0 = arg0;
4765 if (TREE_CODE (xarg0) == PLUS_EXPR
4766 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4767 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4768 else if (TREE_CODE (xarg0) == MINUS_EXPR
4769 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4770 && ! TREE_UNSIGNED (type))
4772 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4773 xarg0 = TREE_OPERAND (xarg0, 0);
4776 STRIP_NOPS (xarg0);
4778 if (TREE_CODE (xarg0) == MULT_EXPR
4779 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4780 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4781 TREE_OPERAND (xarg0, 1),
4782 arg1, 1))
4783 && tree_int_cst_sgn (c2) >= 0)
4784 /* The result is (C2%C3). */
4785 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4786 TREE_OPERAND (xarg0, 0));
4789 goto binary;
4791 case LSHIFT_EXPR:
4792 case RSHIFT_EXPR:
4793 case LROTATE_EXPR:
4794 case RROTATE_EXPR:
4795 if (integer_zerop (arg1))
4796 return non_lvalue (convert (type, arg0));
4797 /* Since negative shift count is not well-defined,
4798 don't try to compute it in the compiler. */
4799 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4800 return t;
4801 /* Rewrite an LROTATE_EXPR by a constant into an
4802 RROTATE_EXPR by a new constant. */
4803 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4805 TREE_SET_CODE (t, RROTATE_EXPR);
4806 code = RROTATE_EXPR;
4807 TREE_OPERAND (t, 1) = arg1
4808 = const_binop
4809 (MINUS_EXPR,
4810 convert (TREE_TYPE (arg1),
4811 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4812 arg1, 0);
4813 if (tree_int_cst_sgn (arg1) < 0)
4814 return t;
4817 /* If we have a rotate of a bit operation with the rotate count and
4818 the second operand of the bit operation both constant,
4819 permute the two operations. */
4820 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4821 && (TREE_CODE (arg0) == BIT_AND_EXPR
4822 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4823 || TREE_CODE (arg0) == BIT_IOR_EXPR
4824 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4825 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4826 return fold (build (TREE_CODE (arg0), type,
4827 fold (build (code, type,
4828 TREE_OPERAND (arg0, 0), arg1)),
4829 fold (build (code, type,
4830 TREE_OPERAND (arg0, 1), arg1))));
4832 /* Two consecutive rotates adding up to the width of the mode can
4833 be ignored. */
4834 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4835 && TREE_CODE (arg0) == RROTATE_EXPR
4836 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4837 && TREE_INT_CST_HIGH (arg1) == 0
4838 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4839 && ((TREE_INT_CST_LOW (arg1)
4840 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4841 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4842 return TREE_OPERAND (arg0, 0);
4844 goto binary;
4846 case MIN_EXPR:
4847 if (operand_equal_p (arg0, arg1, 0))
4848 return arg0;
4849 if (INTEGRAL_TYPE_P (type)
4850 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4851 return omit_one_operand (type, arg1, arg0);
4852 goto associate;
4854 case MAX_EXPR:
4855 if (operand_equal_p (arg0, arg1, 0))
4856 return arg0;
4857 if (INTEGRAL_TYPE_P (type)
4858 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4859 return omit_one_operand (type, arg1, arg0);
4860 goto associate;
4862 case TRUTH_NOT_EXPR:
4863 /* Note that the operand of this must be an int
4864 and its values must be 0 or 1.
4865 ("true" is a fixed value perhaps depending on the language,
4866 but we don't handle values other than 1 correctly yet.) */
4867 tem = invert_truthvalue (arg0);
4868 /* Avoid infinite recursion. */
4869 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4870 return t;
4871 return convert (type, tem);
4873 case TRUTH_ANDIF_EXPR:
4874 /* Note that the operands of this must be ints
4875 and their values must be 0 or 1.
4876 ("true" is a fixed value perhaps depending on the language.) */
4877 /* If first arg is constant zero, return it. */
4878 if (integer_zerop (arg0))
4879 return arg0;
4880 case TRUTH_AND_EXPR:
4881 /* If either arg is constant true, drop it. */
4882 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4883 return non_lvalue (arg1);
4884 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4885 return non_lvalue (arg0);
4886 /* If second arg is constant zero, result is zero, but first arg
4887 must be evaluated. */
4888 if (integer_zerop (arg1))
4889 return omit_one_operand (type, arg1, arg0);
4890 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
4891 case will be handled here. */
4892 if (integer_zerop (arg0))
4893 return omit_one_operand (type, arg0, arg1);
4895 truth_andor:
4896 /* We only do these simplifications if we are optimizing. */
4897 if (!optimize)
4898 return t;
4900 /* Check for things like (A || B) && (A || C). We can convert this
4901 to A || (B && C). Note that either operator can be any of the four
4902 truth and/or operations and the transformation will still be
4903 valid. Also note that we only care about order for the
4904 ANDIF and ORIF operators. If B contains side effects, this
4905 might change the truth-value of A. */
4906 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4907 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4908 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4909 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4910 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
4911 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
4913 tree a00 = TREE_OPERAND (arg0, 0);
4914 tree a01 = TREE_OPERAND (arg0, 1);
4915 tree a10 = TREE_OPERAND (arg1, 0);
4916 tree a11 = TREE_OPERAND (arg1, 1);
4917 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4918 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4919 && (code == TRUTH_AND_EXPR
4920 || code == TRUTH_OR_EXPR));
4922 if (operand_equal_p (a00, a10, 0))
4923 return fold (build (TREE_CODE (arg0), type, a00,
4924 fold (build (code, type, a01, a11))));
4925 else if (commutative && operand_equal_p (a00, a11, 0))
4926 return fold (build (TREE_CODE (arg0), type, a00,
4927 fold (build (code, type, a01, a10))));
4928 else if (commutative && operand_equal_p (a01, a10, 0))
4929 return fold (build (TREE_CODE (arg0), type, a01,
4930 fold (build (code, type, a00, a11))));
4932 /* This case if tricky because we must either have commutative
4933 operators or else A10 must not have side-effects. */
4935 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4936 && operand_equal_p (a01, a11, 0))
4937 return fold (build (TREE_CODE (arg0), type,
4938 fold (build (code, type, a00, a10)),
4939 a01));
4942 /* See if we can build a range comparison. */
4943 if (0 != (tem = fold_range_test (t)))
4944 return tem;
4946 /* Check for the possibility of merging component references. If our
4947 lhs is another similar operation, try to merge its rhs with our
4948 rhs. Then try to merge our lhs and rhs. */
4949 if (TREE_CODE (arg0) == code
4950 && 0 != (tem = fold_truthop (code, type,
4951 TREE_OPERAND (arg0, 1), arg1)))
4952 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4954 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4955 return tem;
4957 return t;
4959 case TRUTH_ORIF_EXPR:
4960 /* Note that the operands of this must be ints
4961 and their values must be 0 or true.
4962 ("true" is a fixed value perhaps depending on the language.) */
4963 /* If first arg is constant true, return it. */
4964 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4965 return arg0;
4966 case TRUTH_OR_EXPR:
4967 /* If either arg is constant zero, drop it. */
4968 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4969 return non_lvalue (arg1);
4970 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4971 return non_lvalue (arg0);
4972 /* If second arg is constant true, result is true, but we must
4973 evaluate first arg. */
4974 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4975 return omit_one_operand (type, arg1, arg0);
4976 /* Likewise for first arg, but note this only occurs here for
4977 TRUTH_OR_EXPR. */
4978 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4979 return omit_one_operand (type, arg0, arg1);
4980 goto truth_andor;
4982 case TRUTH_XOR_EXPR:
4983 /* If either arg is constant zero, drop it. */
4984 if (integer_zerop (arg0))
4985 return non_lvalue (arg1);
4986 if (integer_zerop (arg1))
4987 return non_lvalue (arg0);
4988 /* If either arg is constant true, this is a logical inversion. */
4989 if (integer_onep (arg0))
4990 return non_lvalue (invert_truthvalue (arg1));
4991 if (integer_onep (arg1))
4992 return non_lvalue (invert_truthvalue (arg0));
4993 return t;
4995 case EQ_EXPR:
4996 case NE_EXPR:
4997 case LT_EXPR:
4998 case GT_EXPR:
4999 case LE_EXPR:
5000 case GE_EXPR:
5001 /* If one arg is a constant integer, put it last. */
5002 if (TREE_CODE (arg0) == INTEGER_CST
5003 && TREE_CODE (arg1) != INTEGER_CST)
5005 TREE_OPERAND (t, 0) = arg1;
5006 TREE_OPERAND (t, 1) = arg0;
5007 arg0 = TREE_OPERAND (t, 0);
5008 arg1 = TREE_OPERAND (t, 1);
5009 code = swap_tree_comparison (code);
5010 TREE_SET_CODE (t, code);
5013 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5014 First, see if one arg is constant; find the constant arg
5015 and the other one. */
5017 tree constop = 0, varop;
5018 int constopnum = -1;
5020 if (TREE_CONSTANT (arg1))
5021 constopnum = 1, constop = arg1, varop = arg0;
5022 if (TREE_CONSTANT (arg0))
5023 constopnum = 0, constop = arg0, varop = arg1;
5025 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5027 /* This optimization is invalid for ordered comparisons
5028 if CONST+INCR overflows or if foo+incr might overflow.
5029 This optimization is invalid for floating point due to rounding.
5030 For pointer types we assume overflow doesn't happen. */
5031 if (POINTER_TYPE_P (TREE_TYPE (varop))
5032 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5033 && (code == EQ_EXPR || code == NE_EXPR)))
5035 tree newconst
5036 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5037 constop, TREE_OPERAND (varop, 1)));
5038 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5040 /* If VAROP is a reference to a bitfield, we must mask
5041 the constant by the width of the field. */
5042 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5043 && DECL_BIT_FIELD(TREE_OPERAND
5044 (TREE_OPERAND (varop, 0), 1)))
5046 int size
5047 = TREE_INT_CST_LOW (DECL_SIZE
5048 (TREE_OPERAND
5049 (TREE_OPERAND (varop, 0), 1)));
5051 newconst = fold (build (BIT_AND_EXPR,
5052 TREE_TYPE (varop), newconst,
5053 convert (TREE_TYPE (varop),
5054 build_int_2 (size, 0))));
5058 t = build (code, type, TREE_OPERAND (t, 0),
5059 TREE_OPERAND (t, 1));
5060 TREE_OPERAND (t, constopnum) = newconst;
5061 return t;
5064 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5066 if (POINTER_TYPE_P (TREE_TYPE (varop))
5067 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5068 && (code == EQ_EXPR || code == NE_EXPR)))
5070 tree newconst
5071 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5072 constop, TREE_OPERAND (varop, 1)));
5073 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5075 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5076 && DECL_BIT_FIELD(TREE_OPERAND
5077 (TREE_OPERAND (varop, 0), 1)))
5079 int size
5080 = TREE_INT_CST_LOW (DECL_SIZE
5081 (TREE_OPERAND
5082 (TREE_OPERAND (varop, 0), 1)));
5084 newconst = fold (build (BIT_AND_EXPR,
5085 TREE_TYPE (varop), newconst,
5086 convert (TREE_TYPE (varop),
5087 build_int_2 (size, 0))));
5091 t = build (code, type, TREE_OPERAND (t, 0),
5092 TREE_OPERAND (t, 1));
5093 TREE_OPERAND (t, constopnum) = newconst;
5094 return t;
5099 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5100 if (TREE_CODE (arg1) == INTEGER_CST
5101 && TREE_CODE (arg0) != INTEGER_CST
5102 && tree_int_cst_sgn (arg1) > 0)
5104 switch (TREE_CODE (t))
5106 case GE_EXPR:
5107 code = GT_EXPR;
5108 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5109 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5110 break;
5112 case LT_EXPR:
5113 code = LE_EXPR;
5114 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5115 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5116 break;
5118 default:
5119 break;
5123 /* If this is an EQ or NE comparison with zero and ARG0 is
5124 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5125 two operations, but the latter can be done in one less insn
5126 on machines that have only two-operand insns or on which a
5127 constant cannot be the first operand. */
5128 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5129 && TREE_CODE (arg0) == BIT_AND_EXPR)
5131 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5132 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5133 return
5134 fold (build (code, type,
5135 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5136 build (RSHIFT_EXPR,
5137 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5138 TREE_OPERAND (arg0, 1),
5139 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5140 convert (TREE_TYPE (arg0),
5141 integer_one_node)),
5142 arg1));
5143 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5144 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5145 return
5146 fold (build (code, type,
5147 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5148 build (RSHIFT_EXPR,
5149 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5150 TREE_OPERAND (arg0, 0),
5151 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5152 convert (TREE_TYPE (arg0),
5153 integer_one_node)),
5154 arg1));
5157 /* If this is an NE or EQ comparison of zero against the result of a
5158 signed MOD operation whose second operand is a power of 2, make
5159 the MOD operation unsigned since it is simpler and equivalent. */
5160 if ((code == NE_EXPR || code == EQ_EXPR)
5161 && integer_zerop (arg1)
5162 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5163 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5164 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5165 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5166 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5167 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5169 tree newtype = unsigned_type (TREE_TYPE (arg0));
5170 tree newmod = build (TREE_CODE (arg0), newtype,
5171 convert (newtype, TREE_OPERAND (arg0, 0)),
5172 convert (newtype, TREE_OPERAND (arg0, 1)));
5174 return build (code, type, newmod, convert (newtype, arg1));
5177 /* If this is an NE comparison of zero with an AND of one, remove the
5178 comparison since the AND will give the correct value. */
5179 if (code == NE_EXPR && integer_zerop (arg1)
5180 && TREE_CODE (arg0) == BIT_AND_EXPR
5181 && integer_onep (TREE_OPERAND (arg0, 1)))
5182 return convert (type, arg0);
5184 /* If we have (A & C) == C where C is a power of 2, convert this into
5185 (A & C) != 0. Similarly for NE_EXPR. */
5186 if ((code == EQ_EXPR || code == NE_EXPR)
5187 && TREE_CODE (arg0) == BIT_AND_EXPR
5188 && integer_pow2p (TREE_OPERAND (arg0, 1))
5189 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5190 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5191 arg0, integer_zero_node);
5193 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5194 and similarly for >= into !=. */
5195 if ((code == LT_EXPR || code == GE_EXPR)
5196 && TREE_UNSIGNED (TREE_TYPE (arg0))
5197 && TREE_CODE (arg1) == LSHIFT_EXPR
5198 && integer_onep (TREE_OPERAND (arg1, 0)))
5199 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5200 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5201 TREE_OPERAND (arg1, 1)),
5202 convert (TREE_TYPE (arg0), integer_zero_node));
5204 else if ((code == LT_EXPR || code == GE_EXPR)
5205 && TREE_UNSIGNED (TREE_TYPE (arg0))
5206 && (TREE_CODE (arg1) == NOP_EXPR
5207 || TREE_CODE (arg1) == CONVERT_EXPR)
5208 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5209 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5210 return
5211 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5212 convert (TREE_TYPE (arg0),
5213 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5214 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5215 convert (TREE_TYPE (arg0), integer_zero_node));
5217 /* Simplify comparison of something with itself. (For IEEE
5218 floating-point, we can only do some of these simplifications.) */
5219 if (operand_equal_p (arg0, arg1, 0))
5221 switch (code)
5223 case EQ_EXPR:
5224 case GE_EXPR:
5225 case LE_EXPR:
5226 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5228 if (type == integer_type_node)
5229 return integer_one_node;
5231 t = build_int_2 (1, 0);
5232 TREE_TYPE (t) = type;
5233 return t;
5235 code = EQ_EXPR;
5236 TREE_SET_CODE (t, code);
5237 break;
5239 case NE_EXPR:
5240 /* For NE, we can only do this simplification if integer. */
5241 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5242 break;
5243 /* ... fall through ... */
5244 case GT_EXPR:
5245 case LT_EXPR:
5246 if (type == integer_type_node)
5247 return integer_zero_node;
5249 t = build_int_2 (0, 0);
5250 TREE_TYPE (t) = type;
5251 return t;
5252 default:
5253 abort ();
5257 /* An unsigned comparison against 0 can be simplified. */
5258 if (integer_zerop (arg1)
5259 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5260 || POINTER_TYPE_P (TREE_TYPE (arg1)))
5261 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5263 switch (TREE_CODE (t))
5265 case GT_EXPR:
5266 code = NE_EXPR;
5267 TREE_SET_CODE (t, NE_EXPR);
5268 break;
5269 case LE_EXPR:
5270 code = EQ_EXPR;
5271 TREE_SET_CODE (t, EQ_EXPR);
5272 break;
5273 case GE_EXPR:
5274 return omit_one_operand (type,
5275 convert (type, integer_one_node),
5276 arg0);
5277 case LT_EXPR:
5278 return omit_one_operand (type,
5279 convert (type, integer_zero_node),
5280 arg0);
5281 default:
5282 break;
5286 /* An unsigned <= 0x7fffffff can be simplified. */
5288 int width = TYPE_PRECISION (TREE_TYPE (arg1));
5289 if (TREE_CODE (arg1) == INTEGER_CST
5290 && ! TREE_CONSTANT_OVERFLOW (arg1)
5291 && width <= HOST_BITS_PER_WIDE_INT
5292 && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5293 && TREE_INT_CST_HIGH (arg1) == 0
5294 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5295 || POINTER_TYPE_P (TREE_TYPE (arg1)))
5296 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5298 switch (TREE_CODE (t))
5300 case LE_EXPR:
5301 return fold (build (GE_EXPR, type,
5302 convert (signed_type (TREE_TYPE (arg0)),
5303 arg0),
5304 convert (signed_type (TREE_TYPE (arg1)),
5305 integer_zero_node)));
5306 case GT_EXPR:
5307 return fold (build (LT_EXPR, type,
5308 convert (signed_type (TREE_TYPE (arg0)),
5309 arg0),
5310 convert (signed_type (TREE_TYPE (arg1)),
5311 integer_zero_node)));
5316 /* If we are comparing an expression that just has comparisons
5317 of two integer values, arithmetic expressions of those comparisons,
5318 and constants, we can simplify it. There are only three cases
5319 to check: the two values can either be equal, the first can be
5320 greater, or the second can be greater. Fold the expression for
5321 those three values. Since each value must be 0 or 1, we have
5322 eight possibilities, each of which corresponds to the constant 0
5323 or 1 or one of the six possible comparisons.
5325 This handles common cases like (a > b) == 0 but also handles
5326 expressions like ((x > y) - (y > x)) > 0, which supposedly
5327 occur in macroized code. */
5329 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5331 tree cval1 = 0, cval2 = 0;
5332 int save_p = 0;
5334 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5335 /* Don't handle degenerate cases here; they should already
5336 have been handled anyway. */
5337 && cval1 != 0 && cval2 != 0
5338 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5339 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5340 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5341 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5342 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5344 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5345 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5347 /* We can't just pass T to eval_subst in case cval1 or cval2
5348 was the same as ARG1. */
5350 tree high_result
5351 = fold (build (code, type,
5352 eval_subst (arg0, cval1, maxval, cval2, minval),
5353 arg1));
5354 tree equal_result
5355 = fold (build (code, type,
5356 eval_subst (arg0, cval1, maxval, cval2, maxval),
5357 arg1));
5358 tree low_result
5359 = fold (build (code, type,
5360 eval_subst (arg0, cval1, minval, cval2, maxval),
5361 arg1));
5363 /* All three of these results should be 0 or 1. Confirm they
5364 are. Then use those values to select the proper code
5365 to use. */
5367 if ((integer_zerop (high_result)
5368 || integer_onep (high_result))
5369 && (integer_zerop (equal_result)
5370 || integer_onep (equal_result))
5371 && (integer_zerop (low_result)
5372 || integer_onep (low_result)))
5374 /* Make a 3-bit mask with the high-order bit being the
5375 value for `>', the next for '=', and the low for '<'. */
5376 switch ((integer_onep (high_result) * 4)
5377 + (integer_onep (equal_result) * 2)
5378 + integer_onep (low_result))
5380 case 0:
5381 /* Always false. */
5382 return omit_one_operand (type, integer_zero_node, arg0);
5383 case 1:
5384 code = LT_EXPR;
5385 break;
5386 case 2:
5387 code = EQ_EXPR;
5388 break;
5389 case 3:
5390 code = LE_EXPR;
5391 break;
5392 case 4:
5393 code = GT_EXPR;
5394 break;
5395 case 5:
5396 code = NE_EXPR;
5397 break;
5398 case 6:
5399 code = GE_EXPR;
5400 break;
5401 case 7:
5402 /* Always true. */
5403 return omit_one_operand (type, integer_one_node, arg0);
5406 t = build (code, type, cval1, cval2);
5407 if (save_p)
5408 return save_expr (t);
5409 else
5410 return fold (t);
5415 /* If this is a comparison of a field, we may be able to simplify it. */
5416 if ((TREE_CODE (arg0) == COMPONENT_REF
5417 || TREE_CODE (arg0) == BIT_FIELD_REF)
5418 && (code == EQ_EXPR || code == NE_EXPR)
5419 /* Handle the constant case even without -O
5420 to make sure the warnings are given. */
5421 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5423 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5424 return t1 ? t1 : t;
5427 /* If this is a comparison of complex values and either or both
5428 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5429 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5430 may prevent needless evaluations. */
5431 if ((code == EQ_EXPR || code == NE_EXPR)
5432 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5433 && (TREE_CODE (arg0) == COMPLEX_EXPR
5434 || TREE_CODE (arg1) == COMPLEX_EXPR))
5436 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5437 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5438 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5439 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5440 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5442 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5443 : TRUTH_ORIF_EXPR),
5444 type,
5445 fold (build (code, type, real0, real1)),
5446 fold (build (code, type, imag0, imag1))));
5449 /* From here on, the only cases we handle are when the result is
5450 known to be a constant.
5452 To compute GT, swap the arguments and do LT.
5453 To compute GE, do LT and invert the result.
5454 To compute LE, swap the arguments, do LT and invert the result.
5455 To compute NE, do EQ and invert the result.
5457 Therefore, the code below must handle only EQ and LT. */
5459 if (code == LE_EXPR || code == GT_EXPR)
5461 tem = arg0, arg0 = arg1, arg1 = tem;
5462 code = swap_tree_comparison (code);
5465 /* Note that it is safe to invert for real values here because we
5466 will check below in the one case that it matters. */
5468 invert = 0;
5469 if (code == NE_EXPR || code == GE_EXPR)
5471 invert = 1;
5472 code = invert_tree_comparison (code);
5475 /* Compute a result for LT or EQ if args permit;
5476 otherwise return T. */
5477 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5479 if (code == EQ_EXPR)
5480 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5481 == TREE_INT_CST_LOW (arg1))
5482 && (TREE_INT_CST_HIGH (arg0)
5483 == TREE_INT_CST_HIGH (arg1)),
5485 else
5486 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5487 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5488 : INT_CST_LT (arg0, arg1)),
5492 #if 0 /* This is no longer useful, but breaks some real code. */
5493 /* Assume a nonexplicit constant cannot equal an explicit one,
5494 since such code would be undefined anyway.
5495 Exception: on sysvr4, using #pragma weak,
5496 a label can come out as 0. */
5497 else if (TREE_CODE (arg1) == INTEGER_CST
5498 && !integer_zerop (arg1)
5499 && TREE_CONSTANT (arg0)
5500 && TREE_CODE (arg0) == ADDR_EXPR
5501 && code == EQ_EXPR)
5502 t1 = build_int_2 (0, 0);
5503 #endif
5504 /* Two real constants can be compared explicitly. */
5505 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5507 /* If either operand is a NaN, the result is false with two
5508 exceptions: First, an NE_EXPR is true on NaNs, but that case
5509 is already handled correctly since we will be inverting the
5510 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5511 or a GE_EXPR into a LT_EXPR, we must return true so that it
5512 will be inverted into false. */
5514 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5515 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5516 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5518 else if (code == EQ_EXPR)
5519 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5520 TREE_REAL_CST (arg1)),
5522 else
5523 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5524 TREE_REAL_CST (arg1)),
5528 if (t1 == NULL_TREE)
5529 return t;
5531 if (invert)
5532 TREE_INT_CST_LOW (t1) ^= 1;
5534 TREE_TYPE (t1) = type;
5535 return t1;
5537 case COND_EXPR:
5538 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5539 so all simple results must be passed through pedantic_non_lvalue. */
5540 if (TREE_CODE (arg0) == INTEGER_CST)
5541 return pedantic_non_lvalue
5542 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5543 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5544 return pedantic_omit_one_operand (type, arg1, arg0);
5546 /* If the second operand is zero, invert the comparison and swap
5547 the second and third operands. Likewise if the second operand
5548 is constant and the third is not or if the third operand is
5549 equivalent to the first operand of the comparison. */
5551 if (integer_zerop (arg1)
5552 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5553 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5554 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5555 TREE_OPERAND (t, 2),
5556 TREE_OPERAND (arg0, 1))))
5558 /* See if this can be inverted. If it can't, possibly because
5559 it was a floating-point inequality comparison, don't do
5560 anything. */
5561 tem = invert_truthvalue (arg0);
5563 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5565 t = build (code, type, tem,
5566 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5567 arg0 = tem;
5568 arg1 = TREE_OPERAND (t, 2);
5569 STRIP_NOPS (arg1);
5573 /* If we have A op B ? A : C, we may be able to convert this to a
5574 simpler expression, depending on the operation and the values
5575 of B and C. IEEE floating point prevents this though,
5576 because A or B might be -0.0 or a NaN. */
5578 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5579 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5580 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5581 || flag_fast_math)
5582 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5583 arg1, TREE_OPERAND (arg0, 1)))
5585 tree arg2 = TREE_OPERAND (t, 2);
5586 enum tree_code comp_code = TREE_CODE (arg0);
5588 STRIP_NOPS (arg2);
5590 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5591 depending on the comparison operation. */
5592 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5593 ? real_zerop (TREE_OPERAND (arg0, 1))
5594 : integer_zerop (TREE_OPERAND (arg0, 1)))
5595 && TREE_CODE (arg2) == NEGATE_EXPR
5596 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5597 switch (comp_code)
5599 case EQ_EXPR:
5600 return pedantic_non_lvalue
5601 (fold (build1 (NEGATE_EXPR, type, arg1)));
5602 case NE_EXPR:
5603 return pedantic_non_lvalue (convert (type, arg1));
5604 case GE_EXPR:
5605 case GT_EXPR:
5606 return pedantic_non_lvalue
5607 (convert (type, fold (build1 (ABS_EXPR,
5608 TREE_TYPE (arg1), arg1))));
5609 case LE_EXPR:
5610 case LT_EXPR:
5611 return pedantic_non_lvalue
5612 (fold (build1 (NEGATE_EXPR, type,
5613 convert (type,
5614 fold (build1 (ABS_EXPR,
5615 TREE_TYPE (arg1),
5616 arg1))))));
5617 default:
5618 abort ();
5621 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5622 always zero. */
5624 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5626 if (comp_code == NE_EXPR)
5627 return pedantic_non_lvalue (convert (type, arg1));
5628 else if (comp_code == EQ_EXPR)
5629 return pedantic_non_lvalue (convert (type, integer_zero_node));
5632 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5633 or max (A, B), depending on the operation. */
5635 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5636 arg2, TREE_OPERAND (arg0, 0)))
5638 tree comp_op0 = TREE_OPERAND (arg0, 0);
5639 tree comp_op1 = TREE_OPERAND (arg0, 1);
5640 tree comp_type = TREE_TYPE (comp_op0);
5642 switch (comp_code)
5644 case EQ_EXPR:
5645 return pedantic_non_lvalue (convert (type, arg2));
5646 case NE_EXPR:
5647 return pedantic_non_lvalue (convert (type, arg1));
5648 case LE_EXPR:
5649 case LT_EXPR:
5650 /* In C++ a ?: expression can be an lvalue, so put the
5651 operand which will be used if they are equal first
5652 so that we can convert this back to the
5653 corresponding COND_EXPR. */
5654 return pedantic_non_lvalue
5655 (convert (type, (fold (build (MIN_EXPR, comp_type,
5656 (comp_code == LE_EXPR
5657 ? comp_op0 : comp_op1),
5658 (comp_code == LE_EXPR
5659 ? comp_op1 : comp_op0))))));
5660 break;
5661 case GE_EXPR:
5662 case GT_EXPR:
5663 return pedantic_non_lvalue
5664 (convert (type, fold (build (MAX_EXPR, comp_type,
5665 (comp_code == GE_EXPR
5666 ? comp_op0 : comp_op1),
5667 (comp_code == GE_EXPR
5668 ? comp_op1 : comp_op0)))));
5669 break;
5670 default:
5671 abort ();
5675 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5676 we might still be able to simplify this. For example,
5677 if C1 is one less or one more than C2, this might have started
5678 out as a MIN or MAX and been transformed by this function.
5679 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5681 if (INTEGRAL_TYPE_P (type)
5682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5683 && TREE_CODE (arg2) == INTEGER_CST)
5684 switch (comp_code)
5686 case EQ_EXPR:
5687 /* We can replace A with C1 in this case. */
5688 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5689 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5690 TREE_OPERAND (t, 2));
5691 break;
5693 case LT_EXPR:
5694 /* If C1 is C2 + 1, this is min(A, C2). */
5695 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5696 && operand_equal_p (TREE_OPERAND (arg0, 1),
5697 const_binop (PLUS_EXPR, arg2,
5698 integer_one_node, 0), 1))
5699 return pedantic_non_lvalue
5700 (fold (build (MIN_EXPR, type, arg1, arg2)));
5701 break;
5703 case LE_EXPR:
5704 /* If C1 is C2 - 1, this is min(A, C2). */
5705 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5706 && operand_equal_p (TREE_OPERAND (arg0, 1),
5707 const_binop (MINUS_EXPR, arg2,
5708 integer_one_node, 0), 1))
5709 return pedantic_non_lvalue
5710 (fold (build (MIN_EXPR, type, arg1, arg2)));
5711 break;
5713 case GT_EXPR:
5714 /* If C1 is C2 - 1, this is max(A, C2). */
5715 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5716 && operand_equal_p (TREE_OPERAND (arg0, 1),
5717 const_binop (MINUS_EXPR, arg2,
5718 integer_one_node, 0), 1))
5719 return pedantic_non_lvalue
5720 (fold (build (MAX_EXPR, type, arg1, arg2)));
5721 break;
5723 case GE_EXPR:
5724 /* If C1 is C2 + 1, this is max(A, C2). */
5725 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5726 && operand_equal_p (TREE_OPERAND (arg0, 1),
5727 const_binop (PLUS_EXPR, arg2,
5728 integer_one_node, 0), 1))
5729 return pedantic_non_lvalue
5730 (fold (build (MAX_EXPR, type, arg1, arg2)));
5731 break;
5732 case NE_EXPR:
5733 break;
5734 default:
5735 abort ();
5739 /* If the second operand is simpler than the third, swap them
5740 since that produces better jump optimization results. */
5741 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5742 || TREE_CODE (arg1) == SAVE_EXPR)
5743 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5744 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5745 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5747 /* See if this can be inverted. If it can't, possibly because
5748 it was a floating-point inequality comparison, don't do
5749 anything. */
5750 tem = invert_truthvalue (arg0);
5752 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5754 t = build (code, type, tem,
5755 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5756 arg0 = tem;
5757 arg1 = TREE_OPERAND (t, 2);
5758 STRIP_NOPS (arg1);
5762 /* Convert A ? 1 : 0 to simply A. */
5763 if (integer_onep (TREE_OPERAND (t, 1))
5764 && integer_zerop (TREE_OPERAND (t, 2))
5765 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5766 call to fold will try to move the conversion inside
5767 a COND, which will recurse. In that case, the COND_EXPR
5768 is probably the best choice, so leave it alone. */
5769 && type == TREE_TYPE (arg0))
5770 return pedantic_non_lvalue (arg0);
5772 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5773 operation is simply A & 2. */
5775 if (integer_zerop (TREE_OPERAND (t, 2))
5776 && TREE_CODE (arg0) == NE_EXPR
5777 && integer_zerop (TREE_OPERAND (arg0, 1))
5778 && integer_pow2p (arg1)
5779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5780 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5781 arg1, 1))
5782 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5784 return t;
5786 case COMPOUND_EXPR:
5787 /* When pedantic, a compound expression can be neither an lvalue
5788 nor an integer constant expression. */
5789 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5790 return t;
5791 /* Don't let (0, 0) be null pointer constant. */
5792 if (integer_zerop (arg1))
5793 return non_lvalue (arg1);
5794 return arg1;
5796 case COMPLEX_EXPR:
5797 if (wins)
5798 return build_complex (type, arg0, arg1);
5799 return t;
5801 case REALPART_EXPR:
5802 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5803 return t;
5804 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5805 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5806 TREE_OPERAND (arg0, 1));
5807 else if (TREE_CODE (arg0) == COMPLEX_CST)
5808 return TREE_REALPART (arg0);
5809 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5810 return fold (build (TREE_CODE (arg0), type,
5811 fold (build1 (REALPART_EXPR, type,
5812 TREE_OPERAND (arg0, 0))),
5813 fold (build1 (REALPART_EXPR,
5814 type, TREE_OPERAND (arg0, 1)))));
5815 return t;
5817 case IMAGPART_EXPR:
5818 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5819 return convert (type, integer_zero_node);
5820 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5821 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5822 TREE_OPERAND (arg0, 0));
5823 else if (TREE_CODE (arg0) == COMPLEX_CST)
5824 return TREE_IMAGPART (arg0);
5825 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5826 return fold (build (TREE_CODE (arg0), type,
5827 fold (build1 (IMAGPART_EXPR, type,
5828 TREE_OPERAND (arg0, 0))),
5829 fold (build1 (IMAGPART_EXPR, type,
5830 TREE_OPERAND (arg0, 1)))));
5831 return t;
5833 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5834 appropriate. */
5835 case CLEANUP_POINT_EXPR:
5836 if (! TREE_SIDE_EFFECTS (arg0))
5837 return TREE_OPERAND (t, 0);
5840 enum tree_code code0 = TREE_CODE (arg0);
5841 int kind0 = TREE_CODE_CLASS (code0);
5842 tree arg00 = TREE_OPERAND (arg0, 0);
5843 tree arg01;
5845 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5846 return fold (build1 (code0, type,
5847 fold (build1 (CLEANUP_POINT_EXPR,
5848 TREE_TYPE (arg00), arg00))));
5850 if (kind0 == '<' || kind0 == '2'
5851 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5852 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5853 || code0 == TRUTH_XOR_EXPR)
5855 arg01 = TREE_OPERAND (arg0, 1);
5857 if (! TREE_SIDE_EFFECTS (arg00))
5858 return fold (build (code0, type, arg00,
5859 fold (build1 (CLEANUP_POINT_EXPR,
5860 TREE_TYPE (arg01), arg01))));
5862 if (! TREE_SIDE_EFFECTS (arg01))
5863 return fold (build (code0, type,
5864 fold (build1 (CLEANUP_POINT_EXPR,
5865 TREE_TYPE (arg00), arg00)),
5866 arg01));
5869 return t;
5872 default:
5873 return t;
5874 } /* switch (code) */