import of gcc-2.8
[official-gcc.git] / gcc / fold-const.c
blobb5b843f4f260213032330a7d6525492199006da5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include <stdio.h>
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
51 /* Handle floating overflow for `const_binop'. */
52 static jmp_buf float_error;
54 static void encode PROTO((HOST_WIDE_INT *,
55 HOST_WIDE_INT, HOST_WIDE_INT));
56 static void decode PROTO((HOST_WIDE_INT *,
57 HOST_WIDE_INT *, HOST_WIDE_INT *));
58 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
59 HOST_WIDE_INT, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT *,
61 HOST_WIDE_INT *, HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static int split_tree PROTO((tree, enum tree_code, tree *,
64 tree *, int *));
65 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
66 static tree const_binop PROTO((enum tree_code, tree, tree, int));
67 static tree fold_convert PROTO((tree, tree));
68 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
69 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
70 static int truth_value_p PROTO((enum tree_code));
71 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
72 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
73 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
74 static tree omit_one_operand PROTO((tree, tree, tree));
75 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
76 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
77 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
78 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
79 tree, tree));
80 static tree decode_field_reference PROTO((tree, int *, int *,
81 enum machine_mode *, int *,
82 int *, tree *, tree *));
83 static int all_ones_mask_p PROTO((tree, int));
84 static int simple_operand_p PROTO((tree));
85 static tree range_binop PROTO((enum tree_code, tree, tree, int,
86 tree, int));
87 static tree make_range PROTO((tree, int *, tree *, tree *));
88 static tree build_range_check PROTO((tree, tree, int, tree, tree));
89 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
90 int, tree, tree));
91 static tree fold_range_test PROTO((tree));
92 static tree unextend PROTO((tree, int, int, tree));
93 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
94 static tree strip_compound_expr PROTO((tree, tree));
96 #ifndef BRANCH_COST
97 #define BRANCH_COST 1
98 #endif
100 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
101 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
102 Then this yields nonzero if overflow occurred during the addition.
103 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
104 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
105 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
107 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
108 We do that by representing the two-word integer in 4 words, with only
109 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
111 #define LOWPART(x) \
112 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
113 #define HIGHPART(x) \
114 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
115 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
117 /* Unpack a two-word integer into 4 words.
118 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
119 WORDS points to the array of HOST_WIDE_INTs. */
121 static void
122 encode (words, low, hi)
123 HOST_WIDE_INT *words;
124 HOST_WIDE_INT low, hi;
126 words[0] = LOWPART (low);
127 words[1] = HIGHPART (low);
128 words[2] = LOWPART (hi);
129 words[3] = HIGHPART (hi);
132 /* Pack an array of 4 words into a two-word integer.
133 WORDS points to the array of words.
134 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
136 static void
137 decode (words, low, hi)
138 HOST_WIDE_INT *words;
139 HOST_WIDE_INT *low, *hi;
141 *low = words[0] | words[1] * BASE;
142 *hi = words[2] | words[3] * BASE;
145 /* Make the integer constant T valid for its type
146 by setting to 0 or 1 all the bits in the constant
147 that don't belong in the type.
148 Yield 1 if a signed overflow occurs, 0 otherwise.
149 If OVERFLOW is nonzero, a signed overflow has already occurred
150 in calculating T, so propagate it.
152 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
153 if it exists. */
156 force_fit_type (t, overflow)
157 tree t;
158 int overflow;
160 HOST_WIDE_INT low, high;
161 register int prec;
163 if (TREE_CODE (t) == REAL_CST)
165 #ifdef CHECK_FLOAT_VALUE
166 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
167 overflow);
168 #endif
169 return overflow;
172 else if (TREE_CODE (t) != INTEGER_CST)
173 return overflow;
175 low = TREE_INT_CST_LOW (t);
176 high = TREE_INT_CST_HIGH (t);
178 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
179 prec = POINTER_SIZE;
180 else
181 prec = TYPE_PRECISION (TREE_TYPE (t));
183 /* First clear all bits that are beyond the type's precision. */
185 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
187 else if (prec > HOST_BITS_PER_WIDE_INT)
189 TREE_INT_CST_HIGH (t)
190 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
192 else
194 TREE_INT_CST_HIGH (t) = 0;
195 if (prec < HOST_BITS_PER_WIDE_INT)
196 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
199 /* Unsigned types do not suffer sign extension or overflow. */
200 if (TREE_UNSIGNED (TREE_TYPE (t)))
201 return overflow;
203 /* If the value's sign bit is set, extend the sign. */
204 if (prec != 2 * HOST_BITS_PER_WIDE_INT
205 && (prec > HOST_BITS_PER_WIDE_INT
206 ? (TREE_INT_CST_HIGH (t)
207 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
208 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
210 /* Value is negative:
211 set to 1 all the bits that are outside this type's precision. */
212 if (prec > HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_HIGH (t)
215 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
217 else
219 TREE_INT_CST_HIGH (t) = -1;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
225 /* Yield nonzero if signed overflow occurred. */
226 return
227 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
228 != 0);
231 /* Add two doubleword integers with doubleword result.
232 Each argument is given as two `HOST_WIDE_INT' pieces.
233 One argument is L1 and H1; the other, L2 and H2.
234 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
237 add_double (l1, h1, l2, h2, lv, hv)
238 HOST_WIDE_INT l1, h1, l2, h2;
239 HOST_WIDE_INT *lv, *hv;
241 HOST_WIDE_INT l, h;
243 l = l1 + l2;
244 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
246 *lv = l;
247 *hv = h;
248 return overflow_sum_sign (h1, h2, h);
251 /* Negate a doubleword integer with doubleword result.
252 Return nonzero if the operation overflows, assuming it's signed.
253 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
254 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
257 neg_double (l1, h1, lv, hv)
258 HOST_WIDE_INT l1, h1;
259 HOST_WIDE_INT *lv, *hv;
261 if (l1 == 0)
263 *lv = 0;
264 *hv = - h1;
265 return (*hv & h1) < 0;
267 else
269 *lv = - l1;
270 *hv = ~ h1;
271 return 0;
275 /* Multiply two doubleword integers with doubleword result.
276 Return nonzero if the operation overflows, assuming it's signed.
277 Each argument is given as two `HOST_WIDE_INT' pieces.
278 One argument is L1 and H1; the other, L2 and H2.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
282 mul_double (l1, h1, l2, h2, lv, hv)
283 HOST_WIDE_INT l1, h1, l2, h2;
284 HOST_WIDE_INT *lv, *hv;
286 HOST_WIDE_INT arg1[4];
287 HOST_WIDE_INT arg2[4];
288 HOST_WIDE_INT prod[4 * 2];
289 register unsigned HOST_WIDE_INT carry;
290 register int i, j, k;
291 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
293 encode (arg1, l1, h1);
294 encode (arg2, l2, h2);
296 bzero ((char *) prod, sizeof prod);
298 for (i = 0; i < 4; i++)
300 carry = 0;
301 for (j = 0; j < 4; j++)
303 k = i + j;
304 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
305 carry += arg1[i] * arg2[j];
306 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
307 carry += prod[k];
308 prod[k] = LOWPART (carry);
309 carry = HIGHPART (carry);
311 prod[i + 4] = carry;
314 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
316 /* Check for overflow by calculating the top half of the answer in full;
317 it should agree with the low half's sign bit. */
318 decode (prod+4, &toplow, &tophigh);
319 if (h1 < 0)
321 neg_double (l2, h2, &neglow, &neghigh);
322 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
324 if (h2 < 0)
326 neg_double (l1, h1, &neglow, &neghigh);
327 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
329 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
332 /* Shift the doubleword integer in L1, H1 left by COUNT places
333 keeping only PREC bits of result.
334 Shift right if COUNT is negative.
335 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
336 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 void
339 lshift_double (l1, h1, count, prec, lv, hv, arith)
340 HOST_WIDE_INT l1, h1, count;
341 int prec;
342 HOST_WIDE_INT *lv, *hv;
343 int arith;
345 if (count < 0)
347 rshift_double (l1, h1, - count, prec, lv, hv, arith);
348 return;
351 #ifdef SHIFT_COUNT_TRUNCATED
352 if (SHIFT_COUNT_TRUNCATED)
353 count %= prec;
354 #endif
356 if (count >= HOST_BITS_PER_WIDE_INT)
358 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
359 *lv = 0;
361 else
363 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
364 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
365 *lv = (unsigned HOST_WIDE_INT) l1 << count;
369 /* Shift the doubleword integer in L1, H1 right by COUNT places
370 keeping only PREC bits of result. COUNT must be positive.
371 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
372 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 void
375 rshift_double (l1, h1, count, prec, lv, hv, arith)
376 HOST_WIDE_INT l1, h1, count;
377 int prec;
378 HOST_WIDE_INT *lv, *hv;
379 int arith;
381 unsigned HOST_WIDE_INT signmask;
382 signmask = (arith
383 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
384 : 0);
386 #ifdef SHIFT_COUNT_TRUNCATED
387 if (SHIFT_COUNT_TRUNCATED)
388 count %= prec;
389 #endif
391 if (count >= HOST_BITS_PER_WIDE_INT)
393 *hv = signmask;
394 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
395 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
397 else
399 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
400 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
401 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
402 | ((unsigned HOST_WIDE_INT) h1 >> count));
406 /* Rotate the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Rotate right if COUNT is negative.
409 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
411 void
412 lrotate_double (l1, h1, count, prec, lv, hv)
413 HOST_WIDE_INT l1, h1, count;
414 int prec;
415 HOST_WIDE_INT *lv, *hv;
417 HOST_WIDE_INT s1l, s1h, s2l, s2h;
419 count %= prec;
420 if (count < 0)
421 count += prec;
423 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
424 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
425 *lv = s1l | s2l;
426 *hv = s1h | s2h;
429 /* Rotate the doubleword integer in L1, H1 left by COUNT places
430 keeping only PREC bits of result. COUNT must be positive.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 void
434 rrotate_double (l1, h1, count, prec, lv, hv)
435 HOST_WIDE_INT l1, h1, count;
436 int prec;
437 HOST_WIDE_INT *lv, *hv;
439 HOST_WIDE_INT s1l, s1h, s2l, s2h;
441 count %= prec;
442 if (count < 0)
443 count += prec;
445 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
446 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
447 *lv = s1l | s2l;
448 *hv = s1h | s2h;
451 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
452 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
453 CODE is a tree code for a kind of division, one of
454 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
455 or EXACT_DIV_EXPR
456 It controls how the quotient is rounded to a integer.
457 Return nonzero if the operation overflows.
458 UNS nonzero says do unsigned division. */
461 div_and_round_double (code, uns,
462 lnum_orig, hnum_orig, lden_orig, hden_orig,
463 lquo, hquo, lrem, hrem)
464 enum tree_code code;
465 int uns;
466 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
467 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
468 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
470 int quo_neg = 0;
471 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
472 HOST_WIDE_INT den[4], quo[4];
473 register int i, j;
474 unsigned HOST_WIDE_INT work;
475 register unsigned HOST_WIDE_INT carry = 0;
476 HOST_WIDE_INT lnum = lnum_orig;
477 HOST_WIDE_INT hnum = hnum_orig;
478 HOST_WIDE_INT lden = lden_orig;
479 HOST_WIDE_INT hden = hden_orig;
480 int overflow = 0;
482 if ((hden == 0) && (lden == 0))
483 overflow = 1, lden = 1;
485 /* calculate quotient sign and convert operands to unsigned. */
486 if (!uns)
488 if (hnum < 0)
490 quo_neg = ~ quo_neg;
491 /* (minimum integer) / (-1) is the only overflow case. */
492 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
493 overflow = 1;
495 if (hden < 0)
497 quo_neg = ~ quo_neg;
498 neg_double (lden, hden, &lden, &hden);
502 if (hnum == 0 && hden == 0)
503 { /* single precision */
504 *hquo = *hrem = 0;
505 /* This unsigned division rounds toward zero. */
506 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
507 goto finish_up;
510 if (hnum == 0)
511 { /* trivial case: dividend < divisor */
512 /* hden != 0 already checked. */
513 *hquo = *lquo = 0;
514 *hrem = hnum;
515 *lrem = lnum;
516 goto finish_up;
519 bzero ((char *) quo, sizeof quo);
521 bzero ((char *) num, sizeof num); /* to zero 9th element */
522 bzero ((char *) den, sizeof den);
524 encode (num, lnum, hnum);
525 encode (den, lden, hden);
527 /* Special code for when the divisor < BASE. */
528 if (hden == 0 && lden < BASE)
530 /* hnum != 0 already checked. */
531 for (i = 4 - 1; i >= 0; i--)
533 work = num[i] + carry * BASE;
534 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
535 carry = work % (unsigned HOST_WIDE_INT) lden;
538 else
540 /* Full double precision division,
541 with thanks to Don Knuth's "Seminumerical Algorithms". */
542 int num_hi_sig, den_hi_sig;
543 unsigned HOST_WIDE_INT quo_est, scale;
545 /* Find the highest non-zero divisor digit. */
546 for (i = 4 - 1; ; i--)
547 if (den[i] != 0) {
548 den_hi_sig = i;
549 break;
552 /* Insure that the first digit of the divisor is at least BASE/2.
553 This is required by the quotient digit estimation algorithm. */
555 scale = BASE / (den[den_hi_sig] + 1);
556 if (scale > 1) { /* scale divisor and dividend */
557 carry = 0;
558 for (i = 0; i <= 4 - 1; i++) {
559 work = (num[i] * scale) + carry;
560 num[i] = LOWPART (work);
561 carry = HIGHPART (work);
562 } num[4] = carry;
563 carry = 0;
564 for (i = 0; i <= 4 - 1; i++) {
565 work = (den[i] * scale) + carry;
566 den[i] = LOWPART (work);
567 carry = HIGHPART (work);
568 if (den[i] != 0) den_hi_sig = i;
572 num_hi_sig = 4;
574 /* Main loop */
575 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
576 /* guess the next quotient digit, quo_est, by dividing the first
577 two remaining dividend digits by the high order quotient digit.
578 quo_est is never low and is at most 2 high. */
579 unsigned HOST_WIDE_INT tmp;
581 num_hi_sig = i + den_hi_sig + 1;
582 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
583 if (num[num_hi_sig] != den[den_hi_sig])
584 quo_est = work / den[den_hi_sig];
585 else
586 quo_est = BASE - 1;
588 /* refine quo_est so it's usually correct, and at most one high. */
589 tmp = work - quo_est * den[den_hi_sig];
590 if (tmp < BASE
591 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
592 quo_est--;
594 /* Try QUO_EST as the quotient digit, by multiplying the
595 divisor by QUO_EST and subtracting from the remaining dividend.
596 Keep in mind that QUO_EST is the I - 1st digit. */
598 carry = 0;
599 for (j = 0; j <= den_hi_sig; j++)
601 work = quo_est * den[j] + carry;
602 carry = HIGHPART (work);
603 work = num[i + j] - LOWPART (work);
604 num[i + j] = LOWPART (work);
605 carry += HIGHPART (work) != 0;
608 /* if quo_est was high by one, then num[i] went negative and
609 we need to correct things. */
611 if (num[num_hi_sig] < carry)
613 quo_est--;
614 carry = 0; /* add divisor back in */
615 for (j = 0; j <= den_hi_sig; j++)
617 work = num[i + j] + den[j] + carry;
618 carry = HIGHPART (work);
619 num[i + j] = LOWPART (work);
621 num [num_hi_sig] += carry;
624 /* store the quotient digit. */
625 quo[i] = quo_est;
629 decode (quo, lquo, hquo);
631 finish_up:
632 /* if result is negative, make it so. */
633 if (quo_neg)
634 neg_double (*lquo, *hquo, lquo, hquo);
636 /* compute trial remainder: rem = num - (quo * den) */
637 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
638 neg_double (*lrem, *hrem, lrem, hrem);
639 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
641 switch (code)
643 case TRUNC_DIV_EXPR:
644 case TRUNC_MOD_EXPR: /* round toward zero */
645 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
646 return overflow;
648 case FLOOR_DIV_EXPR:
649 case FLOOR_MOD_EXPR: /* round toward negative infinity */
650 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
652 /* quo = quo - 1; */
653 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
654 lquo, hquo);
656 else return overflow;
657 break;
659 case CEIL_DIV_EXPR:
660 case CEIL_MOD_EXPR: /* round toward positive infinity */
661 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
663 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
664 lquo, hquo);
666 else return overflow;
667 break;
669 case ROUND_DIV_EXPR:
670 case ROUND_MOD_EXPR: /* round to closest integer */
672 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
673 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
675 /* get absolute values */
676 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
677 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
679 /* if (2 * abs (lrem) >= abs (lden)) */
680 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
681 labs_rem, habs_rem, &ltwice, &htwice);
682 if (((unsigned HOST_WIDE_INT) habs_den
683 < (unsigned HOST_WIDE_INT) htwice)
684 || (((unsigned HOST_WIDE_INT) habs_den
685 == (unsigned HOST_WIDE_INT) htwice)
686 && ((HOST_WIDE_INT unsigned) labs_den
687 < (unsigned HOST_WIDE_INT) ltwice)))
689 if (*hquo < 0)
690 /* quo = quo - 1; */
691 add_double (*lquo, *hquo,
692 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
693 else
694 /* quo = quo + 1; */
695 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
696 lquo, hquo);
698 else return overflow;
700 break;
702 default:
703 abort ();
706 /* compute true remainder: rem = num - (quo * den) */
707 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
708 neg_double (*lrem, *hrem, lrem, hrem);
709 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
710 return overflow;
713 #ifndef REAL_ARITHMETIC
714 /* Effectively truncate a real value to represent the nearest possible value
715 in a narrower mode. The result is actually represented in the same data
716 type as the argument, but its value is usually different.
718 A trap may occur during the FP operations and it is the responsibility
719 of the calling function to have a handler established. */
721 REAL_VALUE_TYPE
722 real_value_truncate (mode, arg)
723 enum machine_mode mode;
724 REAL_VALUE_TYPE arg;
726 return REAL_VALUE_TRUNCATE (mode, arg);
729 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
731 /* Check for infinity in an IEEE double precision number. */
734 target_isinf (x)
735 REAL_VALUE_TYPE x;
737 /* The IEEE 64-bit double format. */
738 union {
739 REAL_VALUE_TYPE d;
740 struct {
741 unsigned sign : 1;
742 unsigned exponent : 11;
743 unsigned mantissa1 : 20;
744 unsigned mantissa2;
745 } little_endian;
746 struct {
747 unsigned mantissa2;
748 unsigned mantissa1 : 20;
749 unsigned exponent : 11;
750 unsigned sign : 1;
751 } big_endian;
752 } u;
754 u.d = dconstm1;
755 if (u.big_endian.sign == 1)
757 u.d = x;
758 return (u.big_endian.exponent == 2047
759 && u.big_endian.mantissa1 == 0
760 && u.big_endian.mantissa2 == 0);
762 else
764 u.d = x;
765 return (u.little_endian.exponent == 2047
766 && u.little_endian.mantissa1 == 0
767 && u.little_endian.mantissa2 == 0);
771 /* Check whether an IEEE double precision number is a NaN. */
774 target_isnan (x)
775 REAL_VALUE_TYPE x;
777 /* The IEEE 64-bit double format. */
778 union {
779 REAL_VALUE_TYPE d;
780 struct {
781 unsigned sign : 1;
782 unsigned exponent : 11;
783 unsigned mantissa1 : 20;
784 unsigned mantissa2;
785 } little_endian;
786 struct {
787 unsigned mantissa2;
788 unsigned mantissa1 : 20;
789 unsigned exponent : 11;
790 unsigned sign : 1;
791 } big_endian;
792 } u;
794 u.d = dconstm1;
795 if (u.big_endian.sign == 1)
797 u.d = x;
798 return (u.big_endian.exponent == 2047
799 && (u.big_endian.mantissa1 != 0
800 || u.big_endian.mantissa2 != 0));
802 else
804 u.d = x;
805 return (u.little_endian.exponent == 2047
806 && (u.little_endian.mantissa1 != 0
807 || u.little_endian.mantissa2 != 0));
811 /* Check for a negative IEEE double precision number. */
814 target_negative (x)
815 REAL_VALUE_TYPE x;
817 /* The IEEE 64-bit double format. */
818 union {
819 REAL_VALUE_TYPE d;
820 struct {
821 unsigned sign : 1;
822 unsigned exponent : 11;
823 unsigned mantissa1 : 20;
824 unsigned mantissa2;
825 } little_endian;
826 struct {
827 unsigned mantissa2;
828 unsigned mantissa1 : 20;
829 unsigned exponent : 11;
830 unsigned sign : 1;
831 } big_endian;
832 } u;
834 u.d = dconstm1;
835 if (u.big_endian.sign == 1)
837 u.d = x;
838 return u.big_endian.sign;
840 else
842 u.d = x;
843 return u.little_endian.sign;
846 #else /* Target not IEEE */
848 /* Let's assume other float formats don't have infinity.
849 (This can be overridden by redefining REAL_VALUE_ISINF.) */
851 target_isinf (x)
852 REAL_VALUE_TYPE x;
854 return 0;
857 /* Let's assume other float formats don't have NaNs.
858 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
860 target_isnan (x)
861 REAL_VALUE_TYPE x;
863 return 0;
866 /* Let's assume other float formats don't have minus zero.
867 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
869 target_negative (x)
870 REAL_VALUE_TYPE x;
872 return x < 0;
874 #endif /* Target not IEEE */
876 /* Try to change R into its exact multiplicative inverse in machine mode
877 MODE. Return nonzero function value if successful. */
880 exact_real_inverse (mode, r)
881 enum machine_mode mode;
882 REAL_VALUE_TYPE *r;
884 union
886 double d;
887 unsigned short i[4];
888 }x, t, y;
889 int i;
891 /* Usually disable if bounds checks are not reliable. */
892 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
893 return 0;
895 /* Set array index to the less significant bits in the unions, depending
896 on the endian-ness of the host doubles.
897 Disable if insufficient information on the data structure. */
898 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
899 return 0;
900 #else
901 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
902 #define K 2
903 #else
904 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
905 #define K 2
906 #else
907 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
908 #endif
909 #endif
910 #endif
912 if (setjmp (float_error))
914 /* Don't do the optimization if there was an arithmetic error. */
915 fail:
916 set_float_handler (NULL_PTR);
917 return 0;
919 set_float_handler (float_error);
921 /* Domain check the argument. */
922 x.d = *r;
923 if (x.d == 0.0)
924 goto fail;
926 #ifdef REAL_INFINITY
927 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
928 goto fail;
929 #endif
931 /* Compute the reciprocal and check for numerical exactness.
932 It is unnecessary to check all the significand bits to determine
933 whether X is a power of 2. If X is not, then it is impossible for
934 the bottom half significand of both X and 1/X to be all zero bits.
935 Hence we ignore the data structure of the top half and examine only
936 the low order bits of the two significands. */
937 t.d = 1.0 / x.d;
938 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
939 goto fail;
941 /* Truncate to the required mode and range-check the result. */
942 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
943 #ifdef CHECK_FLOAT_VALUE
944 i = 0;
945 if (CHECK_FLOAT_VALUE (mode, y.d, i))
946 goto fail;
947 #endif
949 /* Fail if truncation changed the value. */
950 if (y.d != t.d || y.d == 0.0)
951 goto fail;
953 #ifdef REAL_INFINITY
954 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
955 goto fail;
956 #endif
958 /* Output the reciprocal and return success flag. */
959 set_float_handler (NULL_PTR);
960 *r = y.d;
961 return 1;
963 #endif /* no REAL_ARITHMETIC */
965 /* Split a tree IN into a constant and a variable part
966 that could be combined with CODE to make IN.
967 CODE must be a commutative arithmetic operation.
968 Store the constant part into *CONP and the variable in &VARP.
969 Return 1 if this was done; zero means the tree IN did not decompose
970 this way.
972 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
973 Therefore, we must tell the caller whether the variable part
974 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
975 The value stored is the coefficient for the variable term.
976 The constant term we return should always be added;
977 we negate it if necessary. */
979 static int
980 split_tree (in, code, varp, conp, varsignp)
981 tree in;
982 enum tree_code code;
983 tree *varp, *conp;
984 int *varsignp;
986 register tree outtype = TREE_TYPE (in);
987 *varp = 0;
988 *conp = 0;
990 /* Strip any conversions that don't change the machine mode. */
991 while ((TREE_CODE (in) == NOP_EXPR
992 || TREE_CODE (in) == CONVERT_EXPR)
993 && (TYPE_MODE (TREE_TYPE (in))
994 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
995 in = TREE_OPERAND (in, 0);
997 if (TREE_CODE (in) == code
998 || (! FLOAT_TYPE_P (TREE_TYPE (in))
999 /* We can associate addition and subtraction together
1000 (even though the C standard doesn't say so)
1001 for integers because the value is not affected.
1002 For reals, the value might be affected, so we can't. */
1003 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1004 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1006 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1007 if (code == INTEGER_CST)
1009 *conp = TREE_OPERAND (in, 0);
1010 *varp = TREE_OPERAND (in, 1);
1011 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1012 && TREE_TYPE (*varp) != outtype)
1013 *varp = convert (outtype, *varp);
1014 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1015 return 1;
1017 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1019 *conp = TREE_OPERAND (in, 1);
1020 *varp = TREE_OPERAND (in, 0);
1021 *varsignp = 1;
1022 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1023 && TREE_TYPE (*varp) != outtype)
1024 *varp = convert (outtype, *varp);
1025 if (TREE_CODE (in) == MINUS_EXPR)
1027 /* If operation is subtraction and constant is second,
1028 must negate it to get an additive constant.
1029 And this cannot be done unless it is a manifest constant.
1030 It could also be the address of a static variable.
1031 We cannot negate that, so give up. */
1032 if (TREE_CODE (*conp) == INTEGER_CST)
1033 /* Subtracting from integer_zero_node loses for long long. */
1034 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1035 else
1036 return 0;
1038 return 1;
1040 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1042 *conp = TREE_OPERAND (in, 0);
1043 *varp = TREE_OPERAND (in, 1);
1044 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1045 && TREE_TYPE (*varp) != outtype)
1046 *varp = convert (outtype, *varp);
1047 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1048 return 1;
1051 return 0;
1054 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1055 to produce a new constant.
1057 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1058 If FORSIZE is nonzero, compute overflow for unsigned types. */
1060 static tree
1061 int_const_binop (code, arg1, arg2, notrunc, forsize)
1062 enum tree_code code;
1063 register tree arg1, arg2;
1064 int notrunc, forsize;
1066 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1067 HOST_WIDE_INT low, hi;
1068 HOST_WIDE_INT garbagel, garbageh;
1069 register tree t;
1070 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1071 int overflow = 0;
1072 int no_overflow = 0;
1074 int1l = TREE_INT_CST_LOW (arg1);
1075 int1h = TREE_INT_CST_HIGH (arg1);
1076 int2l = TREE_INT_CST_LOW (arg2);
1077 int2h = TREE_INT_CST_HIGH (arg2);
1079 switch (code)
1081 case BIT_IOR_EXPR:
1082 low = int1l | int2l, hi = int1h | int2h;
1083 break;
1085 case BIT_XOR_EXPR:
1086 low = int1l ^ int2l, hi = int1h ^ int2h;
1087 break;
1089 case BIT_AND_EXPR:
1090 low = int1l & int2l, hi = int1h & int2h;
1091 break;
1093 case BIT_ANDTC_EXPR:
1094 low = int1l & ~int2l, hi = int1h & ~int2h;
1095 break;
1097 case RSHIFT_EXPR:
1098 int2l = - int2l;
1099 case LSHIFT_EXPR:
1100 /* It's unclear from the C standard whether shifts can overflow.
1101 The following code ignores overflow; perhaps a C standard
1102 interpretation ruling is needed. */
1103 lshift_double (int1l, int1h, int2l,
1104 TYPE_PRECISION (TREE_TYPE (arg1)),
1105 &low, &hi,
1106 !uns);
1107 no_overflow = 1;
1108 break;
1110 case RROTATE_EXPR:
1111 int2l = - int2l;
1112 case LROTATE_EXPR:
1113 lrotate_double (int1l, int1h, int2l,
1114 TYPE_PRECISION (TREE_TYPE (arg1)),
1115 &low, &hi);
1116 break;
1118 case PLUS_EXPR:
1119 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1120 break;
1122 case MINUS_EXPR:
1123 neg_double (int2l, int2h, &low, &hi);
1124 add_double (int1l, int1h, low, hi, &low, &hi);
1125 overflow = overflow_sum_sign (hi, int2h, int1h);
1126 break;
1128 case MULT_EXPR:
1129 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1130 break;
1132 case TRUNC_DIV_EXPR:
1133 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1134 case EXACT_DIV_EXPR:
1135 /* This is a shortcut for a common special case. */
1136 if (int2h == 0 && int2l > 0
1137 && ! TREE_CONSTANT_OVERFLOW (arg1)
1138 && ! TREE_CONSTANT_OVERFLOW (arg2)
1139 && int1h == 0 && int1l >= 0)
1141 if (code == CEIL_DIV_EXPR)
1142 int1l += int2l - 1;
1143 low = int1l / int2l, hi = 0;
1144 break;
1147 /* ... fall through ... */
1149 case ROUND_DIV_EXPR:
1150 if (int2h == 0 && int2l == 1)
1152 low = int1l, hi = int1h;
1153 break;
1155 if (int1l == int2l && int1h == int2h
1156 && ! (int1l == 0 && int1h == 0))
1158 low = 1, hi = 0;
1159 break;
1161 overflow = div_and_round_double (code, uns,
1162 int1l, int1h, int2l, int2h,
1163 &low, &hi, &garbagel, &garbageh);
1164 break;
1166 case TRUNC_MOD_EXPR:
1167 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1168 /* This is a shortcut for a common special case. */
1169 if (int2h == 0 && int2l > 0
1170 && ! TREE_CONSTANT_OVERFLOW (arg1)
1171 && ! TREE_CONSTANT_OVERFLOW (arg2)
1172 && int1h == 0 && int1l >= 0)
1174 if (code == CEIL_MOD_EXPR)
1175 int1l += int2l - 1;
1176 low = int1l % int2l, hi = 0;
1177 break;
1180 /* ... fall through ... */
1182 case ROUND_MOD_EXPR:
1183 overflow = div_and_round_double (code, uns,
1184 int1l, int1h, int2l, int2h,
1185 &garbagel, &garbageh, &low, &hi);
1186 break;
1188 case MIN_EXPR:
1189 case MAX_EXPR:
1190 if (uns)
1192 low = (((unsigned HOST_WIDE_INT) int1h
1193 < (unsigned HOST_WIDE_INT) int2h)
1194 || (((unsigned HOST_WIDE_INT) int1h
1195 == (unsigned HOST_WIDE_INT) int2h)
1196 && ((unsigned HOST_WIDE_INT) int1l
1197 < (unsigned HOST_WIDE_INT) int2l)));
1199 else
1201 low = ((int1h < int2h)
1202 || ((int1h == int2h)
1203 && ((unsigned HOST_WIDE_INT) int1l
1204 < (unsigned HOST_WIDE_INT) int2l)));
1206 if (low == (code == MIN_EXPR))
1207 low = int1l, hi = int1h;
1208 else
1209 low = int2l, hi = int2h;
1210 break;
1212 default:
1213 abort ();
1216 if (TREE_TYPE (arg1) == sizetype && hi == 0
1217 && low >= 0 && low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype))
1218 && ! overflow
1219 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1220 t = size_int (low);
1221 else
1223 t = build_int_2 (low, hi);
1224 TREE_TYPE (t) = TREE_TYPE (arg1);
1227 TREE_OVERFLOW (t)
1228 = ((notrunc ? (!uns || forsize) && overflow
1229 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1230 | TREE_OVERFLOW (arg1)
1231 | TREE_OVERFLOW (arg2));
1232 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1233 So check if force_fit_type truncated the value. */
1234 if (forsize
1235 && ! TREE_OVERFLOW (t)
1236 && (TREE_INT_CST_HIGH (t) != hi
1237 || TREE_INT_CST_LOW (t) != low))
1238 TREE_OVERFLOW (t) = 1;
1239 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1240 | TREE_CONSTANT_OVERFLOW (arg1)
1241 | TREE_CONSTANT_OVERFLOW (arg2));
1242 return t;
1245 /* Combine two constants ARG1 and ARG2 under operation CODE
1246 to produce a new constant.
1247 We assume ARG1 and ARG2 have the same data type,
1248 or at least are the same kind of constant and the same machine mode.
1250 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1252 static tree
1253 const_binop (code, arg1, arg2, notrunc)
1254 enum tree_code code;
1255 register tree arg1, arg2;
1256 int notrunc;
1258 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1260 if (TREE_CODE (arg1) == INTEGER_CST)
1261 return int_const_binop (code, arg1, arg2, notrunc, 0);
1263 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1264 if (TREE_CODE (arg1) == REAL_CST)
1266 REAL_VALUE_TYPE d1;
1267 REAL_VALUE_TYPE d2;
1268 int overflow = 0;
1269 REAL_VALUE_TYPE value;
1270 tree t;
1272 d1 = TREE_REAL_CST (arg1);
1273 d2 = TREE_REAL_CST (arg2);
1275 /* If either operand is a NaN, just return it. Otherwise, set up
1276 for floating-point trap; we return an overflow. */
1277 if (REAL_VALUE_ISNAN (d1))
1278 return arg1;
1279 else if (REAL_VALUE_ISNAN (d2))
1280 return arg2;
1281 else if (setjmp (float_error))
1283 t = copy_node (arg1);
1284 overflow = 1;
1285 goto got_float;
1288 set_float_handler (float_error);
1290 #ifdef REAL_ARITHMETIC
1291 REAL_ARITHMETIC (value, code, d1, d2);
1292 #else
1293 switch (code)
1295 case PLUS_EXPR:
1296 value = d1 + d2;
1297 break;
1299 case MINUS_EXPR:
1300 value = d1 - d2;
1301 break;
1303 case MULT_EXPR:
1304 value = d1 * d2;
1305 break;
1307 case RDIV_EXPR:
1308 #ifndef REAL_INFINITY
1309 if (d2 == 0)
1310 abort ();
1311 #endif
1313 value = d1 / d2;
1314 break;
1316 case MIN_EXPR:
1317 value = MIN (d1, d2);
1318 break;
1320 case MAX_EXPR:
1321 value = MAX (d1, d2);
1322 break;
1324 default:
1325 abort ();
1327 #endif /* no REAL_ARITHMETIC */
1328 t = build_real (TREE_TYPE (arg1),
1329 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1330 got_float:
1331 set_float_handler (NULL_PTR);
1333 TREE_OVERFLOW (t)
1334 = (force_fit_type (t, overflow)
1335 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1336 TREE_CONSTANT_OVERFLOW (t)
1337 = TREE_OVERFLOW (t)
1338 | TREE_CONSTANT_OVERFLOW (arg1)
1339 | TREE_CONSTANT_OVERFLOW (arg2);
1340 return t;
1342 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1343 if (TREE_CODE (arg1) == COMPLEX_CST)
1345 register tree type = TREE_TYPE (arg1);
1346 register tree r1 = TREE_REALPART (arg1);
1347 register tree i1 = TREE_IMAGPART (arg1);
1348 register tree r2 = TREE_REALPART (arg2);
1349 register tree i2 = TREE_IMAGPART (arg2);
1350 register tree t;
1352 switch (code)
1354 case PLUS_EXPR:
1355 t = build_complex (type,
1356 const_binop (PLUS_EXPR, r1, r2, notrunc),
1357 const_binop (PLUS_EXPR, i1, i2, notrunc));
1358 break;
1360 case MINUS_EXPR:
1361 t = build_complex (type,
1362 const_binop (MINUS_EXPR, r1, r2, notrunc),
1363 const_binop (MINUS_EXPR, i1, i2, notrunc));
1364 break;
1366 case MULT_EXPR:
1367 t = build_complex (type,
1368 const_binop (MINUS_EXPR,
1369 const_binop (MULT_EXPR,
1370 r1, r2, notrunc),
1371 const_binop (MULT_EXPR,
1372 i1, i2, notrunc),
1373 notrunc),
1374 const_binop (PLUS_EXPR,
1375 const_binop (MULT_EXPR,
1376 r1, i2, notrunc),
1377 const_binop (MULT_EXPR,
1378 i1, r2, notrunc),
1379 notrunc));
1380 break;
1382 case RDIV_EXPR:
1384 register tree magsquared
1385 = const_binop (PLUS_EXPR,
1386 const_binop (MULT_EXPR, r2, r2, notrunc),
1387 const_binop (MULT_EXPR, i2, i2, notrunc),
1388 notrunc);
1390 t = build_complex (type,
1391 const_binop
1392 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1393 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1394 const_binop (PLUS_EXPR,
1395 const_binop (MULT_EXPR, r1, r2,
1396 notrunc),
1397 const_binop (MULT_EXPR, i1, i2,
1398 notrunc),
1399 notrunc),
1400 magsquared, notrunc),
1401 const_binop
1402 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1403 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1404 const_binop (MINUS_EXPR,
1405 const_binop (MULT_EXPR, i1, r2,
1406 notrunc),
1407 const_binop (MULT_EXPR, r1, i2,
1408 notrunc),
1409 notrunc),
1410 magsquared, notrunc));
1412 break;
1414 default:
1415 abort ();
1417 return t;
1419 return 0;
1422 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1424 tree
1425 size_int (number)
1426 unsigned HOST_WIDE_INT number;
1428 register tree t;
1429 /* Type-size nodes already made for small sizes. */
1430 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1432 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1433 && size_table[number] != 0)
1434 return size_table[number];
1435 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1437 push_obstacks_nochange ();
1438 /* Make this a permanent node. */
1439 end_temporary_allocation ();
1440 t = build_int_2 (number, 0);
1441 TREE_TYPE (t) = sizetype;
1442 size_table[number] = t;
1443 pop_obstacks ();
1445 else
1447 t = build_int_2 (number, 0);
1448 TREE_TYPE (t) = sizetype;
1449 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1451 return t;
1454 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1455 CODE is a tree code. Data type is taken from `sizetype',
1456 If the operands are constant, so is the result. */
1458 tree
1459 size_binop (code, arg0, arg1)
1460 enum tree_code code;
1461 tree arg0, arg1;
1463 /* Handle the special case of two integer constants faster. */
1464 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1466 /* And some specific cases even faster than that. */
1467 if (code == PLUS_EXPR && integer_zerop (arg0))
1468 return arg1;
1469 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1470 && integer_zerop (arg1))
1471 return arg0;
1472 else if (code == MULT_EXPR && integer_onep (arg0))
1473 return arg1;
1475 /* Handle general case of two integer constants. */
1476 return int_const_binop (code, arg0, arg1, 0, 1);
1479 if (arg0 == error_mark_node || arg1 == error_mark_node)
1480 return error_mark_node;
1482 return fold (build (code, sizetype, arg0, arg1));
1485 /* Given T, a tree representing type conversion of ARG1, a constant,
1486 return a constant tree representing the result of conversion. */
1488 static tree
1489 fold_convert (t, arg1)
1490 register tree t;
1491 register tree arg1;
1493 register tree type = TREE_TYPE (t);
1494 int overflow = 0;
1496 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1500 /* If we would build a constant wider than GCC supports,
1501 leave the conversion unfolded. */
1502 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1503 return t;
1505 /* Given an integer constant, make new constant with new type,
1506 appropriately sign-extended or truncated. */
1507 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1508 TREE_INT_CST_HIGH (arg1));
1509 TREE_TYPE (t) = type;
1510 /* Indicate an overflow if (1) ARG1 already overflowed,
1511 or (2) force_fit_type indicates an overflow.
1512 Tell force_fit_type that an overflow has already occurred
1513 if ARG1 is a too-large unsigned value and T is signed. */
1514 TREE_OVERFLOW (t)
1515 = (TREE_OVERFLOW (arg1)
1516 | force_fit_type (t,
1517 (TREE_INT_CST_HIGH (arg1) < 0
1518 & (TREE_UNSIGNED (type)
1519 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1520 TREE_CONSTANT_OVERFLOW (t)
1521 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1523 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1524 else if (TREE_CODE (arg1) == REAL_CST)
1526 /* Don't initialize these, use assignments.
1527 Initialized local aggregates don't work on old compilers. */
1528 REAL_VALUE_TYPE x;
1529 REAL_VALUE_TYPE l;
1530 REAL_VALUE_TYPE u;
1531 tree type1 = TREE_TYPE (arg1);
1533 x = TREE_REAL_CST (arg1);
1534 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1535 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1536 /* See if X will be in range after truncation towards 0.
1537 To compensate for truncation, move the bounds away from 0,
1538 but reject if X exactly equals the adjusted bounds. */
1539 #ifdef REAL_ARITHMETIC
1540 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1541 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1542 #else
1543 l--;
1544 u++;
1545 #endif
1546 /* If X is a NaN, use zero instead and show we have an overflow.
1547 Otherwise, range check. */
1548 if (REAL_VALUE_ISNAN (x))
1549 overflow = 1, x = dconst0;
1550 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1551 overflow = 1;
1553 #ifndef REAL_ARITHMETIC
1555 HOST_WIDE_INT low, high;
1556 HOST_WIDE_INT half_word
1557 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1559 if (x < 0)
1560 x = -x;
1562 high = (HOST_WIDE_INT) (x / half_word / half_word);
1563 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1564 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1566 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1567 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1569 else
1570 low = (HOST_WIDE_INT) x;
1571 if (TREE_REAL_CST (arg1) < 0)
1572 neg_double (low, high, &low, &high);
1573 t = build_int_2 (low, high);
1575 #else
1577 HOST_WIDE_INT low, high;
1578 REAL_VALUE_TO_INT (&low, &high, x);
1579 t = build_int_2 (low, high);
1581 #endif
1582 TREE_TYPE (t) = type;
1583 TREE_OVERFLOW (t)
1584 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1585 TREE_CONSTANT_OVERFLOW (t)
1586 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1588 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1589 TREE_TYPE (t) = type;
1591 else if (TREE_CODE (type) == REAL_TYPE)
1593 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1594 if (TREE_CODE (arg1) == INTEGER_CST)
1595 return build_real_from_int_cst (type, arg1);
1596 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1597 if (TREE_CODE (arg1) == REAL_CST)
1599 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1601 t = arg1;
1602 TREE_TYPE (arg1) = type;
1603 return t;
1605 else if (setjmp (float_error))
1607 overflow = 1;
1608 t = copy_node (arg1);
1609 goto got_it;
1611 set_float_handler (float_error);
1613 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1614 TREE_REAL_CST (arg1)));
1615 set_float_handler (NULL_PTR);
1617 got_it:
1618 TREE_OVERFLOW (t)
1619 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1620 TREE_CONSTANT_OVERFLOW (t)
1621 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1622 return t;
1625 TREE_CONSTANT (t) = 1;
1626 return t;
1629 /* Return an expr equal to X but certainly not valid as an lvalue.
1630 Also make sure it is not valid as an null pointer constant. */
1632 tree
1633 non_lvalue (x)
1634 tree x;
1636 tree result;
1638 /* These things are certainly not lvalues. */
1639 if (TREE_CODE (x) == NON_LVALUE_EXPR
1640 || TREE_CODE (x) == INTEGER_CST
1641 || TREE_CODE (x) == REAL_CST
1642 || TREE_CODE (x) == STRING_CST
1643 || TREE_CODE (x) == ADDR_EXPR)
1645 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1647 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1648 so convert_for_assignment won't strip it.
1649 This is so this 0 won't be treated as a null pointer constant. */
1650 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1651 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1652 return result;
1654 return x;
1657 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1658 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1659 return result;
1662 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1663 Zero means allow extended lvalues. */
1665 int pedantic_lvalues;
1667 /* When pedantic, return an expr equal to X but certainly not valid as a
1668 pedantic lvalue. Otherwise, return X. */
1670 tree
1671 pedantic_non_lvalue (x)
1672 tree x;
1674 if (pedantic_lvalues)
1675 return non_lvalue (x);
1676 else
1677 return x;
1680 /* Given a tree comparison code, return the code that is the logical inverse
1681 of the given code. It is not safe to do this for floating-point
1682 comparisons, except for NE_EXPR and EQ_EXPR. */
1684 static enum tree_code
1685 invert_tree_comparison (code)
1686 enum tree_code code;
1688 switch (code)
1690 case EQ_EXPR:
1691 return NE_EXPR;
1692 case NE_EXPR:
1693 return EQ_EXPR;
1694 case GT_EXPR:
1695 return LE_EXPR;
1696 case GE_EXPR:
1697 return LT_EXPR;
1698 case LT_EXPR:
1699 return GE_EXPR;
1700 case LE_EXPR:
1701 return GT_EXPR;
1702 default:
1703 abort ();
1707 /* Similar, but return the comparison that results if the operands are
1708 swapped. This is safe for floating-point. */
1710 static enum tree_code
1711 swap_tree_comparison (code)
1712 enum tree_code code;
1714 switch (code)
1716 case EQ_EXPR:
1717 case NE_EXPR:
1718 return code;
1719 case GT_EXPR:
1720 return LT_EXPR;
1721 case GE_EXPR:
1722 return LE_EXPR;
1723 case LT_EXPR:
1724 return GT_EXPR;
1725 case LE_EXPR:
1726 return GE_EXPR;
1727 default:
1728 abort ();
1732 /* Return nonzero if CODE is a tree code that represents a truth value. */
1734 static int
1735 truth_value_p (code)
1736 enum tree_code code;
1738 return (TREE_CODE_CLASS (code) == '<'
1739 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1740 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1741 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1744 /* Return nonzero if two operands are necessarily equal.
1745 If ONLY_CONST is non-zero, only return non-zero for constants.
1746 This function tests whether the operands are indistinguishable;
1747 it does not test whether they are equal using C's == operation.
1748 The distinction is important for IEEE floating point, because
1749 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1750 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1753 operand_equal_p (arg0, arg1, only_const)
1754 tree arg0, arg1;
1755 int only_const;
1757 /* If both types don't have the same signedness, then we can't consider
1758 them equal. We must check this before the STRIP_NOPS calls
1759 because they may change the signedness of the arguments. */
1760 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1761 return 0;
1763 STRIP_NOPS (arg0);
1764 STRIP_NOPS (arg1);
1766 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1767 /* This is needed for conversions and for COMPONENT_REF.
1768 Might as well play it safe and always test this. */
1769 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1770 return 0;
1772 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1773 We don't care about side effects in that case because the SAVE_EXPR
1774 takes care of that for us. In all other cases, two expressions are
1775 equal if they have no side effects. If we have two identical
1776 expressions with side effects that should be treated the same due
1777 to the only side effects being identical SAVE_EXPR's, that will
1778 be detected in the recursive calls below. */
1779 if (arg0 == arg1 && ! only_const
1780 && (TREE_CODE (arg0) == SAVE_EXPR
1781 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1782 return 1;
1784 /* Next handle constant cases, those for which we can return 1 even
1785 if ONLY_CONST is set. */
1786 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1787 switch (TREE_CODE (arg0))
1789 case INTEGER_CST:
1790 return (! TREE_CONSTANT_OVERFLOW (arg0)
1791 && ! TREE_CONSTANT_OVERFLOW (arg1)
1792 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1793 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1795 case REAL_CST:
1796 return (! TREE_CONSTANT_OVERFLOW (arg0)
1797 && ! TREE_CONSTANT_OVERFLOW (arg1)
1798 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1799 TREE_REAL_CST (arg1)));
1801 case COMPLEX_CST:
1802 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1803 only_const)
1804 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1805 only_const));
1807 case STRING_CST:
1808 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1809 && ! strncmp (TREE_STRING_POINTER (arg0),
1810 TREE_STRING_POINTER (arg1),
1811 TREE_STRING_LENGTH (arg0)));
1813 case ADDR_EXPR:
1814 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1816 default:
1817 break;
1820 if (only_const)
1821 return 0;
1823 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1825 case '1':
1826 /* Two conversions are equal only if signedness and modes match. */
1827 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1828 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1829 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1830 return 0;
1832 return operand_equal_p (TREE_OPERAND (arg0, 0),
1833 TREE_OPERAND (arg1, 0), 0);
1835 case '<':
1836 case '2':
1837 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1838 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1840 return 1;
1842 /* For commutative ops, allow the other order. */
1843 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1844 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1845 || TREE_CODE (arg0) == BIT_IOR_EXPR
1846 || TREE_CODE (arg0) == BIT_XOR_EXPR
1847 || TREE_CODE (arg0) == BIT_AND_EXPR
1848 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1849 && operand_equal_p (TREE_OPERAND (arg0, 0),
1850 TREE_OPERAND (arg1, 1), 0)
1851 && operand_equal_p (TREE_OPERAND (arg0, 1),
1852 TREE_OPERAND (arg1, 0), 0));
1854 case 'r':
1855 switch (TREE_CODE (arg0))
1857 case INDIRECT_REF:
1858 return operand_equal_p (TREE_OPERAND (arg0, 0),
1859 TREE_OPERAND (arg1, 0), 0);
1861 case COMPONENT_REF:
1862 case ARRAY_REF:
1863 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1864 TREE_OPERAND (arg1, 0), 0)
1865 && operand_equal_p (TREE_OPERAND (arg0, 1),
1866 TREE_OPERAND (arg1, 1), 0));
1868 case BIT_FIELD_REF:
1869 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1870 TREE_OPERAND (arg1, 0), 0)
1871 && operand_equal_p (TREE_OPERAND (arg0, 1),
1872 TREE_OPERAND (arg1, 1), 0)
1873 && operand_equal_p (TREE_OPERAND (arg0, 2),
1874 TREE_OPERAND (arg1, 2), 0));
1875 default:
1876 return 0;
1879 default:
1880 return 0;
1884 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1885 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1887 When in doubt, return 0. */
1889 static int
1890 operand_equal_for_comparison_p (arg0, arg1, other)
1891 tree arg0, arg1;
1892 tree other;
1894 int unsignedp1, unsignedpo;
1895 tree primarg1, primother;
1896 unsigned correct_width;
1898 if (operand_equal_p (arg0, arg1, 0))
1899 return 1;
1901 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1902 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1903 return 0;
1905 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1906 actual comparison operand, ARG0.
1908 First throw away any conversions to wider types
1909 already present in the operands. */
1911 primarg1 = get_narrower (arg1, &unsignedp1);
1912 primother = get_narrower (other, &unsignedpo);
1914 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1915 if (unsignedp1 == unsignedpo
1916 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1917 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1919 tree type = TREE_TYPE (arg0);
1921 /* Make sure shorter operand is extended the right way
1922 to match the longer operand. */
1923 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1924 TREE_TYPE (primarg1)),
1925 primarg1);
1927 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1928 return 1;
1931 return 0;
1934 /* See if ARG is an expression that is either a comparison or is performing
1935 arithmetic on comparisons. The comparisons must only be comparing
1936 two different values, which will be stored in *CVAL1 and *CVAL2; if
1937 they are non-zero it means that some operands have already been found.
1938 No variables may be used anywhere else in the expression except in the
1939 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1940 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1942 If this is true, return 1. Otherwise, return zero. */
1944 static int
1945 twoval_comparison_p (arg, cval1, cval2, save_p)
1946 tree arg;
1947 tree *cval1, *cval2;
1948 int *save_p;
1950 enum tree_code code = TREE_CODE (arg);
1951 char class = TREE_CODE_CLASS (code);
1953 /* We can handle some of the 'e' cases here. */
1954 if (class == 'e' && code == TRUTH_NOT_EXPR)
1955 class = '1';
1956 else if (class == 'e'
1957 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1958 || code == COMPOUND_EXPR))
1959 class = '2';
1961 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1962 the expression. There may be no way to make this work, but it needs
1963 to be looked at again for 2.6. */
1964 #if 0
1965 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1967 /* If we've already found a CVAL1 or CVAL2, this expression is
1968 two complex to handle. */
1969 if (*cval1 || *cval2)
1970 return 0;
1972 class = '1';
1973 *save_p = 1;
1975 #endif
1977 switch (class)
1979 case '1':
1980 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1982 case '2':
1983 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1984 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1985 cval1, cval2, save_p));
1987 case 'c':
1988 return 1;
1990 case 'e':
1991 if (code == COND_EXPR)
1992 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1993 cval1, cval2, save_p)
1994 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1995 cval1, cval2, save_p)
1996 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1997 cval1, cval2, save_p));
1998 return 0;
2000 case '<':
2001 /* First see if we can handle the first operand, then the second. For
2002 the second operand, we know *CVAL1 can't be zero. It must be that
2003 one side of the comparison is each of the values; test for the
2004 case where this isn't true by failing if the two operands
2005 are the same. */
2007 if (operand_equal_p (TREE_OPERAND (arg, 0),
2008 TREE_OPERAND (arg, 1), 0))
2009 return 0;
2011 if (*cval1 == 0)
2012 *cval1 = TREE_OPERAND (arg, 0);
2013 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2015 else if (*cval2 == 0)
2016 *cval2 = TREE_OPERAND (arg, 0);
2017 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2019 else
2020 return 0;
2022 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2024 else if (*cval2 == 0)
2025 *cval2 = TREE_OPERAND (arg, 1);
2026 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2028 else
2029 return 0;
2031 return 1;
2033 default:
2034 return 0;
2038 /* ARG is a tree that is known to contain just arithmetic operations and
2039 comparisons. Evaluate the operations in the tree substituting NEW0 for
2040 any occurrence of OLD0 as an operand of a comparison and likewise for
2041 NEW1 and OLD1. */
2043 static tree
2044 eval_subst (arg, old0, new0, old1, new1)
2045 tree arg;
2046 tree old0, new0, old1, new1;
2048 tree type = TREE_TYPE (arg);
2049 enum tree_code code = TREE_CODE (arg);
2050 char class = TREE_CODE_CLASS (code);
2052 /* We can handle some of the 'e' cases here. */
2053 if (class == 'e' && code == TRUTH_NOT_EXPR)
2054 class = '1';
2055 else if (class == 'e'
2056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2057 class = '2';
2059 switch (class)
2061 case '1':
2062 return fold (build1 (code, type,
2063 eval_subst (TREE_OPERAND (arg, 0),
2064 old0, new0, old1, new1)));
2066 case '2':
2067 return fold (build (code, type,
2068 eval_subst (TREE_OPERAND (arg, 0),
2069 old0, new0, old1, new1),
2070 eval_subst (TREE_OPERAND (arg, 1),
2071 old0, new0, old1, new1)));
2073 case 'e':
2074 switch (code)
2076 case SAVE_EXPR:
2077 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2079 case COMPOUND_EXPR:
2080 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2082 case COND_EXPR:
2083 return fold (build (code, type,
2084 eval_subst (TREE_OPERAND (arg, 0),
2085 old0, new0, old1, new1),
2086 eval_subst (TREE_OPERAND (arg, 1),
2087 old0, new0, old1, new1),
2088 eval_subst (TREE_OPERAND (arg, 2),
2089 old0, new0, old1, new1)));
2090 default:
2091 break;
2093 /* fall through (???) */
2095 case '<':
2097 tree arg0 = TREE_OPERAND (arg, 0);
2098 tree arg1 = TREE_OPERAND (arg, 1);
2100 /* We need to check both for exact equality and tree equality. The
2101 former will be true if the operand has a side-effect. In that
2102 case, we know the operand occurred exactly once. */
2104 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2105 arg0 = new0;
2106 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2107 arg0 = new1;
2109 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2110 arg1 = new0;
2111 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2112 arg1 = new1;
2114 return fold (build (code, type, arg0, arg1));
2117 default:
2118 return arg;
2122 /* Return a tree for the case when the result of an expression is RESULT
2123 converted to TYPE and OMITTED was previously an operand of the expression
2124 but is now not needed (e.g., we folded OMITTED * 0).
2126 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2127 the conversion of RESULT to TYPE. */
2129 static tree
2130 omit_one_operand (type, result, omitted)
2131 tree type, result, omitted;
2133 tree t = convert (type, result);
2135 if (TREE_SIDE_EFFECTS (omitted))
2136 return build (COMPOUND_EXPR, type, omitted, t);
2138 return non_lvalue (t);
2141 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2143 static tree
2144 pedantic_omit_one_operand (type, result, omitted)
2145 tree type, result, omitted;
2147 tree t = convert (type, result);
2149 if (TREE_SIDE_EFFECTS (omitted))
2150 return build (COMPOUND_EXPR, type, omitted, t);
2152 return pedantic_non_lvalue (t);
2157 /* Return a simplified tree node for the truth-negation of ARG. This
2158 never alters ARG itself. We assume that ARG is an operation that
2159 returns a truth value (0 or 1). */
2161 tree
2162 invert_truthvalue (arg)
2163 tree arg;
2165 tree type = TREE_TYPE (arg);
2166 enum tree_code code = TREE_CODE (arg);
2168 if (code == ERROR_MARK)
2169 return arg;
2171 /* If this is a comparison, we can simply invert it, except for
2172 floating-point non-equality comparisons, in which case we just
2173 enclose a TRUTH_NOT_EXPR around what we have. */
2175 if (TREE_CODE_CLASS (code) == '<')
2177 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2178 && code != NE_EXPR && code != EQ_EXPR)
2179 return build1 (TRUTH_NOT_EXPR, type, arg);
2180 else
2181 return build (invert_tree_comparison (code), type,
2182 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2185 switch (code)
2187 case INTEGER_CST:
2188 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2189 && TREE_INT_CST_HIGH (arg) == 0, 0));
2191 case TRUTH_AND_EXPR:
2192 return build (TRUTH_OR_EXPR, type,
2193 invert_truthvalue (TREE_OPERAND (arg, 0)),
2194 invert_truthvalue (TREE_OPERAND (arg, 1)));
2196 case TRUTH_OR_EXPR:
2197 return build (TRUTH_AND_EXPR, type,
2198 invert_truthvalue (TREE_OPERAND (arg, 0)),
2199 invert_truthvalue (TREE_OPERAND (arg, 1)));
2201 case TRUTH_XOR_EXPR:
2202 /* Here we can invert either operand. We invert the first operand
2203 unless the second operand is a TRUTH_NOT_EXPR in which case our
2204 result is the XOR of the first operand with the inside of the
2205 negation of the second operand. */
2207 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2208 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2209 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2210 else
2211 return build (TRUTH_XOR_EXPR, type,
2212 invert_truthvalue (TREE_OPERAND (arg, 0)),
2213 TREE_OPERAND (arg, 1));
2215 case TRUTH_ANDIF_EXPR:
2216 return build (TRUTH_ORIF_EXPR, type,
2217 invert_truthvalue (TREE_OPERAND (arg, 0)),
2218 invert_truthvalue (TREE_OPERAND (arg, 1)));
2220 case TRUTH_ORIF_EXPR:
2221 return build (TRUTH_ANDIF_EXPR, type,
2222 invert_truthvalue (TREE_OPERAND (arg, 0)),
2223 invert_truthvalue (TREE_OPERAND (arg, 1)));
2225 case TRUTH_NOT_EXPR:
2226 return TREE_OPERAND (arg, 0);
2228 case COND_EXPR:
2229 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2230 invert_truthvalue (TREE_OPERAND (arg, 1)),
2231 invert_truthvalue (TREE_OPERAND (arg, 2)));
2233 case COMPOUND_EXPR:
2234 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2235 invert_truthvalue (TREE_OPERAND (arg, 1)));
2237 case NON_LVALUE_EXPR:
2238 return invert_truthvalue (TREE_OPERAND (arg, 0));
2240 case NOP_EXPR:
2241 case CONVERT_EXPR:
2242 case FLOAT_EXPR:
2243 return build1 (TREE_CODE (arg), type,
2244 invert_truthvalue (TREE_OPERAND (arg, 0)));
2246 case BIT_AND_EXPR:
2247 if (!integer_onep (TREE_OPERAND (arg, 1)))
2248 break;
2249 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2251 case SAVE_EXPR:
2252 return build1 (TRUTH_NOT_EXPR, type, arg);
2254 case CLEANUP_POINT_EXPR:
2255 return build1 (CLEANUP_POINT_EXPR, type,
2256 invert_truthvalue (TREE_OPERAND (arg, 0)));
2258 default:
2259 break;
2261 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2262 abort ();
2263 return build1 (TRUTH_NOT_EXPR, type, arg);
2266 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2267 operands are another bit-wise operation with a common input. If so,
2268 distribute the bit operations to save an operation and possibly two if
2269 constants are involved. For example, convert
2270 (A | B) & (A | C) into A | (B & C)
2271 Further simplification will occur if B and C are constants.
2273 If this optimization cannot be done, 0 will be returned. */
2275 static tree
2276 distribute_bit_expr (code, type, arg0, arg1)
2277 enum tree_code code;
2278 tree type;
2279 tree arg0, arg1;
2281 tree common;
2282 tree left, right;
2284 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2285 || TREE_CODE (arg0) == code
2286 || (TREE_CODE (arg0) != BIT_AND_EXPR
2287 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2288 return 0;
2290 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2292 common = TREE_OPERAND (arg0, 0);
2293 left = TREE_OPERAND (arg0, 1);
2294 right = TREE_OPERAND (arg1, 1);
2296 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2298 common = TREE_OPERAND (arg0, 0);
2299 left = TREE_OPERAND (arg0, 1);
2300 right = TREE_OPERAND (arg1, 0);
2302 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2304 common = TREE_OPERAND (arg0, 1);
2305 left = TREE_OPERAND (arg0, 0);
2306 right = TREE_OPERAND (arg1, 1);
2308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2310 common = TREE_OPERAND (arg0, 1);
2311 left = TREE_OPERAND (arg0, 0);
2312 right = TREE_OPERAND (arg1, 0);
2314 else
2315 return 0;
2317 return fold (build (TREE_CODE (arg0), type, common,
2318 fold (build (code, type, left, right))));
2321 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2322 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2324 static tree
2325 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2326 tree inner;
2327 tree type;
2328 int bitsize, bitpos;
2329 int unsignedp;
2331 tree result = build (BIT_FIELD_REF, type, inner,
2332 size_int (bitsize), size_int (bitpos));
2334 TREE_UNSIGNED (result) = unsignedp;
2336 return result;
2339 /* Optimize a bit-field compare.
2341 There are two cases: First is a compare against a constant and the
2342 second is a comparison of two items where the fields are at the same
2343 bit position relative to the start of a chunk (byte, halfword, word)
2344 large enough to contain it. In these cases we can avoid the shift
2345 implicit in bitfield extractions.
2347 For constants, we emit a compare of the shifted constant with the
2348 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2349 compared. For two fields at the same position, we do the ANDs with the
2350 similar mask and compare the result of the ANDs.
2352 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2353 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2354 are the left and right operands of the comparison, respectively.
2356 If the optimization described above can be done, we return the resulting
2357 tree. Otherwise we return zero. */
2359 static tree
2360 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2361 enum tree_code code;
2362 tree compare_type;
2363 tree lhs, rhs;
2365 int lbitpos, lbitsize, rbitpos, rbitsize;
2366 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2367 tree type = TREE_TYPE (lhs);
2368 tree signed_type, unsigned_type;
2369 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2370 enum machine_mode lmode, rmode, lnmode, rnmode;
2371 int lunsignedp, runsignedp;
2372 int lvolatilep = 0, rvolatilep = 0;
2373 int alignment;
2374 tree linner, rinner;
2375 tree mask;
2376 tree offset;
2378 /* Get all the information about the extractions being done. If the bit size
2379 if the same as the size of the underlying object, we aren't doing an
2380 extraction at all and so can do nothing. */
2381 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2382 &lunsignedp, &lvolatilep, &alignment);
2383 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2384 || offset != 0)
2385 return 0;
2387 if (!const_p)
2389 /* If this is not a constant, we can only do something if bit positions,
2390 sizes, and signedness are the same. */
2391 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2392 &runsignedp, &rvolatilep, &alignment);
2394 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2395 || lunsignedp != runsignedp || offset != 0)
2396 return 0;
2399 /* See if we can find a mode to refer to this field. We should be able to,
2400 but fail if we can't. */
2401 lnmode = get_best_mode (lbitsize, lbitpos,
2402 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2403 lvolatilep);
2404 if (lnmode == VOIDmode)
2405 return 0;
2407 /* Set signed and unsigned types of the precision of this mode for the
2408 shifts below. */
2409 signed_type = type_for_mode (lnmode, 0);
2410 unsigned_type = type_for_mode (lnmode, 1);
2412 if (! const_p)
2414 rnmode = get_best_mode (rbitsize, rbitpos,
2415 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2416 rvolatilep);
2417 if (rnmode == VOIDmode)
2418 return 0;
2421 /* Compute the bit position and size for the new reference and our offset
2422 within it. If the new reference is the same size as the original, we
2423 won't optimize anything, so return zero. */
2424 lnbitsize = GET_MODE_BITSIZE (lnmode);
2425 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2426 lbitpos -= lnbitpos;
2427 if (lnbitsize == lbitsize)
2428 return 0;
2430 if (! const_p)
2432 rnbitsize = GET_MODE_BITSIZE (rnmode);
2433 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2434 rbitpos -= rnbitpos;
2435 if (rnbitsize == rbitsize)
2436 return 0;
2439 if (BYTES_BIG_ENDIAN)
2440 lbitpos = lnbitsize - lbitsize - lbitpos;
2442 /* Make the mask to be used against the extracted field. */
2443 mask = build_int_2 (~0, ~0);
2444 TREE_TYPE (mask) = unsigned_type;
2445 force_fit_type (mask, 0);
2446 mask = convert (unsigned_type, mask);
2447 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2448 mask = const_binop (RSHIFT_EXPR, mask,
2449 size_int (lnbitsize - lbitsize - lbitpos), 0);
2451 if (! const_p)
2452 /* If not comparing with constant, just rework the comparison
2453 and return. */
2454 return build (code, compare_type,
2455 build (BIT_AND_EXPR, unsigned_type,
2456 make_bit_field_ref (linner, unsigned_type,
2457 lnbitsize, lnbitpos, 1),
2458 mask),
2459 build (BIT_AND_EXPR, unsigned_type,
2460 make_bit_field_ref (rinner, unsigned_type,
2461 rnbitsize, rnbitpos, 1),
2462 mask));
2464 /* Otherwise, we are handling the constant case. See if the constant is too
2465 big for the field. Warn and return a tree of for 0 (false) if so. We do
2466 this not only for its own sake, but to avoid having to test for this
2467 error case below. If we didn't, we might generate wrong code.
2469 For unsigned fields, the constant shifted right by the field length should
2470 be all zero. For signed fields, the high-order bits should agree with
2471 the sign bit. */
2473 if (lunsignedp)
2475 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2476 convert (unsigned_type, rhs),
2477 size_int (lbitsize), 0)))
2479 warning ("comparison is always %s due to width of bitfield",
2480 code == NE_EXPR ? "one" : "zero");
2481 return convert (compare_type,
2482 (code == NE_EXPR
2483 ? integer_one_node : integer_zero_node));
2486 else
2488 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2489 size_int (lbitsize - 1), 0);
2490 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2492 warning ("comparison is always %s due to width of bitfield",
2493 code == NE_EXPR ? "one" : "zero");
2494 return convert (compare_type,
2495 (code == NE_EXPR
2496 ? integer_one_node : integer_zero_node));
2500 /* Single-bit compares should always be against zero. */
2501 if (lbitsize == 1 && ! integer_zerop (rhs))
2503 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2504 rhs = convert (type, integer_zero_node);
2507 /* Make a new bitfield reference, shift the constant over the
2508 appropriate number of bits and mask it with the computed mask
2509 (in case this was a signed field). If we changed it, make a new one. */
2510 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2511 if (lvolatilep)
2513 TREE_SIDE_EFFECTS (lhs) = 1;
2514 TREE_THIS_VOLATILE (lhs) = 1;
2517 rhs = fold (const_binop (BIT_AND_EXPR,
2518 const_binop (LSHIFT_EXPR,
2519 convert (unsigned_type, rhs),
2520 size_int (lbitpos), 0),
2521 mask, 0));
2523 return build (code, compare_type,
2524 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2525 rhs);
2528 /* Subroutine for fold_truthop: decode a field reference.
2530 If EXP is a comparison reference, we return the innermost reference.
2532 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2533 set to the starting bit number.
2535 If the innermost field can be completely contained in a mode-sized
2536 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2538 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2539 otherwise it is not changed.
2541 *PUNSIGNEDP is set to the signedness of the field.
2543 *PMASK is set to the mask used. This is either contained in a
2544 BIT_AND_EXPR or derived from the width of the field.
2546 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2548 Return 0 if this is not a component reference or is one that we can't
2549 do anything with. */
2551 static tree
2552 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2553 pvolatilep, pmask, pand_mask)
2554 tree exp;
2555 int *pbitsize, *pbitpos;
2556 enum machine_mode *pmode;
2557 int *punsignedp, *pvolatilep;
2558 tree *pmask;
2559 tree *pand_mask;
2561 tree and_mask = 0;
2562 tree mask, inner, offset;
2563 tree unsigned_type;
2564 int precision;
2565 int alignment;
2567 /* All the optimizations using this function assume integer fields.
2568 There are problems with FP fields since the type_for_size call
2569 below can fail for, e.g., XFmode. */
2570 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2571 return 0;
2573 STRIP_NOPS (exp);
2575 if (TREE_CODE (exp) == BIT_AND_EXPR)
2577 and_mask = TREE_OPERAND (exp, 1);
2578 exp = TREE_OPERAND (exp, 0);
2579 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2580 if (TREE_CODE (and_mask) != INTEGER_CST)
2581 return 0;
2585 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2586 punsignedp, pvolatilep, &alignment);
2587 if ((inner == exp && and_mask == 0)
2588 || *pbitsize < 0 || offset != 0)
2589 return 0;
2591 /* Compute the mask to access the bitfield. */
2592 unsigned_type = type_for_size (*pbitsize, 1);
2593 precision = TYPE_PRECISION (unsigned_type);
2595 mask = build_int_2 (~0, ~0);
2596 TREE_TYPE (mask) = unsigned_type;
2597 force_fit_type (mask, 0);
2598 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2599 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2601 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2602 if (and_mask != 0)
2603 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2604 convert (unsigned_type, and_mask), mask));
2606 *pmask = mask;
2607 *pand_mask = and_mask;
2608 return inner;
2611 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2612 bit positions. */
2614 static int
2615 all_ones_mask_p (mask, size)
2616 tree mask;
2617 int size;
2619 tree type = TREE_TYPE (mask);
2620 int precision = TYPE_PRECISION (type);
2621 tree tmask;
2623 tmask = build_int_2 (~0, ~0);
2624 TREE_TYPE (tmask) = signed_type (type);
2625 force_fit_type (tmask, 0);
2626 return
2627 tree_int_cst_equal (mask,
2628 const_binop (RSHIFT_EXPR,
2629 const_binop (LSHIFT_EXPR, tmask,
2630 size_int (precision - size),
2632 size_int (precision - size), 0));
2635 /* Subroutine for fold_truthop: determine if an operand is simple enough
2636 to be evaluated unconditionally. */
2638 static int
2639 simple_operand_p (exp)
2640 tree exp;
2642 /* Strip any conversions that don't change the machine mode. */
2643 while ((TREE_CODE (exp) == NOP_EXPR
2644 || TREE_CODE (exp) == CONVERT_EXPR)
2645 && (TYPE_MODE (TREE_TYPE (exp))
2646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2647 exp = TREE_OPERAND (exp, 0);
2649 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2650 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2651 && ! TREE_ADDRESSABLE (exp)
2652 && ! TREE_THIS_VOLATILE (exp)
2653 && ! DECL_NONLOCAL (exp)
2654 /* Don't regard global variables as simple. They may be
2655 allocated in ways unknown to the compiler (shared memory,
2656 #pragma weak, etc). */
2657 && ! TREE_PUBLIC (exp)
2658 && ! DECL_EXTERNAL (exp)
2659 /* Loading a static variable is unduly expensive, but global
2660 registers aren't expensive. */
2661 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2664 /* The following functions are subroutines to fold_range_test and allow it to
2665 try to change a logical combination of comparisons into a range test.
2667 For example, both
2668 X == 2 && X == 3 && X == 4 && X == 5
2670 X >= 2 && X <= 5
2671 are converted to
2672 (unsigned) (X - 2) <= 3
2674 We describe each set of comparisons as being either inside or outside
2675 a range, using a variable named like IN_P, and then describe the
2676 range with a lower and upper bound. If one of the bounds is omitted,
2677 it represents either the highest or lowest value of the type.
2679 In the comments below, we represent a range by two numbers in brackets
2680 preceded by a "+" to designate being inside that range, or a "-" to
2681 designate being outside that range, so the condition can be inverted by
2682 flipping the prefix. An omitted bound is represented by a "-". For
2683 example, "- [-, 10]" means being outside the range starting at the lowest
2684 possible value and ending at 10, in other words, being greater than 10.
2685 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2686 always false.
2688 We set up things so that the missing bounds are handled in a consistent
2689 manner so neither a missing bound nor "true" and "false" need to be
2690 handled using a special case. */
2692 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2693 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2694 and UPPER1_P are nonzero if the respective argument is an upper bound
2695 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2696 must be specified for a comparison. ARG1 will be converted to ARG0's
2697 type if both are specified. */
2699 static tree
2700 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2701 enum tree_code code;
2702 tree type;
2703 tree arg0, arg1;
2704 int upper0_p, upper1_p;
2706 tree tem;
2707 int result;
2708 int sgn0, sgn1;
2710 /* If neither arg represents infinity, do the normal operation.
2711 Else, if not a comparison, return infinity. Else handle the special
2712 comparison rules. Note that most of the cases below won't occur, but
2713 are handled for consistency. */
2715 if (arg0 != 0 && arg1 != 0)
2717 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2718 arg0, convert (TREE_TYPE (arg0), arg1)));
2719 STRIP_NOPS (tem);
2720 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2723 if (TREE_CODE_CLASS (code) != '<')
2724 return 0;
2726 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2727 for neither. Then compute our result treating them as never equal
2728 and comparing bounds to non-bounds as above. */
2729 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2730 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2731 switch (code)
2733 case EQ_EXPR: case NE_EXPR:
2734 result = (code == NE_EXPR);
2735 break;
2736 case LT_EXPR: case LE_EXPR:
2737 result = sgn0 < sgn1;
2738 break;
2739 case GT_EXPR: case GE_EXPR:
2740 result = sgn0 > sgn1;
2741 break;
2742 default:
2743 abort ();
2746 return convert (type, result ? integer_one_node : integer_zero_node);
2749 /* Given EXP, a logical expression, set the range it is testing into
2750 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2751 actually being tested. *PLOW and *PHIGH will have be made the same type
2752 as the returned expression. If EXP is not a comparison, we will most
2753 likely not be returning a useful value and range. */
2755 static tree
2756 make_range (exp, pin_p, plow, phigh)
2757 tree exp;
2758 int *pin_p;
2759 tree *plow, *phigh;
2761 enum tree_code code;
2762 tree arg0, arg1, type;
2763 int in_p, n_in_p;
2764 tree low, high, n_low, n_high;
2766 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2767 and see if we can refine the range. Some of the cases below may not
2768 happen, but it doesn't seem worth worrying about this. We "continue"
2769 the outer loop when we've changed something; otherwise we "break"
2770 the switch, which will "break" the while. */
2772 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2774 while (1)
2776 code = TREE_CODE (exp);
2777 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2778 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2779 || TREE_CODE_CLASS (code) == '2')
2780 type = TREE_TYPE (arg0);
2782 switch (code)
2784 case TRUTH_NOT_EXPR:
2785 in_p = ! in_p, exp = arg0;
2786 continue;
2788 case EQ_EXPR: case NE_EXPR:
2789 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2790 /* We can only do something if the range is testing for zero
2791 and if the second operand is an integer constant. Note that
2792 saying something is "in" the range we make is done by
2793 complementing IN_P since it will set in the initial case of
2794 being not equal to zero; "out" is leaving it alone. */
2795 if (low == 0 || high == 0
2796 || ! integer_zerop (low) || ! integer_zerop (high)
2797 || TREE_CODE (arg1) != INTEGER_CST)
2798 break;
2800 switch (code)
2802 case NE_EXPR: /* - [c, c] */
2803 low = high = arg1;
2804 break;
2805 case EQ_EXPR: /* + [c, c] */
2806 in_p = ! in_p, low = high = arg1;
2807 break;
2808 case GT_EXPR: /* - [-, c] */
2809 low = 0, high = arg1;
2810 break;
2811 case GE_EXPR: /* + [c, -] */
2812 in_p = ! in_p, low = arg1, high = 0;
2813 break;
2814 case LT_EXPR: /* - [c, -] */
2815 low = arg1, high = 0;
2816 break;
2817 case LE_EXPR: /* + [-, c] */
2818 in_p = ! in_p, low = 0, high = arg1;
2819 break;
2820 default:
2821 abort ();
2824 exp = arg0;
2826 /* If this is an unsigned comparison, we also know that EXP is
2827 greater than or equal to zero. We base the range tests we make
2828 on that fact, so we record it here so we can parse existing
2829 range tests. */
2830 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2832 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2833 1, convert (type, integer_zero_node),
2834 NULL_TREE))
2835 break;
2837 in_p = n_in_p, low = n_low, high = n_high;
2839 /* If the high bound is missing, reverse the range so it
2840 goes from zero to the low bound minus 1. */
2841 if (high == 0)
2843 in_p = ! in_p;
2844 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2845 integer_one_node, 0);
2846 low = convert (type, integer_zero_node);
2849 continue;
2851 case NEGATE_EXPR:
2852 /* (-x) IN [a,b] -> x in [-b, -a] */
2853 n_low = range_binop (MINUS_EXPR, type,
2854 convert (type, integer_zero_node), 0, high, 1);
2855 n_high = range_binop (MINUS_EXPR, type,
2856 convert (type, integer_zero_node), 0, low, 0);
2857 low = n_low, high = n_high;
2858 exp = arg0;
2859 continue;
2861 case BIT_NOT_EXPR:
2862 /* ~ X -> -X - 1 */
2863 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2864 convert (type, integer_one_node));
2865 continue;
2867 case PLUS_EXPR: case MINUS_EXPR:
2868 if (TREE_CODE (arg1) != INTEGER_CST)
2869 break;
2871 /* If EXP is signed, any overflow in the computation is undefined,
2872 so we don't worry about it so long as our computations on
2873 the bounds don't overflow. For unsigned, overflow is defined
2874 and this is exactly the right thing. */
2875 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2876 type, low, 0, arg1, 0);
2877 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2878 type, high, 1, arg1, 0);
2879 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2880 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2881 break;
2883 /* Check for an unsigned range which has wrapped around the maximum
2884 value thus making n_high < n_low, and normalize it. */
2885 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2887 low = range_binop (PLUS_EXPR, type, n_high, 0,
2888 integer_one_node, 0);
2889 high = range_binop (MINUS_EXPR, type, n_low, 0,
2890 integer_one_node, 0);
2891 in_p = ! in_p;
2893 else
2894 low = n_low, high = n_high;
2896 exp = arg0;
2897 continue;
2899 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2900 if (! INTEGRAL_TYPE_P (type)
2901 || (low != 0 && ! int_fits_type_p (low, type))
2902 || (high != 0 && ! int_fits_type_p (high, type)))
2903 break;
2905 n_low = low, n_high = high;
2907 if (n_low != 0)
2908 n_low = convert (type, n_low);
2910 if (n_high != 0)
2911 n_high = convert (type, n_high);
2913 /* If we're converting from an unsigned to a signed type,
2914 we will be doing the comparison as unsigned. The tests above
2915 have already verified that LOW and HIGH are both positive.
2917 So we have to make sure that the original unsigned value will
2918 be interpreted as positive. */
2919 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2921 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
2922 tree high_positive
2923 = fold (build (RSHIFT_EXPR, type,
2924 convert (type,
2925 TYPE_MAX_VALUE (equiv_type)),
2926 convert (type, integer_one_node)));
2928 /* If the low bound is specified, "and" the range with the
2929 range for which the original unsigned value will be
2930 positive. */
2931 if (low != 0)
2933 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2934 1, n_low, n_high,
2935 1, convert (type, integer_zero_node),
2936 high_positive))
2937 break;
2939 in_p = (n_in_p == in_p);
2941 else
2943 /* Otherwise, "or" the range with the range of the input
2944 that will be interpreted as negative. */
2945 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2946 0, n_low, n_high,
2947 1, convert (type, integer_zero_node),
2948 high_positive))
2949 break;
2951 in_p = (in_p != n_in_p);
2955 exp = arg0;
2956 low = n_low, high = n_high;
2957 continue;
2959 default:
2960 break;
2963 break;
2966 /* If EXP is a constant, we can evaluate whether this is true or false. */
2967 if (TREE_CODE (exp) == INTEGER_CST)
2969 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
2970 exp, 0, low, 0))
2971 && integer_onep (range_binop (LE_EXPR, integer_type_node,
2972 exp, 1, high, 1)));
2973 low = high = 0;
2974 exp = 0;
2977 *pin_p = in_p, *plow = low, *phigh = high;
2978 return exp;
2981 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
2982 type, TYPE, return an expression to test if EXP is in (or out of, depending
2983 on IN_P) the range. */
2985 static tree
2986 build_range_check (type, exp, in_p, low, high)
2987 tree type;
2988 tree exp;
2989 int in_p;
2990 tree low, high;
2992 tree etype = TREE_TYPE (exp);
2993 tree utype, value;
2995 if (! in_p
2996 && (0 != (value = build_range_check (type, exp, 1, low, high))))
2997 return invert_truthvalue (value);
2999 else if (low == 0 && high == 0)
3000 return convert (type, integer_one_node);
3002 else if (low == 0)
3003 return fold (build (LE_EXPR, type, exp, high));
3005 else if (high == 0)
3006 return fold (build (GE_EXPR, type, exp, low));
3008 else if (operand_equal_p (low, high, 0))
3009 return fold (build (EQ_EXPR, type, exp, low));
3011 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3012 return build_range_check (type, exp, 1, 0, high);
3014 else if (integer_zerop (low))
3016 utype = unsigned_type (etype);
3017 return build_range_check (type, convert (utype, exp), 1, 0,
3018 convert (utype, high));
3021 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3022 && ! TREE_OVERFLOW (value))
3023 return build_range_check (type,
3024 fold (build (MINUS_EXPR, etype, exp, low)),
3025 1, convert (etype, integer_zero_node), value);
3026 else
3027 return 0;
3030 /* Given two ranges, see if we can merge them into one. Return 1 if we
3031 can, 0 if we can't. Set the output range into the specified parameters. */
3033 static int
3034 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3035 int *pin_p;
3036 tree *plow, *phigh;
3037 int in0_p, in1_p;
3038 tree low0, high0, low1, high1;
3040 int no_overlap;
3041 int subset;
3042 int temp;
3043 tree tem;
3044 int in_p;
3045 tree low, high;
3046 int lowequal = ((low0 == 0 && low1 == 0)
3047 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3048 low0, 0, low1, 0)));
3049 int highequal = ((high0 == 0 && high1 == 0)
3050 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3051 high0, 1, high1, 1)));
3053 /* Make range 0 be the range that starts first, or ends last if they
3054 start at the same value. Swap them if it isn't. */
3055 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3056 low0, 0, low1, 0))
3057 || (lowequal
3058 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3059 high1, 1, high0, 1))))
3061 temp = in0_p, in0_p = in1_p, in1_p = temp;
3062 tem = low0, low0 = low1, low1 = tem;
3063 tem = high0, high0 = high1, high1 = tem;
3066 /* Now flag two cases, whether the ranges are disjoint or whether the
3067 second range is totally subsumed in the first. Note that the tests
3068 below are simplified by the ones above. */
3069 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3070 high0, 1, low1, 0));
3071 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3072 high1, 1, high0, 1));
3074 /* We now have four cases, depending on whether we are including or
3075 excluding the two ranges. */
3076 if (in0_p && in1_p)
3078 /* If they don't overlap, the result is false. If the second range
3079 is a subset it is the result. Otherwise, the range is from the start
3080 of the second to the end of the first. */
3081 if (no_overlap)
3082 in_p = 0, low = high = 0;
3083 else if (subset)
3084 in_p = 1, low = low1, high = high1;
3085 else
3086 in_p = 1, low = low1, high = high0;
3089 else if (in0_p && ! in1_p)
3091 /* If they don't overlap, the result is the first range. If they are
3092 equal, the result is false. If the second range is a subset of the
3093 first, and the ranges begin at the same place, we go from just after
3094 the end of the first range to the end of the second. If the second
3095 range is not a subset of the first, or if it is a subset and both
3096 ranges end at the same place, the range starts at the start of the
3097 first range and ends just before the second range.
3098 Otherwise, we can't describe this as a single range. */
3099 if (no_overlap)
3100 in_p = 1, low = low0, high = high0;
3101 else if (lowequal && highequal)
3102 in_p = 0, low = high = 0;
3103 else if (subset && lowequal)
3105 in_p = 1, high = high0;
3106 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3107 integer_one_node, 0);
3109 else if (! subset || highequal)
3111 in_p = 1, low = low0;
3112 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3113 integer_one_node, 0);
3115 else
3116 return 0;
3119 else if (! in0_p && in1_p)
3121 /* If they don't overlap, the result is the second range. If the second
3122 is a subset of the first, the result is false. Otherwise,
3123 the range starts just after the first range and ends at the
3124 end of the second. */
3125 if (no_overlap)
3126 in_p = 1, low = low1, high = high1;
3127 else if (subset)
3128 in_p = 0, low = high = 0;
3129 else
3131 in_p = 1, high = high1;
3132 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3133 integer_one_node, 0);
3137 else
3139 /* The case where we are excluding both ranges. Here the complex case
3140 is if they don't overlap. In that case, the only time we have a
3141 range is if they are adjacent. If the second is a subset of the
3142 first, the result is the first. Otherwise, the range to exclude
3143 starts at the beginning of the first range and ends at the end of the
3144 second. */
3145 if (no_overlap)
3147 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3148 range_binop (PLUS_EXPR, NULL_TREE,
3149 high0, 1,
3150 integer_one_node, 1),
3151 1, low1, 0)))
3152 in_p = 0, low = low0, high = high1;
3153 else
3154 return 0;
3156 else if (subset)
3157 in_p = 0, low = low0, high = high0;
3158 else
3159 in_p = 0, low = low0, high = high1;
3162 *pin_p = in_p, *plow = low, *phigh = high;
3163 return 1;
3166 /* EXP is some logical combination of boolean tests. See if we can
3167 merge it into some range test. Return the new tree if so. */
3169 static tree
3170 fold_range_test (exp)
3171 tree exp;
3173 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3174 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3175 int in0_p, in1_p, in_p;
3176 tree low0, low1, low, high0, high1, high;
3177 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3178 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3179 tree tem;
3181 /* If this is an OR operation, invert both sides; we will invert
3182 again at the end. */
3183 if (or_op)
3184 in0_p = ! in0_p, in1_p = ! in1_p;
3186 /* If both expressions are the same, if we can merge the ranges, and we
3187 can build the range test, return it or it inverted. If one of the
3188 ranges is always true or always false, consider it to be the same
3189 expression as the other. */
3190 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3191 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3192 in1_p, low1, high1)
3193 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3194 lhs != 0 ? lhs
3195 : rhs != 0 ? rhs : integer_zero_node,
3196 in_p, low, high))))
3197 return or_op ? invert_truthvalue (tem) : tem;
3199 /* On machines where the branch cost is expensive, if this is a
3200 short-circuited branch and the underlying object on both sides
3201 is the same, make a non-short-circuit operation. */
3202 else if (BRANCH_COST >= 2
3203 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3204 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3205 && operand_equal_p (lhs, rhs, 0))
3207 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3208 unless we are at top level, in which case we can't do this. */
3209 if (simple_operand_p (lhs))
3210 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3211 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3212 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3213 TREE_OPERAND (exp, 1));
3215 else if (current_function_decl != 0)
3217 tree common = save_expr (lhs);
3219 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3220 or_op ? ! in0_p : in0_p,
3221 low0, high0))
3222 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3223 or_op ? ! in1_p : in1_p,
3224 low1, high1))))
3225 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3226 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3227 TREE_TYPE (exp), lhs, rhs);
3230 else
3231 return 0;
3234 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3235 bit value. Arrange things so the extra bits will be set to zero if and
3236 only if C is signed-extended to its full width. If MASK is nonzero,
3237 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3239 static tree
3240 unextend (c, p, unsignedp, mask)
3241 tree c;
3242 int p;
3243 int unsignedp;
3244 tree mask;
3246 tree type = TREE_TYPE (c);
3247 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3248 tree temp;
3250 if (p == modesize || unsignedp)
3251 return c;
3253 /* We work by getting just the sign bit into the low-order bit, then
3254 into the high-order bit, then sign-extend. We then XOR that value
3255 with C. */
3256 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3257 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3259 /* We must use a signed type in order to get an arithmetic right shift.
3260 However, we must also avoid introducing accidental overflows, so that
3261 a subsequent call to integer_zerop will work. Hence we must
3262 do the type conversion here. At this point, the constant is either
3263 zero or one, and the conversion to a signed type can never overflow.
3264 We could get an overflow if this conversion is done anywhere else. */
3265 if (TREE_UNSIGNED (type))
3266 temp = convert (signed_type (type), temp);
3268 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3269 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3270 if (mask != 0)
3271 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3272 /* If necessary, convert the type back to match the type of C. */
3273 if (TREE_UNSIGNED (type))
3274 temp = convert (type, temp);
3276 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3279 /* Find ways of folding logical expressions of LHS and RHS:
3280 Try to merge two comparisons to the same innermost item.
3281 Look for range tests like "ch >= '0' && ch <= '9'".
3282 Look for combinations of simple terms on machines with expensive branches
3283 and evaluate the RHS unconditionally.
3285 For example, if we have p->a == 2 && p->b == 4 and we can make an
3286 object large enough to span both A and B, we can do this with a comparison
3287 against the object ANDed with the a mask.
3289 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3290 operations to do this with one comparison.
3292 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3293 function and the one above.
3295 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3296 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3298 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3299 two operands.
3301 We return the simplified tree or 0 if no optimization is possible. */
3303 static tree
3304 fold_truthop (code, truth_type, lhs, rhs)
3305 enum tree_code code;
3306 tree truth_type, lhs, rhs;
3308 /* If this is the "or" of two comparisons, we can do something if we
3309 the comparisons are NE_EXPR. If this is the "and", we can do something
3310 if the comparisons are EQ_EXPR. I.e.,
3311 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3313 WANTED_CODE is this operation code. For single bit fields, we can
3314 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3315 comparison for one-bit fields. */
3317 enum tree_code wanted_code;
3318 enum tree_code lcode, rcode;
3319 tree ll_arg, lr_arg, rl_arg, rr_arg;
3320 tree ll_inner, lr_inner, rl_inner, rr_inner;
3321 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3322 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3323 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3324 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3325 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3326 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3327 enum machine_mode lnmode, rnmode;
3328 tree ll_mask, lr_mask, rl_mask, rr_mask;
3329 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3330 tree l_const, r_const;
3331 tree type, result;
3332 int first_bit, end_bit;
3333 int volatilep;
3335 /* Start by getting the comparison codes. Fail if anything is volatile.
3336 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3337 it were surrounded with a NE_EXPR. */
3339 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3340 return 0;
3342 lcode = TREE_CODE (lhs);
3343 rcode = TREE_CODE (rhs);
3345 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3346 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3348 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3349 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3351 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3352 return 0;
3354 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3355 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3357 ll_arg = TREE_OPERAND (lhs, 0);
3358 lr_arg = TREE_OPERAND (lhs, 1);
3359 rl_arg = TREE_OPERAND (rhs, 0);
3360 rr_arg = TREE_OPERAND (rhs, 1);
3362 /* If the RHS can be evaluated unconditionally and its operands are
3363 simple, it wins to evaluate the RHS unconditionally on machines
3364 with expensive branches. In this case, this isn't a comparison
3365 that can be merged. */
3367 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3368 are with zero (tmw). */
3370 if (BRANCH_COST >= 2
3371 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3372 && simple_operand_p (rl_arg)
3373 && simple_operand_p (rr_arg))
3374 return build (code, truth_type, lhs, rhs);
3376 /* See if the comparisons can be merged. Then get all the parameters for
3377 each side. */
3379 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3380 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3381 return 0;
3383 volatilep = 0;
3384 ll_inner = decode_field_reference (ll_arg,
3385 &ll_bitsize, &ll_bitpos, &ll_mode,
3386 &ll_unsignedp, &volatilep, &ll_mask,
3387 &ll_and_mask);
3388 lr_inner = decode_field_reference (lr_arg,
3389 &lr_bitsize, &lr_bitpos, &lr_mode,
3390 &lr_unsignedp, &volatilep, &lr_mask,
3391 &lr_and_mask);
3392 rl_inner = decode_field_reference (rl_arg,
3393 &rl_bitsize, &rl_bitpos, &rl_mode,
3394 &rl_unsignedp, &volatilep, &rl_mask,
3395 &rl_and_mask);
3396 rr_inner = decode_field_reference (rr_arg,
3397 &rr_bitsize, &rr_bitpos, &rr_mode,
3398 &rr_unsignedp, &volatilep, &rr_mask,
3399 &rr_and_mask);
3401 /* It must be true that the inner operation on the lhs of each
3402 comparison must be the same if we are to be able to do anything.
3403 Then see if we have constants. If not, the same must be true for
3404 the rhs's. */
3405 if (volatilep || ll_inner == 0 || rl_inner == 0
3406 || ! operand_equal_p (ll_inner, rl_inner, 0))
3407 return 0;
3409 if (TREE_CODE (lr_arg) == INTEGER_CST
3410 && TREE_CODE (rr_arg) == INTEGER_CST)
3411 l_const = lr_arg, r_const = rr_arg;
3412 else if (lr_inner == 0 || rr_inner == 0
3413 || ! operand_equal_p (lr_inner, rr_inner, 0))
3414 return 0;
3415 else
3416 l_const = r_const = 0;
3418 /* If either comparison code is not correct for our logical operation,
3419 fail. However, we can convert a one-bit comparison against zero into
3420 the opposite comparison against that bit being set in the field. */
3422 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3423 if (lcode != wanted_code)
3425 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3426 l_const = ll_mask;
3427 else
3428 return 0;
3431 if (rcode != wanted_code)
3433 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3434 r_const = rl_mask;
3435 else
3436 return 0;
3439 /* See if we can find a mode that contains both fields being compared on
3440 the left. If we can't, fail. Otherwise, update all constants and masks
3441 to be relative to a field of that size. */
3442 first_bit = MIN (ll_bitpos, rl_bitpos);
3443 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3444 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3445 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3446 volatilep);
3447 if (lnmode == VOIDmode)
3448 return 0;
3450 lnbitsize = GET_MODE_BITSIZE (lnmode);
3451 lnbitpos = first_bit & ~ (lnbitsize - 1);
3452 type = type_for_size (lnbitsize, 1);
3453 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3455 if (BYTES_BIG_ENDIAN)
3457 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3458 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3461 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3462 size_int (xll_bitpos), 0);
3463 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3464 size_int (xrl_bitpos), 0);
3466 if (l_const)
3468 l_const = convert (type, l_const);
3469 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3470 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3471 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3472 fold (build1 (BIT_NOT_EXPR,
3473 type, ll_mask)),
3474 0)))
3476 warning ("comparison is always %s",
3477 wanted_code == NE_EXPR ? "one" : "zero");
3479 return convert (truth_type,
3480 wanted_code == NE_EXPR
3481 ? integer_one_node : integer_zero_node);
3484 if (r_const)
3486 r_const = convert (type, r_const);
3487 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3488 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3489 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3490 fold (build1 (BIT_NOT_EXPR,
3491 type, rl_mask)),
3492 0)))
3494 warning ("comparison is always %s",
3495 wanted_code == NE_EXPR ? "one" : "zero");
3497 return convert (truth_type,
3498 wanted_code == NE_EXPR
3499 ? integer_one_node : integer_zero_node);
3503 /* If the right sides are not constant, do the same for it. Also,
3504 disallow this optimization if a size or signedness mismatch occurs
3505 between the left and right sides. */
3506 if (l_const == 0)
3508 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3509 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3510 /* Make sure the two fields on the right
3511 correspond to the left without being swapped. */
3512 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3513 return 0;
3515 first_bit = MIN (lr_bitpos, rr_bitpos);
3516 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3517 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3518 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3519 volatilep);
3520 if (rnmode == VOIDmode)
3521 return 0;
3523 rnbitsize = GET_MODE_BITSIZE (rnmode);
3524 rnbitpos = first_bit & ~ (rnbitsize - 1);
3525 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3527 if (BYTES_BIG_ENDIAN)
3529 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3530 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3533 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3534 size_int (xlr_bitpos), 0);
3535 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3536 size_int (xrr_bitpos), 0);
3538 /* Make a mask that corresponds to both fields being compared.
3539 Do this for both items being compared. If the masks agree,
3540 we can do this by masking both and comparing the masked
3541 results. */
3542 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3543 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3544 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3546 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3547 ll_unsignedp || rl_unsignedp);
3548 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3549 lr_unsignedp || rr_unsignedp);
3550 if (! all_ones_mask_p (ll_mask, lnbitsize))
3552 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3553 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3555 return build (wanted_code, truth_type, lhs, rhs);
3558 /* There is still another way we can do something: If both pairs of
3559 fields being compared are adjacent, we may be able to make a wider
3560 field containing them both. */
3561 if ((ll_bitsize + ll_bitpos == rl_bitpos
3562 && lr_bitsize + lr_bitpos == rr_bitpos)
3563 || (ll_bitpos == rl_bitpos + rl_bitsize
3564 && lr_bitpos == rr_bitpos + rr_bitsize))
3565 return build (wanted_code, truth_type,
3566 make_bit_field_ref (ll_inner, type,
3567 ll_bitsize + rl_bitsize,
3568 MIN (ll_bitpos, rl_bitpos),
3569 ll_unsignedp),
3570 make_bit_field_ref (lr_inner, type,
3571 lr_bitsize + rr_bitsize,
3572 MIN (lr_bitpos, rr_bitpos),
3573 lr_unsignedp));
3575 return 0;
3578 /* Handle the case of comparisons with constants. If there is something in
3579 common between the masks, those bits of the constants must be the same.
3580 If not, the condition is always false. Test for this to avoid generating
3581 incorrect code below. */
3582 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3583 if (! integer_zerop (result)
3584 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3585 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3587 if (wanted_code == NE_EXPR)
3589 warning ("`or' of unmatched not-equal tests is always 1");
3590 return convert (truth_type, integer_one_node);
3592 else
3594 warning ("`and' of mutually exclusive equal-tests is always zero");
3595 return convert (truth_type, integer_zero_node);
3599 /* Construct the expression we will return. First get the component
3600 reference we will make. Unless the mask is all ones the width of
3601 that field, perform the mask operation. Then compare with the
3602 merged constant. */
3603 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3604 ll_unsignedp || rl_unsignedp);
3606 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3607 if (! all_ones_mask_p (ll_mask, lnbitsize))
3608 result = build (BIT_AND_EXPR, type, result, ll_mask);
3610 return build (wanted_code, truth_type, result,
3611 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3614 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3615 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3616 that we may sometimes modify the tree. */
3618 static tree
3619 strip_compound_expr (t, s)
3620 tree t;
3621 tree s;
3623 tree type = TREE_TYPE (t);
3624 enum tree_code code = TREE_CODE (t);
3626 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3627 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3628 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3629 return TREE_OPERAND (t, 1);
3631 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3632 don't bother handling any other types. */
3633 else if (code == COND_EXPR)
3635 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3636 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3637 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3639 else if (TREE_CODE_CLASS (code) == '1')
3640 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3641 else if (TREE_CODE_CLASS (code) == '<'
3642 || TREE_CODE_CLASS (code) == '2')
3644 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3645 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3648 return t;
3651 /* Perform constant folding and related simplification of EXPR.
3652 The related simplifications include x*1 => x, x*0 => 0, etc.,
3653 and application of the associative law.
3654 NOP_EXPR conversions may be removed freely (as long as we
3655 are careful not to change the C type of the overall expression)
3656 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3657 but we can constant-fold them if they have constant operands. */
3659 tree
3660 fold (expr)
3661 tree expr;
3663 register tree t = expr;
3664 tree t1 = NULL_TREE;
3665 tree tem;
3666 tree type = TREE_TYPE (expr);
3667 register tree arg0, arg1;
3668 register enum tree_code code = TREE_CODE (t);
3669 register int kind;
3670 int invert;
3672 /* WINS will be nonzero when the switch is done
3673 if all operands are constant. */
3675 int wins = 1;
3677 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3678 Likewise for a SAVE_EXPR that's already been evaluated. */
3679 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3680 return t;
3682 /* Return right away if already constant. */
3683 if (TREE_CONSTANT (t))
3685 if (code == CONST_DECL)
3686 return DECL_INITIAL (t);
3687 return t;
3690 kind = TREE_CODE_CLASS (code);
3691 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3693 tree subop;
3695 /* Special case for conversion ops that can have fixed point args. */
3696 arg0 = TREE_OPERAND (t, 0);
3698 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3699 if (arg0 != 0)
3700 STRIP_TYPE_NOPS (arg0);
3702 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3703 subop = TREE_REALPART (arg0);
3704 else
3705 subop = arg0;
3707 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3708 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3709 && TREE_CODE (subop) != REAL_CST
3710 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3712 /* Note that TREE_CONSTANT isn't enough:
3713 static var addresses are constant but we can't
3714 do arithmetic on them. */
3715 wins = 0;
3717 else if (kind == 'e' || kind == '<'
3718 || kind == '1' || kind == '2' || kind == 'r')
3720 register int len = tree_code_length[(int) code];
3721 register int i;
3722 for (i = 0; i < len; i++)
3724 tree op = TREE_OPERAND (t, i);
3725 tree subop;
3727 if (op == 0)
3728 continue; /* Valid for CALL_EXPR, at least. */
3730 if (kind == '<' || code == RSHIFT_EXPR)
3732 /* Signedness matters here. Perhaps we can refine this
3733 later. */
3734 STRIP_TYPE_NOPS (op);
3736 else
3738 /* Strip any conversions that don't change the mode. */
3739 STRIP_NOPS (op);
3742 if (TREE_CODE (op) == COMPLEX_CST)
3743 subop = TREE_REALPART (op);
3744 else
3745 subop = op;
3747 if (TREE_CODE (subop) != INTEGER_CST
3748 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3749 && TREE_CODE (subop) != REAL_CST
3750 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3752 /* Note that TREE_CONSTANT isn't enough:
3753 static var addresses are constant but we can't
3754 do arithmetic on them. */
3755 wins = 0;
3757 if (i == 0)
3758 arg0 = op;
3759 else if (i == 1)
3760 arg1 = op;
3764 /* If this is a commutative operation, and ARG0 is a constant, move it
3765 to ARG1 to reduce the number of tests below. */
3766 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3767 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3768 || code == BIT_AND_EXPR)
3769 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3771 tem = arg0; arg0 = arg1; arg1 = tem;
3773 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3774 TREE_OPERAND (t, 1) = tem;
3777 /* Now WINS is set as described above,
3778 ARG0 is the first operand of EXPR,
3779 and ARG1 is the second operand (if it has more than one operand).
3781 First check for cases where an arithmetic operation is applied to a
3782 compound, conditional, or comparison operation. Push the arithmetic
3783 operation inside the compound or conditional to see if any folding
3784 can then be done. Convert comparison to conditional for this purpose.
3785 The also optimizes non-constant cases that used to be done in
3786 expand_expr.
3788 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3789 one of the operands is a comparison and the other is a comparison, a
3790 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3791 code below would make the expression more complex. Change it to a
3792 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3793 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3795 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3796 || code == EQ_EXPR || code == NE_EXPR)
3797 && ((truth_value_p (TREE_CODE (arg0))
3798 && (truth_value_p (TREE_CODE (arg1))
3799 || (TREE_CODE (arg1) == BIT_AND_EXPR
3800 && integer_onep (TREE_OPERAND (arg1, 1)))))
3801 || (truth_value_p (TREE_CODE (arg1))
3802 && (truth_value_p (TREE_CODE (arg0))
3803 || (TREE_CODE (arg0) == BIT_AND_EXPR
3804 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3806 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3807 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3808 : TRUTH_XOR_EXPR,
3809 type, arg0, arg1));
3811 if (code == EQ_EXPR)
3812 t = invert_truthvalue (t);
3814 return t;
3817 if (TREE_CODE_CLASS (code) == '1')
3819 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3820 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3821 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3822 else if (TREE_CODE (arg0) == COND_EXPR)
3824 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3825 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3826 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3828 /* If this was a conversion, and all we did was to move into
3829 inside the COND_EXPR, bring it back out. But leave it if
3830 it is a conversion from integer to integer and the
3831 result precision is no wider than a word since such a
3832 conversion is cheap and may be optimized away by combine,
3833 while it couldn't if it were outside the COND_EXPR. Then return
3834 so we don't get into an infinite recursion loop taking the
3835 conversion out and then back in. */
3837 if ((code == NOP_EXPR || code == CONVERT_EXPR
3838 || code == NON_LVALUE_EXPR)
3839 && TREE_CODE (t) == COND_EXPR
3840 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3841 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3842 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3843 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3844 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3845 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3846 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3847 t = build1 (code, type,
3848 build (COND_EXPR,
3849 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3850 TREE_OPERAND (t, 0),
3851 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3852 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3853 return t;
3855 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3856 return fold (build (COND_EXPR, type, arg0,
3857 fold (build1 (code, type, integer_one_node)),
3858 fold (build1 (code, type, integer_zero_node))));
3860 else if (TREE_CODE_CLASS (code) == '2'
3861 || TREE_CODE_CLASS (code) == '<')
3863 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3864 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3865 fold (build (code, type,
3866 arg0, TREE_OPERAND (arg1, 1))));
3867 else if ((TREE_CODE (arg1) == COND_EXPR
3868 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3869 && TREE_CODE_CLASS (code) != '<'))
3870 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3872 tree test, true_value, false_value;
3874 if (TREE_CODE (arg1) == COND_EXPR)
3876 test = TREE_OPERAND (arg1, 0);
3877 true_value = TREE_OPERAND (arg1, 1);
3878 false_value = TREE_OPERAND (arg1, 2);
3880 else
3882 tree testtype = TREE_TYPE (arg1);
3883 test = arg1;
3884 true_value = convert (testtype, integer_one_node);
3885 false_value = convert (testtype, integer_zero_node);
3888 /* If ARG0 is complex we want to make sure we only evaluate
3889 it once. Though this is only required if it is volatile, it
3890 might be more efficient even if it is not. However, if we
3891 succeed in folding one part to a constant, we do not need
3892 to make this SAVE_EXPR. Since we do this optimization
3893 primarily to see if we do end up with constant and this
3894 SAVE_EXPR interferes with later optimizations, suppressing
3895 it when we can is important. */
3897 if (TREE_CODE (arg0) != SAVE_EXPR
3898 && ((TREE_CODE (arg0) != VAR_DECL
3899 && TREE_CODE (arg0) != PARM_DECL)
3900 || TREE_SIDE_EFFECTS (arg0)))
3902 tree lhs = fold (build (code, type, arg0, true_value));
3903 tree rhs = fold (build (code, type, arg0, false_value));
3905 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3906 return fold (build (COND_EXPR, type, test, lhs, rhs));
3908 if (current_function_decl != 0)
3909 arg0 = save_expr (arg0);
3912 test = fold (build (COND_EXPR, type, test,
3913 fold (build (code, type, arg0, true_value)),
3914 fold (build (code, type, arg0, false_value))));
3915 if (TREE_CODE (arg0) == SAVE_EXPR)
3916 return build (COMPOUND_EXPR, type,
3917 convert (void_type_node, arg0),
3918 strip_compound_expr (test, arg0));
3919 else
3920 return convert (type, test);
3923 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3924 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3925 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3926 else if ((TREE_CODE (arg0) == COND_EXPR
3927 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3928 && TREE_CODE_CLASS (code) != '<'))
3929 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
3931 tree test, true_value, false_value;
3933 if (TREE_CODE (arg0) == COND_EXPR)
3935 test = TREE_OPERAND (arg0, 0);
3936 true_value = TREE_OPERAND (arg0, 1);
3937 false_value = TREE_OPERAND (arg0, 2);
3939 else
3941 tree testtype = TREE_TYPE (arg0);
3942 test = arg0;
3943 true_value = convert (testtype, integer_one_node);
3944 false_value = convert (testtype, integer_zero_node);
3947 if (TREE_CODE (arg1) != SAVE_EXPR
3948 && ((TREE_CODE (arg1) != VAR_DECL
3949 && TREE_CODE (arg1) != PARM_DECL)
3950 || TREE_SIDE_EFFECTS (arg1)))
3952 tree lhs = fold (build (code, type, true_value, arg1));
3953 tree rhs = fold (build (code, type, false_value, arg1));
3955 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3956 || TREE_CONSTANT (arg1))
3957 return fold (build (COND_EXPR, type, test, lhs, rhs));
3959 if (current_function_decl != 0)
3960 arg1 = save_expr (arg1);
3963 test = fold (build (COND_EXPR, type, test,
3964 fold (build (code, type, true_value, arg1)),
3965 fold (build (code, type, false_value, arg1))));
3966 if (TREE_CODE (arg1) == SAVE_EXPR)
3967 return build (COMPOUND_EXPR, type,
3968 convert (void_type_node, arg1),
3969 strip_compound_expr (test, arg1));
3970 else
3971 return convert (type, test);
3974 else if (TREE_CODE_CLASS (code) == '<'
3975 && TREE_CODE (arg0) == COMPOUND_EXPR)
3976 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3977 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3978 else if (TREE_CODE_CLASS (code) == '<'
3979 && TREE_CODE (arg1) == COMPOUND_EXPR)
3980 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3981 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3983 switch (code)
3985 case INTEGER_CST:
3986 case REAL_CST:
3987 case STRING_CST:
3988 case COMPLEX_CST:
3989 case CONSTRUCTOR:
3990 return t;
3992 case CONST_DECL:
3993 return fold (DECL_INITIAL (t));
3995 case NOP_EXPR:
3996 case FLOAT_EXPR:
3997 case CONVERT_EXPR:
3998 case FIX_TRUNC_EXPR:
3999 /* Other kinds of FIX are not handled properly by fold_convert. */
4001 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4002 return TREE_OPERAND (t, 0);
4004 /* Handle cases of two conversions in a row. */
4005 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4006 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4008 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4009 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4010 tree final_type = TREE_TYPE (t);
4011 int inside_int = INTEGRAL_TYPE_P (inside_type);
4012 int inside_ptr = POINTER_TYPE_P (inside_type);
4013 int inside_float = FLOAT_TYPE_P (inside_type);
4014 int inside_prec = TYPE_PRECISION (inside_type);
4015 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4016 int inter_int = INTEGRAL_TYPE_P (inter_type);
4017 int inter_ptr = POINTER_TYPE_P (inter_type);
4018 int inter_float = FLOAT_TYPE_P (inter_type);
4019 int inter_prec = TYPE_PRECISION (inter_type);
4020 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4021 int final_int = INTEGRAL_TYPE_P (final_type);
4022 int final_ptr = POINTER_TYPE_P (final_type);
4023 int final_float = FLOAT_TYPE_P (final_type);
4024 int final_prec = TYPE_PRECISION (final_type);
4025 int final_unsignedp = TREE_UNSIGNED (final_type);
4027 /* In addition to the cases of two conversions in a row
4028 handled below, if we are converting something to its own
4029 type via an object of identical or wider precision, neither
4030 conversion is needed. */
4031 if (inside_type == final_type
4032 && ((inter_int && final_int) || (inter_float && final_float))
4033 && inter_prec >= final_prec)
4034 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4036 /* Likewise, if the intermediate and final types are either both
4037 float or both integer, we don't need the middle conversion if
4038 it is wider than the final type and doesn't change the signedness
4039 (for integers). Avoid this if the final type is a pointer
4040 since then we sometimes need the inner conversion. Likewise if
4041 the outer has a precision not equal to the size of its mode. */
4042 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4043 || (inter_float && inside_float))
4044 && inter_prec >= inside_prec
4045 && (inter_float || inter_unsignedp == inside_unsignedp)
4046 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4047 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4048 && ! final_ptr)
4049 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4051 /* Two conversions in a row are not needed unless:
4052 - some conversion is floating-point (overstrict for now), or
4053 - the intermediate type is narrower than both initial and
4054 final, or
4055 - the intermediate type and innermost type differ in signedness,
4056 and the outermost type is wider than the intermediate, or
4057 - the initial type is a pointer type and the precisions of the
4058 intermediate and final types differ, or
4059 - the final type is a pointer type and the precisions of the
4060 initial and intermediate types differ. */
4061 if (! inside_float && ! inter_float && ! final_float
4062 && (inter_prec > inside_prec || inter_prec > final_prec)
4063 && ! (inside_int && inter_int
4064 && inter_unsignedp != inside_unsignedp
4065 && inter_prec < final_prec)
4066 && ((inter_unsignedp && inter_prec > inside_prec)
4067 == (final_unsignedp && final_prec > inter_prec))
4068 && ! (inside_ptr && inter_prec != final_prec)
4069 && ! (final_ptr && inside_prec != inter_prec)
4070 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4071 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4072 && ! final_ptr)
4073 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4076 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4077 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4078 /* Detect assigning a bitfield. */
4079 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4080 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4082 /* Don't leave an assignment inside a conversion
4083 unless assigning a bitfield. */
4084 tree prev = TREE_OPERAND (t, 0);
4085 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4086 /* First do the assignment, then return converted constant. */
4087 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4088 TREE_USED (t) = 1;
4089 return t;
4091 if (!wins)
4093 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4094 return t;
4096 return fold_convert (t, arg0);
4098 #if 0 /* This loses on &"foo"[0]. */
4099 case ARRAY_REF:
4101 int i;
4103 /* Fold an expression like: "foo"[2] */
4104 if (TREE_CODE (arg0) == STRING_CST
4105 && TREE_CODE (arg1) == INTEGER_CST
4106 && !TREE_INT_CST_HIGH (arg1)
4107 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4109 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4110 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4111 force_fit_type (t, 0);
4114 return t;
4115 #endif /* 0 */
4117 case COMPONENT_REF:
4118 if (TREE_CODE (arg0) == CONSTRUCTOR)
4120 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4121 if (m)
4122 t = TREE_VALUE (m);
4124 return t;
4126 case RANGE_EXPR:
4127 TREE_CONSTANT (t) = wins;
4128 return t;
4130 case NEGATE_EXPR:
4131 if (wins)
4133 if (TREE_CODE (arg0) == INTEGER_CST)
4135 HOST_WIDE_INT low, high;
4136 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4137 TREE_INT_CST_HIGH (arg0),
4138 &low, &high);
4139 t = build_int_2 (low, high);
4140 TREE_TYPE (t) = type;
4141 TREE_OVERFLOW (t)
4142 = (TREE_OVERFLOW (arg0)
4143 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4144 TREE_CONSTANT_OVERFLOW (t)
4145 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4147 else if (TREE_CODE (arg0) == REAL_CST)
4148 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4150 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4151 return TREE_OPERAND (arg0, 0);
4153 /* Convert - (a - b) to (b - a) for non-floating-point. */
4154 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4155 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4156 TREE_OPERAND (arg0, 0));
4158 return t;
4160 case ABS_EXPR:
4161 if (wins)
4163 if (TREE_CODE (arg0) == INTEGER_CST)
4165 if (! TREE_UNSIGNED (type)
4166 && TREE_INT_CST_HIGH (arg0) < 0)
4168 HOST_WIDE_INT low, high;
4169 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4170 TREE_INT_CST_HIGH (arg0),
4171 &low, &high);
4172 t = build_int_2 (low, high);
4173 TREE_TYPE (t) = type;
4174 TREE_OVERFLOW (t)
4175 = (TREE_OVERFLOW (arg0)
4176 | force_fit_type (t, overflow));
4177 TREE_CONSTANT_OVERFLOW (t)
4178 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4181 else if (TREE_CODE (arg0) == REAL_CST)
4183 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4184 t = build_real (type,
4185 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4188 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4189 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4190 return t;
4192 case CONJ_EXPR:
4193 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4194 return arg0;
4195 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4196 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4197 TREE_OPERAND (arg0, 0),
4198 fold (build1 (NEGATE_EXPR,
4199 TREE_TYPE (TREE_TYPE (arg0)),
4200 TREE_OPERAND (arg0, 1))));
4201 else if (TREE_CODE (arg0) == COMPLEX_CST)
4202 return build_complex (type, TREE_OPERAND (arg0, 0),
4203 fold (build1 (NEGATE_EXPR,
4204 TREE_TYPE (TREE_TYPE (arg0)),
4205 TREE_OPERAND (arg0, 1))));
4206 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4207 return fold (build (TREE_CODE (arg0), type,
4208 fold (build1 (CONJ_EXPR, type,
4209 TREE_OPERAND (arg0, 0))),
4210 fold (build1 (CONJ_EXPR,
4211 type, TREE_OPERAND (arg0, 1)))));
4212 else if (TREE_CODE (arg0) == CONJ_EXPR)
4213 return TREE_OPERAND (arg0, 0);
4214 return t;
4216 case BIT_NOT_EXPR:
4217 if (wins)
4219 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4220 ~ TREE_INT_CST_HIGH (arg0));
4221 TREE_TYPE (t) = type;
4222 force_fit_type (t, 0);
4223 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4224 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4226 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4227 return TREE_OPERAND (arg0, 0);
4228 return t;
4230 case PLUS_EXPR:
4231 /* A + (-B) -> A - B */
4232 if (TREE_CODE (arg1) == NEGATE_EXPR)
4233 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4234 else if (! FLOAT_TYPE_P (type))
4236 if (integer_zerop (arg1))
4237 return non_lvalue (convert (type, arg0));
4239 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4240 with a constant, and the two constants have no bits in common,
4241 we should treat this as a BIT_IOR_EXPR since this may produce more
4242 simplifications. */
4243 if (TREE_CODE (arg0) == BIT_AND_EXPR
4244 && TREE_CODE (arg1) == BIT_AND_EXPR
4245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4247 && integer_zerop (const_binop (BIT_AND_EXPR,
4248 TREE_OPERAND (arg0, 1),
4249 TREE_OPERAND (arg1, 1), 0)))
4251 code = BIT_IOR_EXPR;
4252 goto bit_ior;
4255 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4256 about the case where C is a constant, just try one of the
4257 four possibilities. */
4259 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4260 && operand_equal_p (TREE_OPERAND (arg0, 1),
4261 TREE_OPERAND (arg1, 1), 0))
4262 return fold (build (MULT_EXPR, type,
4263 fold (build (PLUS_EXPR, type,
4264 TREE_OPERAND (arg0, 0),
4265 TREE_OPERAND (arg1, 0))),
4266 TREE_OPERAND (arg0, 1)));
4268 /* In IEEE floating point, x+0 may not equal x. */
4269 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4270 || flag_fast_math)
4271 && real_zerop (arg1))
4272 return non_lvalue (convert (type, arg0));
4273 associate:
4274 /* In most languages, can't associate operations on floats
4275 through parentheses. Rather than remember where the parentheses
4276 were, we don't associate floats at all. It shouldn't matter much.
4277 However, associating multiplications is only very slightly
4278 inaccurate, so do that if -ffast-math is specified. */
4279 if (FLOAT_TYPE_P (type)
4280 && ! (flag_fast_math && code == MULT_EXPR))
4281 goto binary;
4283 /* The varsign == -1 cases happen only for addition and subtraction.
4284 It says that the arg that was split was really CON minus VAR.
4285 The rest of the code applies to all associative operations. */
4286 if (!wins)
4288 tree var, con;
4289 int varsign;
4291 if (split_tree (arg0, code, &var, &con, &varsign))
4293 if (varsign == -1)
4295 /* EXPR is (CON-VAR) +- ARG1. */
4296 /* If it is + and VAR==ARG1, return just CONST. */
4297 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4298 return convert (TREE_TYPE (t), con);
4300 /* If ARG0 is a constant, don't change things around;
4301 instead keep all the constant computations together. */
4303 if (TREE_CONSTANT (arg0))
4304 return t;
4306 /* Otherwise return (CON +- ARG1) - VAR. */
4307 t = build (MINUS_EXPR, type,
4308 fold (build (code, type, con, arg1)), var);
4310 else
4312 /* EXPR is (VAR+CON) +- ARG1. */
4313 /* If it is - and VAR==ARG1, return just CONST. */
4314 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4315 return convert (TREE_TYPE (t), con);
4317 /* If ARG0 is a constant, don't change things around;
4318 instead keep all the constant computations together. */
4320 if (TREE_CONSTANT (arg0))
4321 return t;
4323 /* Otherwise return VAR +- (ARG1 +- CON). */
4324 tem = fold (build (code, type, arg1, con));
4325 t = build (code, type, var, tem);
4327 if (integer_zerop (tem)
4328 && (code == PLUS_EXPR || code == MINUS_EXPR))
4329 return convert (type, var);
4330 /* If we have x +/- (c - d) [c an explicit integer]
4331 change it to x -/+ (d - c) since if d is relocatable
4332 then the latter can be a single immediate insn
4333 and the former cannot. */
4334 if (TREE_CODE (tem) == MINUS_EXPR
4335 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4337 tree tem1 = TREE_OPERAND (tem, 1);
4338 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4339 TREE_OPERAND (tem, 0) = tem1;
4340 TREE_SET_CODE (t,
4341 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4344 return t;
4347 if (split_tree (arg1, code, &var, &con, &varsign))
4349 if (TREE_CONSTANT (arg1))
4350 return t;
4352 if (varsign == -1)
4353 TREE_SET_CODE (t,
4354 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4356 /* EXPR is ARG0 +- (CON +- VAR). */
4357 if (TREE_CODE (t) == MINUS_EXPR
4358 && operand_equal_p (var, arg0, 0))
4360 /* If VAR and ARG0 cancel, return just CON or -CON. */
4361 if (code == PLUS_EXPR)
4362 return convert (TREE_TYPE (t), con);
4363 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4364 convert (TREE_TYPE (t), con)));
4367 t = build (TREE_CODE (t), type,
4368 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4370 if (integer_zerop (TREE_OPERAND (t, 0))
4371 && TREE_CODE (t) == PLUS_EXPR)
4372 return convert (TREE_TYPE (t), var);
4373 return t;
4376 binary:
4377 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4378 if (TREE_CODE (arg1) == REAL_CST)
4379 return t;
4380 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4381 if (wins)
4382 t1 = const_binop (code, arg0, arg1, 0);
4383 if (t1 != NULL_TREE)
4385 /* The return value should always have
4386 the same type as the original expression. */
4387 if (TREE_TYPE (t1) != TREE_TYPE (t))
4388 t1 = convert (TREE_TYPE (t), t1);
4390 return t1;
4392 return t;
4394 case MINUS_EXPR:
4395 if (! FLOAT_TYPE_P (type))
4397 if (! wins && integer_zerop (arg0))
4398 return build1 (NEGATE_EXPR, type, arg1);
4399 if (integer_zerop (arg1))
4400 return non_lvalue (convert (type, arg0));
4402 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4403 about the case where C is a constant, just try one of the
4404 four possibilities. */
4406 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4407 && operand_equal_p (TREE_OPERAND (arg0, 1),
4408 TREE_OPERAND (arg1, 1), 0))
4409 return fold (build (MULT_EXPR, type,
4410 fold (build (MINUS_EXPR, type,
4411 TREE_OPERAND (arg0, 0),
4412 TREE_OPERAND (arg1, 0))),
4413 TREE_OPERAND (arg0, 1)));
4415 /* Convert A - (-B) to A + B. */
4416 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4417 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4419 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4420 || flag_fast_math)
4422 /* Except with IEEE floating point, 0-x equals -x. */
4423 if (! wins && real_zerop (arg0))
4424 return build1 (NEGATE_EXPR, type, arg1);
4425 /* Except with IEEE floating point, x-0 equals x. */
4426 if (real_zerop (arg1))
4427 return non_lvalue (convert (type, arg0));
4430 /* Fold &x - &x. This can happen from &x.foo - &x.
4431 This is unsafe for certain floats even in non-IEEE formats.
4432 In IEEE, it is unsafe because it does wrong for NaNs.
4433 Also note that operand_equal_p is always false if an operand
4434 is volatile. */
4436 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4437 && operand_equal_p (arg0, arg1, 0))
4438 return convert (type, integer_zero_node);
4440 goto associate;
4442 case MULT_EXPR:
4443 if (! FLOAT_TYPE_P (type))
4445 if (integer_zerop (arg1))
4446 return omit_one_operand (type, arg1, arg0);
4447 if (integer_onep (arg1))
4448 return non_lvalue (convert (type, arg0));
4450 /* ((A / C) * C) is A if the division is an
4451 EXACT_DIV_EXPR. Since C is normally a constant,
4452 just check for one of the four possibilities. */
4454 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4455 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4456 return TREE_OPERAND (arg0, 0);
4458 /* (a * (1 << b)) is (a << b) */
4459 if (TREE_CODE (arg1) == LSHIFT_EXPR
4460 && integer_onep (TREE_OPERAND (arg1, 0)))
4461 return fold (build (LSHIFT_EXPR, type, arg0,
4462 TREE_OPERAND (arg1, 1)));
4463 if (TREE_CODE (arg0) == LSHIFT_EXPR
4464 && integer_onep (TREE_OPERAND (arg0, 0)))
4465 return fold (build (LSHIFT_EXPR, type, arg1,
4466 TREE_OPERAND (arg0, 1)));
4468 else
4470 /* x*0 is 0, except for IEEE floating point. */
4471 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4472 || flag_fast_math)
4473 && real_zerop (arg1))
4474 return omit_one_operand (type, arg1, arg0);
4475 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4476 However, ANSI says we can drop signals,
4477 so we can do this anyway. */
4478 if (real_onep (arg1))
4479 return non_lvalue (convert (type, arg0));
4480 /* x*2 is x+x */
4481 if (! wins && real_twop (arg1) && current_function_decl != 0)
4483 tree arg = save_expr (arg0);
4484 return build (PLUS_EXPR, type, arg, arg);
4487 goto associate;
4489 case BIT_IOR_EXPR:
4490 bit_ior:
4492 register enum tree_code code0, code1;
4494 if (integer_all_onesp (arg1))
4495 return omit_one_operand (type, arg1, arg0);
4496 if (integer_zerop (arg1))
4497 return non_lvalue (convert (type, arg0));
4498 t1 = distribute_bit_expr (code, type, arg0, arg1);
4499 if (t1 != NULL_TREE)
4500 return t1;
4502 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4503 is a rotate of A by C1 bits. */
4504 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4505 is a rotate of A by B bits. */
4507 code0 = TREE_CODE (arg0);
4508 code1 = TREE_CODE (arg1);
4509 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4510 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4511 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4512 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4514 register tree tree01, tree11;
4515 register enum tree_code code01, code11;
4517 tree01 = TREE_OPERAND (arg0, 1);
4518 tree11 = TREE_OPERAND (arg1, 1);
4519 code01 = TREE_CODE (tree01);
4520 code11 = TREE_CODE (tree11);
4521 if (code01 == INTEGER_CST
4522 && code11 == INTEGER_CST
4523 && TREE_INT_CST_HIGH (tree01) == 0
4524 && TREE_INT_CST_HIGH (tree11) == 0
4525 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4526 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4527 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4528 code0 == LSHIFT_EXPR ? tree01 : tree11);
4529 else if (code11 == MINUS_EXPR
4530 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4531 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4532 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4533 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4534 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4535 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4536 type, TREE_OPERAND (arg0, 0), tree01);
4537 else if (code01 == MINUS_EXPR
4538 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4539 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4540 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4541 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4542 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4543 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4544 type, TREE_OPERAND (arg0, 0), tree11);
4547 goto associate;
4550 case BIT_XOR_EXPR:
4551 if (integer_zerop (arg1))
4552 return non_lvalue (convert (type, arg0));
4553 if (integer_all_onesp (arg1))
4554 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4555 goto associate;
4557 case BIT_AND_EXPR:
4558 bit_and:
4559 if (integer_all_onesp (arg1))
4560 return non_lvalue (convert (type, arg0));
4561 if (integer_zerop (arg1))
4562 return omit_one_operand (type, arg1, arg0);
4563 t1 = distribute_bit_expr (code, type, arg0, arg1);
4564 if (t1 != NULL_TREE)
4565 return t1;
4566 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4567 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4568 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4570 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4571 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4572 && (~TREE_INT_CST_LOW (arg0)
4573 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4574 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4576 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4577 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4579 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4580 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4581 && (~TREE_INT_CST_LOW (arg1)
4582 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4583 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4585 goto associate;
4587 case BIT_ANDTC_EXPR:
4588 if (integer_all_onesp (arg0))
4589 return non_lvalue (convert (type, arg1));
4590 if (integer_zerop (arg0))
4591 return omit_one_operand (type, arg0, arg1);
4592 if (TREE_CODE (arg1) == INTEGER_CST)
4594 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4595 code = BIT_AND_EXPR;
4596 goto bit_and;
4598 goto binary;
4600 case RDIV_EXPR:
4601 /* In most cases, do nothing with a divide by zero. */
4602 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4603 #ifndef REAL_INFINITY
4604 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4605 return t;
4606 #endif
4607 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4609 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4610 However, ANSI says we can drop signals, so we can do this anyway. */
4611 if (real_onep (arg1))
4612 return non_lvalue (convert (type, arg0));
4614 /* If ARG1 is a constant, we can convert this to a multiply by the
4615 reciprocal. This does not have the same rounding properties,
4616 so only do this if -ffast-math. We can actually always safely
4617 do it if ARG1 is a power of two, but it's hard to tell if it is
4618 or not in a portable manner. */
4619 if (TREE_CODE (arg1) == REAL_CST)
4621 if (flag_fast_math
4622 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4623 arg1, 0)))
4624 return fold (build (MULT_EXPR, type, arg0, tem));
4625 /* Find the reciprocal if optimizing and the result is exact. */
4626 else if (optimize)
4628 REAL_VALUE_TYPE r;
4629 r = TREE_REAL_CST (arg1);
4630 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4632 tem = build_real (type, r);
4633 return fold (build (MULT_EXPR, type, arg0, tem));
4637 goto binary;
4639 case TRUNC_DIV_EXPR:
4640 case ROUND_DIV_EXPR:
4641 case FLOOR_DIV_EXPR:
4642 case CEIL_DIV_EXPR:
4643 case EXACT_DIV_EXPR:
4644 if (integer_onep (arg1))
4645 return non_lvalue (convert (type, arg0));
4646 if (integer_zerop (arg1))
4647 return t;
4649 /* If we have ((a / C1) / C2) where both division are the same type, try
4650 to simplify. First see if C1 * C2 overflows or not. */
4651 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4652 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4654 tree new_divisor;
4656 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4657 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4659 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4660 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4662 /* If no overflow, divide by C1*C2. */
4663 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4667 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4668 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4669 expressions, which often appear in the offsets or sizes of
4670 objects with a varying size. Only deal with positive divisors
4671 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4673 Look for NOPs and SAVE_EXPRs inside. */
4675 if (TREE_CODE (arg1) == INTEGER_CST
4676 && tree_int_cst_sgn (arg1) >= 0)
4678 int have_save_expr = 0;
4679 tree c2 = integer_zero_node;
4680 tree xarg0 = arg0;
4682 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4683 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4685 STRIP_NOPS (xarg0);
4687 if (TREE_CODE (xarg0) == PLUS_EXPR
4688 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4689 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4690 else if (TREE_CODE (xarg0) == MINUS_EXPR
4691 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4692 /* If we are doing this computation unsigned, the negate
4693 is incorrect. */
4694 && ! TREE_UNSIGNED (type))
4696 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4697 xarg0 = TREE_OPERAND (xarg0, 0);
4700 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4701 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4703 STRIP_NOPS (xarg0);
4705 if (TREE_CODE (xarg0) == MULT_EXPR
4706 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4707 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4708 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4709 TREE_OPERAND (xarg0, 1), arg1, 1))
4710 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4711 TREE_OPERAND (xarg0, 1), 1)))
4712 && (tree_int_cst_sgn (c2) >= 0
4713 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4714 arg1, 1))))
4716 tree outer_div = integer_one_node;
4717 tree c1 = TREE_OPERAND (xarg0, 1);
4718 tree c3 = arg1;
4720 /* If C3 > C1, set them equal and do a divide by
4721 C3/C1 at the end of the operation. */
4722 if (tree_int_cst_lt (c1, c3))
4723 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4725 /* The result is A * (C1/C3) + (C2/C3). */
4726 t = fold (build (PLUS_EXPR, type,
4727 fold (build (MULT_EXPR, type,
4728 TREE_OPERAND (xarg0, 0),
4729 const_binop (code, c1, c3, 1))),
4730 const_binop (code, c2, c3, 1)));
4732 if (! integer_onep (outer_div))
4733 t = fold (build (code, type, t, convert (type, outer_div)));
4735 if (have_save_expr)
4736 t = save_expr (t);
4738 return t;
4742 goto binary;
4744 case CEIL_MOD_EXPR:
4745 case FLOOR_MOD_EXPR:
4746 case ROUND_MOD_EXPR:
4747 case TRUNC_MOD_EXPR:
4748 if (integer_onep (arg1))
4749 return omit_one_operand (type, integer_zero_node, arg0);
4750 if (integer_zerop (arg1))
4751 return t;
4753 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4754 where C1 % C3 == 0. Handle similarly to the division case,
4755 but don't bother with SAVE_EXPRs. */
4757 if (TREE_CODE (arg1) == INTEGER_CST
4758 && ! integer_zerop (arg1))
4760 tree c2 = integer_zero_node;
4761 tree xarg0 = arg0;
4763 if (TREE_CODE (xarg0) == PLUS_EXPR
4764 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4765 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4766 else if (TREE_CODE (xarg0) == MINUS_EXPR
4767 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4768 && ! TREE_UNSIGNED (type))
4770 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4771 xarg0 = TREE_OPERAND (xarg0, 0);
4774 STRIP_NOPS (xarg0);
4776 if (TREE_CODE (xarg0) == MULT_EXPR
4777 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4778 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4779 TREE_OPERAND (xarg0, 1),
4780 arg1, 1))
4781 && tree_int_cst_sgn (c2) >= 0)
4782 /* The result is (C2%C3). */
4783 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4784 TREE_OPERAND (xarg0, 0));
4787 goto binary;
4789 case LSHIFT_EXPR:
4790 case RSHIFT_EXPR:
4791 case LROTATE_EXPR:
4792 case RROTATE_EXPR:
4793 if (integer_zerop (arg1))
4794 return non_lvalue (convert (type, arg0));
4795 /* Since negative shift count is not well-defined,
4796 don't try to compute it in the compiler. */
4797 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4798 return t;
4799 /* Rewrite an LROTATE_EXPR by a constant into an
4800 RROTATE_EXPR by a new constant. */
4801 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4803 TREE_SET_CODE (t, RROTATE_EXPR);
4804 code = RROTATE_EXPR;
4805 TREE_OPERAND (t, 1) = arg1
4806 = const_binop
4807 (MINUS_EXPR,
4808 convert (TREE_TYPE (arg1),
4809 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4810 arg1, 0);
4811 if (tree_int_cst_sgn (arg1) < 0)
4812 return t;
4815 /* If we have a rotate of a bit operation with the rotate count and
4816 the second operand of the bit operation both constant,
4817 permute the two operations. */
4818 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4819 && (TREE_CODE (arg0) == BIT_AND_EXPR
4820 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4821 || TREE_CODE (arg0) == BIT_IOR_EXPR
4822 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4824 return fold (build (TREE_CODE (arg0), type,
4825 fold (build (code, type,
4826 TREE_OPERAND (arg0, 0), arg1)),
4827 fold (build (code, type,
4828 TREE_OPERAND (arg0, 1), arg1))));
4830 /* Two consecutive rotates adding up to the width of the mode can
4831 be ignored. */
4832 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4833 && TREE_CODE (arg0) == RROTATE_EXPR
4834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4835 && TREE_INT_CST_HIGH (arg1) == 0
4836 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4837 && ((TREE_INT_CST_LOW (arg1)
4838 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4839 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4840 return TREE_OPERAND (arg0, 0);
4842 goto binary;
4844 case MIN_EXPR:
4845 if (operand_equal_p (arg0, arg1, 0))
4846 return arg0;
4847 if (INTEGRAL_TYPE_P (type)
4848 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4849 return omit_one_operand (type, arg1, arg0);
4850 goto associate;
4852 case MAX_EXPR:
4853 if (operand_equal_p (arg0, arg1, 0))
4854 return arg0;
4855 if (INTEGRAL_TYPE_P (type)
4856 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4857 return omit_one_operand (type, arg1, arg0);
4858 goto associate;
4860 case TRUTH_NOT_EXPR:
4861 /* Note that the operand of this must be an int
4862 and its values must be 0 or 1.
4863 ("true" is a fixed value perhaps depending on the language,
4864 but we don't handle values other than 1 correctly yet.) */
4865 tem = invert_truthvalue (arg0);
4866 /* Avoid infinite recursion. */
4867 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4868 return t;
4869 return convert (type, tem);
4871 case TRUTH_ANDIF_EXPR:
4872 /* Note that the operands of this must be ints
4873 and their values must be 0 or 1.
4874 ("true" is a fixed value perhaps depending on the language.) */
4875 /* If first arg is constant zero, return it. */
4876 if (integer_zerop (arg0))
4877 return arg0;
4878 case TRUTH_AND_EXPR:
4879 /* If either arg is constant true, drop it. */
4880 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4881 return non_lvalue (arg1);
4882 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4883 return non_lvalue (arg0);
4884 /* If second arg is constant zero, result is zero, but first arg
4885 must be evaluated. */
4886 if (integer_zerop (arg1))
4887 return omit_one_operand (type, arg1, arg0);
4888 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
4889 case will be handled here. */
4890 if (integer_zerop (arg0))
4891 return omit_one_operand (type, arg0, arg1);
4893 truth_andor:
4894 /* We only do these simplifications if we are optimizing. */
4895 if (!optimize)
4896 return t;
4898 /* Check for things like (A || B) && (A || C). We can convert this
4899 to A || (B && C). Note that either operator can be any of the four
4900 truth and/or operations and the transformation will still be
4901 valid. Also note that we only care about order for the
4902 ANDIF and ORIF operators. If B contains side effects, this
4903 might change the truth-value of A. */
4904 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4905 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4906 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4907 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4908 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
4909 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
4911 tree a00 = TREE_OPERAND (arg0, 0);
4912 tree a01 = TREE_OPERAND (arg0, 1);
4913 tree a10 = TREE_OPERAND (arg1, 0);
4914 tree a11 = TREE_OPERAND (arg1, 1);
4915 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4916 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4917 && (code == TRUTH_AND_EXPR
4918 || code == TRUTH_OR_EXPR));
4920 if (operand_equal_p (a00, a10, 0))
4921 return fold (build (TREE_CODE (arg0), type, a00,
4922 fold (build (code, type, a01, a11))));
4923 else if (commutative && operand_equal_p (a00, a11, 0))
4924 return fold (build (TREE_CODE (arg0), type, a00,
4925 fold (build (code, type, a01, a10))));
4926 else if (commutative && operand_equal_p (a01, a10, 0))
4927 return fold (build (TREE_CODE (arg0), type, a01,
4928 fold (build (code, type, a00, a11))));
4930 /* This case if tricky because we must either have commutative
4931 operators or else A10 must not have side-effects. */
4933 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4934 && operand_equal_p (a01, a11, 0))
4935 return fold (build (TREE_CODE (arg0), type,
4936 fold (build (code, type, a00, a10)),
4937 a01));
4940 /* See if we can build a range comparison. */
4941 if (0 != (tem = fold_range_test (t)))
4942 return tem;
4944 /* Check for the possibility of merging component references. If our
4945 lhs is another similar operation, try to merge its rhs with our
4946 rhs. Then try to merge our lhs and rhs. */
4947 if (TREE_CODE (arg0) == code
4948 && 0 != (tem = fold_truthop (code, type,
4949 TREE_OPERAND (arg0, 1), arg1)))
4950 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4952 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4953 return tem;
4955 return t;
4957 case TRUTH_ORIF_EXPR:
4958 /* Note that the operands of this must be ints
4959 and their values must be 0 or true.
4960 ("true" is a fixed value perhaps depending on the language.) */
4961 /* If first arg is constant true, return it. */
4962 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4963 return arg0;
4964 case TRUTH_OR_EXPR:
4965 /* If either arg is constant zero, drop it. */
4966 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4967 return non_lvalue (arg1);
4968 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4969 return non_lvalue (arg0);
4970 /* If second arg is constant true, result is true, but we must
4971 evaluate first arg. */
4972 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4973 return omit_one_operand (type, arg1, arg0);
4974 /* Likewise for first arg, but note this only occurs here for
4975 TRUTH_OR_EXPR. */
4976 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4977 return omit_one_operand (type, arg0, arg1);
4978 goto truth_andor;
4980 case TRUTH_XOR_EXPR:
4981 /* If either arg is constant zero, drop it. */
4982 if (integer_zerop (arg0))
4983 return non_lvalue (arg1);
4984 if (integer_zerop (arg1))
4985 return non_lvalue (arg0);
4986 /* If either arg is constant true, this is a logical inversion. */
4987 if (integer_onep (arg0))
4988 return non_lvalue (invert_truthvalue (arg1));
4989 if (integer_onep (arg1))
4990 return non_lvalue (invert_truthvalue (arg0));
4991 return t;
4993 case EQ_EXPR:
4994 case NE_EXPR:
4995 case LT_EXPR:
4996 case GT_EXPR:
4997 case LE_EXPR:
4998 case GE_EXPR:
4999 /* If one arg is a constant integer, put it last. */
5000 if (TREE_CODE (arg0) == INTEGER_CST
5001 && TREE_CODE (arg1) != INTEGER_CST)
5003 TREE_OPERAND (t, 0) = arg1;
5004 TREE_OPERAND (t, 1) = arg0;
5005 arg0 = TREE_OPERAND (t, 0);
5006 arg1 = TREE_OPERAND (t, 1);
5007 code = swap_tree_comparison (code);
5008 TREE_SET_CODE (t, code);
5011 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5012 First, see if one arg is constant; find the constant arg
5013 and the other one. */
5015 tree constop = 0, varop;
5016 int constopnum = -1;
5018 if (TREE_CONSTANT (arg1))
5019 constopnum = 1, constop = arg1, varop = arg0;
5020 if (TREE_CONSTANT (arg0))
5021 constopnum = 0, constop = arg0, varop = arg1;
5023 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5025 /* This optimization is invalid for ordered comparisons
5026 if CONST+INCR overflows or if foo+incr might overflow.
5027 This optimization is invalid for floating point due to rounding.
5028 For pointer types we assume overflow doesn't happen. */
5029 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5030 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5031 && (code == EQ_EXPR || code == NE_EXPR)))
5033 tree newconst
5034 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5035 constop, TREE_OPERAND (varop, 1)));
5036 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5038 /* If VAROP is a reference to a bitfield, we must mask
5039 the constant by the width of the field. */
5040 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5041 && DECL_BIT_FIELD(TREE_OPERAND
5042 (TREE_OPERAND (varop, 0), 1)))
5044 int size
5045 = TREE_INT_CST_LOW (DECL_SIZE
5046 (TREE_OPERAND
5047 (TREE_OPERAND (varop, 0), 1)));
5049 newconst = fold (build (BIT_AND_EXPR,
5050 TREE_TYPE (varop), newconst,
5051 convert (TREE_TYPE (varop),
5052 build_int_2 (size, 0))));
5056 t = build (code, type, TREE_OPERAND (t, 0),
5057 TREE_OPERAND (t, 1));
5058 TREE_OPERAND (t, constopnum) = newconst;
5059 return t;
5062 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5064 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5065 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5066 && (code == EQ_EXPR || code == NE_EXPR)))
5068 tree newconst
5069 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5070 constop, TREE_OPERAND (varop, 1)));
5071 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5073 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5074 && DECL_BIT_FIELD(TREE_OPERAND
5075 (TREE_OPERAND (varop, 0), 1)))
5077 int size
5078 = TREE_INT_CST_LOW (DECL_SIZE
5079 (TREE_OPERAND
5080 (TREE_OPERAND (varop, 0), 1)));
5082 newconst = fold (build (BIT_AND_EXPR,
5083 TREE_TYPE (varop), newconst,
5084 convert (TREE_TYPE (varop),
5085 build_int_2 (size, 0))));
5089 t = build (code, type, TREE_OPERAND (t, 0),
5090 TREE_OPERAND (t, 1));
5091 TREE_OPERAND (t, constopnum) = newconst;
5092 return t;
5097 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5098 if (TREE_CODE (arg1) == INTEGER_CST
5099 && TREE_CODE (arg0) != INTEGER_CST
5100 && tree_int_cst_sgn (arg1) > 0)
5102 switch (TREE_CODE (t))
5104 case GE_EXPR:
5105 code = GT_EXPR;
5106 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5107 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5108 break;
5110 case LT_EXPR:
5111 code = LE_EXPR;
5112 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5113 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5114 break;
5116 default:
5117 break;
5121 /* If this is an EQ or NE comparison with zero and ARG0 is
5122 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5123 two operations, but the latter can be done in one less insn
5124 on machines that have only two-operand insns or on which a
5125 constant cannot be the first operand. */
5126 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5127 && TREE_CODE (arg0) == BIT_AND_EXPR)
5129 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5130 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5131 return
5132 fold (build (code, type,
5133 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5134 build (RSHIFT_EXPR,
5135 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5136 TREE_OPERAND (arg0, 1),
5137 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5138 convert (TREE_TYPE (arg0),
5139 integer_one_node)),
5140 arg1));
5141 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5142 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5143 return
5144 fold (build (code, type,
5145 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5146 build (RSHIFT_EXPR,
5147 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5148 TREE_OPERAND (arg0, 0),
5149 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5150 convert (TREE_TYPE (arg0),
5151 integer_one_node)),
5152 arg1));
5155 /* If this is an NE or EQ comparison of zero against the result of a
5156 signed MOD operation whose second operand is a power of 2, make
5157 the MOD operation unsigned since it is simpler and equivalent. */
5158 if ((code == NE_EXPR || code == EQ_EXPR)
5159 && integer_zerop (arg1)
5160 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5161 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5162 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5163 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5164 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5165 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5167 tree newtype = unsigned_type (TREE_TYPE (arg0));
5168 tree newmod = build (TREE_CODE (arg0), newtype,
5169 convert (newtype, TREE_OPERAND (arg0, 0)),
5170 convert (newtype, TREE_OPERAND (arg0, 1)));
5172 return build (code, type, newmod, convert (newtype, arg1));
5175 /* If this is an NE comparison of zero with an AND of one, remove the
5176 comparison since the AND will give the correct value. */
5177 if (code == NE_EXPR && integer_zerop (arg1)
5178 && TREE_CODE (arg0) == BIT_AND_EXPR
5179 && integer_onep (TREE_OPERAND (arg0, 1)))
5180 return convert (type, arg0);
5182 /* If we have (A & C) == C where C is a power of 2, convert this into
5183 (A & C) != 0. Similarly for NE_EXPR. */
5184 if ((code == EQ_EXPR || code == NE_EXPR)
5185 && TREE_CODE (arg0) == BIT_AND_EXPR
5186 && integer_pow2p (TREE_OPERAND (arg0, 1))
5187 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5188 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5189 arg0, integer_zero_node);
5191 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5192 and similarly for >= into !=. */
5193 if ((code == LT_EXPR || code == GE_EXPR)
5194 && TREE_UNSIGNED (TREE_TYPE (arg0))
5195 && TREE_CODE (arg1) == LSHIFT_EXPR
5196 && integer_onep (TREE_OPERAND (arg1, 0)))
5197 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5198 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5199 TREE_OPERAND (arg1, 1)),
5200 convert (TREE_TYPE (arg0), integer_zero_node));
5202 else if ((code == LT_EXPR || code == GE_EXPR)
5203 && TREE_UNSIGNED (TREE_TYPE (arg0))
5204 && (TREE_CODE (arg1) == NOP_EXPR
5205 || TREE_CODE (arg1) == CONVERT_EXPR)
5206 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5207 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5208 return
5209 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5210 convert (TREE_TYPE (arg0),
5211 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5212 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5213 convert (TREE_TYPE (arg0), integer_zero_node));
5215 /* Simplify comparison of something with itself. (For IEEE
5216 floating-point, we can only do some of these simplifications.) */
5217 if (operand_equal_p (arg0, arg1, 0))
5219 switch (code)
5221 case EQ_EXPR:
5222 case GE_EXPR:
5223 case LE_EXPR:
5224 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5226 if (type == integer_type_node)
5227 return integer_one_node;
5229 t = build_int_2 (1, 0);
5230 TREE_TYPE (t) = type;
5231 return t;
5233 code = EQ_EXPR;
5234 TREE_SET_CODE (t, code);
5235 break;
5237 case NE_EXPR:
5238 /* For NE, we can only do this simplification if integer. */
5239 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5240 break;
5241 /* ... fall through ... */
5242 case GT_EXPR:
5243 case LT_EXPR:
5244 if (type == integer_type_node)
5245 return integer_zero_node;
5247 t = build_int_2 (0, 0);
5248 TREE_TYPE (t) = type;
5249 return t;
5250 default:
5251 abort ();
5255 /* An unsigned comparison against 0 can be simplified. */
5256 if (integer_zerop (arg1)
5257 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5258 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5259 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5261 switch (TREE_CODE (t))
5263 case GT_EXPR:
5264 code = NE_EXPR;
5265 TREE_SET_CODE (t, NE_EXPR);
5266 break;
5267 case LE_EXPR:
5268 code = EQ_EXPR;
5269 TREE_SET_CODE (t, EQ_EXPR);
5270 break;
5271 case GE_EXPR:
5272 return omit_one_operand (type,
5273 convert (type, integer_one_node),
5274 arg0);
5275 case LT_EXPR:
5276 return omit_one_operand (type,
5277 convert (type, integer_zero_node),
5278 arg0);
5279 default:
5280 break;
5284 /* An unsigned <= 0x7fffffff can be simplified. */
5286 int width = TYPE_PRECISION (TREE_TYPE (arg1));
5287 if (TREE_CODE (arg1) == INTEGER_CST
5288 && ! TREE_CONSTANT_OVERFLOW (arg1)
5289 && width <= HOST_BITS_PER_WIDE_INT
5290 && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5291 && TREE_INT_CST_HIGH (arg1) == 0
5292 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5293 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5294 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5296 switch (TREE_CODE (t))
5298 case LE_EXPR:
5299 return fold (build (GE_EXPR, type,
5300 convert (signed_type (TREE_TYPE (arg0)),
5301 arg0),
5302 convert (signed_type (TREE_TYPE (arg1)),
5303 integer_zero_node)));
5304 case GT_EXPR:
5305 return fold (build (LT_EXPR, type,
5306 convert (signed_type (TREE_TYPE (arg0)),
5307 arg0),
5308 convert (signed_type (TREE_TYPE (arg1)),
5309 integer_zero_node)));
5314 /* If we are comparing an expression that just has comparisons
5315 of two integer values, arithmetic expressions of those comparisons,
5316 and constants, we can simplify it. There are only three cases
5317 to check: the two values can either be equal, the first can be
5318 greater, or the second can be greater. Fold the expression for
5319 those three values. Since each value must be 0 or 1, we have
5320 eight possibilities, each of which corresponds to the constant 0
5321 or 1 or one of the six possible comparisons.
5323 This handles common cases like (a > b) == 0 but also handles
5324 expressions like ((x > y) - (y > x)) > 0, which supposedly
5325 occur in macroized code. */
5327 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5329 tree cval1 = 0, cval2 = 0;
5330 int save_p = 0;
5332 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5333 /* Don't handle degenerate cases here; they should already
5334 have been handled anyway. */
5335 && cval1 != 0 && cval2 != 0
5336 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5337 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5338 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5339 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5340 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5342 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5343 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5345 /* We can't just pass T to eval_subst in case cval1 or cval2
5346 was the same as ARG1. */
5348 tree high_result
5349 = fold (build (code, type,
5350 eval_subst (arg0, cval1, maxval, cval2, minval),
5351 arg1));
5352 tree equal_result
5353 = fold (build (code, type,
5354 eval_subst (arg0, cval1, maxval, cval2, maxval),
5355 arg1));
5356 tree low_result
5357 = fold (build (code, type,
5358 eval_subst (arg0, cval1, minval, cval2, maxval),
5359 arg1));
5361 /* All three of these results should be 0 or 1. Confirm they
5362 are. Then use those values to select the proper code
5363 to use. */
5365 if ((integer_zerop (high_result)
5366 || integer_onep (high_result))
5367 && (integer_zerop (equal_result)
5368 || integer_onep (equal_result))
5369 && (integer_zerop (low_result)
5370 || integer_onep (low_result)))
5372 /* Make a 3-bit mask with the high-order bit being the
5373 value for `>', the next for '=', and the low for '<'. */
5374 switch ((integer_onep (high_result) * 4)
5375 + (integer_onep (equal_result) * 2)
5376 + integer_onep (low_result))
5378 case 0:
5379 /* Always false. */
5380 return omit_one_operand (type, integer_zero_node, arg0);
5381 case 1:
5382 code = LT_EXPR;
5383 break;
5384 case 2:
5385 code = EQ_EXPR;
5386 break;
5387 case 3:
5388 code = LE_EXPR;
5389 break;
5390 case 4:
5391 code = GT_EXPR;
5392 break;
5393 case 5:
5394 code = NE_EXPR;
5395 break;
5396 case 6:
5397 code = GE_EXPR;
5398 break;
5399 case 7:
5400 /* Always true. */
5401 return omit_one_operand (type, integer_one_node, arg0);
5404 t = build (code, type, cval1, cval2);
5405 if (save_p)
5406 return save_expr (t);
5407 else
5408 return fold (t);
5413 /* If this is a comparison of a field, we may be able to simplify it. */
5414 if ((TREE_CODE (arg0) == COMPONENT_REF
5415 || TREE_CODE (arg0) == BIT_FIELD_REF)
5416 && (code == EQ_EXPR || code == NE_EXPR)
5417 /* Handle the constant case even without -O
5418 to make sure the warnings are given. */
5419 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5421 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5422 return t1 ? t1 : t;
5425 /* If this is a comparison of complex values and either or both
5426 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5427 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5428 may prevent needless evaluations. */
5429 if ((code == EQ_EXPR || code == NE_EXPR)
5430 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5431 && (TREE_CODE (arg0) == COMPLEX_EXPR
5432 || TREE_CODE (arg1) == COMPLEX_EXPR))
5434 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5435 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5436 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5437 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5438 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5440 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5441 : TRUTH_ORIF_EXPR),
5442 type,
5443 fold (build (code, type, real0, real1)),
5444 fold (build (code, type, imag0, imag1))));
5447 /* From here on, the only cases we handle are when the result is
5448 known to be a constant.
5450 To compute GT, swap the arguments and do LT.
5451 To compute GE, do LT and invert the result.
5452 To compute LE, swap the arguments, do LT and invert the result.
5453 To compute NE, do EQ and invert the result.
5455 Therefore, the code below must handle only EQ and LT. */
5457 if (code == LE_EXPR || code == GT_EXPR)
5459 tem = arg0, arg0 = arg1, arg1 = tem;
5460 code = swap_tree_comparison (code);
5463 /* Note that it is safe to invert for real values here because we
5464 will check below in the one case that it matters. */
5466 invert = 0;
5467 if (code == NE_EXPR || code == GE_EXPR)
5469 invert = 1;
5470 code = invert_tree_comparison (code);
5473 /* Compute a result for LT or EQ if args permit;
5474 otherwise return T. */
5475 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5477 if (code == EQ_EXPR)
5478 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5479 == TREE_INT_CST_LOW (arg1))
5480 && (TREE_INT_CST_HIGH (arg0)
5481 == TREE_INT_CST_HIGH (arg1)),
5483 else
5484 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5485 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5486 : INT_CST_LT (arg0, arg1)),
5490 #if 0 /* This is no longer useful, but breaks some real code. */
5491 /* Assume a nonexplicit constant cannot equal an explicit one,
5492 since such code would be undefined anyway.
5493 Exception: on sysvr4, using #pragma weak,
5494 a label can come out as 0. */
5495 else if (TREE_CODE (arg1) == INTEGER_CST
5496 && !integer_zerop (arg1)
5497 && TREE_CONSTANT (arg0)
5498 && TREE_CODE (arg0) == ADDR_EXPR
5499 && code == EQ_EXPR)
5500 t1 = build_int_2 (0, 0);
5501 #endif
5502 /* Two real constants can be compared explicitly. */
5503 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5505 /* If either operand is a NaN, the result is false with two
5506 exceptions: First, an NE_EXPR is true on NaNs, but that case
5507 is already handled correctly since we will be inverting the
5508 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5509 or a GE_EXPR into a LT_EXPR, we must return true so that it
5510 will be inverted into false. */
5512 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5513 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5514 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5516 else if (code == EQ_EXPR)
5517 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5518 TREE_REAL_CST (arg1)),
5520 else
5521 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5522 TREE_REAL_CST (arg1)),
5526 if (t1 == NULL_TREE)
5527 return t;
5529 if (invert)
5530 TREE_INT_CST_LOW (t1) ^= 1;
5532 TREE_TYPE (t1) = type;
5533 return t1;
5535 case COND_EXPR:
5536 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5537 so all simple results must be passed through pedantic_non_lvalue. */
5538 if (TREE_CODE (arg0) == INTEGER_CST)
5539 return pedantic_non_lvalue
5540 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5541 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5542 return pedantic_omit_one_operand (type, arg1, arg0);
5544 /* If the second operand is zero, invert the comparison and swap
5545 the second and third operands. Likewise if the second operand
5546 is constant and the third is not or if the third operand is
5547 equivalent to the first operand of the comparison. */
5549 if (integer_zerop (arg1)
5550 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5551 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5552 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5553 TREE_OPERAND (t, 2),
5554 TREE_OPERAND (arg0, 1))))
5556 /* See if this can be inverted. If it can't, possibly because
5557 it was a floating-point inequality comparison, don't do
5558 anything. */
5559 tem = invert_truthvalue (arg0);
5561 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5563 t = build (code, type, tem,
5564 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5565 arg0 = tem;
5566 arg1 = TREE_OPERAND (t, 2);
5567 STRIP_NOPS (arg1);
5571 /* If we have A op B ? A : C, we may be able to convert this to a
5572 simpler expression, depending on the operation and the values
5573 of B and C. IEEE floating point prevents this though,
5574 because A or B might be -0.0 or a NaN. */
5576 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5577 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5578 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5579 || flag_fast_math)
5580 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5581 arg1, TREE_OPERAND (arg0, 1)))
5583 tree arg2 = TREE_OPERAND (t, 2);
5584 enum tree_code comp_code = TREE_CODE (arg0);
5586 STRIP_NOPS (arg2);
5588 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5589 depending on the comparison operation. */
5590 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5591 ? real_zerop (TREE_OPERAND (arg0, 1))
5592 : integer_zerop (TREE_OPERAND (arg0, 1)))
5593 && TREE_CODE (arg2) == NEGATE_EXPR
5594 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5595 switch (comp_code)
5597 case EQ_EXPR:
5598 return pedantic_non_lvalue
5599 (fold (build1 (NEGATE_EXPR, type, arg1)));
5600 case NE_EXPR:
5601 return pedantic_non_lvalue (convert (type, arg1));
5602 case GE_EXPR:
5603 case GT_EXPR:
5604 return pedantic_non_lvalue
5605 (convert (type, fold (build1 (ABS_EXPR,
5606 TREE_TYPE (arg1), arg1))));
5607 case LE_EXPR:
5608 case LT_EXPR:
5609 return pedantic_non_lvalue
5610 (fold (build1 (NEGATE_EXPR, type,
5611 convert (type,
5612 fold (build1 (ABS_EXPR,
5613 TREE_TYPE (arg1),
5614 arg1))))));
5615 default:
5616 abort ();
5619 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5620 always zero. */
5622 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5624 if (comp_code == NE_EXPR)
5625 return pedantic_non_lvalue (convert (type, arg1));
5626 else if (comp_code == EQ_EXPR)
5627 return pedantic_non_lvalue (convert (type, integer_zero_node));
5630 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5631 or max (A, B), depending on the operation. */
5633 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5634 arg2, TREE_OPERAND (arg0, 0)))
5636 tree comp_op0 = TREE_OPERAND (arg0, 0);
5637 tree comp_op1 = TREE_OPERAND (arg0, 1);
5638 tree comp_type = TREE_TYPE (comp_op0);
5640 switch (comp_code)
5642 case EQ_EXPR:
5643 return pedantic_non_lvalue (convert (type, arg2));
5644 case NE_EXPR:
5645 return pedantic_non_lvalue (convert (type, arg1));
5646 case LE_EXPR:
5647 case LT_EXPR:
5648 /* In C++ a ?: expression can be an lvalue, so put the
5649 operand which will be used if they are equal first
5650 so that we can convert this back to the
5651 corresponding COND_EXPR. */
5652 return pedantic_non_lvalue
5653 (convert (type, (fold (build (MIN_EXPR, comp_type,
5654 (comp_code == LE_EXPR
5655 ? comp_op0 : comp_op1),
5656 (comp_code == LE_EXPR
5657 ? comp_op1 : comp_op0))))));
5658 break;
5659 case GE_EXPR:
5660 case GT_EXPR:
5661 return pedantic_non_lvalue
5662 (convert (type, fold (build (MAX_EXPR, comp_type,
5663 (comp_code == GE_EXPR
5664 ? comp_op0 : comp_op1),
5665 (comp_code == GE_EXPR
5666 ? comp_op1 : comp_op0)))));
5667 break;
5668 default:
5669 abort ();
5673 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5674 we might still be able to simplify this. For example,
5675 if C1 is one less or one more than C2, this might have started
5676 out as a MIN or MAX and been transformed by this function.
5677 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5679 if (INTEGRAL_TYPE_P (type)
5680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5681 && TREE_CODE (arg2) == INTEGER_CST)
5682 switch (comp_code)
5684 case EQ_EXPR:
5685 /* We can replace A with C1 in this case. */
5686 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5687 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5688 TREE_OPERAND (t, 2));
5689 break;
5691 case LT_EXPR:
5692 /* If C1 is C2 + 1, this is min(A, C2). */
5693 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5694 && operand_equal_p (TREE_OPERAND (arg0, 1),
5695 const_binop (PLUS_EXPR, arg2,
5696 integer_one_node, 0), 1))
5697 return pedantic_non_lvalue
5698 (fold (build (MIN_EXPR, type, arg1, arg2)));
5699 break;
5701 case LE_EXPR:
5702 /* If C1 is C2 - 1, this is min(A, C2). */
5703 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5704 && operand_equal_p (TREE_OPERAND (arg0, 1),
5705 const_binop (MINUS_EXPR, arg2,
5706 integer_one_node, 0), 1))
5707 return pedantic_non_lvalue
5708 (fold (build (MIN_EXPR, type, arg1, arg2)));
5709 break;
5711 case GT_EXPR:
5712 /* If C1 is C2 - 1, this is max(A, C2). */
5713 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5714 && operand_equal_p (TREE_OPERAND (arg0, 1),
5715 const_binop (MINUS_EXPR, arg2,
5716 integer_one_node, 0), 1))
5717 return pedantic_non_lvalue
5718 (fold (build (MAX_EXPR, type, arg1, arg2)));
5719 break;
5721 case GE_EXPR:
5722 /* If C1 is C2 + 1, this is max(A, C2). */
5723 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5724 && operand_equal_p (TREE_OPERAND (arg0, 1),
5725 const_binop (PLUS_EXPR, arg2,
5726 integer_one_node, 0), 1))
5727 return pedantic_non_lvalue
5728 (fold (build (MAX_EXPR, type, arg1, arg2)));
5729 break;
5730 case NE_EXPR:
5731 break;
5732 default:
5733 abort ();
5737 /* If the second operand is simpler than the third, swap them
5738 since that produces better jump optimization results. */
5739 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5740 || TREE_CODE (arg1) == SAVE_EXPR)
5741 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5742 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5743 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5745 /* See if this can be inverted. If it can't, possibly because
5746 it was a floating-point inequality comparison, don't do
5747 anything. */
5748 tem = invert_truthvalue (arg0);
5750 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5752 t = build (code, type, tem,
5753 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5754 arg0 = tem;
5755 arg1 = TREE_OPERAND (t, 2);
5756 STRIP_NOPS (arg1);
5760 /* Convert A ? 1 : 0 to simply A. */
5761 if (integer_onep (TREE_OPERAND (t, 1))
5762 && integer_zerop (TREE_OPERAND (t, 2))
5763 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5764 call to fold will try to move the conversion inside
5765 a COND, which will recurse. In that case, the COND_EXPR
5766 is probably the best choice, so leave it alone. */
5767 && type == TREE_TYPE (arg0))
5768 return pedantic_non_lvalue (arg0);
5770 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5771 operation is simply A & 2. */
5773 if (integer_zerop (TREE_OPERAND (t, 2))
5774 && TREE_CODE (arg0) == NE_EXPR
5775 && integer_zerop (TREE_OPERAND (arg0, 1))
5776 && integer_pow2p (arg1)
5777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5778 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5779 arg1, 1))
5780 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5782 return t;
5784 case COMPOUND_EXPR:
5785 /* When pedantic, a compound expression can be neither an lvalue
5786 nor an integer constant expression. */
5787 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5788 return t;
5789 /* Don't let (0, 0) be null pointer constant. */
5790 if (integer_zerop (arg1))
5791 return non_lvalue (arg1);
5792 return arg1;
5794 case COMPLEX_EXPR:
5795 if (wins)
5796 return build_complex (type, arg0, arg1);
5797 return t;
5799 case REALPART_EXPR:
5800 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5801 return t;
5802 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5803 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5804 TREE_OPERAND (arg0, 1));
5805 else if (TREE_CODE (arg0) == COMPLEX_CST)
5806 return TREE_REALPART (arg0);
5807 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5808 return fold (build (TREE_CODE (arg0), type,
5809 fold (build1 (REALPART_EXPR, type,
5810 TREE_OPERAND (arg0, 0))),
5811 fold (build1 (REALPART_EXPR,
5812 type, TREE_OPERAND (arg0, 1)))));
5813 return t;
5815 case IMAGPART_EXPR:
5816 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5817 return convert (type, integer_zero_node);
5818 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5819 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5820 TREE_OPERAND (arg0, 0));
5821 else if (TREE_CODE (arg0) == COMPLEX_CST)
5822 return TREE_IMAGPART (arg0);
5823 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5824 return fold (build (TREE_CODE (arg0), type,
5825 fold (build1 (IMAGPART_EXPR, type,
5826 TREE_OPERAND (arg0, 0))),
5827 fold (build1 (IMAGPART_EXPR, type,
5828 TREE_OPERAND (arg0, 1)))));
5829 return t;
5831 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5832 appropriate. */
5833 case CLEANUP_POINT_EXPR:
5834 if (! TREE_SIDE_EFFECTS (arg0))
5835 return TREE_OPERAND (t, 0);
5838 enum tree_code code0 = TREE_CODE (arg0);
5839 int kind0 = TREE_CODE_CLASS (code0);
5840 tree arg00 = TREE_OPERAND (arg0, 0);
5841 tree arg01;
5843 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5844 return fold (build1 (code0, type,
5845 fold (build1 (CLEANUP_POINT_EXPR,
5846 TREE_TYPE (arg00), arg00))));
5848 if (kind0 == '<' || kind0 == '2'
5849 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5850 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5851 || code0 == TRUTH_XOR_EXPR)
5853 arg01 = TREE_OPERAND (arg0, 1);
5855 if (! TREE_SIDE_EFFECTS (arg00))
5856 return fold (build (code0, type, arg00,
5857 fold (build1 (CLEANUP_POINT_EXPR,
5858 TREE_TYPE (arg01), arg01))));
5860 if (! TREE_SIDE_EFFECTS (arg01))
5861 return fold (build (code0, type,
5862 fold (build1 (CLEANUP_POINT_EXPR,
5863 TREE_TYPE (arg00), arg00)),
5864 arg01));
5867 return t;
5870 default:
5871 return t;
5872 } /* switch (code) */