Initial revision
[official-gcc.git] / gcc / fold-const.c
blobb146e8b1fac6f93d3e14d105301f4b7c289c433b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include <stdio.h>
46 #include <setjmp.h>
47 #include "config.h"
48 #include "flags.h"
49 #include "tree.h"
51 /* Handle floating overflow for `const_binop'. */
52 static jmp_buf float_error;
54 static void encode PROTO((HOST_WIDE_INT *,
55 HOST_WIDE_INT, HOST_WIDE_INT));
56 static void decode PROTO((HOST_WIDE_INT *,
57 HOST_WIDE_INT *, HOST_WIDE_INT *));
58 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
59 HOST_WIDE_INT, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT *,
61 HOST_WIDE_INT *, HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static int split_tree PROTO((tree, enum tree_code, tree *,
64 tree *, int *));
65 static tree const_binop PROTO((enum tree_code, tree, tree, int));
66 static tree fold_convert PROTO((tree, tree));
67 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
68 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
69 static int truth_value_p PROTO((enum tree_code));
70 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
71 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
72 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
73 static tree omit_one_operand PROTO((tree, tree, tree));
74 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
75 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
76 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
77 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
78 tree, tree));
79 static tree decode_field_reference PROTO((tree, int *, int *,
80 enum machine_mode *, int *,
81 int *, tree *, tree *));
82 static int all_ones_mask_p PROTO((tree, int));
83 static int simple_operand_p PROTO((tree));
84 static tree range_binop PROTO((enum tree_code, tree, tree, int,
85 tree, int));
86 static tree make_range PROTO((tree, int *, tree *, tree *));
87 static tree build_range_check PROTO((tree, tree, int, tree, tree));
88 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
89 int, tree, tree));
90 static tree fold_range_test PROTO((tree));
91 static tree unextend PROTO((tree, int, int, tree));
92 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
93 static tree strip_compound_expr PROTO((tree, tree));
95 #ifndef BRANCH_COST
96 #define BRANCH_COST 1
97 #endif
99 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
100 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
101 Then this yields nonzero if overflow occurred during the addition.
102 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
103 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
104 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
106 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
107 We do that by representing the two-word integer in 4 words, with only
108 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
110 #define LOWPART(x) \
111 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
112 #define HIGHPART(x) \
113 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
114 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
116 /* Unpack a two-word integer into 4 words.
117 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
118 WORDS points to the array of HOST_WIDE_INTs. */
120 static void
121 encode (words, low, hi)
122 HOST_WIDE_INT *words;
123 HOST_WIDE_INT low, hi;
125 words[0] = LOWPART (low);
126 words[1] = HIGHPART (low);
127 words[2] = LOWPART (hi);
128 words[3] = HIGHPART (hi);
131 /* Pack an array of 4 words into a two-word integer.
132 WORDS points to the array of words.
133 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
135 static void
136 decode (words, low, hi)
137 HOST_WIDE_INT *words;
138 HOST_WIDE_INT *low, *hi;
140 *low = words[0] | words[1] * BASE;
141 *hi = words[2] | words[3] * BASE;
144 /* Make the integer constant T valid for its type
145 by setting to 0 or 1 all the bits in the constant
146 that don't belong in the type.
147 Yield 1 if a signed overflow occurs, 0 otherwise.
148 If OVERFLOW is nonzero, a signed overflow has already occurred
149 in calculating T, so propagate it.
151 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
152 if it exists. */
155 force_fit_type (t, overflow)
156 tree t;
157 int overflow;
159 HOST_WIDE_INT low, high;
160 register int prec;
162 if (TREE_CODE (t) == REAL_CST)
164 #ifdef CHECK_FLOAT_VALUE
165 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
166 overflow);
167 #endif
168 return overflow;
171 else if (TREE_CODE (t) != INTEGER_CST)
172 return overflow;
174 low = TREE_INT_CST_LOW (t);
175 high = TREE_INT_CST_HIGH (t);
177 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
178 prec = POINTER_SIZE;
179 else
180 prec = TYPE_PRECISION (TREE_TYPE (t));
182 /* First clear all bits that are beyond the type's precision. */
184 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
186 else if (prec > HOST_BITS_PER_WIDE_INT)
188 TREE_INT_CST_HIGH (t)
189 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
191 else
193 TREE_INT_CST_HIGH (t) = 0;
194 if (prec < HOST_BITS_PER_WIDE_INT)
195 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
198 /* Unsigned types do not suffer sign extension or overflow. */
199 if (TREE_UNSIGNED (TREE_TYPE (t)))
200 return overflow;
202 /* If the value's sign bit is set, extend the sign. */
203 if (prec != 2 * HOST_BITS_PER_WIDE_INT
204 && (prec > HOST_BITS_PER_WIDE_INT
205 ? (TREE_INT_CST_HIGH (t)
206 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
207 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
209 /* Value is negative:
210 set to 1 all the bits that are outside this type's precision. */
211 if (prec > HOST_BITS_PER_WIDE_INT)
213 TREE_INT_CST_HIGH (t)
214 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 else
218 TREE_INT_CST_HIGH (t) = -1;
219 if (prec < HOST_BITS_PER_WIDE_INT)
220 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
224 /* Yield nonzero if signed overflow occurred. */
225 return
226 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
227 != 0);
230 /* Add two doubleword integers with doubleword result.
231 Each argument is given as two `HOST_WIDE_INT' pieces.
232 One argument is L1 and H1; the other, L2 and H2.
233 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
236 add_double (l1, h1, l2, h2, lv, hv)
237 HOST_WIDE_INT l1, h1, l2, h2;
238 HOST_WIDE_INT *lv, *hv;
240 HOST_WIDE_INT l, h;
242 l = l1 + l2;
243 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
245 *lv = l;
246 *hv = h;
247 return overflow_sum_sign (h1, h2, h);
250 /* Negate a doubleword integer with doubleword result.
251 Return nonzero if the operation overflows, assuming it's signed.
252 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
253 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256 neg_double (l1, h1, lv, hv)
257 HOST_WIDE_INT l1, h1;
258 HOST_WIDE_INT *lv, *hv;
260 if (l1 == 0)
262 *lv = 0;
263 *hv = - h1;
264 return (*hv & h1) < 0;
266 else
268 *lv = - l1;
269 *hv = ~ h1;
270 return 0;
274 /* Multiply two doubleword integers with doubleword result.
275 Return nonzero if the operation overflows, assuming it's signed.
276 Each argument is given as two `HOST_WIDE_INT' pieces.
277 One argument is L1 and H1; the other, L2 and H2.
278 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
281 mul_double (l1, h1, l2, h2, lv, hv)
282 HOST_WIDE_INT l1, h1, l2, h2;
283 HOST_WIDE_INT *lv, *hv;
285 HOST_WIDE_INT arg1[4];
286 HOST_WIDE_INT arg2[4];
287 HOST_WIDE_INT prod[4 * 2];
288 register unsigned HOST_WIDE_INT carry;
289 register int i, j, k;
290 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
292 encode (arg1, l1, h1);
293 encode (arg2, l2, h2);
295 bzero ((char *) prod, sizeof prod);
297 for (i = 0; i < 4; i++)
299 carry = 0;
300 for (j = 0; j < 4; j++)
302 k = i + j;
303 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
304 carry += arg1[i] * arg2[j];
305 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
306 carry += prod[k];
307 prod[k] = LOWPART (carry);
308 carry = HIGHPART (carry);
310 prod[i + 4] = carry;
313 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
315 /* Check for overflow by calculating the top half of the answer in full;
316 it should agree with the low half's sign bit. */
317 decode (prod+4, &toplow, &tophigh);
318 if (h1 < 0)
320 neg_double (l2, h2, &neglow, &neghigh);
321 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
323 if (h2 < 0)
325 neg_double (l1, h1, &neglow, &neghigh);
326 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
328 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
331 /* Shift the doubleword integer in L1, H1 left by COUNT places
332 keeping only PREC bits of result.
333 Shift right if COUNT is negative.
334 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
335 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337 void
338 lshift_double (l1, h1, count, prec, lv, hv, arith)
339 HOST_WIDE_INT l1, h1, count;
340 int prec;
341 HOST_WIDE_INT *lv, *hv;
342 int arith;
344 if (count < 0)
346 rshift_double (l1, h1, - count, prec, lv, hv, arith);
347 return;
350 #ifdef SHIFT_COUNT_TRUNCATED
351 if (SHIFT_COUNT_TRUNCATED)
352 count %= prec;
353 #endif
355 if (count >= HOST_BITS_PER_WIDE_INT)
357 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
358 *lv = 0;
360 else
362 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
363 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
364 *lv = (unsigned HOST_WIDE_INT) l1 << count;
368 /* Shift the doubleword integer in L1, H1 right by COUNT places
369 keeping only PREC bits of result. COUNT must be positive.
370 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
371 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
373 void
374 rshift_double (l1, h1, count, prec, lv, hv, arith)
375 HOST_WIDE_INT l1, h1, count;
376 int prec;
377 HOST_WIDE_INT *lv, *hv;
378 int arith;
380 unsigned HOST_WIDE_INT signmask;
381 signmask = (arith
382 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
383 : 0);
385 #ifdef SHIFT_COUNT_TRUNCATED
386 if (SHIFT_COUNT_TRUNCATED)
387 count %= prec;
388 #endif
390 if (count >= HOST_BITS_PER_WIDE_INT)
392 *hv = signmask;
393 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
394 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
396 else
398 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
399 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
400 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
401 | ((unsigned HOST_WIDE_INT) h1 >> count));
405 /* Rotate the doubleword integer in L1, H1 left by COUNT places
406 keeping only PREC bits of result.
407 Rotate right if COUNT is negative.
408 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
410 void
411 lrotate_double (l1, h1, count, prec, lv, hv)
412 HOST_WIDE_INT l1, h1, count;
413 int prec;
414 HOST_WIDE_INT *lv, *hv;
416 HOST_WIDE_INT s1l, s1h, s2l, s2h;
418 count %= prec;
419 if (count < 0)
420 count += prec;
422 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
423 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
424 *lv = s1l | s2l;
425 *hv = s1h | s2h;
428 /* Rotate the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result. COUNT must be positive.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 void
433 rrotate_double (l1, h1, count, prec, lv, hv)
434 HOST_WIDE_INT l1, h1, count;
435 int prec;
436 HOST_WIDE_INT *lv, *hv;
438 HOST_WIDE_INT s1l, s1h, s2l, s2h;
440 count %= prec;
441 if (count < 0)
442 count += prec;
444 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
445 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
446 *lv = s1l | s2l;
447 *hv = s1h | s2h;
450 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
451 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
452 CODE is a tree code for a kind of division, one of
453 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
454 or EXACT_DIV_EXPR
455 It controls how the quotient is rounded to a integer.
456 Return nonzero if the operation overflows.
457 UNS nonzero says do unsigned division. */
460 div_and_round_double (code, uns,
461 lnum_orig, hnum_orig, lden_orig, hden_orig,
462 lquo, hquo, lrem, hrem)
463 enum tree_code code;
464 int uns;
465 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
466 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
467 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
469 int quo_neg = 0;
470 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
471 HOST_WIDE_INT den[4], quo[4];
472 register int i, j;
473 unsigned HOST_WIDE_INT work;
474 register unsigned HOST_WIDE_INT carry = 0;
475 HOST_WIDE_INT lnum = lnum_orig;
476 HOST_WIDE_INT hnum = hnum_orig;
477 HOST_WIDE_INT lden = lden_orig;
478 HOST_WIDE_INT hden = hden_orig;
479 int overflow = 0;
481 if ((hden == 0) && (lden == 0))
482 abort ();
484 /* calculate quotient sign and convert operands to unsigned. */
485 if (!uns)
487 if (hnum < 0)
489 quo_neg = ~ quo_neg;
490 /* (minimum integer) / (-1) is the only overflow case. */
491 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
492 overflow = 1;
494 if (hden < 0)
496 quo_neg = ~ quo_neg;
497 neg_double (lden, hden, &lden, &hden);
501 if (hnum == 0 && hden == 0)
502 { /* single precision */
503 *hquo = *hrem = 0;
504 /* This unsigned division rounds toward zero. */
505 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
506 goto finish_up;
509 if (hnum == 0)
510 { /* trivial case: dividend < divisor */
511 /* hden != 0 already checked. */
512 *hquo = *lquo = 0;
513 *hrem = hnum;
514 *lrem = lnum;
515 goto finish_up;
518 bzero ((char *) quo, sizeof quo);
520 bzero ((char *) num, sizeof num); /* to zero 9th element */
521 bzero ((char *) den, sizeof den);
523 encode (num, lnum, hnum);
524 encode (den, lden, hden);
526 /* Special code for when the divisor < BASE. */
527 if (hden == 0 && lden < BASE)
529 /* hnum != 0 already checked. */
530 for (i = 4 - 1; i >= 0; i--)
532 work = num[i] + carry * BASE;
533 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
534 carry = work % (unsigned HOST_WIDE_INT) lden;
537 else
539 /* Full double precision division,
540 with thanks to Don Knuth's "Seminumerical Algorithms". */
541 int num_hi_sig, den_hi_sig;
542 unsigned HOST_WIDE_INT quo_est, scale;
544 /* Find the highest non-zero divisor digit. */
545 for (i = 4 - 1; ; i--)
546 if (den[i] != 0) {
547 den_hi_sig = i;
548 break;
551 /* Insure that the first digit of the divisor is at least BASE/2.
552 This is required by the quotient digit estimation algorithm. */
554 scale = BASE / (den[den_hi_sig] + 1);
555 if (scale > 1) { /* scale divisor and dividend */
556 carry = 0;
557 for (i = 0; i <= 4 - 1; i++) {
558 work = (num[i] * scale) + carry;
559 num[i] = LOWPART (work);
560 carry = HIGHPART (work);
561 } num[4] = carry;
562 carry = 0;
563 for (i = 0; i <= 4 - 1; i++) {
564 work = (den[i] * scale) + carry;
565 den[i] = LOWPART (work);
566 carry = HIGHPART (work);
567 if (den[i] != 0) den_hi_sig = i;
571 num_hi_sig = 4;
573 /* Main loop */
574 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
575 /* guess the next quotient digit, quo_est, by dividing the first
576 two remaining dividend digits by the high order quotient digit.
577 quo_est is never low and is at most 2 high. */
578 unsigned HOST_WIDE_INT tmp;
580 num_hi_sig = i + den_hi_sig + 1;
581 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
582 if (num[num_hi_sig] != den[den_hi_sig])
583 quo_est = work / den[den_hi_sig];
584 else
585 quo_est = BASE - 1;
587 /* refine quo_est so it's usually correct, and at most one high. */
588 tmp = work - quo_est * den[den_hi_sig];
589 if (tmp < BASE
590 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
591 quo_est--;
593 /* Try QUO_EST as the quotient digit, by multiplying the
594 divisor by QUO_EST and subtracting from the remaining dividend.
595 Keep in mind that QUO_EST is the I - 1st digit. */
597 carry = 0;
598 for (j = 0; j <= den_hi_sig; j++)
600 work = quo_est * den[j] + carry;
601 carry = HIGHPART (work);
602 work = num[i + j] - LOWPART (work);
603 num[i + j] = LOWPART (work);
604 carry += HIGHPART (work) != 0;
607 /* if quo_est was high by one, then num[i] went negative and
608 we need to correct things. */
610 if (num[num_hi_sig] < carry)
612 quo_est--;
613 carry = 0; /* add divisor back in */
614 for (j = 0; j <= den_hi_sig; j++)
616 work = num[i + j] + den[j] + carry;
617 carry = HIGHPART (work);
618 num[i + j] = LOWPART (work);
620 num [num_hi_sig] += carry;
623 /* store the quotient digit. */
624 quo[i] = quo_est;
628 decode (quo, lquo, hquo);
630 finish_up:
631 /* if result is negative, make it so. */
632 if (quo_neg)
633 neg_double (*lquo, *hquo, lquo, hquo);
635 /* compute trial remainder: rem = num - (quo * den) */
636 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
637 neg_double (*lrem, *hrem, lrem, hrem);
638 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
640 switch (code)
642 case TRUNC_DIV_EXPR:
643 case TRUNC_MOD_EXPR: /* round toward zero */
644 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
645 return overflow;
647 case FLOOR_DIV_EXPR:
648 case FLOOR_MOD_EXPR: /* round toward negative infinity */
649 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
651 /* quo = quo - 1; */
652 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
653 lquo, hquo);
655 else return overflow;
656 break;
658 case CEIL_DIV_EXPR:
659 case CEIL_MOD_EXPR: /* round toward positive infinity */
660 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
662 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
663 lquo, hquo);
665 else return overflow;
666 break;
668 case ROUND_DIV_EXPR:
669 case ROUND_MOD_EXPR: /* round to closest integer */
671 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
672 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
674 /* get absolute values */
675 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
676 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
678 /* if (2 * abs (lrem) >= abs (lden)) */
679 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
680 labs_rem, habs_rem, &ltwice, &htwice);
681 if (((unsigned HOST_WIDE_INT) habs_den
682 < (unsigned HOST_WIDE_INT) htwice)
683 || (((unsigned HOST_WIDE_INT) habs_den
684 == (unsigned HOST_WIDE_INT) htwice)
685 && ((HOST_WIDE_INT unsigned) labs_den
686 < (unsigned HOST_WIDE_INT) ltwice)))
688 if (*hquo < 0)
689 /* quo = quo - 1; */
690 add_double (*lquo, *hquo,
691 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
692 else
693 /* quo = quo + 1; */
694 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
695 lquo, hquo);
697 else return overflow;
699 break;
701 default:
702 abort ();
705 /* compute true remainder: rem = num - (quo * den) */
706 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
707 neg_double (*lrem, *hrem, lrem, hrem);
708 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
709 return overflow;
712 #ifndef REAL_ARITHMETIC
713 /* Effectively truncate a real value to represent the nearest possible value
714 in a narrower mode. The result is actually represented in the same data
715 type as the argument, but its value is usually different.
717 A trap may occur during the FP operations and it is the responsibility
718 of the calling function to have a handler established. */
720 REAL_VALUE_TYPE
721 real_value_truncate (mode, arg)
722 enum machine_mode mode;
723 REAL_VALUE_TYPE arg;
725 return REAL_VALUE_TRUNCATE (mode, arg);
728 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
730 /* Check for infinity in an IEEE double precision number. */
733 target_isinf (x)
734 REAL_VALUE_TYPE x;
736 /* The IEEE 64-bit double format. */
737 union {
738 REAL_VALUE_TYPE d;
739 struct {
740 unsigned sign : 1;
741 unsigned exponent : 11;
742 unsigned mantissa1 : 20;
743 unsigned mantissa2;
744 } little_endian;
745 struct {
746 unsigned mantissa2;
747 unsigned mantissa1 : 20;
748 unsigned exponent : 11;
749 unsigned sign : 1;
750 } big_endian;
751 } u;
753 u.d = dconstm1;
754 if (u.big_endian.sign == 1)
756 u.d = x;
757 return (u.big_endian.exponent == 2047
758 && u.big_endian.mantissa1 == 0
759 && u.big_endian.mantissa2 == 0);
761 else
763 u.d = x;
764 return (u.little_endian.exponent == 2047
765 && u.little_endian.mantissa1 == 0
766 && u.little_endian.mantissa2 == 0);
770 /* Check whether an IEEE double precision number is a NaN. */
773 target_isnan (x)
774 REAL_VALUE_TYPE x;
776 /* The IEEE 64-bit double format. */
777 union {
778 REAL_VALUE_TYPE d;
779 struct {
780 unsigned sign : 1;
781 unsigned exponent : 11;
782 unsigned mantissa1 : 20;
783 unsigned mantissa2;
784 } little_endian;
785 struct {
786 unsigned mantissa2;
787 unsigned mantissa1 : 20;
788 unsigned exponent : 11;
789 unsigned sign : 1;
790 } big_endian;
791 } u;
793 u.d = dconstm1;
794 if (u.big_endian.sign == 1)
796 u.d = x;
797 return (u.big_endian.exponent == 2047
798 && (u.big_endian.mantissa1 != 0
799 || u.big_endian.mantissa2 != 0));
801 else
803 u.d = x;
804 return (u.little_endian.exponent == 2047
805 && (u.little_endian.mantissa1 != 0
806 || u.little_endian.mantissa2 != 0));
810 /* Check for a negative IEEE double precision number. */
813 target_negative (x)
814 REAL_VALUE_TYPE x;
816 /* The IEEE 64-bit double format. */
817 union {
818 REAL_VALUE_TYPE d;
819 struct {
820 unsigned sign : 1;
821 unsigned exponent : 11;
822 unsigned mantissa1 : 20;
823 unsigned mantissa2;
824 } little_endian;
825 struct {
826 unsigned mantissa2;
827 unsigned mantissa1 : 20;
828 unsigned exponent : 11;
829 unsigned sign : 1;
830 } big_endian;
831 } u;
833 u.d = dconstm1;
834 if (u.big_endian.sign == 1)
836 u.d = x;
837 return u.big_endian.sign;
839 else
841 u.d = x;
842 return u.little_endian.sign;
845 #else /* Target not IEEE */
847 /* Let's assume other float formats don't have infinity.
848 (This can be overridden by redefining REAL_VALUE_ISINF.) */
850 target_isinf (x)
851 REAL_VALUE_TYPE x;
853 return 0;
856 /* Let's assume other float formats don't have NaNs.
857 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
859 target_isnan (x)
860 REAL_VALUE_TYPE x;
862 return 0;
865 /* Let's assume other float formats don't have minus zero.
866 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
868 target_negative (x)
869 REAL_VALUE_TYPE x;
871 return x < 0;
873 #endif /* Target not IEEE */
875 /* Try to change R into its exact multiplicative inverse in machine mode
876 MODE. Return nonzero function value if successful. */
879 exact_real_inverse (mode, r)
880 enum machine_mode mode;
881 REAL_VALUE_TYPE *r;
883 union
885 double d;
886 unsigned short i[4];
887 }x, t, y;
888 int i;
890 /* Usually disable if bounds checks are not reliable. */
891 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
892 return 0;
894 /* Set array index to the less significant bits in the unions, depending
895 on the endian-ness of the host doubles.
896 Disable if insufficient information on the data structure. */
897 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
898 return 0;
899 #else
900 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
901 #define K 2
902 #else
903 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
904 #define K 2
905 #else
906 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
907 #endif
908 #endif
909 #endif
911 if (setjmp (float_error))
913 /* Don't do the optimization if there was an arithmetic error. */
914 fail:
915 set_float_handler (NULL_PTR);
916 return 0;
918 set_float_handler (float_error);
920 /* Domain check the argument. */
921 x.d = *r;
922 if (x.d == 0.0)
923 goto fail;
925 #ifdef REAL_INFINITY
926 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
927 goto fail;
928 #endif
930 /* Compute the reciprocal and check for numerical exactness.
931 It is unnecessary to check all the significand bits to determine
932 whether X is a power of 2. If X is not, then it is impossible for
933 the bottom half significand of both X and 1/X to be all zero bits.
934 Hence we ignore the data structure of the top half and examine only
935 the low order bits of the two significands. */
936 t.d = 1.0 / x.d;
937 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
938 goto fail;
940 /* Truncate to the required mode and range-check the result. */
941 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
942 #ifdef CHECK_FLOAT_VALUE
943 i = 0;
944 if (CHECK_FLOAT_VALUE (mode, y.d, i))
945 goto fail;
946 #endif
948 /* Fail if truncation changed the value. */
949 if (y.d != t.d || y.d == 0.0)
950 goto fail;
952 #ifdef REAL_INFINITY
953 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
954 goto fail;
955 #endif
957 /* Output the reciprocal and return success flag. */
958 set_float_handler (NULL_PTR);
959 *r = y.d;
960 return 1;
962 #endif /* no REAL_ARITHMETIC */
964 /* Split a tree IN into a constant and a variable part
965 that could be combined with CODE to make IN.
966 CODE must be a commutative arithmetic operation.
967 Store the constant part into *CONP and the variable in &VARP.
968 Return 1 if this was done; zero means the tree IN did not decompose
969 this way.
971 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
972 Therefore, we must tell the caller whether the variable part
973 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
974 The value stored is the coefficient for the variable term.
975 The constant term we return should always be added;
976 we negate it if necessary. */
978 static int
979 split_tree (in, code, varp, conp, varsignp)
980 tree in;
981 enum tree_code code;
982 tree *varp, *conp;
983 int *varsignp;
985 register tree outtype = TREE_TYPE (in);
986 *varp = 0;
987 *conp = 0;
989 /* Strip any conversions that don't change the machine mode. */
990 while ((TREE_CODE (in) == NOP_EXPR
991 || TREE_CODE (in) == CONVERT_EXPR)
992 && (TYPE_MODE (TREE_TYPE (in))
993 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
994 in = TREE_OPERAND (in, 0);
996 if (TREE_CODE (in) == code
997 || (! FLOAT_TYPE_P (TREE_TYPE (in))
998 /* We can associate addition and subtraction together
999 (even though the C standard doesn't say so)
1000 for integers because the value is not affected.
1001 For reals, the value might be affected, so we can't. */
1002 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1003 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1005 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1006 if (code == INTEGER_CST)
1008 *conp = TREE_OPERAND (in, 0);
1009 *varp = TREE_OPERAND (in, 1);
1010 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1011 && TREE_TYPE (*varp) != outtype)
1012 *varp = convert (outtype, *varp);
1013 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1014 return 1;
1016 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1018 *conp = TREE_OPERAND (in, 1);
1019 *varp = TREE_OPERAND (in, 0);
1020 *varsignp = 1;
1021 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1022 && TREE_TYPE (*varp) != outtype)
1023 *varp = convert (outtype, *varp);
1024 if (TREE_CODE (in) == MINUS_EXPR)
1026 /* If operation is subtraction and constant is second,
1027 must negate it to get an additive constant.
1028 And this cannot be done unless it is a manifest constant.
1029 It could also be the address of a static variable.
1030 We cannot negate that, so give up. */
1031 if (TREE_CODE (*conp) == INTEGER_CST)
1032 /* Subtracting from integer_zero_node loses for long long. */
1033 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1034 else
1035 return 0;
1037 return 1;
1039 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1041 *conp = TREE_OPERAND (in, 0);
1042 *varp = TREE_OPERAND (in, 1);
1043 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1044 && TREE_TYPE (*varp) != outtype)
1045 *varp = convert (outtype, *varp);
1046 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1047 return 1;
1050 return 0;
1053 /* Combine two constants ARG1 and ARG2 under operation CODE
1054 to produce a new constant.
1055 We assume ARG1 and ARG2 have the same data type,
1056 or at least are the same kind of constant and the same machine mode.
1058 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1060 static tree
1061 const_binop (code, arg1, arg2, notrunc)
1062 enum tree_code code;
1063 register tree arg1, arg2;
1064 int notrunc;
1066 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1068 if (TREE_CODE (arg1) == INTEGER_CST)
1070 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
1071 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
1072 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
1073 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
1074 HOST_WIDE_INT low, hi;
1075 HOST_WIDE_INT garbagel, garbageh;
1076 register tree t;
1077 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1078 int overflow = 0;
1079 int no_overflow = 0;
1081 switch (code)
1083 case BIT_IOR_EXPR:
1084 low = int1l | int2l, hi = int1h | int2h;
1085 break;
1087 case BIT_XOR_EXPR:
1088 low = int1l ^ int2l, hi = int1h ^ int2h;
1089 break;
1091 case BIT_AND_EXPR:
1092 low = int1l & int2l, hi = int1h & int2h;
1093 break;
1095 case BIT_ANDTC_EXPR:
1096 low = int1l & ~int2l, hi = int1h & ~int2h;
1097 break;
1099 case RSHIFT_EXPR:
1100 int2l = - int2l;
1101 case LSHIFT_EXPR:
1102 /* It's unclear from the C standard whether shifts can overflow.
1103 The following code ignores overflow; perhaps a C standard
1104 interpretation ruling is needed. */
1105 lshift_double (int1l, int1h, int2l,
1106 TYPE_PRECISION (TREE_TYPE (arg1)),
1107 &low, &hi,
1108 !uns);
1109 no_overflow = 1;
1110 break;
1112 case RROTATE_EXPR:
1113 int2l = - int2l;
1114 case LROTATE_EXPR:
1115 lrotate_double (int1l, int1h, int2l,
1116 TYPE_PRECISION (TREE_TYPE (arg1)),
1117 &low, &hi);
1118 break;
1120 case PLUS_EXPR:
1121 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1122 break;
1124 case MINUS_EXPR:
1125 neg_double (int2l, int2h, &low, &hi);
1126 add_double (int1l, int1h, low, hi, &low, &hi);
1127 overflow = overflow_sum_sign (hi, int2h, int1h);
1128 break;
1130 case MULT_EXPR:
1131 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1132 break;
1134 case TRUNC_DIV_EXPR:
1135 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1136 case EXACT_DIV_EXPR:
1137 /* This is a shortcut for a common special case. */
1138 if (int2h == 0 && int2l > 0
1139 && ! TREE_CONSTANT_OVERFLOW (arg1)
1140 && ! TREE_CONSTANT_OVERFLOW (arg2)
1141 && int1h == 0 && int1l >= 0)
1143 if (code == CEIL_DIV_EXPR)
1144 int1l += int2l - 1;
1145 low = int1l / int2l, hi = 0;
1146 break;
1149 /* ... fall through ... */
1151 case ROUND_DIV_EXPR:
1152 if (int2h == 0 && int2l == 1)
1154 low = int1l, hi = int1h;
1155 break;
1157 if (int1l == int2l && int1h == int2h
1158 && ! (int1l == 0 && int1h == 0))
1160 low = 1, hi = 0;
1161 break;
1163 overflow = div_and_round_double (code, uns,
1164 int1l, int1h, int2l, int2h,
1165 &low, &hi, &garbagel, &garbageh);
1166 break;
1168 case TRUNC_MOD_EXPR:
1169 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1170 /* This is a shortcut for a common special case. */
1171 if (int2h == 0 && int2l > 0
1172 && ! TREE_CONSTANT_OVERFLOW (arg1)
1173 && ! TREE_CONSTANT_OVERFLOW (arg2)
1174 && int1h == 0 && int1l >= 0)
1176 if (code == CEIL_MOD_EXPR)
1177 int1l += int2l - 1;
1178 low = int1l % int2l, hi = 0;
1179 break;
1182 /* ... fall through ... */
1184 case ROUND_MOD_EXPR:
1185 overflow = div_and_round_double (code, uns,
1186 int1l, int1h, int2l, int2h,
1187 &garbagel, &garbageh, &low, &hi);
1188 break;
1190 case MIN_EXPR:
1191 case MAX_EXPR:
1192 if (uns)
1194 low = (((unsigned HOST_WIDE_INT) int1h
1195 < (unsigned HOST_WIDE_INT) int2h)
1196 || (((unsigned HOST_WIDE_INT) int1h
1197 == (unsigned HOST_WIDE_INT) int2h)
1198 && ((unsigned HOST_WIDE_INT) int1l
1199 < (unsigned HOST_WIDE_INT) int2l)));
1201 else
1203 low = ((int1h < int2h)
1204 || ((int1h == int2h)
1205 && ((unsigned HOST_WIDE_INT) int1l
1206 < (unsigned HOST_WIDE_INT) int2l)));
1208 if (low == (code == MIN_EXPR))
1209 low = int1l, hi = int1h;
1210 else
1211 low = int2l, hi = int2h;
1212 break;
1214 default:
1215 abort ();
1217 got_it:
1218 if (TREE_TYPE (arg1) == sizetype && hi == 0
1219 && low >= 0 && low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype))
1220 && ! overflow
1221 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1222 t = size_int (low);
1223 else
1225 t = build_int_2 (low, hi);
1226 TREE_TYPE (t) = TREE_TYPE (arg1);
1229 TREE_OVERFLOW (t)
1230 = ((notrunc ? !uns && overflow
1231 : force_fit_type (t, overflow && !uns) && ! no_overflow)
1232 | TREE_OVERFLOW (arg1)
1233 | TREE_OVERFLOW (arg2));
1234 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1235 | TREE_CONSTANT_OVERFLOW (arg1)
1236 | TREE_CONSTANT_OVERFLOW (arg2));
1237 return t;
1239 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1240 if (TREE_CODE (arg1) == REAL_CST)
1242 REAL_VALUE_TYPE d1;
1243 REAL_VALUE_TYPE d2;
1244 int overflow = 0;
1245 REAL_VALUE_TYPE value;
1246 tree t;
1248 d1 = TREE_REAL_CST (arg1);
1249 d2 = TREE_REAL_CST (arg2);
1251 /* If either operand is a NaN, just return it. Otherwise, set up
1252 for floating-point trap; we return an overflow. */
1253 if (REAL_VALUE_ISNAN (d1))
1254 return arg1;
1255 else if (REAL_VALUE_ISNAN (d2))
1256 return arg2;
1257 else if (setjmp (float_error))
1259 t = copy_node (arg1);
1260 overflow = 1;
1261 goto got_float;
1264 set_float_handler (float_error);
1266 #ifdef REAL_ARITHMETIC
1267 REAL_ARITHMETIC (value, code, d1, d2);
1268 #else
1269 switch (code)
1271 case PLUS_EXPR:
1272 value = d1 + d2;
1273 break;
1275 case MINUS_EXPR:
1276 value = d1 - d2;
1277 break;
1279 case MULT_EXPR:
1280 value = d1 * d2;
1281 break;
1283 case RDIV_EXPR:
1284 #ifndef REAL_INFINITY
1285 if (d2 == 0)
1286 abort ();
1287 #endif
1289 value = d1 / d2;
1290 break;
1292 case MIN_EXPR:
1293 value = MIN (d1, d2);
1294 break;
1296 case MAX_EXPR:
1297 value = MAX (d1, d2);
1298 break;
1300 default:
1301 abort ();
1303 #endif /* no REAL_ARITHMETIC */
1304 t = build_real (TREE_TYPE (arg1),
1305 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1306 got_float:
1307 set_float_handler (NULL_PTR);
1309 TREE_OVERFLOW (t)
1310 = (force_fit_type (t, overflow)
1311 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1312 TREE_CONSTANT_OVERFLOW (t)
1313 = TREE_OVERFLOW (t)
1314 | TREE_CONSTANT_OVERFLOW (arg1)
1315 | TREE_CONSTANT_OVERFLOW (arg2);
1316 return t;
1318 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1319 if (TREE_CODE (arg1) == COMPLEX_CST)
1321 register tree type = TREE_TYPE (arg1);
1322 register tree r1 = TREE_REALPART (arg1);
1323 register tree i1 = TREE_IMAGPART (arg1);
1324 register tree r2 = TREE_REALPART (arg2);
1325 register tree i2 = TREE_IMAGPART (arg2);
1326 register tree t;
1328 switch (code)
1330 case PLUS_EXPR:
1331 t = build_complex (type,
1332 const_binop (PLUS_EXPR, r1, r2, notrunc),
1333 const_binop (PLUS_EXPR, i1, i2, notrunc));
1334 break;
1336 case MINUS_EXPR:
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR, r1, r2, notrunc),
1339 const_binop (MINUS_EXPR, i1, i2, notrunc));
1340 break;
1342 case MULT_EXPR:
1343 t = build_complex (type,
1344 const_binop (MINUS_EXPR,
1345 const_binop (MULT_EXPR,
1346 r1, r2, notrunc),
1347 const_binop (MULT_EXPR,
1348 i1, i2, notrunc),
1349 notrunc),
1350 const_binop (PLUS_EXPR,
1351 const_binop (MULT_EXPR,
1352 r1, i2, notrunc),
1353 const_binop (MULT_EXPR,
1354 i1, r2, notrunc),
1355 notrunc));
1356 break;
1358 case RDIV_EXPR:
1360 register tree magsquared
1361 = const_binop (PLUS_EXPR,
1362 const_binop (MULT_EXPR, r2, r2, notrunc),
1363 const_binop (MULT_EXPR, i2, i2, notrunc),
1364 notrunc);
1366 t = build_complex (type,
1367 const_binop
1368 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1369 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1370 const_binop (PLUS_EXPR,
1371 const_binop (MULT_EXPR, r1, r2,
1372 notrunc),
1373 const_binop (MULT_EXPR, i1, i2,
1374 notrunc),
1375 notrunc),
1376 magsquared, notrunc),
1377 const_binop
1378 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1379 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1380 const_binop (MINUS_EXPR,
1381 const_binop (MULT_EXPR, i1, r2,
1382 notrunc),
1383 const_binop (MULT_EXPR, r1, i2,
1384 notrunc),
1385 notrunc),
1386 magsquared, notrunc));
1388 break;
1390 default:
1391 abort ();
1393 return t;
1395 return 0;
1398 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1400 tree
1401 size_int (number)
1402 unsigned HOST_WIDE_INT number;
1404 register tree t;
1405 /* Type-size nodes already made for small sizes. */
1406 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1408 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1409 && size_table[number] != 0)
1410 return size_table[number];
1411 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1413 push_obstacks_nochange ();
1414 /* Make this a permanent node. */
1415 end_temporary_allocation ();
1416 t = build_int_2 (number, 0);
1417 TREE_TYPE (t) = sizetype;
1418 size_table[number] = t;
1419 pop_obstacks ();
1421 else
1423 t = build_int_2 (number, 0);
1424 TREE_TYPE (t) = sizetype;
1425 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1427 return t;
1430 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1431 CODE is a tree code. Data type is taken from `sizetype',
1432 If the operands are constant, so is the result. */
1434 tree
1435 size_binop (code, arg0, arg1)
1436 enum tree_code code;
1437 tree arg0, arg1;
1439 /* Handle the special case of two integer constants faster. */
1440 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1442 /* And some specific cases even faster than that. */
1443 if (code == PLUS_EXPR && integer_zerop (arg0))
1444 return arg1;
1445 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1446 && integer_zerop (arg1))
1447 return arg0;
1448 else if (code == MULT_EXPR && integer_onep (arg0))
1449 return arg1;
1451 /* Handle general case of two integer constants. */
1452 return const_binop (code, arg0, arg1, 0);
1455 if (arg0 == error_mark_node || arg1 == error_mark_node)
1456 return error_mark_node;
1458 return fold (build (code, sizetype, arg0, arg1));
1461 /* Given T, a tree representing type conversion of ARG1, a constant,
1462 return a constant tree representing the result of conversion. */
1464 static tree
1465 fold_convert (t, arg1)
1466 register tree t;
1467 register tree arg1;
1469 register tree type = TREE_TYPE (t);
1470 int overflow = 0;
1472 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1474 if (TREE_CODE (arg1) == INTEGER_CST)
1476 /* If we would build a constant wider than GCC supports,
1477 leave the conversion unfolded. */
1478 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1479 return t;
1481 /* Given an integer constant, make new constant with new type,
1482 appropriately sign-extended or truncated. */
1483 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1484 TREE_INT_CST_HIGH (arg1));
1485 TREE_TYPE (t) = type;
1486 /* Indicate an overflow if (1) ARG1 already overflowed,
1487 or (2) force_fit_type indicates an overflow.
1488 Tell force_fit_type that an overflow has already occurred
1489 if ARG1 is a too-large unsigned value and T is signed. */
1490 TREE_OVERFLOW (t)
1491 = (TREE_OVERFLOW (arg1)
1492 | force_fit_type (t,
1493 (TREE_INT_CST_HIGH (arg1) < 0
1494 & (TREE_UNSIGNED (type)
1495 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1496 TREE_CONSTANT_OVERFLOW (t)
1497 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1499 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1500 else if (TREE_CODE (arg1) == REAL_CST)
1502 /* Don't initialize these, use assignments.
1503 Initialized local aggregates don't work on old compilers. */
1504 REAL_VALUE_TYPE x;
1505 REAL_VALUE_TYPE l;
1506 REAL_VALUE_TYPE u;
1507 tree type1 = TREE_TYPE (arg1);
1509 x = TREE_REAL_CST (arg1);
1510 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1511 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1512 /* See if X will be in range after truncation towards 0.
1513 To compensate for truncation, move the bounds away from 0,
1514 but reject if X exactly equals the adjusted bounds. */
1515 #ifdef REAL_ARITHMETIC
1516 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1517 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1518 #else
1519 l--;
1520 u++;
1521 #endif
1522 /* If X is a NaN, use zero instead and show we have an overflow.
1523 Otherwise, range check. */
1524 if (REAL_VALUE_ISNAN (x))
1525 overflow = 1, x = dconst0;
1526 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1527 overflow = 1;
1529 #ifndef REAL_ARITHMETIC
1531 HOST_WIDE_INT low, high;
1532 HOST_WIDE_INT half_word
1533 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1535 if (x < 0)
1536 x = -x;
1538 high = (HOST_WIDE_INT) (x / half_word / half_word);
1539 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1540 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1542 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1543 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1545 else
1546 low = (HOST_WIDE_INT) x;
1547 if (TREE_REAL_CST (arg1) < 0)
1548 neg_double (low, high, &low, &high);
1549 t = build_int_2 (low, high);
1551 #else
1553 HOST_WIDE_INT low, high;
1554 REAL_VALUE_TO_INT (&low, &high, x);
1555 t = build_int_2 (low, high);
1557 #endif
1558 TREE_TYPE (t) = type;
1559 TREE_OVERFLOW (t)
1560 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1561 TREE_CONSTANT_OVERFLOW (t)
1562 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1564 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1565 TREE_TYPE (t) = type;
1567 else if (TREE_CODE (type) == REAL_TYPE)
1569 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1570 if (TREE_CODE (arg1) == INTEGER_CST)
1571 return build_real_from_int_cst (type, arg1);
1572 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1573 if (TREE_CODE (arg1) == REAL_CST)
1575 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1577 t = arg1;
1578 TREE_TYPE (arg1) = type;
1579 return t;
1581 else if (setjmp (float_error))
1583 overflow = 1;
1584 t = copy_node (arg1);
1585 goto got_it;
1587 set_float_handler (float_error);
1589 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1590 TREE_REAL_CST (arg1)));
1591 set_float_handler (NULL_PTR);
1593 got_it:
1594 TREE_OVERFLOW (t)
1595 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1596 TREE_CONSTANT_OVERFLOW (t)
1597 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1598 return t;
1601 TREE_CONSTANT (t) = 1;
1602 return t;
1605 /* Return an expr equal to X but certainly not valid as an lvalue.
1606 Also make sure it is not valid as an null pointer constant. */
1608 tree
1609 non_lvalue (x)
1610 tree x;
1612 tree result;
1614 /* These things are certainly not lvalues. */
1615 if (TREE_CODE (x) == NON_LVALUE_EXPR
1616 || TREE_CODE (x) == INTEGER_CST
1617 || TREE_CODE (x) == REAL_CST
1618 || TREE_CODE (x) == STRING_CST
1619 || TREE_CODE (x) == ADDR_EXPR)
1621 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1623 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1624 so convert_for_assignment won't strip it.
1625 This is so this 0 won't be treated as a null pointer constant. */
1626 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1627 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1628 return result;
1630 return x;
1633 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1634 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1635 return result;
1638 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1639 Zero means allow extended lvalues. */
1641 int pedantic_lvalues;
1643 /* When pedantic, return an expr equal to X but certainly not valid as a
1644 pedantic lvalue. Otherwise, return X. */
1646 tree
1647 pedantic_non_lvalue (x)
1648 tree x;
1650 if (pedantic_lvalues)
1651 return non_lvalue (x);
1652 else
1653 return x;
1656 /* Given a tree comparison code, return the code that is the logical inverse
1657 of the given code. It is not safe to do this for floating-point
1658 comparisons, except for NE_EXPR and EQ_EXPR. */
1660 static enum tree_code
1661 invert_tree_comparison (code)
1662 enum tree_code code;
1664 switch (code)
1666 case EQ_EXPR:
1667 return NE_EXPR;
1668 case NE_EXPR:
1669 return EQ_EXPR;
1670 case GT_EXPR:
1671 return LE_EXPR;
1672 case GE_EXPR:
1673 return LT_EXPR;
1674 case LT_EXPR:
1675 return GE_EXPR;
1676 case LE_EXPR:
1677 return GT_EXPR;
1678 default:
1679 abort ();
1683 /* Similar, but return the comparison that results if the operands are
1684 swapped. This is safe for floating-point. */
1686 static enum tree_code
1687 swap_tree_comparison (code)
1688 enum tree_code code;
1690 switch (code)
1692 case EQ_EXPR:
1693 case NE_EXPR:
1694 return code;
1695 case GT_EXPR:
1696 return LT_EXPR;
1697 case GE_EXPR:
1698 return LE_EXPR;
1699 case LT_EXPR:
1700 return GT_EXPR;
1701 case LE_EXPR:
1702 return GE_EXPR;
1703 default:
1704 abort ();
1708 /* Return nonzero if CODE is a tree code that represents a truth value. */
1710 static int
1711 truth_value_p (code)
1712 enum tree_code code;
1714 return (TREE_CODE_CLASS (code) == '<'
1715 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1716 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1717 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1720 /* Return nonzero if two operands are necessarily equal.
1721 If ONLY_CONST is non-zero, only return non-zero for constants.
1722 This function tests whether the operands are indistinguishable;
1723 it does not test whether they are equal using C's == operation.
1724 The distinction is important for IEEE floating point, because
1725 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1726 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1729 operand_equal_p (arg0, arg1, only_const)
1730 tree arg0, arg1;
1731 int only_const;
1733 /* If both types don't have the same signedness, then we can't consider
1734 them equal. We must check this before the STRIP_NOPS calls
1735 because they may change the signedness of the arguments. */
1736 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1737 return 0;
1739 STRIP_NOPS (arg0);
1740 STRIP_NOPS (arg1);
1742 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1743 /* This is needed for conversions and for COMPONENT_REF.
1744 Might as well play it safe and always test this. */
1745 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1746 return 0;
1748 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1749 We don't care about side effects in that case because the SAVE_EXPR
1750 takes care of that for us. In all other cases, two expressions are
1751 equal if they have no side effects. If we have two identical
1752 expressions with side effects that should be treated the same due
1753 to the only side effects being identical SAVE_EXPR's, that will
1754 be detected in the recursive calls below. */
1755 if (arg0 == arg1 && ! only_const
1756 && (TREE_CODE (arg0) == SAVE_EXPR
1757 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1758 return 1;
1760 /* Next handle constant cases, those for which we can return 1 even
1761 if ONLY_CONST is set. */
1762 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1763 switch (TREE_CODE (arg0))
1765 case INTEGER_CST:
1766 return (! TREE_CONSTANT_OVERFLOW (arg0)
1767 && ! TREE_CONSTANT_OVERFLOW (arg1)
1768 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1769 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1771 case REAL_CST:
1772 return (! TREE_CONSTANT_OVERFLOW (arg0)
1773 && ! TREE_CONSTANT_OVERFLOW (arg1)
1774 && REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
1775 TREE_REAL_CST (arg1)));
1777 case COMPLEX_CST:
1778 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1779 only_const)
1780 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1781 only_const));
1783 case STRING_CST:
1784 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1785 && ! strncmp (TREE_STRING_POINTER (arg0),
1786 TREE_STRING_POINTER (arg1),
1787 TREE_STRING_LENGTH (arg0)));
1789 case ADDR_EXPR:
1790 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1794 if (only_const)
1795 return 0;
1797 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1799 case '1':
1800 /* Two conversions are equal only if signedness and modes match. */
1801 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1802 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1803 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1804 return 0;
1806 return operand_equal_p (TREE_OPERAND (arg0, 0),
1807 TREE_OPERAND (arg1, 0), 0);
1809 case '<':
1810 case '2':
1811 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1812 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1814 return 1;
1816 /* For commutative ops, allow the other order. */
1817 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1818 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1819 || TREE_CODE (arg0) == BIT_IOR_EXPR
1820 || TREE_CODE (arg0) == BIT_XOR_EXPR
1821 || TREE_CODE (arg0) == BIT_AND_EXPR
1822 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1823 && operand_equal_p (TREE_OPERAND (arg0, 0),
1824 TREE_OPERAND (arg1, 1), 0)
1825 && operand_equal_p (TREE_OPERAND (arg0, 1),
1826 TREE_OPERAND (arg1, 0), 0));
1828 case 'r':
1829 switch (TREE_CODE (arg0))
1831 case INDIRECT_REF:
1832 return operand_equal_p (TREE_OPERAND (arg0, 0),
1833 TREE_OPERAND (arg1, 0), 0);
1835 case COMPONENT_REF:
1836 case ARRAY_REF:
1837 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1838 TREE_OPERAND (arg1, 0), 0)
1839 && operand_equal_p (TREE_OPERAND (arg0, 1),
1840 TREE_OPERAND (arg1, 1), 0));
1842 case BIT_FIELD_REF:
1843 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1844 TREE_OPERAND (arg1, 0), 0)
1845 && operand_equal_p (TREE_OPERAND (arg0, 1),
1846 TREE_OPERAND (arg1, 1), 0)
1847 && operand_equal_p (TREE_OPERAND (arg0, 2),
1848 TREE_OPERAND (arg1, 2), 0));
1850 break;
1853 return 0;
1856 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1857 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1859 When in doubt, return 0. */
1861 static int
1862 operand_equal_for_comparison_p (arg0, arg1, other)
1863 tree arg0, arg1;
1864 tree other;
1866 int unsignedp1, unsignedpo;
1867 tree primarg1, primother;
1868 unsigned correct_width;
1870 if (operand_equal_p (arg0, arg1, 0))
1871 return 1;
1873 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1874 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1875 return 0;
1877 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1878 actual comparison operand, ARG0.
1880 First throw away any conversions to wider types
1881 already present in the operands. */
1883 primarg1 = get_narrower (arg1, &unsignedp1);
1884 primother = get_narrower (other, &unsignedpo);
1886 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1887 if (unsignedp1 == unsignedpo
1888 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1889 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1891 tree type = TREE_TYPE (arg0);
1893 /* Make sure shorter operand is extended the right way
1894 to match the longer operand. */
1895 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1896 TREE_TYPE (primarg1)),
1897 primarg1);
1899 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1900 return 1;
1903 return 0;
1906 /* See if ARG is an expression that is either a comparison or is performing
1907 arithmetic on comparisons. The comparisons must only be comparing
1908 two different values, which will be stored in *CVAL1 and *CVAL2; if
1909 they are non-zero it means that some operands have already been found.
1910 No variables may be used anywhere else in the expression except in the
1911 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1912 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1914 If this is true, return 1. Otherwise, return zero. */
1916 static int
1917 twoval_comparison_p (arg, cval1, cval2, save_p)
1918 tree arg;
1919 tree *cval1, *cval2;
1920 int *save_p;
1922 enum tree_code code = TREE_CODE (arg);
1923 char class = TREE_CODE_CLASS (code);
1925 /* We can handle some of the 'e' cases here. */
1926 if (class == 'e' && code == TRUTH_NOT_EXPR)
1927 class = '1';
1928 else if (class == 'e'
1929 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1930 || code == COMPOUND_EXPR))
1931 class = '2';
1933 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1934 the expression. There may be no way to make this work, but it needs
1935 to be looked at again for 2.6. */
1936 #if 0
1937 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1939 /* If we've already found a CVAL1 or CVAL2, this expression is
1940 two complex to handle. */
1941 if (*cval1 || *cval2)
1942 return 0;
1944 class = '1';
1945 *save_p = 1;
1947 #endif
1949 switch (class)
1951 case '1':
1952 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1954 case '2':
1955 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1956 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1957 cval1, cval2, save_p));
1959 case 'c':
1960 return 1;
1962 case 'e':
1963 if (code == COND_EXPR)
1964 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1965 cval1, cval2, save_p)
1966 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1967 cval1, cval2, save_p)
1968 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1969 cval1, cval2, save_p));
1970 return 0;
1972 case '<':
1973 /* First see if we can handle the first operand, then the second. For
1974 the second operand, we know *CVAL1 can't be zero. It must be that
1975 one side of the comparison is each of the values; test for the
1976 case where this isn't true by failing if the two operands
1977 are the same. */
1979 if (operand_equal_p (TREE_OPERAND (arg, 0),
1980 TREE_OPERAND (arg, 1), 0))
1981 return 0;
1983 if (*cval1 == 0)
1984 *cval1 = TREE_OPERAND (arg, 0);
1985 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1987 else if (*cval2 == 0)
1988 *cval2 = TREE_OPERAND (arg, 0);
1989 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1991 else
1992 return 0;
1994 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1996 else if (*cval2 == 0)
1997 *cval2 = TREE_OPERAND (arg, 1);
1998 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2000 else
2001 return 0;
2003 return 1;
2006 return 0;
2009 /* ARG is a tree that is known to contain just arithmetic operations and
2010 comparisons. Evaluate the operations in the tree substituting NEW0 for
2011 any occurrence of OLD0 as an operand of a comparison and likewise for
2012 NEW1 and OLD1. */
2014 static tree
2015 eval_subst (arg, old0, new0, old1, new1)
2016 tree arg;
2017 tree old0, new0, old1, new1;
2019 tree type = TREE_TYPE (arg);
2020 enum tree_code code = TREE_CODE (arg);
2021 char class = TREE_CODE_CLASS (code);
2023 /* We can handle some of the 'e' cases here. */
2024 if (class == 'e' && code == TRUTH_NOT_EXPR)
2025 class = '1';
2026 else if (class == 'e'
2027 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2028 class = '2';
2030 switch (class)
2032 case '1':
2033 return fold (build1 (code, type,
2034 eval_subst (TREE_OPERAND (arg, 0),
2035 old0, new0, old1, new1)));
2037 case '2':
2038 return fold (build (code, type,
2039 eval_subst (TREE_OPERAND (arg, 0),
2040 old0, new0, old1, new1),
2041 eval_subst (TREE_OPERAND (arg, 1),
2042 old0, new0, old1, new1)));
2044 case 'e':
2045 switch (code)
2047 case SAVE_EXPR:
2048 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2050 case COMPOUND_EXPR:
2051 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2053 case COND_EXPR:
2054 return fold (build (code, type,
2055 eval_subst (TREE_OPERAND (arg, 0),
2056 old0, new0, old1, new1),
2057 eval_subst (TREE_OPERAND (arg, 1),
2058 old0, new0, old1, new1),
2059 eval_subst (TREE_OPERAND (arg, 2),
2060 old0, new0, old1, new1)));
2063 case '<':
2065 tree arg0 = TREE_OPERAND (arg, 0);
2066 tree arg1 = TREE_OPERAND (arg, 1);
2068 /* We need to check both for exact equality and tree equality. The
2069 former will be true if the operand has a side-effect. In that
2070 case, we know the operand occurred exactly once. */
2072 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2073 arg0 = new0;
2074 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2075 arg0 = new1;
2077 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2078 arg1 = new0;
2079 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2080 arg1 = new1;
2082 return fold (build (code, type, arg0, arg1));
2086 return arg;
2089 /* Return a tree for the case when the result of an expression is RESULT
2090 converted to TYPE and OMITTED was previously an operand of the expression
2091 but is now not needed (e.g., we folded OMITTED * 0).
2093 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2094 the conversion of RESULT to TYPE. */
2096 static tree
2097 omit_one_operand (type, result, omitted)
2098 tree type, result, omitted;
2100 tree t = convert (type, result);
2102 if (TREE_SIDE_EFFECTS (omitted))
2103 return build (COMPOUND_EXPR, type, omitted, t);
2105 return non_lvalue (t);
2108 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2110 static tree
2111 pedantic_omit_one_operand (type, result, omitted)
2112 tree type, result, omitted;
2114 tree t = convert (type, result);
2116 if (TREE_SIDE_EFFECTS (omitted))
2117 return build (COMPOUND_EXPR, type, omitted, t);
2119 return pedantic_non_lvalue (t);
2124 /* Return a simplified tree node for the truth-negation of ARG. This
2125 never alters ARG itself. We assume that ARG is an operation that
2126 returns a truth value (0 or 1). */
2128 tree
2129 invert_truthvalue (arg)
2130 tree arg;
2132 tree type = TREE_TYPE (arg);
2133 enum tree_code code = TREE_CODE (arg);
2135 if (code == ERROR_MARK)
2136 return arg;
2138 /* If this is a comparison, we can simply invert it, except for
2139 floating-point non-equality comparisons, in which case we just
2140 enclose a TRUTH_NOT_EXPR around what we have. */
2142 if (TREE_CODE_CLASS (code) == '<')
2144 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2145 && code != NE_EXPR && code != EQ_EXPR)
2146 return build1 (TRUTH_NOT_EXPR, type, arg);
2147 else
2148 return build (invert_tree_comparison (code), type,
2149 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2152 switch (code)
2154 case INTEGER_CST:
2155 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2156 && TREE_INT_CST_HIGH (arg) == 0, 0));
2158 case TRUTH_AND_EXPR:
2159 return build (TRUTH_OR_EXPR, type,
2160 invert_truthvalue (TREE_OPERAND (arg, 0)),
2161 invert_truthvalue (TREE_OPERAND (arg, 1)));
2163 case TRUTH_OR_EXPR:
2164 return build (TRUTH_AND_EXPR, type,
2165 invert_truthvalue (TREE_OPERAND (arg, 0)),
2166 invert_truthvalue (TREE_OPERAND (arg, 1)));
2168 case TRUTH_XOR_EXPR:
2169 /* Here we can invert either operand. We invert the first operand
2170 unless the second operand is a TRUTH_NOT_EXPR in which case our
2171 result is the XOR of the first operand with the inside of the
2172 negation of the second operand. */
2174 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2175 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2176 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2177 else
2178 return build (TRUTH_XOR_EXPR, type,
2179 invert_truthvalue (TREE_OPERAND (arg, 0)),
2180 TREE_OPERAND (arg, 1));
2182 case TRUTH_ANDIF_EXPR:
2183 return build (TRUTH_ORIF_EXPR, type,
2184 invert_truthvalue (TREE_OPERAND (arg, 0)),
2185 invert_truthvalue (TREE_OPERAND (arg, 1)));
2187 case TRUTH_ORIF_EXPR:
2188 return build (TRUTH_ANDIF_EXPR, type,
2189 invert_truthvalue (TREE_OPERAND (arg, 0)),
2190 invert_truthvalue (TREE_OPERAND (arg, 1)));
2192 case TRUTH_NOT_EXPR:
2193 return TREE_OPERAND (arg, 0);
2195 case COND_EXPR:
2196 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2197 invert_truthvalue (TREE_OPERAND (arg, 1)),
2198 invert_truthvalue (TREE_OPERAND (arg, 2)));
2200 case COMPOUND_EXPR:
2201 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2202 invert_truthvalue (TREE_OPERAND (arg, 1)));
2204 case NON_LVALUE_EXPR:
2205 return invert_truthvalue (TREE_OPERAND (arg, 0));
2207 case NOP_EXPR:
2208 case CONVERT_EXPR:
2209 case FLOAT_EXPR:
2210 return build1 (TREE_CODE (arg), type,
2211 invert_truthvalue (TREE_OPERAND (arg, 0)));
2213 case BIT_AND_EXPR:
2214 if (!integer_onep (TREE_OPERAND (arg, 1)))
2215 break;
2216 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2218 case SAVE_EXPR:
2219 return build1 (TRUTH_NOT_EXPR, type, arg);
2221 case CLEANUP_POINT_EXPR:
2222 return build1 (CLEANUP_POINT_EXPR, type,
2223 invert_truthvalue (TREE_OPERAND (arg, 0)));
2225 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2226 abort ();
2227 return build1 (TRUTH_NOT_EXPR, type, arg);
2230 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2231 operands are another bit-wise operation with a common input. If so,
2232 distribute the bit operations to save an operation and possibly two if
2233 constants are involved. For example, convert
2234 (A | B) & (A | C) into A | (B & C)
2235 Further simplification will occur if B and C are constants.
2237 If this optimization cannot be done, 0 will be returned. */
2239 static tree
2240 distribute_bit_expr (code, type, arg0, arg1)
2241 enum tree_code code;
2242 tree type;
2243 tree arg0, arg1;
2245 tree common;
2246 tree left, right;
2248 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2249 || TREE_CODE (arg0) == code
2250 || (TREE_CODE (arg0) != BIT_AND_EXPR
2251 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2252 return 0;
2254 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2256 common = TREE_OPERAND (arg0, 0);
2257 left = TREE_OPERAND (arg0, 1);
2258 right = TREE_OPERAND (arg1, 1);
2260 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2262 common = TREE_OPERAND (arg0, 0);
2263 left = TREE_OPERAND (arg0, 1);
2264 right = TREE_OPERAND (arg1, 0);
2266 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2268 common = TREE_OPERAND (arg0, 1);
2269 left = TREE_OPERAND (arg0, 0);
2270 right = TREE_OPERAND (arg1, 1);
2272 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2274 common = TREE_OPERAND (arg0, 1);
2275 left = TREE_OPERAND (arg0, 0);
2276 right = TREE_OPERAND (arg1, 0);
2278 else
2279 return 0;
2281 return fold (build (TREE_CODE (arg0), type, common,
2282 fold (build (code, type, left, right))));
2285 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2286 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2288 static tree
2289 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2290 tree inner;
2291 tree type;
2292 int bitsize, bitpos;
2293 int unsignedp;
2295 tree result = build (BIT_FIELD_REF, type, inner,
2296 size_int (bitsize), size_int (bitpos));
2298 TREE_UNSIGNED (result) = unsignedp;
2300 return result;
2303 /* Optimize a bit-field compare.
2305 There are two cases: First is a compare against a constant and the
2306 second is a comparison of two items where the fields are at the same
2307 bit position relative to the start of a chunk (byte, halfword, word)
2308 large enough to contain it. In these cases we can avoid the shift
2309 implicit in bitfield extractions.
2311 For constants, we emit a compare of the shifted constant with the
2312 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2313 compared. For two fields at the same position, we do the ANDs with the
2314 similar mask and compare the result of the ANDs.
2316 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2317 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2318 are the left and right operands of the comparison, respectively.
2320 If the optimization described above can be done, we return the resulting
2321 tree. Otherwise we return zero. */
2323 static tree
2324 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2325 enum tree_code code;
2326 tree compare_type;
2327 tree lhs, rhs;
2329 int lbitpos, lbitsize, rbitpos, rbitsize;
2330 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2331 tree type = TREE_TYPE (lhs);
2332 tree signed_type, unsigned_type;
2333 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2334 enum machine_mode lmode, rmode, lnmode, rnmode;
2335 int lunsignedp, runsignedp;
2336 int lvolatilep = 0, rvolatilep = 0;
2337 int alignment;
2338 tree linner, rinner;
2339 tree mask;
2340 tree offset;
2342 /* Get all the information about the extractions being done. If the bit size
2343 if the same as the size of the underlying object, we aren't doing an
2344 extraction at all and so can do nothing. */
2345 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2346 &lunsignedp, &lvolatilep, &alignment);
2347 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2348 || offset != 0)
2349 return 0;
2351 if (!const_p)
2353 /* If this is not a constant, we can only do something if bit positions,
2354 sizes, and signedness are the same. */
2355 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2356 &runsignedp, &rvolatilep, &alignment);
2358 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2359 || lunsignedp != runsignedp || offset != 0)
2360 return 0;
2363 /* See if we can find a mode to refer to this field. We should be able to,
2364 but fail if we can't. */
2365 lnmode = get_best_mode (lbitsize, lbitpos,
2366 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2367 lvolatilep);
2368 if (lnmode == VOIDmode)
2369 return 0;
2371 /* Set signed and unsigned types of the precision of this mode for the
2372 shifts below. */
2373 signed_type = type_for_mode (lnmode, 0);
2374 unsigned_type = type_for_mode (lnmode, 1);
2376 if (! const_p)
2378 rnmode = get_best_mode (rbitsize, rbitpos,
2379 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2380 rvolatilep);
2381 if (rnmode == VOIDmode)
2382 return 0;
2385 /* Compute the bit position and size for the new reference and our offset
2386 within it. If the new reference is the same size as the original, we
2387 won't optimize anything, so return zero. */
2388 lnbitsize = GET_MODE_BITSIZE (lnmode);
2389 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2390 lbitpos -= lnbitpos;
2391 if (lnbitsize == lbitsize)
2392 return 0;
2394 if (! const_p)
2396 rnbitsize = GET_MODE_BITSIZE (rnmode);
2397 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2398 rbitpos -= rnbitpos;
2399 if (rnbitsize == rbitsize)
2400 return 0;
2403 if (BYTES_BIG_ENDIAN)
2404 lbitpos = lnbitsize - lbitsize - lbitpos;
2406 /* Make the mask to be used against the extracted field. */
2407 mask = build_int_2 (~0, ~0);
2408 TREE_TYPE (mask) = unsigned_type;
2409 force_fit_type (mask, 0);
2410 mask = convert (unsigned_type, mask);
2411 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2412 mask = const_binop (RSHIFT_EXPR, mask,
2413 size_int (lnbitsize - lbitsize - lbitpos), 0);
2415 if (! const_p)
2416 /* If not comparing with constant, just rework the comparison
2417 and return. */
2418 return build (code, compare_type,
2419 build (BIT_AND_EXPR, unsigned_type,
2420 make_bit_field_ref (linner, unsigned_type,
2421 lnbitsize, lnbitpos, 1),
2422 mask),
2423 build (BIT_AND_EXPR, unsigned_type,
2424 make_bit_field_ref (rinner, unsigned_type,
2425 rnbitsize, rnbitpos, 1),
2426 mask));
2428 /* Otherwise, we are handling the constant case. See if the constant is too
2429 big for the field. Warn and return a tree of for 0 (false) if so. We do
2430 this not only for its own sake, but to avoid having to test for this
2431 error case below. If we didn't, we might generate wrong code.
2433 For unsigned fields, the constant shifted right by the field length should
2434 be all zero. For signed fields, the high-order bits should agree with
2435 the sign bit. */
2437 if (lunsignedp)
2439 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2440 convert (unsigned_type, rhs),
2441 size_int (lbitsize), 0)))
2443 warning ("comparison is always %s due to width of bitfield",
2444 code == NE_EXPR ? "one" : "zero");
2445 return convert (compare_type,
2446 (code == NE_EXPR
2447 ? integer_one_node : integer_zero_node));
2450 else
2452 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2453 size_int (lbitsize - 1), 0);
2454 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2456 warning ("comparison is always %s due to width of bitfield",
2457 code == NE_EXPR ? "one" : "zero");
2458 return convert (compare_type,
2459 (code == NE_EXPR
2460 ? integer_one_node : integer_zero_node));
2464 /* Single-bit compares should always be against zero. */
2465 if (lbitsize == 1 && ! integer_zerop (rhs))
2467 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2468 rhs = convert (type, integer_zero_node);
2471 /* Make a new bitfield reference, shift the constant over the
2472 appropriate number of bits and mask it with the computed mask
2473 (in case this was a signed field). If we changed it, make a new one. */
2474 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2475 if (lvolatilep)
2477 TREE_SIDE_EFFECTS (lhs) = 1;
2478 TREE_THIS_VOLATILE (lhs) = 1;
2481 rhs = fold (const_binop (BIT_AND_EXPR,
2482 const_binop (LSHIFT_EXPR,
2483 convert (unsigned_type, rhs),
2484 size_int (lbitpos), 0),
2485 mask, 0));
2487 return build (code, compare_type,
2488 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2489 rhs);
2492 /* Subroutine for fold_truthop: decode a field reference.
2494 If EXP is a comparison reference, we return the innermost reference.
2496 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2497 set to the starting bit number.
2499 If the innermost field can be completely contained in a mode-sized
2500 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2502 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2503 otherwise it is not changed.
2505 *PUNSIGNEDP is set to the signedness of the field.
2507 *PMASK is set to the mask used. This is either contained in a
2508 BIT_AND_EXPR or derived from the width of the field.
2510 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2512 Return 0 if this is not a component reference or is one that we can't
2513 do anything with. */
2515 static tree
2516 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2517 pvolatilep, pmask, pand_mask)
2518 tree exp;
2519 int *pbitsize, *pbitpos;
2520 enum machine_mode *pmode;
2521 int *punsignedp, *pvolatilep;
2522 tree *pmask;
2523 tree *pand_mask;
2525 tree and_mask = 0;
2526 tree mask, inner, offset;
2527 tree unsigned_type;
2528 int precision;
2529 int alignment;
2531 /* All the optimizations using this function assume integer fields.
2532 There are problems with FP fields since the type_for_size call
2533 below can fail for, e.g., XFmode. */
2534 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2535 return 0;
2537 STRIP_NOPS (exp);
2539 if (TREE_CODE (exp) == BIT_AND_EXPR)
2541 and_mask = TREE_OPERAND (exp, 1);
2542 exp = TREE_OPERAND (exp, 0);
2543 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2544 if (TREE_CODE (and_mask) != INTEGER_CST)
2545 return 0;
2549 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2550 punsignedp, pvolatilep, &alignment);
2551 if ((inner == exp && and_mask == 0)
2552 || *pbitsize < 0 || offset != 0)
2553 return 0;
2555 /* Compute the mask to access the bitfield. */
2556 unsigned_type = type_for_size (*pbitsize, 1);
2557 precision = TYPE_PRECISION (unsigned_type);
2559 mask = build_int_2 (~0, ~0);
2560 TREE_TYPE (mask) = unsigned_type;
2561 force_fit_type (mask, 0);
2562 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2563 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2565 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2566 if (and_mask != 0)
2567 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2568 convert (unsigned_type, and_mask), mask));
2570 *pmask = mask;
2571 *pand_mask = and_mask;
2572 return inner;
2575 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2576 bit positions. */
2578 static int
2579 all_ones_mask_p (mask, size)
2580 tree mask;
2581 int size;
2583 tree type = TREE_TYPE (mask);
2584 int precision = TYPE_PRECISION (type);
2585 tree tmask;
2587 tmask = build_int_2 (~0, ~0);
2588 TREE_TYPE (tmask) = signed_type (type);
2589 force_fit_type (tmask, 0);
2590 return
2591 tree_int_cst_equal (mask,
2592 const_binop (RSHIFT_EXPR,
2593 const_binop (LSHIFT_EXPR, tmask,
2594 size_int (precision - size),
2596 size_int (precision - size), 0));
2599 /* Subroutine for fold_truthop: determine if an operand is simple enough
2600 to be evaluated unconditionally. */
2602 static int
2603 simple_operand_p (exp)
2604 tree exp;
2606 /* Strip any conversions that don't change the machine mode. */
2607 while ((TREE_CODE (exp) == NOP_EXPR
2608 || TREE_CODE (exp) == CONVERT_EXPR)
2609 && (TYPE_MODE (TREE_TYPE (exp))
2610 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2611 exp = TREE_OPERAND (exp, 0);
2613 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2614 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2615 && ! TREE_ADDRESSABLE (exp)
2616 && ! TREE_THIS_VOLATILE (exp)
2617 && ! DECL_NONLOCAL (exp)
2618 /* Don't regard global variables as simple. They may be
2619 allocated in ways unknown to the compiler (shared memory,
2620 #pragma weak, etc). */
2621 && ! TREE_PUBLIC (exp)
2622 && ! DECL_EXTERNAL (exp)
2623 /* Loading a static variable is unduly expensive, but global
2624 registers aren't expensive. */
2625 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2628 /* The following functions are subroutines to fold_range_test and allow it to
2629 try to change a logical combination of comparisons into a range test.
2631 For example, both
2632 X == 2 && X == 3 && X == 4 && X == 5
2634 X >= 2 && X <= 5
2635 are converted to
2636 (unsigned) (X - 2) <= 3
2638 We decribe each set of comparisons as being either inside or outside
2639 a range, using a variable named like IN_P, and then describe the
2640 range with a lower and upper bound. If one of the bounds is omitted,
2641 it represents either the highest or lowest value of the type.
2643 In the comments below, we represent a range by two numbers in brackets
2644 preceeded by a "+" to designate being inside that range, or a "-" to
2645 designate being outside that range, so the condition can be inverted by
2646 flipping the prefix. An omitted bound is represented by a "-". For
2647 example, "- [-, 10]" means being outside the range starting at the lowest
2648 possible value and ending at 10, in other words, being greater than 10.
2649 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2650 always false.
2652 We set up things so that the missing bounds are handled in a consistent
2653 manner so neither a missing bound nor "true" and "false" need to be
2654 handled using a special case. */
2656 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2657 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2658 and UPPER1_P are nonzero if the respective argument is an upper bound
2659 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2660 must be specified for a comparison. ARG1 will be converted to ARG0's
2661 type if both are specified. */
2663 static tree
2664 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2665 enum tree_code code;
2666 tree type;
2667 tree arg0, arg1;
2668 int upper0_p, upper1_p;
2670 tree tem;
2671 int result;
2672 int sgn0, sgn1;
2674 /* If neither arg represents infinity, do the normal operation.
2675 Else, if not a comparison, return infinity. Else handle the special
2676 comparison rules. Note that most of the cases below won't occur, but
2677 are handled for consistency. */
2679 if (arg0 != 0 && arg1 != 0)
2681 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2682 arg0, convert (TREE_TYPE (arg0), arg1)));
2683 STRIP_NOPS (tem);
2684 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2687 if (TREE_CODE_CLASS (code) != '<')
2688 return 0;
2690 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2691 for neither. Then compute our result treating them as never equal
2692 and comparing bounds to non-bounds as above. */
2693 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2694 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2695 switch (code)
2697 case EQ_EXPR: case NE_EXPR:
2698 result = (code == NE_EXPR);
2699 break;
2700 case LT_EXPR: case LE_EXPR:
2701 result = sgn0 < sgn1;
2702 break;
2703 case GT_EXPR: case GE_EXPR:
2704 result = sgn0 > sgn1;
2705 break;
2708 return convert (type, result ? integer_one_node : integer_zero_node);
2711 /* Given EXP, a logical expression, set the range it is testing into
2712 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2713 actually being tested. *PLOW and *PHIGH will have be made the same type
2714 as the returned expression. If EXP is not a comparison, we will most
2715 likely not be returning a useful value and range. */
2717 static tree
2718 make_range (exp, pin_p, plow, phigh)
2719 tree exp;
2720 int *pin_p;
2721 tree *plow, *phigh;
2723 enum tree_code code;
2724 tree arg0, arg1, type;
2725 int in_p, n_in_p;
2726 tree low, high, n_low, n_high;
2728 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2729 and see if we can refine the range. Some of the cases below may not
2730 happen, but it doesn't seem worth worrying about this. We "continue"
2731 the outer loop when we've changed something; otherwise we "break"
2732 the switch, which will "break" the while. */
2734 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2736 while (1)
2738 code = TREE_CODE (exp);
2739 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2740 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2741 || TREE_CODE_CLASS (code) == '2')
2742 type = TREE_TYPE (arg0);
2744 switch (code)
2746 case TRUTH_NOT_EXPR:
2747 in_p = ! in_p, exp = arg0;
2748 continue;
2750 case EQ_EXPR: case NE_EXPR:
2751 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2752 /* We can only do something if the range is testing for zero
2753 and if the second operand is an integer constant. Note that
2754 saying something is "in" the range we make is done by
2755 complementing IN_P since it will set in the initial case of
2756 being not equal to zero; "out" is leaving it alone. */
2757 if (low == 0 || high == 0
2758 || ! integer_zerop (low) || ! integer_zerop (high)
2759 || TREE_CODE (arg1) != INTEGER_CST)
2760 break;
2762 switch (code)
2764 case NE_EXPR: /* - [c, c] */
2765 low = high = arg1;
2766 break;
2767 case EQ_EXPR: /* + [c, c] */
2768 in_p = ! in_p, low = high = arg1;
2769 break;
2770 case GT_EXPR: /* - [-, c] */
2771 low = 0, high = arg1;
2772 break;
2773 case GE_EXPR: /* + [c, -] */
2774 in_p = ! in_p, low = arg1, high = 0;
2775 break;
2776 case LT_EXPR: /* - [c, -] */
2777 low = arg1, high = 0;
2778 break;
2779 case LE_EXPR: /* + [-, c] */
2780 in_p = ! in_p, low = 0, high = arg1;
2781 break;
2784 exp = arg0;
2786 /* If this is an unsigned comparison, we also know that EXP is
2787 greater than or equal to zero. We base the range tests we make
2788 on that fact, so we record it here so we can parse existing
2789 range tests. */
2790 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2792 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2793 1, convert (type, integer_zero_node),
2794 NULL_TREE))
2795 break;
2797 in_p = n_in_p, low = n_low, high = n_high;
2799 /* If the high bound is missing, reverse the range so it
2800 goes from zero to the low bound minus 1. */
2801 if (high == 0)
2803 in_p = ! in_p;
2804 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2805 integer_one_node, 0);
2806 low = convert (type, integer_zero_node);
2809 continue;
2811 case NEGATE_EXPR:
2812 /* (-x) IN [a,b] -> x in [-b, -a] */
2813 n_low = range_binop (MINUS_EXPR, type,
2814 convert (type, integer_zero_node), 0, high, 1);
2815 n_high = range_binop (MINUS_EXPR, type,
2816 convert (type, integer_zero_node), 0, low, 0);
2817 low = n_low, high = n_high;
2818 exp = arg0;
2819 continue;
2821 case BIT_NOT_EXPR:
2822 /* ~ X -> -X - 1 */
2823 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2824 convert (type, integer_one_node));
2825 continue;
2827 case PLUS_EXPR: case MINUS_EXPR:
2828 if (TREE_CODE (arg1) != INTEGER_CST)
2829 break;
2831 /* If EXP is signed, any overflow in the computation is undefined,
2832 so we don't worry about it so long as our computations on
2833 the bounds don't overflow. For unsigned, overflow is defined
2834 and this is exactly the right thing. */
2835 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2836 type, low, 0, arg1, 0);
2837 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2838 type, high, 1, arg1, 0);
2839 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2840 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2841 break;
2843 /* Check for an unsigned range which has wrapped around the maximum
2844 value thus making n_high < n_low, and normalize it. */
2845 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2847 low = range_binop (PLUS_EXPR, type, n_high, 0,
2848 integer_one_node, 0);
2849 high = range_binop (MINUS_EXPR, type, n_low, 0,
2850 integer_one_node, 0);
2851 in_p = ! in_p;
2853 else
2854 low = n_low, high = n_high;
2856 exp = arg0;
2857 continue;
2859 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2860 if (! INTEGRAL_TYPE_P (type)
2861 || (low != 0 && ! int_fits_type_p (low, type))
2862 || (high != 0 && ! int_fits_type_p (high, type)))
2863 break;
2865 if (low != 0)
2866 low = convert (type, low);
2868 if (high != 0)
2869 high = convert (type, high);
2871 exp = arg0;
2872 continue;
2875 break;
2878 /* If EXP is a constant, we can evaluate whether this is true or false. */
2879 if (TREE_CODE (exp) == INTEGER_CST)
2881 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
2882 exp, 0, low, 0))
2883 && integer_onep (range_binop (LE_EXPR, integer_type_node,
2884 exp, 1, high, 1)));
2885 low = high = 0;
2886 exp = 0;
2889 *pin_p = in_p, *plow = low, *phigh = high;
2890 return exp;
2893 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
2894 type, TYPE, return an expression to test if EXP is in (or out of, depending
2895 on IN_P) the range. */
2897 static tree
2898 build_range_check (type, exp, in_p, low, high)
2899 tree type;
2900 tree exp;
2901 int in_p;
2902 tree low, high;
2904 tree etype = TREE_TYPE (exp);
2905 tree utype, value;
2907 if (! in_p
2908 && (0 != (value = build_range_check (type, exp, 1, low, high))))
2909 return invert_truthvalue (value);
2911 else if (low == 0 && high == 0)
2912 return convert (type, integer_one_node);
2914 else if (low == 0)
2915 return fold (build (LE_EXPR, type, exp, high));
2917 else if (high == 0)
2918 return fold (build (GE_EXPR, type, exp, low));
2920 else if (operand_equal_p (low, high, 0))
2921 return fold (build (EQ_EXPR, type, exp, low));
2923 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
2924 return build_range_check (type, exp, 1, 0, high);
2926 else if (integer_zerop (low))
2928 utype = unsigned_type (etype);
2929 return build_range_check (type, convert (utype, exp), 1, 0,
2930 convert (utype, high));
2933 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
2934 && ! TREE_OVERFLOW (value))
2935 return build_range_check (type,
2936 fold (build (MINUS_EXPR, etype, exp, low)),
2937 1, convert (etype, integer_zero_node), value);
2938 else
2939 return 0;
2942 /* Given two ranges, see if we can merge them into one. Return 1 if we
2943 can, 0 if we can't. Set the output range into the specified parameters. */
2945 static int
2946 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
2947 int *pin_p;
2948 tree *plow, *phigh;
2949 int in0_p, in1_p;
2950 tree low0, high0, low1, high1;
2952 int no_overlap;
2953 int subset;
2954 int temp;
2955 tree tem;
2956 int in_p;
2957 tree low, high;
2959 /* Make range 0 be the range that starts first. Swap them if it isn't. */
2960 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
2961 low0, 0, low1, 0))
2962 || (((low0 == 0 && low1 == 0)
2963 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
2964 low0, 0, low1, 0)))
2965 && integer_onep (range_binop (GT_EXPR, integer_type_node,
2966 high0, 1, high1, 1))))
2968 temp = in0_p, in0_p = in1_p, in1_p = temp;
2969 tem = low0, low0 = low1, low1 = tem;
2970 tem = high0, high0 = high1, high1 = tem;
2973 /* Now flag two cases, whether the ranges are disjoint or whether the
2974 second range is totally subsumed in the first. Note that the tests
2975 below are simplified by the ones above. */
2976 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
2977 high0, 1, low1, 0));
2978 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
2979 high1, 1, high0, 1));
2981 /* We now have four cases, depending on whether we are including or
2982 excluding the two ranges. */
2983 if (in0_p && in1_p)
2985 /* If they don't overlap, the result is false. If the second range
2986 is a subset it is the result. Otherwise, the range is from the start
2987 of the second to the end of the first. */
2988 if (no_overlap)
2989 in_p = 0, low = high = 0;
2990 else if (subset)
2991 in_p = 1, low = low1, high = high1;
2992 else
2993 in_p = 1, low = low1, high = high0;
2996 else if (in0_p && ! in1_p)
2998 /* If they don't overlap, the result is the first range. If the
2999 second range is a subset of the first, we can't describe this as
3000 a single range unless both ranges end at the same place. If both
3001 ranges start in the same place, then the result is false.
3002 Otherwise, we go from the start of the first range to just before
3003 the start of the second. */
3004 if (no_overlap)
3005 in_p = 1, low = low0, high = high0;
3006 else if (subset
3007 && integer_zerop (range_binop (EQ_EXPR, integer_type_node,
3008 high0, 1, high1, 0)))
3009 return 0;
3010 else if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3011 low0, 0, low1, 0)))
3012 in_p = 0, low = high = 0;
3013 else
3015 in_p = 1, low = low0;
3016 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3017 integer_one_node, 0);
3021 else if (! in0_p && in1_p)
3023 /* If they don't overlap, the result is the second range. If the second
3024 is a subset of the first, the result is false. Otherwise,
3025 the range starts just after the first range and ends at the
3026 end of the second. */
3027 if (no_overlap)
3028 in_p = 1, low = low1, high = high1;
3029 else if (subset)
3030 in_p = 0, low = high = 0;
3031 else
3033 in_p = 1, high = high1;
3034 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3035 integer_one_node, 0);
3039 else
3041 /* The case where we are excluding both ranges. Here the complex case
3042 is if they don't overlap. In that case, the only time we have a
3043 range is if they are adjacent. If the second is a subset of the
3044 first, the result is the first. Otherwise, the range to exclude
3045 starts at the beginning of the first range and ends at the end of the
3046 second. */
3047 if (no_overlap)
3049 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3050 range_binop (PLUS_EXPR, NULL_TREE,
3051 high0, 1,
3052 integer_one_node, 1),
3053 1, low1, 0)))
3054 in_p = 0, low = low0, high = high1;
3055 else
3056 return 0;
3058 else if (subset)
3059 in_p = 0, low = low0, high = high0;
3060 else
3061 in_p = 0, low = low0, high = high1;
3064 *pin_p = in_p, *plow = low, *phigh = high;
3065 return 1;
3068 /* EXP is some logical combination of boolean tests. See if we can
3069 merge it into some range test. Return the new tree if so. */
3071 static tree
3072 fold_range_test (exp)
3073 tree exp;
3075 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3076 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3077 int in0_p, in1_p, in_p;
3078 tree low0, low1, low, high0, high1, high;
3079 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3080 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3081 tree tem;
3083 /* If this is an OR operation, invert both sides; we will invert
3084 again at the end. */
3085 if (or_op)
3086 in0_p = ! in0_p, in1_p = ! in1_p;
3088 /* If both expressions are the same, if we can merge the ranges, and we
3089 can build the range test, return it or it inverted. If one of the
3090 ranges is always true or always false, consider it to be the same
3091 expression as the other. */
3092 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3093 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3094 in1_p, low1, high1)
3095 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3096 lhs != 0 ? lhs
3097 : rhs != 0 ? rhs : integer_zero_node,
3098 in_p, low, high))))
3099 return or_op ? invert_truthvalue (tem) : tem;
3101 /* On machines where the branch cost is expensive, if this is a
3102 short-circuited branch and the underlying object on both sides
3103 is the same, make a non-short-circuit operation. */
3104 else if (BRANCH_COST >= 2
3105 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3106 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3107 && operand_equal_p (lhs, rhs, 0))
3109 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3110 unless we are at top level, in which case we can't do this. */
3111 if (simple_operand_p (lhs))
3112 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3113 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3114 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3115 TREE_OPERAND (exp, 1));
3117 else if (current_function_decl != 0)
3119 tree common = save_expr (lhs);
3121 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3122 or_op ? ! in0_p : in0_p,
3123 low0, high0))
3124 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3125 or_op ? ! in1_p : in1_p,
3126 low1, high1))))
3127 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3128 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3129 TREE_TYPE (exp), lhs, rhs);
3132 else
3133 return 0;
3136 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3137 bit value. Arrange things so the extra bits will be set to zero if and
3138 only if C is signed-extended to its full width. If MASK is nonzero,
3139 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3141 static tree
3142 unextend (c, p, unsignedp, mask)
3143 tree c;
3144 int p;
3145 int unsignedp;
3146 tree mask;
3148 tree type = TREE_TYPE (c);
3149 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3150 tree temp;
3152 if (p == modesize || unsignedp)
3153 return c;
3155 /* We work by getting just the sign bit into the low-order bit, then
3156 into the high-order bit, then sign-extend. We then XOR that value
3157 with C. */
3158 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3159 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3161 /* We must use a signed type in order to get an arithmetic right shift.
3162 However, we must also avoid introducing accidental overflows, so that
3163 a subsequent call to integer_zerop will work. Hence we must
3164 do the type conversion here. At this point, the constant is either
3165 zero or one, and the conversion to a signed type can never overflow.
3166 We could get an overflow if this conversion is done anywhere else. */
3167 if (TREE_UNSIGNED (type))
3168 temp = convert (signed_type (type), temp);
3170 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3171 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3172 if (mask != 0)
3173 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3174 /* If necessary, convert the type back to match the type of C. */
3175 if (TREE_UNSIGNED (type))
3176 temp = convert (type, temp);
3178 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3181 /* Find ways of folding logical expressions of LHS and RHS:
3182 Try to merge two comparisons to the same innermost item.
3183 Look for range tests like "ch >= '0' && ch <= '9'".
3184 Look for combinations of simple terms on machines with expensive branches
3185 and evaluate the RHS unconditionally.
3187 For example, if we have p->a == 2 && p->b == 4 and we can make an
3188 object large enough to span both A and B, we can do this with a comparison
3189 against the object ANDed with the a mask.
3191 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3192 operations to do this with one comparison.
3194 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3195 function and the one above.
3197 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3198 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3200 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3201 two operands.
3203 We return the simplified tree or 0 if no optimization is possible. */
3205 static tree
3206 fold_truthop (code, truth_type, lhs, rhs)
3207 enum tree_code code;
3208 tree truth_type, lhs, rhs;
3210 /* If this is the "or" of two comparisons, we can do something if we
3211 the comparisons are NE_EXPR. If this is the "and", we can do something
3212 if the comparisons are EQ_EXPR. I.e.,
3213 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3215 WANTED_CODE is this operation code. For single bit fields, we can
3216 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3217 comparison for one-bit fields. */
3219 enum tree_code wanted_code;
3220 enum tree_code lcode, rcode;
3221 tree ll_arg, lr_arg, rl_arg, rr_arg;
3222 tree ll_inner, lr_inner, rl_inner, rr_inner;
3223 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3224 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3225 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3226 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3227 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3228 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3229 enum machine_mode lnmode, rnmode;
3230 tree ll_mask, lr_mask, rl_mask, rr_mask;
3231 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3232 tree l_const, r_const;
3233 tree type, result;
3234 int first_bit, end_bit;
3235 int volatilep;
3237 /* Start by getting the comparison codes. Fail if anything is volatile.
3238 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3239 it were surrounded with a NE_EXPR. */
3241 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3242 return 0;
3244 lcode = TREE_CODE (lhs);
3245 rcode = TREE_CODE (rhs);
3247 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3248 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3250 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3251 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3253 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3254 return 0;
3256 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3257 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3259 ll_arg = TREE_OPERAND (lhs, 0);
3260 lr_arg = TREE_OPERAND (lhs, 1);
3261 rl_arg = TREE_OPERAND (rhs, 0);
3262 rr_arg = TREE_OPERAND (rhs, 1);
3264 /* If the RHS can be evaluated unconditionally and its operands are
3265 simple, it wins to evaluate the RHS unconditionally on machines
3266 with expensive branches. In this case, this isn't a comparison
3267 that can be merged. */
3269 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3270 are with zero (tmw). */
3272 if (BRANCH_COST >= 2
3273 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3274 && simple_operand_p (rl_arg)
3275 && simple_operand_p (rr_arg))
3276 return build (code, truth_type, lhs, rhs);
3278 /* See if the comparisons can be merged. Then get all the parameters for
3279 each side. */
3281 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3282 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3283 return 0;
3285 volatilep = 0;
3286 ll_inner = decode_field_reference (ll_arg,
3287 &ll_bitsize, &ll_bitpos, &ll_mode,
3288 &ll_unsignedp, &volatilep, &ll_mask,
3289 &ll_and_mask);
3290 lr_inner = decode_field_reference (lr_arg,
3291 &lr_bitsize, &lr_bitpos, &lr_mode,
3292 &lr_unsignedp, &volatilep, &lr_mask,
3293 &lr_and_mask);
3294 rl_inner = decode_field_reference (rl_arg,
3295 &rl_bitsize, &rl_bitpos, &rl_mode,
3296 &rl_unsignedp, &volatilep, &rl_mask,
3297 &rl_and_mask);
3298 rr_inner = decode_field_reference (rr_arg,
3299 &rr_bitsize, &rr_bitpos, &rr_mode,
3300 &rr_unsignedp, &volatilep, &rr_mask,
3301 &rr_and_mask);
3303 /* It must be true that the inner operation on the lhs of each
3304 comparison must be the same if we are to be able to do anything.
3305 Then see if we have constants. If not, the same must be true for
3306 the rhs's. */
3307 if (volatilep || ll_inner == 0 || rl_inner == 0
3308 || ! operand_equal_p (ll_inner, rl_inner, 0))
3309 return 0;
3311 if (TREE_CODE (lr_arg) == INTEGER_CST
3312 && TREE_CODE (rr_arg) == INTEGER_CST)
3313 l_const = lr_arg, r_const = rr_arg;
3314 else if (lr_inner == 0 || rr_inner == 0
3315 || ! operand_equal_p (lr_inner, rr_inner, 0))
3316 return 0;
3317 else
3318 l_const = r_const = 0;
3320 /* If either comparison code is not correct for our logical operation,
3321 fail. However, we can convert a one-bit comparison against zero into
3322 the opposite comparison against that bit being set in the field. */
3324 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3325 if (lcode != wanted_code)
3327 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3328 l_const = ll_mask;
3329 else
3330 return 0;
3333 if (rcode != wanted_code)
3335 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3336 r_const = rl_mask;
3337 else
3338 return 0;
3341 /* See if we can find a mode that contains both fields being compared on
3342 the left. If we can't, fail. Otherwise, update all constants and masks
3343 to be relative to a field of that size. */
3344 first_bit = MIN (ll_bitpos, rl_bitpos);
3345 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3346 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3347 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3348 volatilep);
3349 if (lnmode == VOIDmode)
3350 return 0;
3352 lnbitsize = GET_MODE_BITSIZE (lnmode);
3353 lnbitpos = first_bit & ~ (lnbitsize - 1);
3354 type = type_for_size (lnbitsize, 1);
3355 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3357 if (BYTES_BIG_ENDIAN)
3359 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3360 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3363 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3364 size_int (xll_bitpos), 0);
3365 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3366 size_int (xrl_bitpos), 0);
3368 if (l_const)
3370 l_const = convert (type, l_const);
3371 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3372 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3373 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3374 fold (build1 (BIT_NOT_EXPR,
3375 type, ll_mask)),
3376 0)))
3378 warning ("comparison is always %s",
3379 wanted_code == NE_EXPR ? "one" : "zero");
3381 return convert (truth_type,
3382 wanted_code == NE_EXPR
3383 ? integer_one_node : integer_zero_node);
3386 if (r_const)
3388 r_const = convert (type, r_const);
3389 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3390 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3391 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3392 fold (build1 (BIT_NOT_EXPR,
3393 type, rl_mask)),
3394 0)))
3396 warning ("comparison is always %s",
3397 wanted_code == NE_EXPR ? "one" : "zero");
3399 return convert (truth_type,
3400 wanted_code == NE_EXPR
3401 ? integer_one_node : integer_zero_node);
3405 /* If the right sides are not constant, do the same for it. Also,
3406 disallow this optimization if a size or signedness mismatch occurs
3407 between the left and right sides. */
3408 if (l_const == 0)
3410 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3411 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3412 /* Make sure the two fields on the right
3413 correspond to the left without being swapped. */
3414 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3415 return 0;
3417 first_bit = MIN (lr_bitpos, rr_bitpos);
3418 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3419 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3420 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3421 volatilep);
3422 if (rnmode == VOIDmode)
3423 return 0;
3425 rnbitsize = GET_MODE_BITSIZE (rnmode);
3426 rnbitpos = first_bit & ~ (rnbitsize - 1);
3427 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3429 if (BYTES_BIG_ENDIAN)
3431 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3432 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3435 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3436 size_int (xlr_bitpos), 0);
3437 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3438 size_int (xrr_bitpos), 0);
3440 /* Make a mask that corresponds to both fields being compared.
3441 Do this for both items being compared. If the masks agree,
3442 we can do this by masking both and comparing the masked
3443 results. */
3444 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3445 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3446 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3448 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3449 ll_unsignedp || rl_unsignedp);
3450 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3451 lr_unsignedp || rr_unsignedp);
3452 if (! all_ones_mask_p (ll_mask, lnbitsize))
3454 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3455 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3457 return build (wanted_code, truth_type, lhs, rhs);
3460 /* There is still another way we can do something: If both pairs of
3461 fields being compared are adjacent, we may be able to make a wider
3462 field containing them both. */
3463 if ((ll_bitsize + ll_bitpos == rl_bitpos
3464 && lr_bitsize + lr_bitpos == rr_bitpos)
3465 || (ll_bitpos == rl_bitpos + rl_bitsize
3466 && lr_bitpos == rr_bitpos + rr_bitsize))
3467 return build (wanted_code, truth_type,
3468 make_bit_field_ref (ll_inner, type,
3469 ll_bitsize + rl_bitsize,
3470 MIN (ll_bitpos, rl_bitpos),
3471 ll_unsignedp),
3472 make_bit_field_ref (lr_inner, type,
3473 lr_bitsize + rr_bitsize,
3474 MIN (lr_bitpos, rr_bitpos),
3475 lr_unsignedp));
3477 return 0;
3480 /* Handle the case of comparisons with constants. If there is something in
3481 common between the masks, those bits of the constants must be the same.
3482 If not, the condition is always false. Test for this to avoid generating
3483 incorrect code below. */
3484 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3485 if (! integer_zerop (result)
3486 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3487 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3489 if (wanted_code == NE_EXPR)
3491 warning ("`or' of unmatched not-equal tests is always 1");
3492 return convert (truth_type, integer_one_node);
3494 else
3496 warning ("`and' of mutually exclusive equal-tests is always zero");
3497 return convert (truth_type, integer_zero_node);
3501 /* Construct the expression we will return. First get the component
3502 reference we will make. Unless the mask is all ones the width of
3503 that field, perform the mask operation. Then compare with the
3504 merged constant. */
3505 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3506 ll_unsignedp || rl_unsignedp);
3508 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3509 if (! all_ones_mask_p (ll_mask, lnbitsize))
3510 result = build (BIT_AND_EXPR, type, result, ll_mask);
3512 return build (wanted_code, truth_type, result,
3513 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3516 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3517 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3518 that we may sometimes modify the tree. */
3520 static tree
3521 strip_compound_expr (t, s)
3522 tree t;
3523 tree s;
3525 tree type = TREE_TYPE (t);
3526 enum tree_code code = TREE_CODE (t);
3528 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3529 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3530 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3531 return TREE_OPERAND (t, 1);
3533 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3534 don't bother handling any other types. */
3535 else if (code == COND_EXPR)
3537 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3538 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3539 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3541 else if (TREE_CODE_CLASS (code) == '1')
3542 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3543 else if (TREE_CODE_CLASS (code) == '<'
3544 || TREE_CODE_CLASS (code) == '2')
3546 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3547 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3550 return t;
3553 /* Perform constant folding and related simplification of EXPR.
3554 The related simplifications include x*1 => x, x*0 => 0, etc.,
3555 and application of the associative law.
3556 NOP_EXPR conversions may be removed freely (as long as we
3557 are careful not to change the C type of the overall expression)
3558 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3559 but we can constant-fold them if they have constant operands. */
3561 tree
3562 fold (expr)
3563 tree expr;
3565 register tree t = expr;
3566 tree t1 = NULL_TREE;
3567 tree tem;
3568 tree type = TREE_TYPE (expr);
3569 register tree arg0, arg1;
3570 register enum tree_code code = TREE_CODE (t);
3571 register int kind;
3572 int invert;
3574 /* WINS will be nonzero when the switch is done
3575 if all operands are constant. */
3577 int wins = 1;
3579 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3580 Likewise for a SAVE_EXPR that's already been evaluated. */
3581 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3582 return t;
3584 /* Return right away if already constant. */
3585 if (TREE_CONSTANT (t))
3587 if (code == CONST_DECL)
3588 return DECL_INITIAL (t);
3589 return t;
3592 kind = TREE_CODE_CLASS (code);
3593 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3595 tree subop;
3597 /* Special case for conversion ops that can have fixed point args. */
3598 arg0 = TREE_OPERAND (t, 0);
3600 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3601 if (arg0 != 0)
3602 STRIP_TYPE_NOPS (arg0);
3604 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3605 subop = TREE_REALPART (arg0);
3606 else
3607 subop = arg0;
3609 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3610 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3611 && TREE_CODE (subop) != REAL_CST
3612 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3614 /* Note that TREE_CONSTANT isn't enough:
3615 static var addresses are constant but we can't
3616 do arithmetic on them. */
3617 wins = 0;
3619 else if (kind == 'e' || kind == '<'
3620 || kind == '1' || kind == '2' || kind == 'r')
3622 register int len = tree_code_length[(int) code];
3623 register int i;
3624 for (i = 0; i < len; i++)
3626 tree op = TREE_OPERAND (t, i);
3627 tree subop;
3629 if (op == 0)
3630 continue; /* Valid for CALL_EXPR, at least. */
3632 if (kind == '<' || code == RSHIFT_EXPR)
3634 /* Signedness matters here. Perhaps we can refine this
3635 later. */
3636 STRIP_TYPE_NOPS (op);
3638 else
3640 /* Strip any conversions that don't change the mode. */
3641 STRIP_NOPS (op);
3644 if (TREE_CODE (op) == COMPLEX_CST)
3645 subop = TREE_REALPART (op);
3646 else
3647 subop = op;
3649 if (TREE_CODE (subop) != INTEGER_CST
3650 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3651 && TREE_CODE (subop) != REAL_CST
3652 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3654 /* Note that TREE_CONSTANT isn't enough:
3655 static var addresses are constant but we can't
3656 do arithmetic on them. */
3657 wins = 0;
3659 if (i == 0)
3660 arg0 = op;
3661 else if (i == 1)
3662 arg1 = op;
3666 /* If this is a commutative operation, and ARG0 is a constant, move it
3667 to ARG1 to reduce the number of tests below. */
3668 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3669 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3670 || code == BIT_AND_EXPR)
3671 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3673 tem = arg0; arg0 = arg1; arg1 = tem;
3675 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3676 TREE_OPERAND (t, 1) = tem;
3679 /* Now WINS is set as described above,
3680 ARG0 is the first operand of EXPR,
3681 and ARG1 is the second operand (if it has more than one operand).
3683 First check for cases where an arithmetic operation is applied to a
3684 compound, conditional, or comparison operation. Push the arithmetic
3685 operation inside the compound or conditional to see if any folding
3686 can then be done. Convert comparison to conditional for this purpose.
3687 The also optimizes non-constant cases that used to be done in
3688 expand_expr.
3690 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3691 one of the operands is a comparison and the other is a comparison, a
3692 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3693 code below would make the expression more complex. Change it to a
3694 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3695 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3697 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3698 || code == EQ_EXPR || code == NE_EXPR)
3699 && ((truth_value_p (TREE_CODE (arg0))
3700 && (truth_value_p (TREE_CODE (arg1))
3701 || (TREE_CODE (arg1) == BIT_AND_EXPR
3702 && integer_onep (TREE_OPERAND (arg1, 1)))))
3703 || (truth_value_p (TREE_CODE (arg1))
3704 && (truth_value_p (TREE_CODE (arg0))
3705 || (TREE_CODE (arg0) == BIT_AND_EXPR
3706 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3708 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3709 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3710 : TRUTH_XOR_EXPR,
3711 type, arg0, arg1));
3713 if (code == EQ_EXPR)
3714 t = invert_truthvalue (t);
3716 return t;
3719 if (TREE_CODE_CLASS (code) == '1')
3721 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3722 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3723 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3724 else if (TREE_CODE (arg0) == COND_EXPR)
3726 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3727 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3728 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3730 /* If this was a conversion, and all we did was to move into
3731 inside the COND_EXPR, bring it back out. But leave it if
3732 it is a conversion from integer to integer and the
3733 result precision is no wider than a word since such a
3734 conversion is cheap and may be optimized away by combine,
3735 while it couldn't if it were outside the COND_EXPR. Then return
3736 so we don't get into an infinite recursion loop taking the
3737 conversion out and then back in. */
3739 if ((code == NOP_EXPR || code == CONVERT_EXPR
3740 || code == NON_LVALUE_EXPR)
3741 && TREE_CODE (t) == COND_EXPR
3742 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3743 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3744 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3745 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3746 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3747 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3748 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3749 t = build1 (code, type,
3750 build (COND_EXPR,
3751 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3752 TREE_OPERAND (t, 0),
3753 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3754 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3755 return t;
3757 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3758 return fold (build (COND_EXPR, type, arg0,
3759 fold (build1 (code, type, integer_one_node)),
3760 fold (build1 (code, type, integer_zero_node))));
3762 else if (TREE_CODE_CLASS (code) == '2'
3763 || TREE_CODE_CLASS (code) == '<')
3765 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3766 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3767 fold (build (code, type,
3768 arg0, TREE_OPERAND (arg1, 1))));
3769 else if ((TREE_CODE (arg1) == COND_EXPR
3770 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3771 && TREE_CODE_CLASS (code) != '<'))
3772 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3774 tree test, true_value, false_value;
3776 if (TREE_CODE (arg1) == COND_EXPR)
3778 test = TREE_OPERAND (arg1, 0);
3779 true_value = TREE_OPERAND (arg1, 1);
3780 false_value = TREE_OPERAND (arg1, 2);
3782 else
3784 tree testtype = TREE_TYPE (arg1);
3785 test = arg1;
3786 true_value = convert (testtype, integer_one_node);
3787 false_value = convert (testtype, integer_zero_node);
3790 /* If ARG0 is complex we want to make sure we only evaluate
3791 it once. Though this is only required if it is volatile, it
3792 might be more efficient even if it is not. However, if we
3793 succeed in folding one part to a constant, we do not need
3794 to make this SAVE_EXPR. Since we do this optimization
3795 primarily to see if we do end up with constant and this
3796 SAVE_EXPR interferes with later optimizations, suppressing
3797 it when we can is important. */
3799 if (TREE_CODE (arg0) != SAVE_EXPR
3800 && ((TREE_CODE (arg0) != VAR_DECL
3801 && TREE_CODE (arg0) != PARM_DECL)
3802 || TREE_SIDE_EFFECTS (arg0)))
3804 tree lhs = fold (build (code, type, arg0, true_value));
3805 tree rhs = fold (build (code, type, arg0, false_value));
3807 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3808 return fold (build (COND_EXPR, type, test, lhs, rhs));
3810 if (current_function_decl != 0)
3811 arg0 = save_expr (arg0);
3814 test = fold (build (COND_EXPR, type, test,
3815 fold (build (code, type, arg0, true_value)),
3816 fold (build (code, type, arg0, false_value))));
3817 if (TREE_CODE (arg0) == SAVE_EXPR)
3818 return build (COMPOUND_EXPR, type,
3819 convert (void_type_node, arg0),
3820 strip_compound_expr (test, arg0));
3821 else
3822 return convert (type, test);
3825 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3826 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3827 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3828 else if ((TREE_CODE (arg0) == COND_EXPR
3829 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3830 && TREE_CODE_CLASS (code) != '<'))
3831 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
3833 tree test, true_value, false_value;
3835 if (TREE_CODE (arg0) == COND_EXPR)
3837 test = TREE_OPERAND (arg0, 0);
3838 true_value = TREE_OPERAND (arg0, 1);
3839 false_value = TREE_OPERAND (arg0, 2);
3841 else
3843 tree testtype = TREE_TYPE (arg0);
3844 test = arg0;
3845 true_value = convert (testtype, integer_one_node);
3846 false_value = convert (testtype, integer_zero_node);
3849 if (TREE_CODE (arg1) != SAVE_EXPR
3850 && ((TREE_CODE (arg1) != VAR_DECL
3851 && TREE_CODE (arg1) != PARM_DECL)
3852 || TREE_SIDE_EFFECTS (arg1)))
3854 tree lhs = fold (build (code, type, true_value, arg1));
3855 tree rhs = fold (build (code, type, false_value, arg1));
3857 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3858 || TREE_CONSTANT (arg1))
3859 return fold (build (COND_EXPR, type, test, lhs, rhs));
3861 if (current_function_decl != 0)
3862 arg1 = save_expr (arg1);
3865 test = fold (build (COND_EXPR, type, test,
3866 fold (build (code, type, true_value, arg1)),
3867 fold (build (code, type, false_value, arg1))));
3868 if (TREE_CODE (arg1) == SAVE_EXPR)
3869 return build (COMPOUND_EXPR, type,
3870 convert (void_type_node, arg1),
3871 strip_compound_expr (test, arg1));
3872 else
3873 return convert (type, test);
3876 else if (TREE_CODE_CLASS (code) == '<'
3877 && TREE_CODE (arg0) == COMPOUND_EXPR)
3878 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3879 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3880 else if (TREE_CODE_CLASS (code) == '<'
3881 && TREE_CODE (arg1) == COMPOUND_EXPR)
3882 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3883 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3885 switch (code)
3887 case INTEGER_CST:
3888 case REAL_CST:
3889 case STRING_CST:
3890 case COMPLEX_CST:
3891 case CONSTRUCTOR:
3892 return t;
3894 case CONST_DECL:
3895 return fold (DECL_INITIAL (t));
3897 case NOP_EXPR:
3898 case FLOAT_EXPR:
3899 case CONVERT_EXPR:
3900 case FIX_TRUNC_EXPR:
3901 /* Other kinds of FIX are not handled properly by fold_convert. */
3903 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
3904 return TREE_OPERAND (t, 0);
3906 /* Handle cases of two conversions in a row. */
3907 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3908 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3910 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3911 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
3912 tree final_type = TREE_TYPE (t);
3913 int inside_int = INTEGRAL_TYPE_P (inside_type);
3914 int inside_ptr = POINTER_TYPE_P (inside_type);
3915 int inside_float = FLOAT_TYPE_P (inside_type);
3916 int inside_prec = TYPE_PRECISION (inside_type);
3917 int inside_unsignedp = TREE_UNSIGNED (inside_type);
3918 int inter_int = INTEGRAL_TYPE_P (inter_type);
3919 int inter_ptr = POINTER_TYPE_P (inter_type);
3920 int inter_float = FLOAT_TYPE_P (inter_type);
3921 int inter_prec = TYPE_PRECISION (inter_type);
3922 int inter_unsignedp = TREE_UNSIGNED (inter_type);
3923 int final_int = INTEGRAL_TYPE_P (final_type);
3924 int final_ptr = POINTER_TYPE_P (final_type);
3925 int final_float = FLOAT_TYPE_P (final_type);
3926 int final_prec = TYPE_PRECISION (final_type);
3927 int final_unsignedp = TREE_UNSIGNED (final_type);
3929 /* In addition to the cases of two conversions in a row
3930 handled below, if we are converting something to its own
3931 type via an object of identical or wider precision, neither
3932 conversion is needed. */
3933 if (inside_type == final_type
3934 && ((inter_int && final_int) || (inter_float && final_float))
3935 && inter_prec >= final_prec)
3936 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3938 /* Likewise, if the intermediate and final types are either both
3939 float or both integer, we don't need the middle conversion if
3940 it is wider than the final type and doesn't change the signedness
3941 (for integers). Avoid this if the final type is a pointer
3942 since then we sometimes need the inner conversion. Likewise if
3943 the outer has a precision not equal to the size of its mode. */
3944 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
3945 || (inter_float && inside_float))
3946 && inter_prec >= inside_prec
3947 && (inter_float || inter_unsignedp == inside_unsignedp)
3948 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3949 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3950 && ! final_ptr)
3951 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3953 /* Two conversions in a row are not needed unless:
3954 - some conversion is floating-point (overstrict for now), or
3955 - the intermediate type is narrower than both initial and
3956 final, or
3957 - the intermediate type and innermost type differ in signedness,
3958 and the outermost type is wider than the intermediate, or
3959 - the initial type is a pointer type and the precisions of the
3960 intermediate and final types differ, or
3961 - the final type is a pointer type and the precisions of the
3962 initial and intermediate types differ. */
3963 if (! inside_float && ! inter_float && ! final_float
3964 && (inter_prec > inside_prec || inter_prec > final_prec)
3965 && ! (inside_int && inter_int
3966 && inter_unsignedp != inside_unsignedp
3967 && inter_prec < final_prec)
3968 && ((inter_unsignedp && inter_prec > inside_prec)
3969 == (final_unsignedp && final_prec > inter_prec))
3970 && ! (inside_ptr && inter_prec != final_prec)
3971 && ! (final_ptr && inside_prec != inter_prec)
3972 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3973 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3974 && ! final_ptr)
3975 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3978 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3979 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3980 /* Detect assigning a bitfield. */
3981 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3982 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3984 /* Don't leave an assignment inside a conversion
3985 unless assigning a bitfield. */
3986 tree prev = TREE_OPERAND (t, 0);
3987 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3988 /* First do the assignment, then return converted constant. */
3989 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3990 TREE_USED (t) = 1;
3991 return t;
3993 if (!wins)
3995 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3996 return t;
3998 return fold_convert (t, arg0);
4000 #if 0 /* This loses on &"foo"[0]. */
4001 case ARRAY_REF:
4003 int i;
4005 /* Fold an expression like: "foo"[2] */
4006 if (TREE_CODE (arg0) == STRING_CST
4007 && TREE_CODE (arg1) == INTEGER_CST
4008 && !TREE_INT_CST_HIGH (arg1)
4009 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4011 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4012 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4013 force_fit_type (t, 0);
4016 return t;
4017 #endif /* 0 */
4019 case COMPONENT_REF:
4020 if (TREE_CODE (arg0) == CONSTRUCTOR)
4022 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4023 if (m)
4024 t = TREE_VALUE (m);
4026 return t;
4028 case RANGE_EXPR:
4029 TREE_CONSTANT (t) = wins;
4030 return t;
4032 case NEGATE_EXPR:
4033 if (wins)
4035 if (TREE_CODE (arg0) == INTEGER_CST)
4037 HOST_WIDE_INT low, high;
4038 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4039 TREE_INT_CST_HIGH (arg0),
4040 &low, &high);
4041 t = build_int_2 (low, high);
4042 TREE_TYPE (t) = type;
4043 TREE_OVERFLOW (t)
4044 = (TREE_OVERFLOW (arg0)
4045 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4046 TREE_CONSTANT_OVERFLOW (t)
4047 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4049 else if (TREE_CODE (arg0) == REAL_CST)
4050 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4052 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4053 return TREE_OPERAND (arg0, 0);
4055 /* Convert - (a - b) to (b - a) for non-floating-point. */
4056 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4057 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4058 TREE_OPERAND (arg0, 0));
4060 return t;
4062 case ABS_EXPR:
4063 if (wins)
4065 if (TREE_CODE (arg0) == INTEGER_CST)
4067 if (! TREE_UNSIGNED (type)
4068 && TREE_INT_CST_HIGH (arg0) < 0)
4070 HOST_WIDE_INT low, high;
4071 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4072 TREE_INT_CST_HIGH (arg0),
4073 &low, &high);
4074 t = build_int_2 (low, high);
4075 TREE_TYPE (t) = type;
4076 TREE_OVERFLOW (t)
4077 = (TREE_OVERFLOW (arg0)
4078 | force_fit_type (t, overflow));
4079 TREE_CONSTANT_OVERFLOW (t)
4080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4083 else if (TREE_CODE (arg0) == REAL_CST)
4085 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4086 t = build_real (type,
4087 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4090 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4091 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4092 return t;
4094 case CONJ_EXPR:
4095 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4096 return arg0;
4097 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4098 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4099 TREE_OPERAND (arg0, 0),
4100 fold (build1 (NEGATE_EXPR,
4101 TREE_TYPE (TREE_TYPE (arg0)),
4102 TREE_OPERAND (arg0, 1))));
4103 else if (TREE_CODE (arg0) == COMPLEX_CST)
4104 return build_complex (type, TREE_OPERAND (arg0, 0),
4105 fold (build1 (NEGATE_EXPR,
4106 TREE_TYPE (TREE_TYPE (arg0)),
4107 TREE_OPERAND (arg0, 1))));
4108 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4109 return fold (build (TREE_CODE (arg0), type,
4110 fold (build1 (CONJ_EXPR, type,
4111 TREE_OPERAND (arg0, 0))),
4112 fold (build1 (CONJ_EXPR,
4113 type, TREE_OPERAND (arg0, 1)))));
4114 else if (TREE_CODE (arg0) == CONJ_EXPR)
4115 return TREE_OPERAND (arg0, 0);
4116 return t;
4118 case BIT_NOT_EXPR:
4119 if (wins)
4121 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4122 ~ TREE_INT_CST_HIGH (arg0));
4123 TREE_TYPE (t) = type;
4124 force_fit_type (t, 0);
4125 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4126 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4128 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4129 return TREE_OPERAND (arg0, 0);
4130 return t;
4132 case PLUS_EXPR:
4133 /* A + (-B) -> A - B */
4134 if (TREE_CODE (arg1) == NEGATE_EXPR)
4135 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4136 else if (! FLOAT_TYPE_P (type))
4138 if (integer_zerop (arg1))
4139 return non_lvalue (convert (type, arg0));
4141 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4142 with a constant, and the two constants have no bits in common,
4143 we should treat this as a BIT_IOR_EXPR since this may produce more
4144 simplifications. */
4145 if (TREE_CODE (arg0) == BIT_AND_EXPR
4146 && TREE_CODE (arg1) == BIT_AND_EXPR
4147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4148 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4149 && integer_zerop (const_binop (BIT_AND_EXPR,
4150 TREE_OPERAND (arg0, 1),
4151 TREE_OPERAND (arg1, 1), 0)))
4153 code = BIT_IOR_EXPR;
4154 goto bit_ior;
4157 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4158 about the case where C is a constant, just try one of the
4159 four possibilities. */
4161 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4162 && operand_equal_p (TREE_OPERAND (arg0, 1),
4163 TREE_OPERAND (arg1, 1), 0))
4164 return fold (build (MULT_EXPR, type,
4165 fold (build (PLUS_EXPR, type,
4166 TREE_OPERAND (arg0, 0),
4167 TREE_OPERAND (arg1, 0))),
4168 TREE_OPERAND (arg0, 1)));
4170 /* In IEEE floating point, x+0 may not equal x. */
4171 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4172 || flag_fast_math)
4173 && real_zerop (arg1))
4174 return non_lvalue (convert (type, arg0));
4175 associate:
4176 /* In most languages, can't associate operations on floats
4177 through parentheses. Rather than remember where the parentheses
4178 were, we don't associate floats at all. It shouldn't matter much.
4179 However, associating multiplications is only very slightly
4180 inaccurate, so do that if -ffast-math is specified. */
4181 if (FLOAT_TYPE_P (type)
4182 && ! (flag_fast_math && code == MULT_EXPR))
4183 goto binary;
4185 /* The varsign == -1 cases happen only for addition and subtraction.
4186 It says that the arg that was split was really CON minus VAR.
4187 The rest of the code applies to all associative operations. */
4188 if (!wins)
4190 tree var, con;
4191 int varsign;
4193 if (split_tree (arg0, code, &var, &con, &varsign))
4195 if (varsign == -1)
4197 /* EXPR is (CON-VAR) +- ARG1. */
4198 /* If it is + and VAR==ARG1, return just CONST. */
4199 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4200 return convert (TREE_TYPE (t), con);
4202 /* If ARG0 is a constant, don't change things around;
4203 instead keep all the constant computations together. */
4205 if (TREE_CONSTANT (arg0))
4206 return t;
4208 /* Otherwise return (CON +- ARG1) - VAR. */
4209 t = build (MINUS_EXPR, type,
4210 fold (build (code, type, con, arg1)), var);
4212 else
4214 /* EXPR is (VAR+CON) +- ARG1. */
4215 /* If it is - and VAR==ARG1, return just CONST. */
4216 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4217 return convert (TREE_TYPE (t), con);
4219 /* If ARG0 is a constant, don't change things around;
4220 instead keep all the constant computations together. */
4222 if (TREE_CONSTANT (arg0))
4223 return t;
4225 /* Otherwise return VAR +- (ARG1 +- CON). */
4226 tem = fold (build (code, type, arg1, con));
4227 t = build (code, type, var, tem);
4229 if (integer_zerop (tem)
4230 && (code == PLUS_EXPR || code == MINUS_EXPR))
4231 return convert (type, var);
4232 /* If we have x +/- (c - d) [c an explicit integer]
4233 change it to x -/+ (d - c) since if d is relocatable
4234 then the latter can be a single immediate insn
4235 and the former cannot. */
4236 if (TREE_CODE (tem) == MINUS_EXPR
4237 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4239 tree tem1 = TREE_OPERAND (tem, 1);
4240 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4241 TREE_OPERAND (tem, 0) = tem1;
4242 TREE_SET_CODE (t,
4243 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4246 return t;
4249 if (split_tree (arg1, code, &var, &con, &varsign))
4251 if (TREE_CONSTANT (arg1))
4252 return t;
4254 if (varsign == -1)
4255 TREE_SET_CODE (t,
4256 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4258 /* EXPR is ARG0 +- (CON +- VAR). */
4259 if (TREE_CODE (t) == MINUS_EXPR
4260 && operand_equal_p (var, arg0, 0))
4262 /* If VAR and ARG0 cancel, return just CON or -CON. */
4263 if (code == PLUS_EXPR)
4264 return convert (TREE_TYPE (t), con);
4265 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4266 convert (TREE_TYPE (t), con)));
4269 t = build (TREE_CODE (t), type,
4270 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4272 if (integer_zerop (TREE_OPERAND (t, 0))
4273 && TREE_CODE (t) == PLUS_EXPR)
4274 return convert (TREE_TYPE (t), var);
4275 return t;
4278 binary:
4279 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4280 if (TREE_CODE (arg1) == REAL_CST)
4281 return t;
4282 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4283 if (wins)
4284 t1 = const_binop (code, arg0, arg1, 0);
4285 if (t1 != NULL_TREE)
4287 /* The return value should always have
4288 the same type as the original expression. */
4289 if (TREE_TYPE (t1) != TREE_TYPE (t))
4290 t1 = convert (TREE_TYPE (t), t1);
4292 return t1;
4294 return t;
4296 case MINUS_EXPR:
4297 if (! FLOAT_TYPE_P (type))
4299 if (! wins && integer_zerop (arg0))
4300 return build1 (NEGATE_EXPR, type, arg1);
4301 if (integer_zerop (arg1))
4302 return non_lvalue (convert (type, arg0));
4304 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4305 about the case where C is a constant, just try one of the
4306 four possibilities. */
4308 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4309 && operand_equal_p (TREE_OPERAND (arg0, 1),
4310 TREE_OPERAND (arg1, 1), 0))
4311 return fold (build (MULT_EXPR, type,
4312 fold (build (MINUS_EXPR, type,
4313 TREE_OPERAND (arg0, 0),
4314 TREE_OPERAND (arg1, 0))),
4315 TREE_OPERAND (arg0, 1)));
4317 /* Convert A - (-B) to A + B. */
4318 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4319 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4321 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4322 || flag_fast_math)
4324 /* Except with IEEE floating point, 0-x equals -x. */
4325 if (! wins && real_zerop (arg0))
4326 return build1 (NEGATE_EXPR, type, arg1);
4327 /* Except with IEEE floating point, x-0 equals x. */
4328 if (real_zerop (arg1))
4329 return non_lvalue (convert (type, arg0));
4332 /* Fold &x - &x. This can happen from &x.foo - &x.
4333 This is unsafe for certain floats even in non-IEEE formats.
4334 In IEEE, it is unsafe because it does wrong for NaNs.
4335 Also note that operand_equal_p is always false if an operand
4336 is volatile. */
4338 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4339 && operand_equal_p (arg0, arg1, 0))
4340 return convert (type, integer_zero_node);
4342 goto associate;
4344 case MULT_EXPR:
4345 if (! FLOAT_TYPE_P (type))
4347 if (integer_zerop (arg1))
4348 return omit_one_operand (type, arg1, arg0);
4349 if (integer_onep (arg1))
4350 return non_lvalue (convert (type, arg0));
4352 /* ((A / C) * C) is A if the division is an
4353 EXACT_DIV_EXPR. Since C is normally a constant,
4354 just check for one of the four possibilities. */
4356 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4357 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4358 return TREE_OPERAND (arg0, 0);
4360 /* (a * (1 << b)) is (a << b) */
4361 if (TREE_CODE (arg1) == LSHIFT_EXPR
4362 && integer_onep (TREE_OPERAND (arg1, 0)))
4363 return fold (build (LSHIFT_EXPR, type, arg0,
4364 TREE_OPERAND (arg1, 1)));
4365 if (TREE_CODE (arg0) == LSHIFT_EXPR
4366 && integer_onep (TREE_OPERAND (arg0, 0)))
4367 return fold (build (LSHIFT_EXPR, type, arg1,
4368 TREE_OPERAND (arg0, 1)));
4370 else
4372 /* x*0 is 0, except for IEEE floating point. */
4373 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4374 || flag_fast_math)
4375 && real_zerop (arg1))
4376 return omit_one_operand (type, arg1, arg0);
4377 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4378 However, ANSI says we can drop signals,
4379 so we can do this anyway. */
4380 if (real_onep (arg1))
4381 return non_lvalue (convert (type, arg0));
4382 /* x*2 is x+x */
4383 if (! wins && real_twop (arg1) && current_function_decl != 0)
4385 tree arg = save_expr (arg0);
4386 return build (PLUS_EXPR, type, arg, arg);
4389 goto associate;
4391 case BIT_IOR_EXPR:
4392 bit_ior:
4394 register enum tree_code code0, code1;
4396 if (integer_all_onesp (arg1))
4397 return omit_one_operand (type, arg1, arg0);
4398 if (integer_zerop (arg1))
4399 return non_lvalue (convert (type, arg0));
4400 t1 = distribute_bit_expr (code, type, arg0, arg1);
4401 if (t1 != NULL_TREE)
4402 return t1;
4404 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4405 is a rotate of A by C1 bits. */
4406 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4407 is a rotate of A by B bits. */
4409 code0 = TREE_CODE (arg0);
4410 code1 = TREE_CODE (arg1);
4411 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4412 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4413 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4414 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4416 register tree tree01, tree11;
4417 register enum tree_code code01, code11;
4419 tree01 = TREE_OPERAND (arg0, 1);
4420 tree11 = TREE_OPERAND (arg1, 1);
4421 code01 = TREE_CODE (tree01);
4422 code11 = TREE_CODE (tree11);
4423 if (code01 == INTEGER_CST
4424 && code11 == INTEGER_CST
4425 && TREE_INT_CST_HIGH (tree01) == 0
4426 && TREE_INT_CST_HIGH (tree11) == 0
4427 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4428 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4429 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4430 code0 == LSHIFT_EXPR ? tree01 : tree11);
4431 else if (code11 == MINUS_EXPR
4432 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4433 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4434 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4435 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4436 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4437 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4438 type, TREE_OPERAND (arg0, 0), tree01);
4439 else if (code01 == MINUS_EXPR
4440 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4441 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4442 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4443 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4444 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4445 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4446 type, TREE_OPERAND (arg0, 0), tree11);
4449 goto associate;
4452 case BIT_XOR_EXPR:
4453 if (integer_zerop (arg1))
4454 return non_lvalue (convert (type, arg0));
4455 if (integer_all_onesp (arg1))
4456 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4457 goto associate;
4459 case BIT_AND_EXPR:
4460 bit_and:
4461 if (integer_all_onesp (arg1))
4462 return non_lvalue (convert (type, arg0));
4463 if (integer_zerop (arg1))
4464 return omit_one_operand (type, arg1, arg0);
4465 t1 = distribute_bit_expr (code, type, arg0, arg1);
4466 if (t1 != NULL_TREE)
4467 return t1;
4468 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4469 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4470 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4472 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4473 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4474 && (~TREE_INT_CST_LOW (arg0)
4475 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4476 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4478 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4479 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4481 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4482 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4483 && (~TREE_INT_CST_LOW (arg1)
4484 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4485 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4487 goto associate;
4489 case BIT_ANDTC_EXPR:
4490 if (integer_all_onesp (arg0))
4491 return non_lvalue (convert (type, arg1));
4492 if (integer_zerop (arg0))
4493 return omit_one_operand (type, arg0, arg1);
4494 if (TREE_CODE (arg1) == INTEGER_CST)
4496 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4497 code = BIT_AND_EXPR;
4498 goto bit_and;
4500 goto binary;
4502 case RDIV_EXPR:
4503 /* In most cases, do nothing with a divide by zero. */
4504 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4505 #ifndef REAL_INFINITY
4506 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4507 return t;
4508 #endif
4509 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4511 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4512 However, ANSI says we can drop signals, so we can do this anyway. */
4513 if (real_onep (arg1))
4514 return non_lvalue (convert (type, arg0));
4516 /* If ARG1 is a constant, we can convert this to a multiply by the
4517 reciprocal. This does not have the same rounding properties,
4518 so only do this if -ffast-math. We can actually always safely
4519 do it if ARG1 is a power of two, but it's hard to tell if it is
4520 or not in a portable manner. */
4521 if (TREE_CODE (arg1) == REAL_CST)
4523 if (flag_fast_math
4524 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4525 arg1, 0)))
4526 return fold (build (MULT_EXPR, type, arg0, tem));
4527 /* Find the reciprocal if optimizing and the result is exact. */
4528 else if (optimize)
4530 REAL_VALUE_TYPE r;
4531 r = TREE_REAL_CST (arg1);
4532 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4534 tem = build_real (type, r);
4535 return fold (build (MULT_EXPR, type, arg0, tem));
4539 goto binary;
4541 case TRUNC_DIV_EXPR:
4542 case ROUND_DIV_EXPR:
4543 case FLOOR_DIV_EXPR:
4544 case CEIL_DIV_EXPR:
4545 case EXACT_DIV_EXPR:
4546 if (integer_onep (arg1))
4547 return non_lvalue (convert (type, arg0));
4548 if (integer_zerop (arg1))
4549 return t;
4551 /* If we have ((a / C1) / C2) where both division are the same type, try
4552 to simplify. First see if C1 * C2 overflows or not. */
4553 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4554 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4556 tree new_divisor;
4558 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4559 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4561 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4562 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4564 /* If no overflow, divide by C1*C2. */
4565 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4569 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4570 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4571 expressions, which often appear in the offsets or sizes of
4572 objects with a varying size. Only deal with positive divisors
4573 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4575 Look for NOPs and SAVE_EXPRs inside. */
4577 if (TREE_CODE (arg1) == INTEGER_CST
4578 && tree_int_cst_sgn (arg1) >= 0)
4580 int have_save_expr = 0;
4581 tree c2 = integer_zero_node;
4582 tree xarg0 = arg0;
4584 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4585 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4587 STRIP_NOPS (xarg0);
4589 if (TREE_CODE (xarg0) == PLUS_EXPR
4590 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4591 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4592 else if (TREE_CODE (xarg0) == MINUS_EXPR
4593 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4594 /* If we are doing this computation unsigned, the negate
4595 is incorrect. */
4596 && ! TREE_UNSIGNED (type))
4598 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4599 xarg0 = TREE_OPERAND (xarg0, 0);
4602 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4603 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4605 STRIP_NOPS (xarg0);
4607 if (TREE_CODE (xarg0) == MULT_EXPR
4608 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4609 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4610 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4611 TREE_OPERAND (xarg0, 1), arg1, 1))
4612 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4613 TREE_OPERAND (xarg0, 1), 1)))
4614 && (tree_int_cst_sgn (c2) >= 0
4615 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4616 arg1, 1))))
4618 tree outer_div = integer_one_node;
4619 tree c1 = TREE_OPERAND (xarg0, 1);
4620 tree c3 = arg1;
4622 /* If C3 > C1, set them equal and do a divide by
4623 C3/C1 at the end of the operation. */
4624 if (tree_int_cst_lt (c1, c3))
4625 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4627 /* The result is A * (C1/C3) + (C2/C3). */
4628 t = fold (build (PLUS_EXPR, type,
4629 fold (build (MULT_EXPR, type,
4630 TREE_OPERAND (xarg0, 0),
4631 const_binop (code, c1, c3, 1))),
4632 const_binop (code, c2, c3, 1)));
4634 if (! integer_onep (outer_div))
4635 t = fold (build (code, type, t, convert (type, outer_div)));
4637 if (have_save_expr)
4638 t = save_expr (t);
4640 return t;
4644 goto binary;
4646 case CEIL_MOD_EXPR:
4647 case FLOOR_MOD_EXPR:
4648 case ROUND_MOD_EXPR:
4649 case TRUNC_MOD_EXPR:
4650 if (integer_onep (arg1))
4651 return omit_one_operand (type, integer_zero_node, arg0);
4652 if (integer_zerop (arg1))
4653 return t;
4655 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4656 where C1 % C3 == 0. Handle similarly to the division case,
4657 but don't bother with SAVE_EXPRs. */
4659 if (TREE_CODE (arg1) == INTEGER_CST
4660 && ! integer_zerop (arg1))
4662 tree c2 = integer_zero_node;
4663 tree xarg0 = arg0;
4665 if (TREE_CODE (xarg0) == PLUS_EXPR
4666 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4667 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4668 else if (TREE_CODE (xarg0) == MINUS_EXPR
4669 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4670 && ! TREE_UNSIGNED (type))
4672 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4673 xarg0 = TREE_OPERAND (xarg0, 0);
4676 STRIP_NOPS (xarg0);
4678 if (TREE_CODE (xarg0) == MULT_EXPR
4679 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4680 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4681 TREE_OPERAND (xarg0, 1),
4682 arg1, 1))
4683 && tree_int_cst_sgn (c2) >= 0)
4684 /* The result is (C2%C3). */
4685 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4686 TREE_OPERAND (xarg0, 0));
4689 goto binary;
4691 case LSHIFT_EXPR:
4692 case RSHIFT_EXPR:
4693 case LROTATE_EXPR:
4694 case RROTATE_EXPR:
4695 if (integer_zerop (arg1))
4696 return non_lvalue (convert (type, arg0));
4697 /* Since negative shift count is not well-defined,
4698 don't try to compute it in the compiler. */
4699 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4700 return t;
4701 /* Rewrite an LROTATE_EXPR by a constant into an
4702 RROTATE_EXPR by a new constant. */
4703 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4705 TREE_SET_CODE (t, RROTATE_EXPR);
4706 code = RROTATE_EXPR;
4707 TREE_OPERAND (t, 1) = arg1
4708 = const_binop
4709 (MINUS_EXPR,
4710 convert (TREE_TYPE (arg1),
4711 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4712 arg1, 0);
4713 if (tree_int_cst_sgn (arg1) < 0)
4714 return t;
4717 /* If we have a rotate of a bit operation with the rotate count and
4718 the second operand of the bit operation both constant,
4719 permute the two operations. */
4720 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4721 && (TREE_CODE (arg0) == BIT_AND_EXPR
4722 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4723 || TREE_CODE (arg0) == BIT_IOR_EXPR
4724 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4726 return fold (build (TREE_CODE (arg0), type,
4727 fold (build (code, type,
4728 TREE_OPERAND (arg0, 0), arg1)),
4729 fold (build (code, type,
4730 TREE_OPERAND (arg0, 1), arg1))));
4732 /* Two consecutive rotates adding up to the width of the mode can
4733 be ignored. */
4734 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4735 && TREE_CODE (arg0) == RROTATE_EXPR
4736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4737 && TREE_INT_CST_HIGH (arg1) == 0
4738 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4739 && ((TREE_INT_CST_LOW (arg1)
4740 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4741 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4742 return TREE_OPERAND (arg0, 0);
4744 goto binary;
4746 case MIN_EXPR:
4747 if (operand_equal_p (arg0, arg1, 0))
4748 return arg0;
4749 if (INTEGRAL_TYPE_P (type)
4750 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4751 return omit_one_operand (type, arg1, arg0);
4752 goto associate;
4754 case MAX_EXPR:
4755 if (operand_equal_p (arg0, arg1, 0))
4756 return arg0;
4757 if (INTEGRAL_TYPE_P (type)
4758 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4759 return omit_one_operand (type, arg1, arg0);
4760 goto associate;
4762 case TRUTH_NOT_EXPR:
4763 /* Note that the operand of this must be an int
4764 and its values must be 0 or 1.
4765 ("true" is a fixed value perhaps depending on the language,
4766 but we don't handle values other than 1 correctly yet.) */
4767 tem = invert_truthvalue (arg0);
4768 /* Avoid infinite recursion. */
4769 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4770 return t;
4771 return convert (type, tem);
4773 case TRUTH_ANDIF_EXPR:
4774 /* Note that the operands of this must be ints
4775 and their values must be 0 or 1.
4776 ("true" is a fixed value perhaps depending on the language.) */
4777 /* If first arg is constant zero, return it. */
4778 if (integer_zerop (arg0))
4779 return arg0;
4780 case TRUTH_AND_EXPR:
4781 /* If either arg is constant true, drop it. */
4782 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4783 return non_lvalue (arg1);
4784 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4785 return non_lvalue (arg0);
4786 /* If second arg is constant zero, result is zero, but first arg
4787 must be evaluated. */
4788 if (integer_zerop (arg1))
4789 return omit_one_operand (type, arg1, arg0);
4790 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
4791 case will be handled here. */
4792 if (integer_zerop (arg0))
4793 return omit_one_operand (type, arg0, arg1);
4795 truth_andor:
4796 /* We only do these simplifications if we are optimizing. */
4797 if (!optimize)
4798 return t;
4800 /* Check for things like (A || B) && (A || C). We can convert this
4801 to A || (B && C). Note that either operator can be any of the four
4802 truth and/or operations and the transformation will still be
4803 valid. Also note that we only care about order for the
4804 ANDIF and ORIF operators. If B contains side effects, this
4805 might change the truth-value of A. */
4806 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4807 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4808 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4809 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4810 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
4811 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
4813 tree a00 = TREE_OPERAND (arg0, 0);
4814 tree a01 = TREE_OPERAND (arg0, 1);
4815 tree a10 = TREE_OPERAND (arg1, 0);
4816 tree a11 = TREE_OPERAND (arg1, 1);
4817 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4818 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4819 && (code == TRUTH_AND_EXPR
4820 || code == TRUTH_OR_EXPR));
4822 if (operand_equal_p (a00, a10, 0))
4823 return fold (build (TREE_CODE (arg0), type, a00,
4824 fold (build (code, type, a01, a11))));
4825 else if (commutative && operand_equal_p (a00, a11, 0))
4826 return fold (build (TREE_CODE (arg0), type, a00,
4827 fold (build (code, type, a01, a10))));
4828 else if (commutative && operand_equal_p (a01, a10, 0))
4829 return fold (build (TREE_CODE (arg0), type, a01,
4830 fold (build (code, type, a00, a11))));
4832 /* This case if tricky because we must either have commutative
4833 operators or else A10 must not have side-effects. */
4835 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4836 && operand_equal_p (a01, a11, 0))
4837 return fold (build (TREE_CODE (arg0), type,
4838 fold (build (code, type, a00, a10)),
4839 a01));
4842 /* See if we can build a range comparison. */
4843 if (0 != (tem = fold_range_test (t)))
4844 return tem;
4846 /* Check for the possibility of merging component references. If our
4847 lhs is another similar operation, try to merge its rhs with our
4848 rhs. Then try to merge our lhs and rhs. */
4849 if (TREE_CODE (arg0) == code
4850 && 0 != (tem = fold_truthop (code, type,
4851 TREE_OPERAND (arg0, 1), arg1)))
4852 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4854 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4855 return tem;
4857 return t;
4859 case TRUTH_ORIF_EXPR:
4860 /* Note that the operands of this must be ints
4861 and their values must be 0 or true.
4862 ("true" is a fixed value perhaps depending on the language.) */
4863 /* If first arg is constant true, return it. */
4864 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4865 return arg0;
4866 case TRUTH_OR_EXPR:
4867 /* If either arg is constant zero, drop it. */
4868 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4869 return non_lvalue (arg1);
4870 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4871 return non_lvalue (arg0);
4872 /* If second arg is constant true, result is true, but we must
4873 evaluate first arg. */
4874 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4875 return omit_one_operand (type, arg1, arg0);
4876 /* Likewise for first arg, but note this only occurs here for
4877 TRUTH_OR_EXPR. */
4878 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4879 return omit_one_operand (type, arg0, arg1);
4880 goto truth_andor;
4882 case TRUTH_XOR_EXPR:
4883 /* If either arg is constant zero, drop it. */
4884 if (integer_zerop (arg0))
4885 return non_lvalue (arg1);
4886 if (integer_zerop (arg1))
4887 return non_lvalue (arg0);
4888 /* If either arg is constant true, this is a logical inversion. */
4889 if (integer_onep (arg0))
4890 return non_lvalue (invert_truthvalue (arg1));
4891 if (integer_onep (arg1))
4892 return non_lvalue (invert_truthvalue (arg0));
4893 return t;
4895 case EQ_EXPR:
4896 case NE_EXPR:
4897 case LT_EXPR:
4898 case GT_EXPR:
4899 case LE_EXPR:
4900 case GE_EXPR:
4901 /* If one arg is a constant integer, put it last. */
4902 if (TREE_CODE (arg0) == INTEGER_CST
4903 && TREE_CODE (arg1) != INTEGER_CST)
4905 TREE_OPERAND (t, 0) = arg1;
4906 TREE_OPERAND (t, 1) = arg0;
4907 arg0 = TREE_OPERAND (t, 0);
4908 arg1 = TREE_OPERAND (t, 1);
4909 code = swap_tree_comparison (code);
4910 TREE_SET_CODE (t, code);
4913 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4914 First, see if one arg is constant; find the constant arg
4915 and the other one. */
4917 tree constop = 0, varop;
4918 int constopnum = -1;
4920 if (TREE_CONSTANT (arg1))
4921 constopnum = 1, constop = arg1, varop = arg0;
4922 if (TREE_CONSTANT (arg0))
4923 constopnum = 0, constop = arg0, varop = arg1;
4925 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4927 /* This optimization is invalid for ordered comparisons
4928 if CONST+INCR overflows or if foo+incr might overflow.
4929 This optimization is invalid for floating point due to rounding.
4930 For pointer types we assume overflow doesn't happen. */
4931 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4932 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4933 && (code == EQ_EXPR || code == NE_EXPR)))
4935 tree newconst
4936 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4937 constop, TREE_OPERAND (varop, 1)));
4938 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4940 /* If VAROP is a reference to a bitfield, we must mask
4941 the constant by the width of the field. */
4942 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4943 && DECL_BIT_FIELD(TREE_OPERAND
4944 (TREE_OPERAND (varop, 0), 1)))
4946 int size
4947 = TREE_INT_CST_LOW (DECL_SIZE
4948 (TREE_OPERAND
4949 (TREE_OPERAND (varop, 0), 1)));
4951 newconst = fold (build (BIT_AND_EXPR,
4952 TREE_TYPE (varop), newconst,
4953 convert (TREE_TYPE (varop),
4954 build_int_2 (size, 0))));
4958 t = build (code, type, TREE_OPERAND (t, 0),
4959 TREE_OPERAND (t, 1));
4960 TREE_OPERAND (t, constopnum) = newconst;
4961 return t;
4964 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4966 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4967 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4968 && (code == EQ_EXPR || code == NE_EXPR)))
4970 tree newconst
4971 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4972 constop, TREE_OPERAND (varop, 1)));
4973 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4975 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4976 && DECL_BIT_FIELD(TREE_OPERAND
4977 (TREE_OPERAND (varop, 0), 1)))
4979 int size
4980 = TREE_INT_CST_LOW (DECL_SIZE
4981 (TREE_OPERAND
4982 (TREE_OPERAND (varop, 0), 1)));
4984 newconst = fold (build (BIT_AND_EXPR,
4985 TREE_TYPE (varop), newconst,
4986 convert (TREE_TYPE (varop),
4987 build_int_2 (size, 0))));
4991 t = build (code, type, TREE_OPERAND (t, 0),
4992 TREE_OPERAND (t, 1));
4993 TREE_OPERAND (t, constopnum) = newconst;
4994 return t;
4999 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5000 if (TREE_CODE (arg1) == INTEGER_CST
5001 && TREE_CODE (arg0) != INTEGER_CST
5002 && tree_int_cst_sgn (arg1) > 0)
5004 switch (TREE_CODE (t))
5006 case GE_EXPR:
5007 code = GT_EXPR;
5008 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5009 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5010 break;
5012 case LT_EXPR:
5013 code = LE_EXPR;
5014 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5015 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5016 break;
5020 /* If this is an EQ or NE comparison with zero and ARG0 is
5021 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5022 two operations, but the latter can be done in one less insn
5023 one machine that have only two-operand insns or on which a
5024 constant cannot be the first operand. */
5025 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5026 && TREE_CODE (arg0) == BIT_AND_EXPR)
5028 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5029 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5030 return
5031 fold (build (code, type,
5032 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5033 build (RSHIFT_EXPR,
5034 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5035 TREE_OPERAND (arg0, 1),
5036 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5037 convert (TREE_TYPE (arg0),
5038 integer_one_node)),
5039 arg1));
5040 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5041 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5042 return
5043 fold (build (code, type,
5044 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5045 build (RSHIFT_EXPR,
5046 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5047 TREE_OPERAND (arg0, 0),
5048 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5049 convert (TREE_TYPE (arg0),
5050 integer_one_node)),
5051 arg1));
5054 /* If this is an NE or EQ comparison of zero against the result of a
5055 signed MOD operation whose second operand is a power of 2, make
5056 the MOD operation unsigned since it is simpler and equivalent. */
5057 if ((code == NE_EXPR || code == EQ_EXPR)
5058 && integer_zerop (arg1)
5059 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5060 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5061 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5062 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5063 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5064 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5066 tree newtype = unsigned_type (TREE_TYPE (arg0));
5067 tree newmod = build (TREE_CODE (arg0), newtype,
5068 convert (newtype, TREE_OPERAND (arg0, 0)),
5069 convert (newtype, TREE_OPERAND (arg0, 1)));
5071 return build (code, type, newmod, convert (newtype, arg1));
5074 /* If this is an NE comparison of zero with an AND of one, remove the
5075 comparison since the AND will give the correct value. */
5076 if (code == NE_EXPR && integer_zerop (arg1)
5077 && TREE_CODE (arg0) == BIT_AND_EXPR
5078 && integer_onep (TREE_OPERAND (arg0, 1)))
5079 return convert (type, arg0);
5081 /* If we have (A & C) == C where C is a power of 2, convert this into
5082 (A & C) != 0. Similarly for NE_EXPR. */
5083 if ((code == EQ_EXPR || code == NE_EXPR)
5084 && TREE_CODE (arg0) == BIT_AND_EXPR
5085 && integer_pow2p (TREE_OPERAND (arg0, 1))
5086 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5087 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5088 arg0, integer_zero_node);
5090 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5091 and similarly for >= into !=. */
5092 if ((code == LT_EXPR || code == GE_EXPR)
5093 && TREE_UNSIGNED (TREE_TYPE (arg0))
5094 && TREE_CODE (arg1) == LSHIFT_EXPR
5095 && integer_onep (TREE_OPERAND (arg1, 0)))
5096 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5097 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5098 TREE_OPERAND (arg1, 1)),
5099 convert (TREE_TYPE (arg0), integer_zero_node));
5101 else if ((code == LT_EXPR || code == GE_EXPR)
5102 && TREE_UNSIGNED (TREE_TYPE (arg0))
5103 && (TREE_CODE (arg1) == NOP_EXPR
5104 || TREE_CODE (arg1) == CONVERT_EXPR)
5105 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5106 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5107 return
5108 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5109 convert (TREE_TYPE (arg0),
5110 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5111 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5112 convert (TREE_TYPE (arg0), integer_zero_node));
5114 /* Simplify comparison of something with itself. (For IEEE
5115 floating-point, we can only do some of these simplifications.) */
5116 if (operand_equal_p (arg0, arg1, 0))
5118 switch (code)
5120 case EQ_EXPR:
5121 case GE_EXPR:
5122 case LE_EXPR:
5123 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5125 if (type == integer_type_node)
5126 return integer_one_node;
5128 t = build_int_2 (1, 0);
5129 TREE_TYPE (t) = type;
5130 return t;
5132 code = EQ_EXPR;
5133 TREE_SET_CODE (t, code);
5134 break;
5136 case NE_EXPR:
5137 /* For NE, we can only do this simplification if integer. */
5138 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5139 break;
5140 /* ... fall through ... */
5141 case GT_EXPR:
5142 case LT_EXPR:
5143 if (type == integer_type_node)
5144 return integer_zero_node;
5146 t = build_int_2 (0, 0);
5147 TREE_TYPE (t) = type;
5148 return t;
5152 /* An unsigned comparison against 0 can be simplified. */
5153 if (integer_zerop (arg1)
5154 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5155 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5156 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5158 switch (TREE_CODE (t))
5160 case GT_EXPR:
5161 code = NE_EXPR;
5162 TREE_SET_CODE (t, NE_EXPR);
5163 break;
5164 case LE_EXPR:
5165 code = EQ_EXPR;
5166 TREE_SET_CODE (t, EQ_EXPR);
5167 break;
5168 case GE_EXPR:
5169 return omit_one_operand (type,
5170 convert (type, integer_one_node),
5171 arg0);
5172 case LT_EXPR:
5173 return omit_one_operand (type,
5174 convert (type, integer_zero_node),
5175 arg0);
5179 /* If we are comparing an expression that just has comparisons
5180 of two integer values, arithmetic expressions of those comparisons,
5181 and constants, we can simplify it. There are only three cases
5182 to check: the two values can either be equal, the first can be
5183 greater, or the second can be greater. Fold the expression for
5184 those three values. Since each value must be 0 or 1, we have
5185 eight possibilities, each of which corresponds to the constant 0
5186 or 1 or one of the six possible comparisons.
5188 This handles common cases like (a > b) == 0 but also handles
5189 expressions like ((x > y) - (y > x)) > 0, which supposedly
5190 occur in macroized code. */
5192 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5194 tree cval1 = 0, cval2 = 0;
5195 int save_p = 0;
5197 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5198 /* Don't handle degenerate cases here; they should already
5199 have been handled anyway. */
5200 && cval1 != 0 && cval2 != 0
5201 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5202 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5203 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5204 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5205 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5207 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5208 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5210 /* We can't just pass T to eval_subst in case cval1 or cval2
5211 was the same as ARG1. */
5213 tree high_result
5214 = fold (build (code, type,
5215 eval_subst (arg0, cval1, maxval, cval2, minval),
5216 arg1));
5217 tree equal_result
5218 = fold (build (code, type,
5219 eval_subst (arg0, cval1, maxval, cval2, maxval),
5220 arg1));
5221 tree low_result
5222 = fold (build (code, type,
5223 eval_subst (arg0, cval1, minval, cval2, maxval),
5224 arg1));
5226 /* All three of these results should be 0 or 1. Confirm they
5227 are. Then use those values to select the proper code
5228 to use. */
5230 if ((integer_zerop (high_result)
5231 || integer_onep (high_result))
5232 && (integer_zerop (equal_result)
5233 || integer_onep (equal_result))
5234 && (integer_zerop (low_result)
5235 || integer_onep (low_result)))
5237 /* Make a 3-bit mask with the high-order bit being the
5238 value for `>', the next for '=', and the low for '<'. */
5239 switch ((integer_onep (high_result) * 4)
5240 + (integer_onep (equal_result) * 2)
5241 + integer_onep (low_result))
5243 case 0:
5244 /* Always false. */
5245 return omit_one_operand (type, integer_zero_node, arg0);
5246 case 1:
5247 code = LT_EXPR;
5248 break;
5249 case 2:
5250 code = EQ_EXPR;
5251 break;
5252 case 3:
5253 code = LE_EXPR;
5254 break;
5255 case 4:
5256 code = GT_EXPR;
5257 break;
5258 case 5:
5259 code = NE_EXPR;
5260 break;
5261 case 6:
5262 code = GE_EXPR;
5263 break;
5264 case 7:
5265 /* Always true. */
5266 return omit_one_operand (type, integer_one_node, arg0);
5269 t = build (code, type, cval1, cval2);
5270 if (save_p)
5271 return save_expr (t);
5272 else
5273 return fold (t);
5278 /* If this is a comparison of a field, we may be able to simplify it. */
5279 if ((TREE_CODE (arg0) == COMPONENT_REF
5280 || TREE_CODE (arg0) == BIT_FIELD_REF)
5281 && (code == EQ_EXPR || code == NE_EXPR)
5282 /* Handle the constant case even without -O
5283 to make sure the warnings are given. */
5284 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5286 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5287 return t1 ? t1 : t;
5290 /* If this is a comparison of complex values and either or both
5291 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5292 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5293 may prevent needless evaluations. */
5294 if ((code == EQ_EXPR || code == NE_EXPR)
5295 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5296 && (TREE_CODE (arg0) == COMPLEX_EXPR
5297 || TREE_CODE (arg1) == COMPLEX_EXPR))
5299 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5300 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5301 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5302 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5303 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5305 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5306 : TRUTH_ORIF_EXPR),
5307 type,
5308 fold (build (code, type, real0, real1)),
5309 fold (build (code, type, imag0, imag1))));
5312 /* From here on, the only cases we handle are when the result is
5313 known to be a constant.
5315 To compute GT, swap the arguments and do LT.
5316 To compute GE, do LT and invert the result.
5317 To compute LE, swap the arguments, do LT and invert the result.
5318 To compute NE, do EQ and invert the result.
5320 Therefore, the code below must handle only EQ and LT. */
5322 if (code == LE_EXPR || code == GT_EXPR)
5324 tem = arg0, arg0 = arg1, arg1 = tem;
5325 code = swap_tree_comparison (code);
5328 /* Note that it is safe to invert for real values here because we
5329 will check below in the one case that it matters. */
5331 invert = 0;
5332 if (code == NE_EXPR || code == GE_EXPR)
5334 invert = 1;
5335 code = invert_tree_comparison (code);
5338 /* Compute a result for LT or EQ if args permit;
5339 otherwise return T. */
5340 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5342 if (code == EQ_EXPR)
5343 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5344 == TREE_INT_CST_LOW (arg1))
5345 && (TREE_INT_CST_HIGH (arg0)
5346 == TREE_INT_CST_HIGH (arg1)),
5348 else
5349 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5350 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5351 : INT_CST_LT (arg0, arg1)),
5355 #if 0 /* This is no longer useful, but breaks some real code. */
5356 /* Assume a nonexplicit constant cannot equal an explicit one,
5357 since such code would be undefined anyway.
5358 Exception: on sysvr4, using #pragma weak,
5359 a label can come out as 0. */
5360 else if (TREE_CODE (arg1) == INTEGER_CST
5361 && !integer_zerop (arg1)
5362 && TREE_CONSTANT (arg0)
5363 && TREE_CODE (arg0) == ADDR_EXPR
5364 && code == EQ_EXPR)
5365 t1 = build_int_2 (0, 0);
5366 #endif
5367 /* Two real constants can be compared explicitly. */
5368 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5370 /* If either operand is a NaN, the result is false with two
5371 exceptions: First, an NE_EXPR is true on NaNs, but that case
5372 is already handled correctly since we will be inverting the
5373 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5374 or a GE_EXPR into a LT_EXPR, we must return true so that it
5375 will be inverted into false. */
5377 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5378 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5379 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5381 else if (code == EQ_EXPR)
5382 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5383 TREE_REAL_CST (arg1)),
5385 else
5386 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5387 TREE_REAL_CST (arg1)),
5391 if (t1 == NULL_TREE)
5392 return t;
5394 if (invert)
5395 TREE_INT_CST_LOW (t1) ^= 1;
5397 TREE_TYPE (t1) = type;
5398 return t1;
5400 case COND_EXPR:
5401 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5402 so all simple results must be passed through pedantic_non_lvalue. */
5403 if (TREE_CODE (arg0) == INTEGER_CST)
5404 return pedantic_non_lvalue
5405 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5406 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5407 return pedantic_omit_one_operand (type, arg1, arg0);
5409 /* If the second operand is zero, invert the comparison and swap
5410 the second and third operands. Likewise if the second operand
5411 is constant and the third is not or if the third operand is
5412 equivalent to the first operand of the comparison. */
5414 if (integer_zerop (arg1)
5415 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5416 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5417 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5418 TREE_OPERAND (t, 2),
5419 TREE_OPERAND (arg0, 1))))
5421 /* See if this can be inverted. If it can't, possibly because
5422 it was a floating-point inequality comparison, don't do
5423 anything. */
5424 tem = invert_truthvalue (arg0);
5426 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5428 t = build (code, type, tem,
5429 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5430 arg0 = tem;
5431 arg1 = TREE_OPERAND (t, 2);
5432 STRIP_NOPS (arg1);
5436 /* If we have A op B ? A : C, we may be able to convert this to a
5437 simpler expression, depending on the operation and the values
5438 of B and C. IEEE floating point prevents this though,
5439 because A or B might be -0.0 or a NaN. */
5441 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5442 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5443 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5444 || flag_fast_math)
5445 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5446 arg1, TREE_OPERAND (arg0, 1)))
5448 tree arg2 = TREE_OPERAND (t, 2);
5449 enum tree_code comp_code = TREE_CODE (arg0);
5451 STRIP_NOPS (arg2);
5453 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5454 depending on the comparison operation. */
5455 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5456 ? real_zerop (TREE_OPERAND (arg0, 1))
5457 : integer_zerop (TREE_OPERAND (arg0, 1)))
5458 && TREE_CODE (arg2) == NEGATE_EXPR
5459 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5460 switch (comp_code)
5462 case EQ_EXPR:
5463 return pedantic_non_lvalue
5464 (fold (build1 (NEGATE_EXPR, type, arg1)));
5465 case NE_EXPR:
5466 return pedantic_non_lvalue (convert (type, arg1));
5467 case GE_EXPR:
5468 case GT_EXPR:
5469 return pedantic_non_lvalue
5470 (convert (type, fold (build1 (ABS_EXPR,
5471 TREE_TYPE (arg1), arg1))));
5472 case LE_EXPR:
5473 case LT_EXPR:
5474 return pedantic_non_lvalue
5475 (fold (build1 (NEGATE_EXPR, type,
5476 convert (type,
5477 fold (build1 (ABS_EXPR,
5478 TREE_TYPE (arg1),
5479 arg1))))));
5482 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5483 always zero. */
5485 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5487 if (comp_code == NE_EXPR)
5488 return pedantic_non_lvalue (convert (type, arg1));
5489 else if (comp_code == EQ_EXPR)
5490 return pedantic_non_lvalue (convert (type, integer_zero_node));
5493 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5494 or max (A, B), depending on the operation. */
5496 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5497 arg2, TREE_OPERAND (arg0, 0)))
5499 tree comp_op0 = TREE_OPERAND (arg0, 0);
5500 tree comp_op1 = TREE_OPERAND (arg0, 1);
5501 tree comp_type = TREE_TYPE (comp_op0);
5503 switch (comp_code)
5505 case EQ_EXPR:
5506 return pedantic_non_lvalue (convert (type, arg2));
5507 case NE_EXPR:
5508 return pedantic_non_lvalue (convert (type, arg1));
5509 case LE_EXPR:
5510 case LT_EXPR:
5511 /* In C++ a ?: expression can be an lvalue, so we can't
5512 do this; we would lose the distinction between
5513 LT and LE. */
5514 if (pedantic_lvalues)
5515 return pedantic_non_lvalue
5516 (convert (type, (fold (build (MIN_EXPR, comp_type,
5517 comp_op0, comp_op1)))));
5518 break;
5519 case GE_EXPR:
5520 case GT_EXPR:
5521 if (pedantic_lvalues)
5522 return pedantic_non_lvalue
5523 (convert (type, fold (build (MAX_EXPR, comp_type,
5524 comp_op0, comp_op1))));
5525 break;
5529 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5530 we might still be able to simplify this. For example,
5531 if C1 is one less or one more than C2, this might have started
5532 out as a MIN or MAX and been transformed by this function.
5533 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5535 if (INTEGRAL_TYPE_P (type)
5536 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5537 && TREE_CODE (arg2) == INTEGER_CST)
5538 switch (comp_code)
5540 case EQ_EXPR:
5541 /* We can replace A with C1 in this case. */
5542 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5543 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5544 TREE_OPERAND (t, 2));
5545 break;
5547 case LT_EXPR:
5548 /* If C1 is C2 + 1, this is min(A, C2). */
5549 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5550 && operand_equal_p (TREE_OPERAND (arg0, 1),
5551 const_binop (PLUS_EXPR, arg2,
5552 integer_one_node, 0), 1))
5553 return pedantic_non_lvalue
5554 (fold (build (MIN_EXPR, type, arg1, arg2)));
5555 break;
5557 case LE_EXPR:
5558 /* If C1 is C2 - 1, this is min(A, C2). */
5559 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5560 && operand_equal_p (TREE_OPERAND (arg0, 1),
5561 const_binop (MINUS_EXPR, arg2,
5562 integer_one_node, 0), 1))
5563 return pedantic_non_lvalue
5564 (fold (build (MIN_EXPR, type, arg1, arg2)));
5565 break;
5567 case GT_EXPR:
5568 /* If C1 is C2 - 1, this is max(A, C2). */
5569 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5570 && operand_equal_p (TREE_OPERAND (arg0, 1),
5571 const_binop (MINUS_EXPR, arg2,
5572 integer_one_node, 0), 1))
5573 return pedantic_non_lvalue
5574 (fold (build (MAX_EXPR, type, arg1, arg2)));
5575 break;
5577 case GE_EXPR:
5578 /* If C1 is C2 + 1, this is max(A, C2). */
5579 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5580 && operand_equal_p (TREE_OPERAND (arg0, 1),
5581 const_binop (PLUS_EXPR, arg2,
5582 integer_one_node, 0), 1))
5583 return pedantic_non_lvalue
5584 (fold (build (MAX_EXPR, type, arg1, arg2)));
5585 break;
5589 /* If the second operand is simpler than the third, swap them
5590 since that produces better jump optimization results. */
5591 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5592 || TREE_CODE (arg1) == SAVE_EXPR)
5593 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5594 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5595 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5597 /* See if this can be inverted. If it can't, possibly because
5598 it was a floating-point inequality comparison, don't do
5599 anything. */
5600 tem = invert_truthvalue (arg0);
5602 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5604 t = build (code, type, tem,
5605 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5606 arg0 = tem;
5607 arg1 = TREE_OPERAND (t, 2);
5608 STRIP_NOPS (arg1);
5612 /* Convert A ? 1 : 0 to simply A. */
5613 if (integer_onep (TREE_OPERAND (t, 1))
5614 && integer_zerop (TREE_OPERAND (t, 2))
5615 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5616 call to fold will try to move the conversion inside
5617 a COND, which will recurse. In that case, the COND_EXPR
5618 is probably the best choice, so leave it alone. */
5619 && type == TREE_TYPE (arg0))
5620 return pedantic_non_lvalue (arg0);
5622 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5623 operation is simply A & 2. */
5625 if (integer_zerop (TREE_OPERAND (t, 2))
5626 && TREE_CODE (arg0) == NE_EXPR
5627 && integer_zerop (TREE_OPERAND (arg0, 1))
5628 && integer_pow2p (arg1)
5629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5630 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5631 arg1, 1))
5632 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5634 return t;
5636 case COMPOUND_EXPR:
5637 /* When pedantic, a compound expression can be neither an lvalue
5638 nor an integer constant expression. */
5639 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5640 return t;
5641 /* Don't let (0, 0) be null pointer constant. */
5642 if (integer_zerop (arg1))
5643 return non_lvalue (arg1);
5644 return arg1;
5646 case COMPLEX_EXPR:
5647 if (wins)
5648 return build_complex (type, arg0, arg1);
5649 return t;
5651 case REALPART_EXPR:
5652 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5653 return t;
5654 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5655 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5656 TREE_OPERAND (arg0, 1));
5657 else if (TREE_CODE (arg0) == COMPLEX_CST)
5658 return TREE_REALPART (arg0);
5659 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5660 return fold (build (TREE_CODE (arg0), type,
5661 fold (build1 (REALPART_EXPR, type,
5662 TREE_OPERAND (arg0, 0))),
5663 fold (build1 (REALPART_EXPR,
5664 type, TREE_OPERAND (arg0, 1)))));
5665 return t;
5667 case IMAGPART_EXPR:
5668 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5669 return convert (type, integer_zero_node);
5670 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5671 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5672 TREE_OPERAND (arg0, 0));
5673 else if (TREE_CODE (arg0) == COMPLEX_CST)
5674 return TREE_IMAGPART (arg0);
5675 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5676 return fold (build (TREE_CODE (arg0), type,
5677 fold (build1 (IMAGPART_EXPR, type,
5678 TREE_OPERAND (arg0, 0))),
5679 fold (build1 (IMAGPART_EXPR, type,
5680 TREE_OPERAND (arg0, 1)))));
5681 return t;
5683 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5684 appropriate. */
5685 case CLEANUP_POINT_EXPR:
5686 if (! TREE_SIDE_EFFECTS (arg0))
5687 return TREE_OPERAND (t, 0);
5690 enum tree_code code0 = TREE_CODE (arg0);
5691 int kind0 = TREE_CODE_CLASS (code0);
5692 tree arg00 = TREE_OPERAND (arg0, 0);
5693 tree arg01;
5695 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5696 return fold (build1 (code0, type,
5697 fold (build1 (CLEANUP_POINT_EXPR,
5698 TREE_TYPE (arg00), arg00))));
5700 if (kind0 == '<' || kind0 == '2'
5701 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5702 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5703 || code0 == TRUTH_XOR_EXPR)
5705 arg01 = TREE_OPERAND (arg0, 1);
5707 if (! TREE_SIDE_EFFECTS (arg00))
5708 return fold (build (code0, type, arg00,
5709 fold (build1 (CLEANUP_POINT_EXPR,
5710 TREE_TYPE (arg01), arg01))));
5712 if (! TREE_SIDE_EFFECTS (arg01))
5713 return fold (build (code0, type,
5714 fold (build1 (CLEANUP_POINT_EXPR,
5715 TREE_TYPE (arg00), arg00)),
5716 arg01));
5719 return t;
5722 default:
5723 return t;
5724 } /* switch (code) */