* pa.h (CPP_SPEC): Add whitespace after -D__STDC_EXT__.
[official-gcc.git] / gcc / fold-const.c
blob3b759e8c8be4d924894b46f39418c6a416519f0a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant and prior overflow indicator, and
44 forces the value to fit the type. It returns an overflow indicator. */
46 #include "config.h"
47 #include "system.h"
48 #include <setjmp.h>
49 #include "flags.h"
50 #include "tree.h"
51 #include "rtl.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
58 HOST_WIDE_INT));
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
61 HOST_WIDE_INT *));
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
64 int));
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static void fold_convert_1 PARAMS ((PTR));
70 static tree fold_convert PARAMS ((tree, tree));
71 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
72 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
73 static int truth_value_p PARAMS ((enum tree_code));
74 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
75 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
76 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
77 static tree omit_one_operand PARAMS ((tree, tree, tree));
78 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
79 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
80 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
81 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
82 tree, tree));
83 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
84 HOST_WIDE_INT *,
85 enum machine_mode *, int *,
86 int *, tree *, tree *));
87 static int all_ones_mask_p PARAMS ((tree, int));
88 static int simple_operand_p PARAMS ((tree));
89 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
90 tree, int));
91 static tree make_range PARAMS ((tree, int *, tree *, tree *));
92 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
93 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
94 int, tree, tree));
95 static tree fold_range_test PARAMS ((tree));
96 static tree unextend PARAMS ((tree, int, int, tree));
97 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
98 static tree optimize_minmax_comparison PARAMS ((tree));
99 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
100 static tree strip_compound_expr PARAMS ((tree, tree));
101 static int multiple_of_p PARAMS ((tree, tree, tree));
102 static tree constant_boolean_node PARAMS ((int, tree));
103 static int count_cond PARAMS ((tree, int));
105 #ifndef BRANCH_COST
106 #define BRANCH_COST 1
107 #endif
109 #if defined(HOST_EBCDIC)
110 /* bit 8 is significant in EBCDIC */
111 #define CHARMASK 0xff
112 #else
113 #define CHARMASK 0x7f
114 #endif
117 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
118 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
119 and SUM1. Then this yields nonzero if overflow occurred during the
120 addition.
122 Overflow occurs if A and B have the same sign, but A and SUM differ in
123 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
124 sign. */
125 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
127 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
128 We do that by representing the two-word integer in 4 words, with only
129 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
130 number. The value of the word is LOWPART + HIGHPART * BASE. */
132 #define LOWPART(x) \
133 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
134 #define HIGHPART(x) \
135 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
136 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
138 /* Unpack a two-word integer into 4 words.
139 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
140 WORDS points to the array of HOST_WIDE_INTs. */
142 static void
143 encode (words, low, hi)
144 HOST_WIDE_INT *words;
145 unsigned HOST_WIDE_INT low;
146 HOST_WIDE_INT hi;
148 words[0] = LOWPART (low);
149 words[1] = HIGHPART (low);
150 words[2] = LOWPART (hi);
151 words[3] = HIGHPART (hi);
154 /* Pack an array of 4 words into a two-word integer.
155 WORDS points to the array of words.
156 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
158 static void
159 decode (words, low, hi)
160 HOST_WIDE_INT *words;
161 unsigned HOST_WIDE_INT *low;
162 HOST_WIDE_INT *hi;
164 *low = words[0] + words[1] * BASE;
165 *hi = words[2] + words[3] * BASE;
168 /* Make the integer constant T valid for its type by setting to 0 or 1 all
169 the bits in the constant that don't belong in the type.
171 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
172 nonzero, a signed overflow has already occurred in calculating T, so
173 propagate it.
175 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
176 if it exists. */
179 force_fit_type (t, overflow)
180 tree t;
181 int overflow;
183 unsigned HOST_WIDE_INT low;
184 HOST_WIDE_INT high;
185 unsigned int prec;
187 if (TREE_CODE (t) == REAL_CST)
189 #ifdef CHECK_FLOAT_VALUE
190 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
191 overflow);
192 #endif
193 return overflow;
196 else if (TREE_CODE (t) != INTEGER_CST)
197 return overflow;
199 low = TREE_INT_CST_LOW (t);
200 high = TREE_INT_CST_HIGH (t);
202 if (POINTER_TYPE_P (TREE_TYPE (t)))
203 prec = POINTER_SIZE;
204 else
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214 else
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow. */
222 if (TREE_UNSIGNED (TREE_TYPE (t)))
223 return overflow;
225 /* If the value's sign bit is set, extend the sign. */
226 if (prec != 2 * HOST_BITS_PER_WIDE_INT
227 && (prec > HOST_BITS_PER_WIDE_INT
228 ? 0 != (TREE_INT_CST_HIGH (t)
229 & ((HOST_WIDE_INT) 1
230 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
231 : 0 != (TREE_INT_CST_LOW (t)
232 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
234 /* Value is negative:
235 set to 1 all the bits that are outside this type's precision. */
236 if (prec > HOST_BITS_PER_WIDE_INT)
237 TREE_INT_CST_HIGH (t)
238 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 else
241 TREE_INT_CST_HIGH (t) = -1;
242 if (prec < HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
247 /* Return nonzero if signed overflow occurred. */
248 return
249 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
250 != 0);
253 /* Add two doubleword integers with doubleword result.
254 Each argument is given as two `HOST_WIDE_INT' pieces.
255 One argument is L1 and H1; the other, L2 and H2.
256 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
259 add_double (l1, h1, l2, h2, lv, hv)
260 unsigned HOST_WIDE_INT l1, l2;
261 HOST_WIDE_INT h1, h2;
262 unsigned HOST_WIDE_INT *lv;
263 HOST_WIDE_INT *hv;
265 unsigned HOST_WIDE_INT l;
266 HOST_WIDE_INT h;
268 l = l1 + l2;
269 h = h1 + h2 + (l < l1);
271 *lv = l;
272 *hv = h;
273 return OVERFLOW_SUM_SIGN (h1, h2, h);
276 /* Negate a doubleword integer with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
282 neg_double (l1, h1, lv, hv)
283 unsigned HOST_WIDE_INT l1;
284 HOST_WIDE_INT h1;
285 unsigned HOST_WIDE_INT *lv;
286 HOST_WIDE_INT *hv;
288 if (l1 == 0)
290 *lv = 0;
291 *hv = - h1;
292 return (*hv & h1) < 0;
294 else
296 *lv = - l1;
297 *hv = ~ h1;
298 return 0;
302 /* Multiply two doubleword integers with doubleword result.
303 Return nonzero if the operation overflows, assuming it's signed.
304 Each argument is given as two `HOST_WIDE_INT' pieces.
305 One argument is L1 and H1; the other, L2 and H2.
306 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309 mul_double (l1, h1, l2, h2, lv, hv)
310 unsigned HOST_WIDE_INT l1, l2;
311 HOST_WIDE_INT h1, h2;
312 unsigned HOST_WIDE_INT *lv;
313 HOST_WIDE_INT *hv;
315 HOST_WIDE_INT arg1[4];
316 HOST_WIDE_INT arg2[4];
317 HOST_WIDE_INT prod[4 * 2];
318 register unsigned HOST_WIDE_INT carry;
319 register int i, j, k;
320 unsigned HOST_WIDE_INT toplow, neglow;
321 HOST_WIDE_INT tophigh, neghigh;
323 encode (arg1, l1, h1);
324 encode (arg2, l2, h2);
326 bzero ((char *) prod, sizeof prod);
328 for (i = 0; i < 4; i++)
330 carry = 0;
331 for (j = 0; j < 4; j++)
333 k = i + j;
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry += arg1[i] * arg2[j];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 carry += prod[k];
338 prod[k] = LOWPART (carry);
339 carry = HIGHPART (carry);
341 prod[i + 4] = carry;
344 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod+4, &toplow, &tophigh);
349 if (h1 < 0)
351 neg_double (l2, h2, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354 if (h2 < 0)
356 neg_double (l1, h1, &neglow, &neghigh);
357 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
359 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 void
369 lshift_double (l1, h1, count, prec, lv, hv, arith)
370 unsigned HOST_WIDE_INT l1;
371 HOST_WIDE_INT h1, count;
372 unsigned int prec;
373 unsigned HOST_WIDE_INT *lv;
374 HOST_WIDE_INT *hv;
375 int arith;
377 if (count < 0)
379 rshift_double (l1, h1, - count, prec, lv, hv, arith);
380 return;
383 #ifdef SHIFT_COUNT_TRUNCATED
384 if (SHIFT_COUNT_TRUNCATED)
385 count %= prec;
386 #endif
388 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
390 /* Shifting by the host word size is undefined according to the
391 ANSI standard, so we must handle this as a special case. */
392 *hv = 0;
393 *lv = 0;
395 else if (count >= HOST_BITS_PER_WIDE_INT)
397 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
398 *lv = 0;
400 else
402 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
403 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
404 *lv = l1 << count;
408 /* Shift the doubleword integer in L1, H1 right by COUNT places
409 keeping only PREC bits of result. COUNT must be positive.
410 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
411 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 void
414 rshift_double (l1, h1, count, prec, lv, hv, arith)
415 unsigned HOST_WIDE_INT l1;
416 HOST_WIDE_INT h1, count;
417 unsigned int prec ATTRIBUTE_UNUSED;
418 unsigned HOST_WIDE_INT *lv;
419 HOST_WIDE_INT *hv;
420 int arith;
422 unsigned HOST_WIDE_INT signmask;
424 signmask = (arith
425 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
426 : 0);
428 #ifdef SHIFT_COUNT_TRUNCATED
429 if (SHIFT_COUNT_TRUNCATED)
430 count %= prec;
431 #endif
433 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
435 /* Shifting by the host word size is undefined according to the
436 ANSI standard, so we must handle this as a special case. */
437 *hv = signmask;
438 *lv = signmask;
440 else if (count >= HOST_BITS_PER_WIDE_INT)
442 *hv = signmask;
443 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
444 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
446 else
448 *lv = ((l1 >> count)
449 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
450 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
451 | ((unsigned HOST_WIDE_INT) h1 >> count));
455 /* Rotate the doubleword integer in L1, H1 left by COUNT places
456 keeping only PREC bits of result.
457 Rotate right if COUNT is negative.
458 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 void
461 lrotate_double (l1, h1, count, prec, lv, hv)
462 unsigned HOST_WIDE_INT l1;
463 HOST_WIDE_INT h1, count;
464 unsigned int prec;
465 unsigned HOST_WIDE_INT *lv;
466 HOST_WIDE_INT *hv;
468 unsigned HOST_WIDE_INT s1l, s2l;
469 HOST_WIDE_INT s1h, s2h;
471 count %= prec;
472 if (count < 0)
473 count += prec;
475 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
476 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
477 *lv = s1l | s2l;
478 *hv = s1h | s2h;
481 /* Rotate the doubleword integer in L1, H1 left by COUNT places
482 keeping only PREC bits of result. COUNT must be positive.
483 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
485 void
486 rrotate_double (l1, h1, count, prec, lv, hv)
487 unsigned HOST_WIDE_INT l1;
488 HOST_WIDE_INT h1, count;
489 unsigned int prec;
490 unsigned HOST_WIDE_INT *lv;
491 HOST_WIDE_INT *hv;
493 unsigned HOST_WIDE_INT s1l, s2l;
494 HOST_WIDE_INT s1h, s2h;
496 count %= prec;
497 if (count < 0)
498 count += prec;
500 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
501 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
502 *lv = s1l | s2l;
503 *hv = s1h | s2h;
506 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
507 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
508 CODE is a tree code for a kind of division, one of
509 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
510 or EXACT_DIV_EXPR
511 It controls how the quotient is rounded to a integer.
512 Return nonzero if the operation overflows.
513 UNS nonzero says do unsigned division. */
516 div_and_round_double (code, uns,
517 lnum_orig, hnum_orig, lden_orig, hden_orig,
518 lquo, hquo, lrem, hrem)
519 enum tree_code code;
520 int uns;
521 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
522 HOST_WIDE_INT hnum_orig;
523 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
524 HOST_WIDE_INT hden_orig;
525 unsigned HOST_WIDE_INT *lquo, *lrem;
526 HOST_WIDE_INT *hquo, *hrem;
528 int quo_neg = 0;
529 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
530 HOST_WIDE_INT den[4], quo[4];
531 register int i, j;
532 unsigned HOST_WIDE_INT work;
533 unsigned HOST_WIDE_INT carry = 0;
534 unsigned HOST_WIDE_INT lnum = lnum_orig;
535 HOST_WIDE_INT hnum = hnum_orig;
536 unsigned HOST_WIDE_INT lden = lden_orig;
537 HOST_WIDE_INT hden = hden_orig;
538 int overflow = 0;
540 if (hden == 0 && lden == 0)
541 overflow = 1, lden = 1;
543 /* calculate quotient sign and convert operands to unsigned. */
544 if (!uns)
546 if (hnum < 0)
548 quo_neg = ~ quo_neg;
549 /* (minimum integer) / (-1) is the only overflow case. */
550 if (neg_double (lnum, hnum, &lnum, &hnum)
551 && ((HOST_WIDE_INT) lden & hden) == -1)
552 overflow = 1;
554 if (hden < 0)
556 quo_neg = ~ quo_neg;
557 neg_double (lden, hden, &lden, &hden);
561 if (hnum == 0 && hden == 0)
562 { /* single precision */
563 *hquo = *hrem = 0;
564 /* This unsigned division rounds toward zero. */
565 *lquo = lnum / lden;
566 goto finish_up;
569 if (hnum == 0)
570 { /* trivial case: dividend < divisor */
571 /* hden != 0 already checked. */
572 *hquo = *lquo = 0;
573 *hrem = hnum;
574 *lrem = lnum;
575 goto finish_up;
578 bzero ((char *) quo, sizeof quo);
580 bzero ((char *) num, sizeof num); /* to zero 9th element */
581 bzero ((char *) den, sizeof den);
583 encode (num, lnum, hnum);
584 encode (den, lden, hden);
586 /* Special code for when the divisor < BASE. */
587 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
589 /* hnum != 0 already checked. */
590 for (i = 4 - 1; i >= 0; i--)
592 work = num[i] + carry * BASE;
593 quo[i] = work / lden;
594 carry = work % lden;
597 else
599 /* Full double precision division,
600 with thanks to Don Knuth's "Seminumerical Algorithms". */
601 int num_hi_sig, den_hi_sig;
602 unsigned HOST_WIDE_INT quo_est, scale;
604 /* Find the highest non-zero divisor digit. */
605 for (i = 4 - 1; ; i--)
606 if (den[i] != 0) {
607 den_hi_sig = i;
608 break;
611 /* Insure that the first digit of the divisor is at least BASE/2.
612 This is required by the quotient digit estimation algorithm. */
614 scale = BASE / (den[den_hi_sig] + 1);
615 if (scale > 1)
616 { /* scale divisor and dividend */
617 carry = 0;
618 for (i = 0; i <= 4 - 1; i++)
620 work = (num[i] * scale) + carry;
621 num[i] = LOWPART (work);
622 carry = HIGHPART (work);
625 num[4] = carry;
626 carry = 0;
627 for (i = 0; i <= 4 - 1; i++)
629 work = (den[i] * scale) + carry;
630 den[i] = LOWPART (work);
631 carry = HIGHPART (work);
632 if (den[i] != 0) den_hi_sig = i;
636 num_hi_sig = 4;
638 /* Main loop */
639 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
641 /* Guess the next quotient digit, quo_est, by dividing the first
642 two remaining dividend digits by the high order quotient digit.
643 quo_est is never low and is at most 2 high. */
644 unsigned HOST_WIDE_INT tmp;
646 num_hi_sig = i + den_hi_sig + 1;
647 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
648 if (num[num_hi_sig] != den[den_hi_sig])
649 quo_est = work / den[den_hi_sig];
650 else
651 quo_est = BASE - 1;
653 /* Refine quo_est so it's usually correct, and at most one high. */
654 tmp = work - quo_est * den[den_hi_sig];
655 if (tmp < BASE
656 && (den[den_hi_sig - 1] * quo_est
657 > (tmp * BASE + num[num_hi_sig - 2])))
658 quo_est--;
660 /* Try QUO_EST as the quotient digit, by multiplying the
661 divisor by QUO_EST and subtracting from the remaining dividend.
662 Keep in mind that QUO_EST is the I - 1st digit. */
664 carry = 0;
665 for (j = 0; j <= den_hi_sig; j++)
667 work = quo_est * den[j] + carry;
668 carry = HIGHPART (work);
669 work = num[i + j] - LOWPART (work);
670 num[i + j] = LOWPART (work);
671 carry += HIGHPART (work) != 0;
674 /* If quo_est was high by one, then num[i] went negative and
675 we need to correct things. */
676 if (num[num_hi_sig] < carry)
678 quo_est--;
679 carry = 0; /* add divisor back in */
680 for (j = 0; j <= den_hi_sig; j++)
682 work = num[i + j] + den[j] + carry;
683 carry = HIGHPART (work);
684 num[i + j] = LOWPART (work);
687 num [num_hi_sig] += carry;
690 /* Store the quotient digit. */
691 quo[i] = quo_est;
695 decode (quo, lquo, hquo);
697 finish_up:
698 /* if result is negative, make it so. */
699 if (quo_neg)
700 neg_double (*lquo, *hquo, lquo, hquo);
702 /* compute trial remainder: rem = num - (quo * den) */
703 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
704 neg_double (*lrem, *hrem, lrem, hrem);
705 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
707 switch (code)
709 case TRUNC_DIV_EXPR:
710 case TRUNC_MOD_EXPR: /* round toward zero */
711 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
712 return overflow;
714 case FLOOR_DIV_EXPR:
715 case FLOOR_MOD_EXPR: /* round toward negative infinity */
716 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
718 /* quo = quo - 1; */
719 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
720 lquo, hquo);
722 else
723 return overflow;
724 break;
726 case CEIL_DIV_EXPR:
727 case CEIL_MOD_EXPR: /* round toward positive infinity */
728 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
730 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
731 lquo, hquo);
733 else
734 return overflow;
735 break;
737 case ROUND_DIV_EXPR:
738 case ROUND_MOD_EXPR: /* round to closest integer */
740 unsigned HOST_WIDE_INT labs_rem = *lrem;
741 HOST_WIDE_INT habs_rem = *hrem;
742 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
743 HOST_WIDE_INT habs_den = hden, htwice;
745 /* Get absolute values */
746 if (*hrem < 0)
747 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
748 if (hden < 0)
749 neg_double (lden, hden, &labs_den, &habs_den);
751 /* If (2 * abs (lrem) >= abs (lden)) */
752 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
753 labs_rem, habs_rem, &ltwice, &htwice);
755 if (((unsigned HOST_WIDE_INT) habs_den
756 < (unsigned HOST_WIDE_INT) htwice)
757 || (((unsigned HOST_WIDE_INT) habs_den
758 == (unsigned HOST_WIDE_INT) htwice)
759 && (labs_den < ltwice)))
761 if (*hquo < 0)
762 /* quo = quo - 1; */
763 add_double (*lquo, *hquo,
764 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
765 else
766 /* quo = quo + 1; */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
768 lquo, hquo);
770 else
771 return overflow;
773 break;
775 default:
776 abort ();
779 /* compute true remainder: rem = num - (quo * den) */
780 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
781 neg_double (*lrem, *hrem, lrem, hrem);
782 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
783 return overflow;
786 #ifndef REAL_ARITHMETIC
787 /* Effectively truncate a real value to represent the nearest possible value
788 in a narrower mode. The result is actually represented in the same data
789 type as the argument, but its value is usually different.
791 A trap may occur during the FP operations and it is the responsibility
792 of the calling function to have a handler established. */
794 REAL_VALUE_TYPE
795 real_value_truncate (mode, arg)
796 enum machine_mode mode;
797 REAL_VALUE_TYPE arg;
799 return REAL_VALUE_TRUNCATE (mode, arg);
802 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
804 /* Check for infinity in an IEEE double precision number. */
807 target_isinf (x)
808 REAL_VALUE_TYPE x;
810 /* The IEEE 64-bit double format. */
811 union {
812 REAL_VALUE_TYPE d;
813 struct {
814 unsigned sign : 1;
815 unsigned exponent : 11;
816 unsigned mantissa1 : 20;
817 unsigned mantissa2;
818 } little_endian;
819 struct {
820 unsigned mantissa2;
821 unsigned mantissa1 : 20;
822 unsigned exponent : 11;
823 unsigned sign : 1;
824 } big_endian;
825 } u;
827 u.d = dconstm1;
828 if (u.big_endian.sign == 1)
830 u.d = x;
831 return (u.big_endian.exponent == 2047
832 && u.big_endian.mantissa1 == 0
833 && u.big_endian.mantissa2 == 0);
835 else
837 u.d = x;
838 return (u.little_endian.exponent == 2047
839 && u.little_endian.mantissa1 == 0
840 && u.little_endian.mantissa2 == 0);
844 /* Check whether an IEEE double precision number is a NaN. */
847 target_isnan (x)
848 REAL_VALUE_TYPE x;
850 /* The IEEE 64-bit double format. */
851 union {
852 REAL_VALUE_TYPE d;
853 struct {
854 unsigned sign : 1;
855 unsigned exponent : 11;
856 unsigned mantissa1 : 20;
857 unsigned mantissa2;
858 } little_endian;
859 struct {
860 unsigned mantissa2;
861 unsigned mantissa1 : 20;
862 unsigned exponent : 11;
863 unsigned sign : 1;
864 } big_endian;
865 } u;
867 u.d = dconstm1;
868 if (u.big_endian.sign == 1)
870 u.d = x;
871 return (u.big_endian.exponent == 2047
872 && (u.big_endian.mantissa1 != 0
873 || u.big_endian.mantissa2 != 0));
875 else
877 u.d = x;
878 return (u.little_endian.exponent == 2047
879 && (u.little_endian.mantissa1 != 0
880 || u.little_endian.mantissa2 != 0));
884 /* Check for a negative IEEE double precision number. */
887 target_negative (x)
888 REAL_VALUE_TYPE x;
890 /* The IEEE 64-bit double format. */
891 union {
892 REAL_VALUE_TYPE d;
893 struct {
894 unsigned sign : 1;
895 unsigned exponent : 11;
896 unsigned mantissa1 : 20;
897 unsigned mantissa2;
898 } little_endian;
899 struct {
900 unsigned mantissa2;
901 unsigned mantissa1 : 20;
902 unsigned exponent : 11;
903 unsigned sign : 1;
904 } big_endian;
905 } u;
907 u.d = dconstm1;
908 if (u.big_endian.sign == 1)
910 u.d = x;
911 return u.big_endian.sign;
913 else
915 u.d = x;
916 return u.little_endian.sign;
919 #else /* Target not IEEE */
921 /* Let's assume other float formats don't have infinity.
922 (This can be overridden by redefining REAL_VALUE_ISINF.) */
925 target_isinf (x)
926 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
928 return 0;
931 /* Let's assume other float formats don't have NaNs.
932 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
935 target_isnan (x)
936 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
938 return 0;
941 /* Let's assume other float formats don't have minus zero.
942 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
945 target_negative (x)
946 REAL_VALUE_TYPE x;
948 return x < 0;
950 #endif /* Target not IEEE */
952 /* Try to change R into its exact multiplicative inverse in machine mode
953 MODE. Return nonzero function value if successful. */
956 exact_real_inverse (mode, r)
957 enum machine_mode mode;
958 REAL_VALUE_TYPE *r;
960 jmp_buf float_error;
961 union
963 double d;
964 unsigned short i[4];
965 }x, t, y;
966 #ifdef CHECK_FLOAT_VALUE
967 int i;
968 #endif
970 /* Usually disable if bounds checks are not reliable. */
971 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
972 return 0;
974 /* Set array index to the less significant bits in the unions, depending
975 on the endian-ness of the host doubles.
976 Disable if insufficient information on the data structure. */
977 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
978 return 0;
979 #else
980 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
981 #define K 2
982 #else
983 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
984 #define K 2
985 #else
986 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
987 #endif
988 #endif
989 #endif
991 if (setjmp (float_error))
993 /* Don't do the optimization if there was an arithmetic error. */
994 fail:
995 set_float_handler (NULL_PTR);
996 return 0;
998 set_float_handler (float_error);
1000 /* Domain check the argument. */
1001 x.d = *r;
1002 if (x.d == 0.0)
1003 goto fail;
1005 #ifdef REAL_INFINITY
1006 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
1007 goto fail;
1008 #endif
1010 /* Compute the reciprocal and check for numerical exactness.
1011 It is unnecessary to check all the significand bits to determine
1012 whether X is a power of 2. If X is not, then it is impossible for
1013 the bottom half significand of both X and 1/X to be all zero bits.
1014 Hence we ignore the data structure of the top half and examine only
1015 the low order bits of the two significands. */
1016 t.d = 1.0 / x.d;
1017 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
1018 goto fail;
1020 /* Truncate to the required mode and range-check the result. */
1021 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
1022 #ifdef CHECK_FLOAT_VALUE
1023 i = 0;
1024 if (CHECK_FLOAT_VALUE (mode, y.d, i))
1025 goto fail;
1026 #endif
1028 /* Fail if truncation changed the value. */
1029 if (y.d != t.d || y.d == 0.0)
1030 goto fail;
1032 #ifdef REAL_INFINITY
1033 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
1034 goto fail;
1035 #endif
1037 /* Output the reciprocal and return success flag. */
1038 set_float_handler (NULL_PTR);
1039 *r = y.d;
1040 return 1;
1043 /* Convert C9X hexadecimal floating point string constant S. Return
1044 real value type in mode MODE. This function uses the host computer's
1045 floating point arithmetic when there is no REAL_ARITHMETIC. */
1047 REAL_VALUE_TYPE
1048 real_hex_to_f (s, mode)
1049 char *s;
1050 enum machine_mode mode;
1052 REAL_VALUE_TYPE ip;
1053 char *p = s;
1054 unsigned HOST_WIDE_INT low, high;
1055 int shcount, nrmcount, k;
1056 int sign, expsign, isfloat;
1057 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1058 int frexpon = 0; /* Bits after the decimal point. */
1059 int expon = 0; /* Value of exponent. */
1060 int decpt = 0; /* How many decimal points. */
1061 int gotp = 0; /* How many P's. */
1062 char c;
1064 isfloat = 0;
1065 expsign = 1;
1066 ip = 0.0;
1068 while (*p == ' ' || *p == '\t')
1069 ++p;
1071 /* Sign, if any, comes first. */
1072 sign = 1;
1073 if (*p == '-')
1075 sign = -1;
1076 ++p;
1079 /* The string is supposed to start with 0x or 0X . */
1080 if (*p == '0')
1082 ++p;
1083 if (*p == 'x' || *p == 'X')
1084 ++p;
1085 else
1086 abort ();
1088 else
1089 abort ();
1091 while (*p == '0')
1092 ++p;
1094 high = 0;
1095 low = 0;
1096 shcount = 0;
1097 while ((c = *p) != '\0')
1099 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1100 || (c >= 'a' && c <= 'f'))
1102 k = c & CHARMASK;
1103 if (k >= 'a' && k <= 'f')
1104 k = k - 'a' + 10;
1105 else if (k >= 'A')
1106 k = k - 'A' + 10;
1107 else
1108 k = k - '0';
1110 if ((high & 0xf0000000) == 0)
1112 high = (high << 4) + ((low >> 28) & 15);
1113 low = (low << 4) + k;
1114 shcount += 4;
1115 if (decpt)
1116 frexpon += 4;
1118 else
1120 /* Record nonzero lost bits. */
1121 lost |= k;
1122 if (! decpt)
1123 frexpon -= 4;
1125 ++p;
1127 else if ( c == '.')
1129 ++decpt;
1130 ++p;
1133 else if (c == 'p' || c == 'P')
1135 ++gotp;
1136 ++p;
1137 /* Sign of exponent. */
1138 if (*p == '-')
1140 expsign = -1;
1141 ++p;
1144 /* Value of exponent.
1145 The exponent field is a decimal integer. */
1146 while (ISDIGIT(*p))
1148 k = (*p++ & CHARMASK) - '0';
1149 expon = 10 * expon + k;
1152 expon *= expsign;
1153 /* F suffix is ambiguous in the significand part
1154 so it must appear after the decimal exponent field. */
1155 if (*p == 'f' || *p == 'F')
1157 isfloat = 1;
1158 ++p;
1159 break;
1163 else if (c == 'l' || c == 'L')
1165 ++p;
1166 break;
1168 else
1169 break;
1172 /* Abort if last character read was not legitimate. */
1173 c = *p;
1174 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1175 abort ();
1177 /* There must be either one decimal point or one p. */
1178 if (decpt == 0 && gotp == 0)
1179 abort ();
1181 shcount -= 4;
1182 if (high == 0 && low == 0)
1183 return dconst0;
1185 /* Normalize. */
1186 nrmcount = 0;
1187 if (high == 0)
1189 high = low;
1190 low = 0;
1191 nrmcount += 32;
1194 /* Leave a high guard bit for carry-out. */
1195 if ((high & 0x80000000) != 0)
1197 lost |= low & 1;
1198 low = (low >> 1) | (high << 31);
1199 high = high >> 1;
1200 nrmcount -= 1;
1203 if ((high & 0xffff8000) == 0)
1205 high = (high << 16) + ((low >> 16) & 0xffff);
1206 low = low << 16;
1207 nrmcount += 16;
1210 while ((high & 0xc0000000) == 0)
1212 high = (high << 1) + ((low >> 31) & 1);
1213 low = low << 1;
1214 nrmcount += 1;
1217 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1219 /* Keep 24 bits precision, bits 0x7fffff80.
1220 Rounding bit is 0x40. */
1221 lost = lost | low | (high & 0x3f);
1222 low = 0;
1223 if (high & 0x40)
1225 if ((high & 0x80) || lost)
1226 high += 0x40;
1228 high &= 0xffffff80;
1230 else
1232 /* We need real.c to do long double formats, so here default
1233 to double precision. */
1234 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1235 /* IEEE double.
1236 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1237 Rounding bit is low word 0x200. */
1238 lost = lost | (low & 0x1ff);
1239 if (low & 0x200)
1241 if ((low & 0x400) || lost)
1243 low = (low + 0x200) & 0xfffffc00;
1244 if (low == 0)
1245 high += 1;
1248 low &= 0xfffffc00;
1249 #else
1250 /* Assume it's a VAX with 56-bit significand,
1251 bits 0x7fffffff ffffff80. */
1252 lost = lost | (low & 0x7f);
1253 if (low & 0x40)
1255 if ((low & 0x80) || lost)
1257 low = (low + 0x40) & 0xffffff80;
1258 if (low == 0)
1259 high += 1;
1262 low &= 0xffffff80;
1263 #endif
1266 ip = (double) high;
1267 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1268 /* Apply shifts and exponent value as power of 2. */
1269 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1271 if (sign < 0)
1272 ip = -ip;
1273 return ip;
1276 #endif /* no REAL_ARITHMETIC */
1278 /* Given T, an expression, return the negation of T. Allow for T to be
1279 null, in which case return null. */
1281 static tree
1282 negate_expr (t)
1283 tree t;
1285 tree type;
1286 tree tem;
1288 if (t == 0)
1289 return 0;
1291 type = TREE_TYPE (t);
1292 STRIP_SIGN_NOPS (t);
1294 switch (TREE_CODE (t))
1296 case INTEGER_CST:
1297 case REAL_CST:
1298 if (! TREE_UNSIGNED (type)
1299 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1300 && ! TREE_OVERFLOW (tem))
1301 return tem;
1302 break;
1304 case NEGATE_EXPR:
1305 return convert (type, TREE_OPERAND (t, 0));
1307 case MINUS_EXPR:
1308 /* - (A - B) -> B - A */
1309 if (! FLOAT_TYPE_P (type) || flag_fast_math)
1310 return convert (type,
1311 fold (build (MINUS_EXPR, TREE_TYPE (t),
1312 TREE_OPERAND (t, 1),
1313 TREE_OPERAND (t, 0))));
1314 break;
1316 default:
1317 break;
1320 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1323 /* Split a tree IN into a constant, literal and variable parts that could be
1324 combined with CODE to make IN. "constant" means an expression with
1325 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1326 commutative arithmetic operation. Store the constant part into *CONP,
1327 the literal in &LITP and return the variable part. If a part isn't
1328 present, set it to null. If the tree does not decompose in this way,
1329 return the entire tree as the variable part and the other parts as null.
1331 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1332 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1333 are negating all of IN.
1335 If IN is itself a literal or constant, return it as appropriate.
1337 Note that we do not guarantee that any of the three values will be the
1338 same type as IN, but they will have the same signedness and mode. */
1340 static tree
1341 split_tree (in, code, conp, litp, negate_p)
1342 tree in;
1343 enum tree_code code;
1344 tree *conp, *litp;
1345 int negate_p;
1347 tree var = 0;
1349 *conp = 0;
1350 *litp = 0;
1352 /* Strip any conversions that don't change the machine mode or signedness. */
1353 STRIP_SIGN_NOPS (in);
1355 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1356 *litp = in;
1357 else if (TREE_CONSTANT (in))
1358 *conp = in;
1360 else if (TREE_CODE (in) == code
1361 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1362 /* We can associate addition and subtraction together (even
1363 though the C standard doesn't say so) for integers because
1364 the value is not affected. For reals, the value might be
1365 affected, so we can't. */
1366 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1367 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1369 tree op0 = TREE_OPERAND (in, 0);
1370 tree op1 = TREE_OPERAND (in, 1);
1371 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1372 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1374 /* First see if either of the operands is a literal, then a constant. */
1375 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1376 *litp = op0, op0 = 0;
1377 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1378 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1380 if (op0 != 0 && TREE_CONSTANT (op0))
1381 *conp = op0, op0 = 0;
1382 else if (op1 != 0 && TREE_CONSTANT (op1))
1383 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1385 /* If we haven't dealt with either operand, this is not a case we can
1386 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1387 if (op0 != 0 && op1 != 0)
1388 var = in;
1389 else if (op0 != 0)
1390 var = op0;
1391 else
1392 var = op1, neg_var_p = neg1_p;
1394 /* Now do any needed negations. */
1395 if (neg_litp_p) *litp = negate_expr (*litp);
1396 if (neg_conp_p) *conp = negate_expr (*conp);
1397 if (neg_var_p) var = negate_expr (var);
1399 else
1400 var = in;
1402 if (negate_p)
1404 var = negate_expr (var);
1405 *conp = negate_expr (*conp);
1406 *litp = negate_expr (*litp);
1409 return var;
1412 /* Re-associate trees split by the above function. T1 and T2 are either
1413 expressions to associate or null. Return the new expression, if any. If
1414 we build an operation, do it in TYPE and with CODE, except if CODE is a
1415 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1416 have taken care of the negations. */
1418 static tree
1419 associate_trees (t1, t2, code, type)
1420 tree t1, t2;
1421 enum tree_code code;
1422 tree type;
1424 if (t1 == 0)
1425 return t2;
1426 else if (t2 == 0)
1427 return t1;
1429 if (code == MINUS_EXPR)
1430 code = PLUS_EXPR;
1432 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1433 try to fold this since we will have infinite recursion. But do
1434 deal with any NEGATE_EXPRs. */
1435 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1436 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1438 if (TREE_CODE (t1) == NEGATE_EXPR)
1439 return build (MINUS_EXPR, type, convert (type, t2),
1440 convert (type, TREE_OPERAND (t1, 0)));
1441 else if (TREE_CODE (t2) == NEGATE_EXPR)
1442 return build (MINUS_EXPR, type, convert (type, t1),
1443 convert (type, TREE_OPERAND (t2, 0)));
1444 else
1445 return build (code, type, convert (type, t1), convert (type, t2));
1448 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1451 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1452 to produce a new constant.
1454 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1455 If FORSIZE is nonzero, compute overflow for unsigned types. */
1457 static tree
1458 int_const_binop (code, arg1, arg2, notrunc, forsize)
1459 enum tree_code code;
1460 register tree arg1, arg2;
1461 int notrunc, forsize;
1463 unsigned HOST_WIDE_INT int1l, int2l;
1464 HOST_WIDE_INT int1h, int2h;
1465 unsigned HOST_WIDE_INT low;
1466 HOST_WIDE_INT hi;
1467 unsigned HOST_WIDE_INT garbagel;
1468 HOST_WIDE_INT garbageh;
1469 register tree t;
1470 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1471 int overflow = 0;
1472 int no_overflow = 0;
1474 int1l = TREE_INT_CST_LOW (arg1);
1475 int1h = TREE_INT_CST_HIGH (arg1);
1476 int2l = TREE_INT_CST_LOW (arg2);
1477 int2h = TREE_INT_CST_HIGH (arg2);
1479 switch (code)
1481 case BIT_IOR_EXPR:
1482 low = int1l | int2l, hi = int1h | int2h;
1483 break;
1485 case BIT_XOR_EXPR:
1486 low = int1l ^ int2l, hi = int1h ^ int2h;
1487 break;
1489 case BIT_AND_EXPR:
1490 low = int1l & int2l, hi = int1h & int2h;
1491 break;
1493 case BIT_ANDTC_EXPR:
1494 low = int1l & ~int2l, hi = int1h & ~int2h;
1495 break;
1497 case RSHIFT_EXPR:
1498 int2l = - int2l;
1499 case LSHIFT_EXPR:
1500 /* It's unclear from the C standard whether shifts can overflow.
1501 The following code ignores overflow; perhaps a C standard
1502 interpretation ruling is needed. */
1503 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1504 &low, &hi, !uns);
1505 no_overflow = 1;
1506 break;
1508 case RROTATE_EXPR:
1509 int2l = - int2l;
1510 case LROTATE_EXPR:
1511 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1512 &low, &hi);
1513 break;
1515 case PLUS_EXPR:
1516 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1517 break;
1519 case MINUS_EXPR:
1520 neg_double (int2l, int2h, &low, &hi);
1521 add_double (int1l, int1h, low, hi, &low, &hi);
1522 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1523 break;
1525 case MULT_EXPR:
1526 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1527 break;
1529 case TRUNC_DIV_EXPR:
1530 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1531 case EXACT_DIV_EXPR:
1532 /* This is a shortcut for a common special case. */
1533 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1534 && ! TREE_CONSTANT_OVERFLOW (arg1)
1535 && ! TREE_CONSTANT_OVERFLOW (arg2)
1536 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1538 if (code == CEIL_DIV_EXPR)
1539 int1l += int2l - 1;
1541 low = int1l / int2l, hi = 0;
1542 break;
1545 /* ... fall through ... */
1547 case ROUND_DIV_EXPR:
1548 if (int2h == 0 && int2l == 1)
1550 low = int1l, hi = int1h;
1551 break;
1553 if (int1l == int2l && int1h == int2h
1554 && ! (int1l == 0 && int1h == 0))
1556 low = 1, hi = 0;
1557 break;
1559 overflow = div_and_round_double (code, uns,
1560 int1l, int1h, int2l, int2h,
1561 &low, &hi, &garbagel, &garbageh);
1562 break;
1564 case TRUNC_MOD_EXPR:
1565 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1566 /* This is a shortcut for a common special case. */
1567 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1568 && ! TREE_CONSTANT_OVERFLOW (arg1)
1569 && ! TREE_CONSTANT_OVERFLOW (arg2)
1570 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1572 if (code == CEIL_MOD_EXPR)
1573 int1l += int2l - 1;
1574 low = int1l % int2l, hi = 0;
1575 break;
1578 /* ... fall through ... */
1580 case ROUND_MOD_EXPR:
1581 overflow = div_and_round_double (code, uns,
1582 int1l, int1h, int2l, int2h,
1583 &garbagel, &garbageh, &low, &hi);
1584 break;
1586 case MIN_EXPR:
1587 case MAX_EXPR:
1588 if (uns)
1589 low = (((unsigned HOST_WIDE_INT) int1h
1590 < (unsigned HOST_WIDE_INT) int2h)
1591 || (((unsigned HOST_WIDE_INT) int1h
1592 == (unsigned HOST_WIDE_INT) int2h)
1593 && int1l < int2l));
1594 else
1595 low = (int1h < int2h
1596 || (int1h == int2h && int1l < int2l));
1598 if (low == (code == MIN_EXPR))
1599 low = int1l, hi = int1h;
1600 else
1601 low = int2l, hi = int2h;
1602 break;
1604 default:
1605 abort ();
1608 if (forsize && hi == 0 && low < 10000)
1609 return size_int_type_wide (low, TREE_TYPE (arg1));
1610 else
1612 t = build_int_2 (low, hi);
1613 TREE_TYPE (t) = TREE_TYPE (arg1);
1616 TREE_OVERFLOW (t)
1617 = ((notrunc ? (!uns || forsize) && overflow
1618 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1619 | TREE_OVERFLOW (arg1)
1620 | TREE_OVERFLOW (arg2));
1622 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1623 So check if force_fit_type truncated the value. */
1624 if (forsize
1625 && ! TREE_OVERFLOW (t)
1626 && (TREE_INT_CST_HIGH (t) != hi
1627 || TREE_INT_CST_LOW (t) != low))
1628 TREE_OVERFLOW (t) = 1;
1630 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1631 | TREE_CONSTANT_OVERFLOW (arg1)
1632 | TREE_CONSTANT_OVERFLOW (arg2));
1633 return t;
1636 /* Define input and output argument for const_binop_1. */
1637 struct cb_args
1639 enum tree_code code; /* Input: tree code for operation*/
1640 tree type; /* Input: tree type for operation. */
1641 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1642 tree t; /* Output: constant for result. */
1645 /* Do the real arithmetic for const_binop while protected by a
1646 float overflow handler. */
1648 static void
1649 const_binop_1 (data)
1650 PTR data;
1652 struct cb_args *args = (struct cb_args *) data;
1653 REAL_VALUE_TYPE value;
1655 #ifdef REAL_ARITHMETIC
1656 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1657 #else
1658 switch (args->code)
1660 case PLUS_EXPR:
1661 value = args->d1 + args->d2;
1662 break;
1664 case MINUS_EXPR:
1665 value = args->d1 - args->d2;
1666 break;
1668 case MULT_EXPR:
1669 value = args->d1 * args->d2;
1670 break;
1672 case RDIV_EXPR:
1673 #ifndef REAL_INFINITY
1674 if (args->d2 == 0)
1675 abort ();
1676 #endif
1678 value = args->d1 / args->d2;
1679 break;
1681 case MIN_EXPR:
1682 value = MIN (args->d1, args->d2);
1683 break;
1685 case MAX_EXPR:
1686 value = MAX (args->d1, args->d2);
1687 break;
1689 default:
1690 abort ();
1692 #endif /* no REAL_ARITHMETIC */
1694 args->t
1695 = build_real (args->type,
1696 real_value_truncate (TYPE_MODE (args->type), value));
1699 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1700 constant. We assume ARG1 and ARG2 have the same data type, or at least
1701 are the same kind of constant and the same machine mode.
1703 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1705 static tree
1706 const_binop (code, arg1, arg2, notrunc)
1707 enum tree_code code;
1708 register tree arg1, arg2;
1709 int notrunc;
1711 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc, 0);
1716 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1717 if (TREE_CODE (arg1) == REAL_CST)
1719 REAL_VALUE_TYPE d1;
1720 REAL_VALUE_TYPE d2;
1721 int overflow = 0;
1722 tree t;
1723 struct cb_args args;
1725 d1 = TREE_REAL_CST (arg1);
1726 d2 = TREE_REAL_CST (arg2);
1728 /* If either operand is a NaN, just return it. Otherwise, set up
1729 for floating-point trap; we return an overflow. */
1730 if (REAL_VALUE_ISNAN (d1))
1731 return arg1;
1732 else if (REAL_VALUE_ISNAN (d2))
1733 return arg2;
1735 /* Setup input for const_binop_1() */
1736 args.type = TREE_TYPE (arg1);
1737 args.d1 = d1;
1738 args.d2 = d2;
1739 args.code = code;
1741 if (do_float_handler (const_binop_1, (PTR) &args))
1742 /* Receive output from const_binop_1. */
1743 t = args.t;
1744 else
1746 /* We got an exception from const_binop_1. */
1747 t = copy_node (arg1);
1748 overflow = 1;
1751 TREE_OVERFLOW (t)
1752 = (force_fit_type (t, overflow)
1753 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1754 TREE_CONSTANT_OVERFLOW (t)
1755 = TREE_OVERFLOW (t)
1756 | TREE_CONSTANT_OVERFLOW (arg1)
1757 | TREE_CONSTANT_OVERFLOW (arg2);
1758 return t;
1760 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1761 if (TREE_CODE (arg1) == COMPLEX_CST)
1763 register tree type = TREE_TYPE (arg1);
1764 register tree r1 = TREE_REALPART (arg1);
1765 register tree i1 = TREE_IMAGPART (arg1);
1766 register tree r2 = TREE_REALPART (arg2);
1767 register tree i2 = TREE_IMAGPART (arg2);
1768 register tree t;
1770 switch (code)
1772 case PLUS_EXPR:
1773 t = build_complex (type,
1774 const_binop (PLUS_EXPR, r1, r2, notrunc),
1775 const_binop (PLUS_EXPR, i1, i2, notrunc));
1776 break;
1778 case MINUS_EXPR:
1779 t = build_complex (type,
1780 const_binop (MINUS_EXPR, r1, r2, notrunc),
1781 const_binop (MINUS_EXPR, i1, i2, notrunc));
1782 break;
1784 case MULT_EXPR:
1785 t = build_complex (type,
1786 const_binop (MINUS_EXPR,
1787 const_binop (MULT_EXPR,
1788 r1, r2, notrunc),
1789 const_binop (MULT_EXPR,
1790 i1, i2, notrunc),
1791 notrunc),
1792 const_binop (PLUS_EXPR,
1793 const_binop (MULT_EXPR,
1794 r1, i2, notrunc),
1795 const_binop (MULT_EXPR,
1796 i1, r2, notrunc),
1797 notrunc));
1798 break;
1800 case RDIV_EXPR:
1802 register tree magsquared
1803 = const_binop (PLUS_EXPR,
1804 const_binop (MULT_EXPR, r2, r2, notrunc),
1805 const_binop (MULT_EXPR, i2, i2, notrunc),
1806 notrunc);
1808 t = build_complex (type,
1809 const_binop
1810 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1811 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1812 const_binop (PLUS_EXPR,
1813 const_binop (MULT_EXPR, r1, r2,
1814 notrunc),
1815 const_binop (MULT_EXPR, i1, i2,
1816 notrunc),
1817 notrunc),
1818 magsquared, notrunc),
1819 const_binop
1820 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1821 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1822 const_binop (MINUS_EXPR,
1823 const_binop (MULT_EXPR, i1, r2,
1824 notrunc),
1825 const_binop (MULT_EXPR, r1, i2,
1826 notrunc),
1827 notrunc),
1828 magsquared, notrunc));
1830 break;
1832 default:
1833 abort ();
1835 return t;
1837 return 0;
1840 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1841 bits are given by NUMBER and of the sizetype represented by KIND. */
1843 tree
1844 size_int_wide (number, kind)
1845 HOST_WIDE_INT number;
1846 enum size_type_kind kind;
1848 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1851 /* Likewise, but the desired type is specified explicitly. */
1853 tree
1854 size_int_type_wide (number, type)
1855 HOST_WIDE_INT number;
1856 tree type;
1858 /* Type-size nodes already made for small sizes. */
1859 static tree size_table[2048 + 1];
1860 static int init_p = 0;
1861 tree t;
1863 if (ggc_p && ! init_p)
1865 ggc_add_tree_root ((tree *) size_table,
1866 sizeof size_table / sizeof (tree));
1867 init_p = 1;
1870 /* If this is a positive number that fits in the table we use to hold
1871 cached entries, see if it is already in the table and put it there
1872 if not. */
1873 if (number >= 0 && number < (int) (sizeof size_table / sizeof size_table[0]))
1875 if (size_table[number] != 0)
1876 for (t = size_table[number]; t != 0; t = TREE_CHAIN (t))
1877 if (TREE_TYPE (t) == type)
1878 return t;
1880 if (! ggc_p)
1882 /* Make this a permanent node. */
1883 push_obstacks_nochange ();
1884 end_temporary_allocation ();
1887 t = build_int_2 (number, 0);
1888 TREE_TYPE (t) = type;
1889 TREE_CHAIN (t) = size_table[number];
1890 size_table[number] = t;
1892 if (! ggc_p)
1893 pop_obstacks ();
1895 return t;
1898 t = build_int_2 (number, number < 0 ? -1 : 0);
1899 TREE_TYPE (t) = type;
1900 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1901 return t;
1904 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1905 is a tree code. The type of the result is taken from the operands.
1906 Both must be the same type integer type and it must be a size type.
1907 If the operands are constant, so is the result. */
1909 tree
1910 size_binop (code, arg0, arg1)
1911 enum tree_code code;
1912 tree arg0, arg1;
1914 tree type = TREE_TYPE (arg0);
1916 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1917 || type != TREE_TYPE (arg1))
1918 abort ();
1920 /* Handle the special case of two integer constants faster. */
1921 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1923 /* And some specific cases even faster than that. */
1924 if (code == PLUS_EXPR && integer_zerop (arg0))
1925 return arg1;
1926 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1927 && integer_zerop (arg1))
1928 return arg0;
1929 else if (code == MULT_EXPR && integer_onep (arg0))
1930 return arg1;
1932 /* Handle general case of two integer constants. */
1933 return int_const_binop (code, arg0, arg1, 0, 1);
1936 if (arg0 == error_mark_node || arg1 == error_mark_node)
1937 return error_mark_node;
1939 return fold (build (code, type, arg0, arg1));
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1946 tree
1947 size_diffop (arg0, arg1)
1948 tree arg0, arg1;
1950 tree type = TREE_TYPE (arg0);
1951 tree ctype;
1953 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1954 || type != TREE_TYPE (arg1))
1955 abort ();
1957 /* If the type is already signed, just do the simple thing. */
1958 if (! TREE_UNSIGNED (type))
1959 return size_binop (MINUS_EXPR, arg0, arg1);
1961 ctype = (type == bitsizetype || type == ubitsizetype
1962 ? sbitsizetype : ssizetype);
1964 /* If either operand is not a constant, do the conversions to the signed
1965 type and subtract. The hardware will do the right thing with any
1966 overflow in the subtraction. */
1967 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1968 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1969 convert (ctype, arg1));
1971 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1972 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1973 overflow) and negate (which can't either). Special-case a result
1974 of zero while we're here. */
1975 if (tree_int_cst_equal (arg0, arg1))
1976 return convert (ctype, integer_zero_node);
1977 else if (tree_int_cst_lt (arg1, arg0))
1978 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1979 else
1980 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1981 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1984 /* This structure is used to communicate arguments to fold_convert_1. */
1985 struct fc_args
1987 tree arg1; /* Input: value to convert. */
1988 tree type; /* Input: type to convert value to. */
1989 tree t; /* Ouput: result of conversion. */
1992 /* Function to convert floating-point constants, protected by floating
1993 point exception handler. */
1995 static void
1996 fold_convert_1 (data)
1997 PTR data;
1999 struct fc_args * args = (struct fc_args *) data;
2001 args->t = build_real (args->type,
2002 real_value_truncate (TYPE_MODE (args->type),
2003 TREE_REAL_CST (args->arg1)));
2006 /* Given T, a tree representing type conversion of ARG1, a constant,
2007 return a constant tree representing the result of conversion. */
2009 static tree
2010 fold_convert (t, arg1)
2011 register tree t;
2012 register tree arg1;
2014 register tree type = TREE_TYPE (t);
2015 int overflow = 0;
2017 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2019 if (TREE_CODE (arg1) == INTEGER_CST)
2021 /* If we would build a constant wider than GCC supports,
2022 leave the conversion unfolded. */
2023 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
2024 return t;
2026 /* If we are trying to make a sizetype for a small integer, use
2027 size_int to pick up cached types to reduce duplicate nodes. */
2028 if (TREE_CODE (type) == INTEGER_CST && TYPE_IS_SIZETYPE (type)
2029 && compare_tree_int (arg1, 10000) < 0)
2030 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
2032 /* Given an integer constant, make new constant with new type,
2033 appropriately sign-extended or truncated. */
2034 t = build_int_2 (TREE_INT_CST_LOW (arg1),
2035 TREE_INT_CST_HIGH (arg1));
2036 TREE_TYPE (t) = type;
2037 /* Indicate an overflow if (1) ARG1 already overflowed,
2038 or (2) force_fit_type indicates an overflow.
2039 Tell force_fit_type that an overflow has already occurred
2040 if ARG1 is a too-large unsigned value and T is signed.
2041 But don't indicate an overflow if converting a pointer. */
2042 TREE_OVERFLOW (t)
2043 = ((force_fit_type (t,
2044 (TREE_INT_CST_HIGH (arg1) < 0
2045 && (TREE_UNSIGNED (type)
2046 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
2047 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
2048 || TREE_OVERFLOW (arg1));
2049 TREE_CONSTANT_OVERFLOW (t)
2050 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2052 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2053 else if (TREE_CODE (arg1) == REAL_CST)
2055 /* Don't initialize these, use assignments.
2056 Initialized local aggregates don't work on old compilers. */
2057 REAL_VALUE_TYPE x;
2058 REAL_VALUE_TYPE l;
2059 REAL_VALUE_TYPE u;
2060 tree type1 = TREE_TYPE (arg1);
2061 int no_upper_bound;
2063 x = TREE_REAL_CST (arg1);
2064 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
2066 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
2067 if (!no_upper_bound)
2068 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
2070 /* See if X will be in range after truncation towards 0.
2071 To compensate for truncation, move the bounds away from 0,
2072 but reject if X exactly equals the adjusted bounds. */
2073 #ifdef REAL_ARITHMETIC
2074 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2075 if (!no_upper_bound)
2076 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2077 #else
2078 l--;
2079 if (!no_upper_bound)
2080 u++;
2081 #endif
2082 /* If X is a NaN, use zero instead and show we have an overflow.
2083 Otherwise, range check. */
2084 if (REAL_VALUE_ISNAN (x))
2085 overflow = 1, x = dconst0;
2086 else if (! (REAL_VALUES_LESS (l, x)
2087 && !no_upper_bound
2088 && REAL_VALUES_LESS (x, u)))
2089 overflow = 1;
2091 #ifndef REAL_ARITHMETIC
2093 HOST_WIDE_INT low, high;
2094 HOST_WIDE_INT half_word
2095 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2097 if (x < 0)
2098 x = -x;
2100 high = (HOST_WIDE_INT) (x / half_word / half_word);
2101 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2102 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2104 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2105 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2107 else
2108 low = (HOST_WIDE_INT) x;
2109 if (TREE_REAL_CST (arg1) < 0)
2110 neg_double (low, high, &low, &high);
2111 t = build_int_2 (low, high);
2113 #else
2115 HOST_WIDE_INT low, high;
2116 REAL_VALUE_TO_INT (&low, &high, x);
2117 t = build_int_2 (low, high);
2119 #endif
2120 TREE_TYPE (t) = type;
2121 TREE_OVERFLOW (t)
2122 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2123 TREE_CONSTANT_OVERFLOW (t)
2124 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2126 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2127 TREE_TYPE (t) = type;
2129 else if (TREE_CODE (type) == REAL_TYPE)
2131 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2132 if (TREE_CODE (arg1) == INTEGER_CST)
2133 return build_real_from_int_cst (type, arg1);
2134 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2135 if (TREE_CODE (arg1) == REAL_CST)
2137 struct fc_args args;
2139 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2141 t = arg1;
2142 TREE_TYPE (arg1) = type;
2143 return t;
2146 /* Setup input for fold_convert_1() */
2147 args.arg1 = arg1;
2148 args.type = type;
2150 if (do_float_handler (fold_convert_1, (PTR) &args))
2152 /* Receive output from fold_convert_1() */
2153 t = args.t;
2155 else
2157 /* We got an exception from fold_convert_1() */
2158 overflow = 1;
2159 t = copy_node (arg1);
2162 TREE_OVERFLOW (t)
2163 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2164 TREE_CONSTANT_OVERFLOW (t)
2165 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2166 return t;
2169 TREE_CONSTANT (t) = 1;
2170 return t;
2173 /* Return an expr equal to X but certainly not valid as an lvalue. */
2175 tree
2176 non_lvalue (x)
2177 tree x;
2179 tree result;
2181 /* These things are certainly not lvalues. */
2182 if (TREE_CODE (x) == NON_LVALUE_EXPR
2183 || TREE_CODE (x) == INTEGER_CST
2184 || TREE_CODE (x) == REAL_CST
2185 || TREE_CODE (x) == STRING_CST
2186 || TREE_CODE (x) == ADDR_EXPR)
2187 return x;
2189 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2190 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2191 return result;
2194 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2195 Zero means allow extended lvalues. */
2197 int pedantic_lvalues;
2199 /* When pedantic, return an expr equal to X but certainly not valid as a
2200 pedantic lvalue. Otherwise, return X. */
2202 tree
2203 pedantic_non_lvalue (x)
2204 tree x;
2206 if (pedantic_lvalues)
2207 return non_lvalue (x);
2208 else
2209 return x;
2212 /* Given a tree comparison code, return the code that is the logical inverse
2213 of the given code. It is not safe to do this for floating-point
2214 comparisons, except for NE_EXPR and EQ_EXPR. */
2216 static enum tree_code
2217 invert_tree_comparison (code)
2218 enum tree_code code;
2220 switch (code)
2222 case EQ_EXPR:
2223 return NE_EXPR;
2224 case NE_EXPR:
2225 return EQ_EXPR;
2226 case GT_EXPR:
2227 return LE_EXPR;
2228 case GE_EXPR:
2229 return LT_EXPR;
2230 case LT_EXPR:
2231 return GE_EXPR;
2232 case LE_EXPR:
2233 return GT_EXPR;
2234 default:
2235 abort ();
2239 /* Similar, but return the comparison that results if the operands are
2240 swapped. This is safe for floating-point. */
2242 static enum tree_code
2243 swap_tree_comparison (code)
2244 enum tree_code code;
2246 switch (code)
2248 case EQ_EXPR:
2249 case NE_EXPR:
2250 return code;
2251 case GT_EXPR:
2252 return LT_EXPR;
2253 case GE_EXPR:
2254 return LE_EXPR;
2255 case LT_EXPR:
2256 return GT_EXPR;
2257 case LE_EXPR:
2258 return GE_EXPR;
2259 default:
2260 abort ();
2264 /* Return nonzero if CODE is a tree code that represents a truth value. */
2266 static int
2267 truth_value_p (code)
2268 enum tree_code code;
2270 return (TREE_CODE_CLASS (code) == '<'
2271 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2272 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2273 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2276 /* Return nonzero if two operands are necessarily equal.
2277 If ONLY_CONST is non-zero, only return non-zero for constants.
2278 This function tests whether the operands are indistinguishable;
2279 it does not test whether they are equal using C's == operation.
2280 The distinction is important for IEEE floating point, because
2281 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2282 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2285 operand_equal_p (arg0, arg1, only_const)
2286 tree arg0, arg1;
2287 int only_const;
2289 /* If both types don't have the same signedness, then we can't consider
2290 them equal. We must check this before the STRIP_NOPS calls
2291 because they may change the signedness of the arguments. */
2292 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2293 return 0;
2295 STRIP_NOPS (arg0);
2296 STRIP_NOPS (arg1);
2298 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2299 /* This is needed for conversions and for COMPONENT_REF.
2300 Might as well play it safe and always test this. */
2301 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2302 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2303 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2304 return 0;
2306 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2307 We don't care about side effects in that case because the SAVE_EXPR
2308 takes care of that for us. In all other cases, two expressions are
2309 equal if they have no side effects. If we have two identical
2310 expressions with side effects that should be treated the same due
2311 to the only side effects being identical SAVE_EXPR's, that will
2312 be detected in the recursive calls below. */
2313 if (arg0 == arg1 && ! only_const
2314 && (TREE_CODE (arg0) == SAVE_EXPR
2315 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2316 return 1;
2318 /* Next handle constant cases, those for which we can return 1 even
2319 if ONLY_CONST is set. */
2320 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2321 switch (TREE_CODE (arg0))
2323 case INTEGER_CST:
2324 return (! TREE_CONSTANT_OVERFLOW (arg0)
2325 && ! TREE_CONSTANT_OVERFLOW (arg1)
2326 && tree_int_cst_equal (arg0, arg1));
2328 case REAL_CST:
2329 return (! TREE_CONSTANT_OVERFLOW (arg0)
2330 && ! TREE_CONSTANT_OVERFLOW (arg1)
2331 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2332 TREE_REAL_CST (arg1)));
2334 case COMPLEX_CST:
2335 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2336 only_const)
2337 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2338 only_const));
2340 case STRING_CST:
2341 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2342 && ! memcmp (TREE_STRING_POINTER (arg0),
2343 TREE_STRING_POINTER (arg1),
2344 TREE_STRING_LENGTH (arg0)));
2346 case ADDR_EXPR:
2347 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2349 default:
2350 break;
2353 if (only_const)
2354 return 0;
2356 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2358 case '1':
2359 /* Two conversions are equal only if signedness and modes match. */
2360 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2361 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2362 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2363 return 0;
2365 return operand_equal_p (TREE_OPERAND (arg0, 0),
2366 TREE_OPERAND (arg1, 0), 0);
2368 case '<':
2369 case '2':
2370 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2371 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2373 return 1;
2375 /* For commutative ops, allow the other order. */
2376 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2377 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2378 || TREE_CODE (arg0) == BIT_IOR_EXPR
2379 || TREE_CODE (arg0) == BIT_XOR_EXPR
2380 || TREE_CODE (arg0) == BIT_AND_EXPR
2381 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2382 && operand_equal_p (TREE_OPERAND (arg0, 0),
2383 TREE_OPERAND (arg1, 1), 0)
2384 && operand_equal_p (TREE_OPERAND (arg0, 1),
2385 TREE_OPERAND (arg1, 0), 0));
2387 case 'r':
2388 /* If either of the pointer (or reference) expressions we are dereferencing
2389 contain a side effect, these cannot be equal. */
2390 if (TREE_SIDE_EFFECTS (arg0)
2391 || TREE_SIDE_EFFECTS (arg1))
2392 return 0;
2394 switch (TREE_CODE (arg0))
2396 case INDIRECT_REF:
2397 return operand_equal_p (TREE_OPERAND (arg0, 0),
2398 TREE_OPERAND (arg1, 0), 0);
2400 case COMPONENT_REF:
2401 case ARRAY_REF:
2402 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2403 TREE_OPERAND (arg1, 0), 0)
2404 && operand_equal_p (TREE_OPERAND (arg0, 1),
2405 TREE_OPERAND (arg1, 1), 0));
2407 case BIT_FIELD_REF:
2408 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2409 TREE_OPERAND (arg1, 0), 0)
2410 && operand_equal_p (TREE_OPERAND (arg0, 1),
2411 TREE_OPERAND (arg1, 1), 0)
2412 && operand_equal_p (TREE_OPERAND (arg0, 2),
2413 TREE_OPERAND (arg1, 2), 0));
2414 default:
2415 return 0;
2418 case 'e':
2419 if (TREE_CODE (arg0) == RTL_EXPR)
2420 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2421 return 0;
2423 default:
2424 return 0;
2428 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2429 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2431 When in doubt, return 0. */
2433 static int
2434 operand_equal_for_comparison_p (arg0, arg1, other)
2435 tree arg0, arg1;
2436 tree other;
2438 int unsignedp1, unsignedpo;
2439 tree primarg0, primarg1, primother;
2440 unsigned int correct_width;
2442 if (operand_equal_p (arg0, arg1, 0))
2443 return 1;
2445 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2446 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2447 return 0;
2449 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2450 and see if the inner values are the same. This removes any
2451 signedness comparison, which doesn't matter here. */
2452 primarg0 = arg0, primarg1 = arg1;
2453 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2454 if (operand_equal_p (primarg0, primarg1, 0))
2455 return 1;
2457 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2458 actual comparison operand, ARG0.
2460 First throw away any conversions to wider types
2461 already present in the operands. */
2463 primarg1 = get_narrower (arg1, &unsignedp1);
2464 primother = get_narrower (other, &unsignedpo);
2466 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2467 if (unsignedp1 == unsignedpo
2468 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2469 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2471 tree type = TREE_TYPE (arg0);
2473 /* Make sure shorter operand is extended the right way
2474 to match the longer operand. */
2475 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2476 TREE_TYPE (primarg1)),
2477 primarg1);
2479 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2480 return 1;
2483 return 0;
2486 /* See if ARG is an expression that is either a comparison or is performing
2487 arithmetic on comparisons. The comparisons must only be comparing
2488 two different values, which will be stored in *CVAL1 and *CVAL2; if
2489 they are non-zero it means that some operands have already been found.
2490 No variables may be used anywhere else in the expression except in the
2491 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2492 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2494 If this is true, return 1. Otherwise, return zero. */
2496 static int
2497 twoval_comparison_p (arg, cval1, cval2, save_p)
2498 tree arg;
2499 tree *cval1, *cval2;
2500 int *save_p;
2502 enum tree_code code = TREE_CODE (arg);
2503 char class = TREE_CODE_CLASS (code);
2505 /* We can handle some of the 'e' cases here. */
2506 if (class == 'e' && code == TRUTH_NOT_EXPR)
2507 class = '1';
2508 else if (class == 'e'
2509 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2510 || code == COMPOUND_EXPR))
2511 class = '2';
2513 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2514 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2516 /* If we've already found a CVAL1 or CVAL2, this expression is
2517 two complex to handle. */
2518 if (*cval1 || *cval2)
2519 return 0;
2521 class = '1';
2522 *save_p = 1;
2525 switch (class)
2527 case '1':
2528 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2530 case '2':
2531 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2532 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2533 cval1, cval2, save_p));
2535 case 'c':
2536 return 1;
2538 case 'e':
2539 if (code == COND_EXPR)
2540 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2541 cval1, cval2, save_p)
2542 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2543 cval1, cval2, save_p)
2544 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2545 cval1, cval2, save_p));
2546 return 0;
2548 case '<':
2549 /* First see if we can handle the first operand, then the second. For
2550 the second operand, we know *CVAL1 can't be zero. It must be that
2551 one side of the comparison is each of the values; test for the
2552 case where this isn't true by failing if the two operands
2553 are the same. */
2555 if (operand_equal_p (TREE_OPERAND (arg, 0),
2556 TREE_OPERAND (arg, 1), 0))
2557 return 0;
2559 if (*cval1 == 0)
2560 *cval1 = TREE_OPERAND (arg, 0);
2561 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2563 else if (*cval2 == 0)
2564 *cval2 = TREE_OPERAND (arg, 0);
2565 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2567 else
2568 return 0;
2570 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2572 else if (*cval2 == 0)
2573 *cval2 = TREE_OPERAND (arg, 1);
2574 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2576 else
2577 return 0;
2579 return 1;
2581 default:
2582 return 0;
2586 /* ARG is a tree that is known to contain just arithmetic operations and
2587 comparisons. Evaluate the operations in the tree substituting NEW0 for
2588 any occurrence of OLD0 as an operand of a comparison and likewise for
2589 NEW1 and OLD1. */
2591 static tree
2592 eval_subst (arg, old0, new0, old1, new1)
2593 tree arg;
2594 tree old0, new0, old1, new1;
2596 tree type = TREE_TYPE (arg);
2597 enum tree_code code = TREE_CODE (arg);
2598 char class = TREE_CODE_CLASS (code);
2600 /* We can handle some of the 'e' cases here. */
2601 if (class == 'e' && code == TRUTH_NOT_EXPR)
2602 class = '1';
2603 else if (class == 'e'
2604 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2605 class = '2';
2607 switch (class)
2609 case '1':
2610 return fold (build1 (code, type,
2611 eval_subst (TREE_OPERAND (arg, 0),
2612 old0, new0, old1, new1)));
2614 case '2':
2615 return fold (build (code, type,
2616 eval_subst (TREE_OPERAND (arg, 0),
2617 old0, new0, old1, new1),
2618 eval_subst (TREE_OPERAND (arg, 1),
2619 old0, new0, old1, new1)));
2621 case 'e':
2622 switch (code)
2624 case SAVE_EXPR:
2625 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2627 case COMPOUND_EXPR:
2628 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2630 case COND_EXPR:
2631 return fold (build (code, type,
2632 eval_subst (TREE_OPERAND (arg, 0),
2633 old0, new0, old1, new1),
2634 eval_subst (TREE_OPERAND (arg, 1),
2635 old0, new0, old1, new1),
2636 eval_subst (TREE_OPERAND (arg, 2),
2637 old0, new0, old1, new1)));
2638 default:
2639 break;
2641 /* fall through - ??? */
2643 case '<':
2645 tree arg0 = TREE_OPERAND (arg, 0);
2646 tree arg1 = TREE_OPERAND (arg, 1);
2648 /* We need to check both for exact equality and tree equality. The
2649 former will be true if the operand has a side-effect. In that
2650 case, we know the operand occurred exactly once. */
2652 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2653 arg0 = new0;
2654 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2655 arg0 = new1;
2657 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2658 arg1 = new0;
2659 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2660 arg1 = new1;
2662 return fold (build (code, type, arg0, arg1));
2665 default:
2666 return arg;
2670 /* Return a tree for the case when the result of an expression is RESULT
2671 converted to TYPE and OMITTED was previously an operand of the expression
2672 but is now not needed (e.g., we folded OMITTED * 0).
2674 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2675 the conversion of RESULT to TYPE. */
2677 static tree
2678 omit_one_operand (type, result, omitted)
2679 tree type, result, omitted;
2681 tree t = convert (type, result);
2683 if (TREE_SIDE_EFFECTS (omitted))
2684 return build (COMPOUND_EXPR, type, omitted, t);
2686 return non_lvalue (t);
2689 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2691 static tree
2692 pedantic_omit_one_operand (type, result, omitted)
2693 tree type, result, omitted;
2695 tree t = convert (type, result);
2697 if (TREE_SIDE_EFFECTS (omitted))
2698 return build (COMPOUND_EXPR, type, omitted, t);
2700 return pedantic_non_lvalue (t);
2705 /* Return a simplified tree node for the truth-negation of ARG. This
2706 never alters ARG itself. We assume that ARG is an operation that
2707 returns a truth value (0 or 1). */
2709 tree
2710 invert_truthvalue (arg)
2711 tree arg;
2713 tree type = TREE_TYPE (arg);
2714 enum tree_code code = TREE_CODE (arg);
2716 if (code == ERROR_MARK)
2717 return arg;
2719 /* If this is a comparison, we can simply invert it, except for
2720 floating-point non-equality comparisons, in which case we just
2721 enclose a TRUTH_NOT_EXPR around what we have. */
2723 if (TREE_CODE_CLASS (code) == '<')
2725 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2726 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2727 return build1 (TRUTH_NOT_EXPR, type, arg);
2728 else
2729 return build (invert_tree_comparison (code), type,
2730 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2733 switch (code)
2735 case INTEGER_CST:
2736 return convert (type, build_int_2 (integer_zerop (arg), 0));
2738 case TRUTH_AND_EXPR:
2739 return build (TRUTH_OR_EXPR, type,
2740 invert_truthvalue (TREE_OPERAND (arg, 0)),
2741 invert_truthvalue (TREE_OPERAND (arg, 1)));
2743 case TRUTH_OR_EXPR:
2744 return build (TRUTH_AND_EXPR, type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)),
2746 invert_truthvalue (TREE_OPERAND (arg, 1)));
2748 case TRUTH_XOR_EXPR:
2749 /* Here we can invert either operand. We invert the first operand
2750 unless the second operand is a TRUTH_NOT_EXPR in which case our
2751 result is the XOR of the first operand with the inside of the
2752 negation of the second operand. */
2754 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2755 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2756 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2757 else
2758 return build (TRUTH_XOR_EXPR, type,
2759 invert_truthvalue (TREE_OPERAND (arg, 0)),
2760 TREE_OPERAND (arg, 1));
2762 case TRUTH_ANDIF_EXPR:
2763 return build (TRUTH_ORIF_EXPR, type,
2764 invert_truthvalue (TREE_OPERAND (arg, 0)),
2765 invert_truthvalue (TREE_OPERAND (arg, 1)));
2767 case TRUTH_ORIF_EXPR:
2768 return build (TRUTH_ANDIF_EXPR, type,
2769 invert_truthvalue (TREE_OPERAND (arg, 0)),
2770 invert_truthvalue (TREE_OPERAND (arg, 1)));
2772 case TRUTH_NOT_EXPR:
2773 return TREE_OPERAND (arg, 0);
2775 case COND_EXPR:
2776 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2777 invert_truthvalue (TREE_OPERAND (arg, 1)),
2778 invert_truthvalue (TREE_OPERAND (arg, 2)));
2780 case COMPOUND_EXPR:
2781 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2782 invert_truthvalue (TREE_OPERAND (arg, 1)));
2784 case WITH_RECORD_EXPR:
2785 return build (WITH_RECORD_EXPR, type,
2786 invert_truthvalue (TREE_OPERAND (arg, 0)),
2787 TREE_OPERAND (arg, 1));
2789 case NON_LVALUE_EXPR:
2790 return invert_truthvalue (TREE_OPERAND (arg, 0));
2792 case NOP_EXPR:
2793 case CONVERT_EXPR:
2794 case FLOAT_EXPR:
2795 return build1 (TREE_CODE (arg), type,
2796 invert_truthvalue (TREE_OPERAND (arg, 0)));
2798 case BIT_AND_EXPR:
2799 if (!integer_onep (TREE_OPERAND (arg, 1)))
2800 break;
2801 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2803 case SAVE_EXPR:
2804 return build1 (TRUTH_NOT_EXPR, type, arg);
2806 case CLEANUP_POINT_EXPR:
2807 return build1 (CLEANUP_POINT_EXPR, type,
2808 invert_truthvalue (TREE_OPERAND (arg, 0)));
2810 default:
2811 break;
2813 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2814 abort ();
2815 return build1 (TRUTH_NOT_EXPR, type, arg);
2818 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2819 operands are another bit-wise operation with a common input. If so,
2820 distribute the bit operations to save an operation and possibly two if
2821 constants are involved. For example, convert
2822 (A | B) & (A | C) into A | (B & C)
2823 Further simplification will occur if B and C are constants.
2825 If this optimization cannot be done, 0 will be returned. */
2827 static tree
2828 distribute_bit_expr (code, type, arg0, arg1)
2829 enum tree_code code;
2830 tree type;
2831 tree arg0, arg1;
2833 tree common;
2834 tree left, right;
2836 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2837 || TREE_CODE (arg0) == code
2838 || (TREE_CODE (arg0) != BIT_AND_EXPR
2839 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2840 return 0;
2842 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2844 common = TREE_OPERAND (arg0, 0);
2845 left = TREE_OPERAND (arg0, 1);
2846 right = TREE_OPERAND (arg1, 1);
2848 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2850 common = TREE_OPERAND (arg0, 0);
2851 left = TREE_OPERAND (arg0, 1);
2852 right = TREE_OPERAND (arg1, 0);
2854 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2856 common = TREE_OPERAND (arg0, 1);
2857 left = TREE_OPERAND (arg0, 0);
2858 right = TREE_OPERAND (arg1, 1);
2860 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2862 common = TREE_OPERAND (arg0, 1);
2863 left = TREE_OPERAND (arg0, 0);
2864 right = TREE_OPERAND (arg1, 0);
2866 else
2867 return 0;
2869 return fold (build (TREE_CODE (arg0), type, common,
2870 fold (build (code, type, left, right))));
2873 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2874 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2876 static tree
2877 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2878 tree inner;
2879 tree type;
2880 int bitsize, bitpos;
2881 int unsignedp;
2883 tree result = build (BIT_FIELD_REF, type, inner,
2884 size_int (bitsize), bitsize_int (bitpos));
2886 TREE_UNSIGNED (result) = unsignedp;
2888 return result;
2891 /* Optimize a bit-field compare.
2893 There are two cases: First is a compare against a constant and the
2894 second is a comparison of two items where the fields are at the same
2895 bit position relative to the start of a chunk (byte, halfword, word)
2896 large enough to contain it. In these cases we can avoid the shift
2897 implicit in bitfield extractions.
2899 For constants, we emit a compare of the shifted constant with the
2900 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2901 compared. For two fields at the same position, we do the ANDs with the
2902 similar mask and compare the result of the ANDs.
2904 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2905 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2906 are the left and right operands of the comparison, respectively.
2908 If the optimization described above can be done, we return the resulting
2909 tree. Otherwise we return zero. */
2911 static tree
2912 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2913 enum tree_code code;
2914 tree compare_type;
2915 tree lhs, rhs;
2917 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2918 tree type = TREE_TYPE (lhs);
2919 tree signed_type, unsigned_type;
2920 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2921 enum machine_mode lmode, rmode, nmode;
2922 int lunsignedp, runsignedp;
2923 int lvolatilep = 0, rvolatilep = 0;
2924 unsigned int alignment;
2925 tree linner, rinner = NULL_TREE;
2926 tree mask;
2927 tree offset;
2929 /* Get all the information about the extractions being done. If the bit size
2930 if the same as the size of the underlying object, we aren't doing an
2931 extraction at all and so can do nothing. We also don't want to
2932 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2933 then will no longer be able to replace it. */
2934 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2935 &lunsignedp, &lvolatilep, &alignment);
2936 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2937 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2938 return 0;
2940 if (!const_p)
2942 /* If this is not a constant, we can only do something if bit positions,
2943 sizes, and signedness are the same. */
2944 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2945 &runsignedp, &rvolatilep, &alignment);
2947 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2948 || lunsignedp != runsignedp || offset != 0
2949 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2950 return 0;
2953 /* See if we can find a mode to refer to this field. We should be able to,
2954 but fail if we can't. */
2955 nmode = get_best_mode (lbitsize, lbitpos,
2956 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2957 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2958 TYPE_ALIGN (TREE_TYPE (rinner))),
2959 word_mode, lvolatilep || rvolatilep);
2960 if (nmode == VOIDmode)
2961 return 0;
2963 /* Set signed and unsigned types of the precision of this mode for the
2964 shifts below. */
2965 signed_type = type_for_mode (nmode, 0);
2966 unsigned_type = type_for_mode (nmode, 1);
2968 /* Compute the bit position and size for the new reference and our offset
2969 within it. If the new reference is the same size as the original, we
2970 won't optimize anything, so return zero. */
2971 nbitsize = GET_MODE_BITSIZE (nmode);
2972 nbitpos = lbitpos & ~ (nbitsize - 1);
2973 lbitpos -= nbitpos;
2974 if (nbitsize == lbitsize)
2975 return 0;
2977 if (BYTES_BIG_ENDIAN)
2978 lbitpos = nbitsize - lbitsize - lbitpos;
2980 /* Make the mask to be used against the extracted field. */
2981 mask = build_int_2 (~0, ~0);
2982 TREE_TYPE (mask) = unsigned_type;
2983 force_fit_type (mask, 0);
2984 mask = convert (unsigned_type, mask);
2985 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2986 mask = const_binop (RSHIFT_EXPR, mask,
2987 size_int (nbitsize - lbitsize - lbitpos), 0);
2989 if (! const_p)
2990 /* If not comparing with constant, just rework the comparison
2991 and return. */
2992 return build (code, compare_type,
2993 build (BIT_AND_EXPR, unsigned_type,
2994 make_bit_field_ref (linner, unsigned_type,
2995 nbitsize, nbitpos, 1),
2996 mask),
2997 build (BIT_AND_EXPR, unsigned_type,
2998 make_bit_field_ref (rinner, unsigned_type,
2999 nbitsize, nbitpos, 1),
3000 mask));
3002 /* Otherwise, we are handling the constant case. See if the constant is too
3003 big for the field. Warn and return a tree of for 0 (false) if so. We do
3004 this not only for its own sake, but to avoid having to test for this
3005 error case below. If we didn't, we might generate wrong code.
3007 For unsigned fields, the constant shifted right by the field length should
3008 be all zero. For signed fields, the high-order bits should agree with
3009 the sign bit. */
3011 if (lunsignedp)
3013 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3014 convert (unsigned_type, rhs),
3015 size_int (lbitsize), 0)))
3017 warning ("comparison is always %d due to width of bitfield",
3018 code == NE_EXPR);
3019 return convert (compare_type,
3020 (code == NE_EXPR
3021 ? integer_one_node : integer_zero_node));
3024 else
3026 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
3027 size_int (lbitsize - 1), 0);
3028 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3030 warning ("comparison is always %d due to width of bitfield",
3031 code == NE_EXPR);
3032 return convert (compare_type,
3033 (code == NE_EXPR
3034 ? integer_one_node : integer_zero_node));
3038 /* Single-bit compares should always be against zero. */
3039 if (lbitsize == 1 && ! integer_zerop (rhs))
3041 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3042 rhs = convert (type, integer_zero_node);
3045 /* Make a new bitfield reference, shift the constant over the
3046 appropriate number of bits and mask it with the computed mask
3047 (in case this was a signed field). If we changed it, make a new one. */
3048 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3049 if (lvolatilep)
3051 TREE_SIDE_EFFECTS (lhs) = 1;
3052 TREE_THIS_VOLATILE (lhs) = 1;
3055 rhs = fold (const_binop (BIT_AND_EXPR,
3056 const_binop (LSHIFT_EXPR,
3057 convert (unsigned_type, rhs),
3058 size_int (lbitpos), 0),
3059 mask, 0));
3061 return build (code, compare_type,
3062 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
3063 rhs);
3066 /* Subroutine for fold_truthop: decode a field reference.
3068 If EXP is a comparison reference, we return the innermost reference.
3070 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3071 set to the starting bit number.
3073 If the innermost field can be completely contained in a mode-sized
3074 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3076 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3077 otherwise it is not changed.
3079 *PUNSIGNEDP is set to the signedness of the field.
3081 *PMASK is set to the mask used. This is either contained in a
3082 BIT_AND_EXPR or derived from the width of the field.
3084 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3086 Return 0 if this is not a component reference or is one that we can't
3087 do anything with. */
3089 static tree
3090 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3091 pvolatilep, pmask, pand_mask)
3092 tree exp;
3093 HOST_WIDE_INT *pbitsize, *pbitpos;
3094 enum machine_mode *pmode;
3095 int *punsignedp, *pvolatilep;
3096 tree *pmask;
3097 tree *pand_mask;
3099 tree and_mask = 0;
3100 tree mask, inner, offset;
3101 tree unsigned_type;
3102 unsigned int precision;
3103 unsigned int alignment;
3105 /* All the optimizations using this function assume integer fields.
3106 There are problems with FP fields since the type_for_size call
3107 below can fail for, e.g., XFmode. */
3108 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3109 return 0;
3111 STRIP_NOPS (exp);
3113 if (TREE_CODE (exp) == BIT_AND_EXPR)
3115 and_mask = TREE_OPERAND (exp, 1);
3116 exp = TREE_OPERAND (exp, 0);
3117 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3118 if (TREE_CODE (and_mask) != INTEGER_CST)
3119 return 0;
3123 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3124 punsignedp, pvolatilep, &alignment);
3125 if ((inner == exp && and_mask == 0)
3126 || *pbitsize < 0 || offset != 0
3127 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3128 return 0;
3130 /* Compute the mask to access the bitfield. */
3131 unsigned_type = type_for_size (*pbitsize, 1);
3132 precision = TYPE_PRECISION (unsigned_type);
3134 mask = build_int_2 (~0, ~0);
3135 TREE_TYPE (mask) = unsigned_type;
3136 force_fit_type (mask, 0);
3137 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3138 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3140 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3141 if (and_mask != 0)
3142 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3143 convert (unsigned_type, and_mask), mask));
3145 *pmask = mask;
3146 *pand_mask = and_mask;
3147 return inner;
3150 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3151 bit positions. */
3153 static int
3154 all_ones_mask_p (mask, size)
3155 tree mask;
3156 int size;
3158 tree type = TREE_TYPE (mask);
3159 unsigned int precision = TYPE_PRECISION (type);
3160 tree tmask;
3162 tmask = build_int_2 (~0, ~0);
3163 TREE_TYPE (tmask) = signed_type (type);
3164 force_fit_type (tmask, 0);
3165 return
3166 tree_int_cst_equal (mask,
3167 const_binop (RSHIFT_EXPR,
3168 const_binop (LSHIFT_EXPR, tmask,
3169 size_int (precision - size),
3171 size_int (precision - size), 0));
3174 /* Subroutine for fold_truthop: determine if an operand is simple enough
3175 to be evaluated unconditionally. */
3177 static int
3178 simple_operand_p (exp)
3179 tree exp;
3181 /* Strip any conversions that don't change the machine mode. */
3182 while ((TREE_CODE (exp) == NOP_EXPR
3183 || TREE_CODE (exp) == CONVERT_EXPR)
3184 && (TYPE_MODE (TREE_TYPE (exp))
3185 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3186 exp = TREE_OPERAND (exp, 0);
3188 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3189 || (DECL_P (exp)
3190 && ! TREE_ADDRESSABLE (exp)
3191 && ! TREE_THIS_VOLATILE (exp)
3192 && ! DECL_NONLOCAL (exp)
3193 /* Don't regard global variables as simple. They may be
3194 allocated in ways unknown to the compiler (shared memory,
3195 #pragma weak, etc). */
3196 && ! TREE_PUBLIC (exp)
3197 && ! DECL_EXTERNAL (exp)
3198 /* Loading a static variable is unduly expensive, but global
3199 registers aren't expensive. */
3200 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3203 /* The following functions are subroutines to fold_range_test and allow it to
3204 try to change a logical combination of comparisons into a range test.
3206 For example, both
3207 X == 2 && X == 3 && X == 4 && X == 5
3209 X >= 2 && X <= 5
3210 are converted to
3211 (unsigned) (X - 2) <= 3
3213 We describe each set of comparisons as being either inside or outside
3214 a range, using a variable named like IN_P, and then describe the
3215 range with a lower and upper bound. If one of the bounds is omitted,
3216 it represents either the highest or lowest value of the type.
3218 In the comments below, we represent a range by two numbers in brackets
3219 preceded by a "+" to designate being inside that range, or a "-" to
3220 designate being outside that range, so the condition can be inverted by
3221 flipping the prefix. An omitted bound is represented by a "-". For
3222 example, "- [-, 10]" means being outside the range starting at the lowest
3223 possible value and ending at 10, in other words, being greater than 10.
3224 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3225 always false.
3227 We set up things so that the missing bounds are handled in a consistent
3228 manner so neither a missing bound nor "true" and "false" need to be
3229 handled using a special case. */
3231 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3232 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3233 and UPPER1_P are nonzero if the respective argument is an upper bound
3234 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3235 must be specified for a comparison. ARG1 will be converted to ARG0's
3236 type if both are specified. */
3238 static tree
3239 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3240 enum tree_code code;
3241 tree type;
3242 tree arg0, arg1;
3243 int upper0_p, upper1_p;
3245 tree tem;
3246 int result;
3247 int sgn0, sgn1;
3249 /* If neither arg represents infinity, do the normal operation.
3250 Else, if not a comparison, return infinity. Else handle the special
3251 comparison rules. Note that most of the cases below won't occur, but
3252 are handled for consistency. */
3254 if (arg0 != 0 && arg1 != 0)
3256 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3257 arg0, convert (TREE_TYPE (arg0), arg1)));
3258 STRIP_NOPS (tem);
3259 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3262 if (TREE_CODE_CLASS (code) != '<')
3263 return 0;
3265 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3266 for neither. In real maths, we cannot assume open ended ranges are
3267 the same. But, this is computer arithmetic, where numbers are finite.
3268 We can therefore make the transformation of any unbounded range with
3269 the value Z, Z being greater than any representable number. This permits
3270 us to treat unbounded ranges as equal. */
3271 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3272 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3273 switch (code)
3275 case EQ_EXPR:
3276 result = sgn0 == sgn1;
3277 break;
3278 case NE_EXPR:
3279 result = sgn0 != sgn1;
3280 break;
3281 case LT_EXPR:
3282 result = sgn0 < sgn1;
3283 break;
3284 case LE_EXPR:
3285 result = sgn0 <= sgn1;
3286 break;
3287 case GT_EXPR:
3288 result = sgn0 > sgn1;
3289 break;
3290 case GE_EXPR:
3291 result = sgn0 >= sgn1;
3292 break;
3293 default:
3294 abort ();
3297 return convert (type, result ? integer_one_node : integer_zero_node);
3300 /* Given EXP, a logical expression, set the range it is testing into
3301 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3302 actually being tested. *PLOW and *PHIGH will have be made the same type
3303 as the returned expression. If EXP is not a comparison, we will most
3304 likely not be returning a useful value and range. */
3306 static tree
3307 make_range (exp, pin_p, plow, phigh)
3308 tree exp;
3309 int *pin_p;
3310 tree *plow, *phigh;
3312 enum tree_code code;
3313 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3314 tree orig_type = NULL_TREE;
3315 int in_p, n_in_p;
3316 tree low, high, n_low, n_high;
3318 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3319 and see if we can refine the range. Some of the cases below may not
3320 happen, but it doesn't seem worth worrying about this. We "continue"
3321 the outer loop when we've changed something; otherwise we "break"
3322 the switch, which will "break" the while. */
3324 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3326 while (1)
3328 code = TREE_CODE (exp);
3330 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3332 arg0 = TREE_OPERAND (exp, 0);
3333 if (TREE_CODE_CLASS (code) == '<'
3334 || TREE_CODE_CLASS (code) == '1'
3335 || TREE_CODE_CLASS (code) == '2')
3336 type = TREE_TYPE (arg0);
3337 if (TREE_CODE_CLASS (code) == '2'
3338 || TREE_CODE_CLASS (code) == '<'
3339 || (TREE_CODE_CLASS (code) == 'e'
3340 && TREE_CODE_LENGTH (code) > 1))
3341 arg1 = TREE_OPERAND (exp, 1);
3344 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3345 lose a cast by accident. */
3346 if (type != NULL_TREE && orig_type == NULL_TREE)
3347 orig_type = type;
3349 switch (code)
3351 case TRUTH_NOT_EXPR:
3352 in_p = ! in_p, exp = arg0;
3353 continue;
3355 case EQ_EXPR: case NE_EXPR:
3356 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3357 /* We can only do something if the range is testing for zero
3358 and if the second operand is an integer constant. Note that
3359 saying something is "in" the range we make is done by
3360 complementing IN_P since it will set in the initial case of
3361 being not equal to zero; "out" is leaving it alone. */
3362 if (low == 0 || high == 0
3363 || ! integer_zerop (low) || ! integer_zerop (high)
3364 || TREE_CODE (arg1) != INTEGER_CST)
3365 break;
3367 switch (code)
3369 case NE_EXPR: /* - [c, c] */
3370 low = high = arg1;
3371 break;
3372 case EQ_EXPR: /* + [c, c] */
3373 in_p = ! in_p, low = high = arg1;
3374 break;
3375 case GT_EXPR: /* - [-, c] */
3376 low = 0, high = arg1;
3377 break;
3378 case GE_EXPR: /* + [c, -] */
3379 in_p = ! in_p, low = arg1, high = 0;
3380 break;
3381 case LT_EXPR: /* - [c, -] */
3382 low = arg1, high = 0;
3383 break;
3384 case LE_EXPR: /* + [-, c] */
3385 in_p = ! in_p, low = 0, high = arg1;
3386 break;
3387 default:
3388 abort ();
3391 exp = arg0;
3393 /* If this is an unsigned comparison, we also know that EXP is
3394 greater than or equal to zero. We base the range tests we make
3395 on that fact, so we record it here so we can parse existing
3396 range tests. */
3397 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3399 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3400 1, convert (type, integer_zero_node),
3401 NULL_TREE))
3402 break;
3404 in_p = n_in_p, low = n_low, high = n_high;
3406 /* If the high bound is missing, but we
3407 have a low bound, reverse the range so
3408 it goes from zero to the low bound minus 1. */
3409 if (high == 0 && low)
3411 in_p = ! in_p;
3412 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3413 integer_one_node, 0);
3414 low = convert (type, integer_zero_node);
3417 continue;
3419 case NEGATE_EXPR:
3420 /* (-x) IN [a,b] -> x in [-b, -a] */
3421 n_low = range_binop (MINUS_EXPR, type,
3422 convert (type, integer_zero_node), 0, high, 1);
3423 n_high = range_binop (MINUS_EXPR, type,
3424 convert (type, integer_zero_node), 0, low, 0);
3425 low = n_low, high = n_high;
3426 exp = arg0;
3427 continue;
3429 case BIT_NOT_EXPR:
3430 /* ~ X -> -X - 1 */
3431 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3432 convert (type, integer_one_node));
3433 continue;
3435 case PLUS_EXPR: case MINUS_EXPR:
3436 if (TREE_CODE (arg1) != INTEGER_CST)
3437 break;
3439 /* If EXP is signed, any overflow in the computation is undefined,
3440 so we don't worry about it so long as our computations on
3441 the bounds don't overflow. For unsigned, overflow is defined
3442 and this is exactly the right thing. */
3443 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3444 type, low, 0, arg1, 0);
3445 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3446 type, high, 1, arg1, 0);
3447 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3448 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3449 break;
3451 /* Check for an unsigned range which has wrapped around the maximum
3452 value thus making n_high < n_low, and normalize it. */
3453 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3455 low = range_binop (PLUS_EXPR, type, n_high, 0,
3456 integer_one_node, 0);
3457 high = range_binop (MINUS_EXPR, type, n_low, 0,
3458 integer_one_node, 0);
3460 /* If the range is of the form +/- [ x+1, x ], we won't
3461 be able to normalize it. But then, it represents the
3462 whole range or the empty set, so make it
3463 +/- [ -, - ]. */
3464 if (tree_int_cst_equal (n_low, low)
3465 && tree_int_cst_equal (n_high, high))
3466 low = high = 0;
3467 else
3468 in_p = ! in_p;
3470 else
3471 low = n_low, high = n_high;
3473 exp = arg0;
3474 continue;
3476 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3477 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3478 break;
3480 if (! INTEGRAL_TYPE_P (type)
3481 || (low != 0 && ! int_fits_type_p (low, type))
3482 || (high != 0 && ! int_fits_type_p (high, type)))
3483 break;
3485 n_low = low, n_high = high;
3487 if (n_low != 0)
3488 n_low = convert (type, n_low);
3490 if (n_high != 0)
3491 n_high = convert (type, n_high);
3493 /* If we're converting from an unsigned to a signed type,
3494 we will be doing the comparison as unsigned. The tests above
3495 have already verified that LOW and HIGH are both positive.
3497 So we have to make sure that the original unsigned value will
3498 be interpreted as positive. */
3499 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3501 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3502 tree high_positive;
3504 /* A range without an upper bound is, naturally, unbounded.
3505 Since convert would have cropped a very large value, use
3506 the max value for the destination type. */
3507 high_positive
3508 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3509 : TYPE_MAX_VALUE (type);
3511 high_positive = fold (build (RSHIFT_EXPR, type,
3512 convert (type, high_positive),
3513 convert (type, integer_one_node)));
3515 /* If the low bound is specified, "and" the range with the
3516 range for which the original unsigned value will be
3517 positive. */
3518 if (low != 0)
3520 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3521 1, n_low, n_high,
3522 1, convert (type, integer_zero_node),
3523 high_positive))
3524 break;
3526 in_p = (n_in_p == in_p);
3528 else
3530 /* Otherwise, "or" the range with the range of the input
3531 that will be interpreted as negative. */
3532 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3533 0, n_low, n_high,
3534 1, convert (type, integer_zero_node),
3535 high_positive))
3536 break;
3538 in_p = (in_p != n_in_p);
3542 exp = arg0;
3543 low = n_low, high = n_high;
3544 continue;
3546 default:
3547 break;
3550 break;
3553 /* If EXP is a constant, we can evaluate whether this is true or false. */
3554 if (TREE_CODE (exp) == INTEGER_CST)
3556 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3557 exp, 0, low, 0))
3558 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3559 exp, 1, high, 1)));
3560 low = high = 0;
3561 exp = 0;
3564 *pin_p = in_p, *plow = low, *phigh = high;
3565 return exp;
3568 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3569 type, TYPE, return an expression to test if EXP is in (or out of, depending
3570 on IN_P) the range. */
3572 static tree
3573 build_range_check (type, exp, in_p, low, high)
3574 tree type;
3575 tree exp;
3576 int in_p;
3577 tree low, high;
3579 tree etype = TREE_TYPE (exp);
3580 tree utype, value;
3582 if (! in_p
3583 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3584 return invert_truthvalue (value);
3586 else if (low == 0 && high == 0)
3587 return convert (type, integer_one_node);
3589 else if (low == 0)
3590 return fold (build (LE_EXPR, type, exp, high));
3592 else if (high == 0)
3593 return fold (build (GE_EXPR, type, exp, low));
3595 else if (operand_equal_p (low, high, 0))
3596 return fold (build (EQ_EXPR, type, exp, low));
3598 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3599 return build_range_check (type, exp, 1, 0, high);
3601 else if (integer_zerop (low))
3603 utype = unsigned_type (etype);
3604 return build_range_check (type, convert (utype, exp), 1, 0,
3605 convert (utype, high));
3608 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3609 && ! TREE_OVERFLOW (value))
3610 return build_range_check (type,
3611 fold (build (MINUS_EXPR, etype, exp, low)),
3612 1, convert (etype, integer_zero_node), value);
3613 else
3614 return 0;
3617 /* Given two ranges, see if we can merge them into one. Return 1 if we
3618 can, 0 if we can't. Set the output range into the specified parameters. */
3620 static int
3621 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3622 int *pin_p;
3623 tree *plow, *phigh;
3624 int in0_p, in1_p;
3625 tree low0, high0, low1, high1;
3627 int no_overlap;
3628 int subset;
3629 int temp;
3630 tree tem;
3631 int in_p;
3632 tree low, high;
3633 int lowequal = ((low0 == 0 && low1 == 0)
3634 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3635 low0, 0, low1, 0)));
3636 int highequal = ((high0 == 0 && high1 == 0)
3637 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3638 high0, 1, high1, 1)));
3640 /* Make range 0 be the range that starts first, or ends last if they
3641 start at the same value. Swap them if it isn't. */
3642 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3643 low0, 0, low1, 0))
3644 || (lowequal
3645 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3646 high1, 1, high0, 1))))
3648 temp = in0_p, in0_p = in1_p, in1_p = temp;
3649 tem = low0, low0 = low1, low1 = tem;
3650 tem = high0, high0 = high1, high1 = tem;
3653 /* Now flag two cases, whether the ranges are disjoint or whether the
3654 second range is totally subsumed in the first. Note that the tests
3655 below are simplified by the ones above. */
3656 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3657 high0, 1, low1, 0));
3658 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3659 high1, 1, high0, 1));
3661 /* We now have four cases, depending on whether we are including or
3662 excluding the two ranges. */
3663 if (in0_p && in1_p)
3665 /* If they don't overlap, the result is false. If the second range
3666 is a subset it is the result. Otherwise, the range is from the start
3667 of the second to the end of the first. */
3668 if (no_overlap)
3669 in_p = 0, low = high = 0;
3670 else if (subset)
3671 in_p = 1, low = low1, high = high1;
3672 else
3673 in_p = 1, low = low1, high = high0;
3676 else if (in0_p && ! in1_p)
3678 /* If they don't overlap, the result is the first range. If they are
3679 equal, the result is false. If the second range is a subset of the
3680 first, and the ranges begin at the same place, we go from just after
3681 the end of the first range to the end of the second. If the second
3682 range is not a subset of the first, or if it is a subset and both
3683 ranges end at the same place, the range starts at the start of the
3684 first range and ends just before the second range.
3685 Otherwise, we can't describe this as a single range. */
3686 if (no_overlap)
3687 in_p = 1, low = low0, high = high0;
3688 else if (lowequal && highequal)
3689 in_p = 0, low = high = 0;
3690 else if (subset && lowequal)
3692 in_p = 1, high = high0;
3693 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3694 integer_one_node, 0);
3696 else if (! subset || highequal)
3698 in_p = 1, low = low0;
3699 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3700 integer_one_node, 0);
3702 else
3703 return 0;
3706 else if (! in0_p && in1_p)
3708 /* If they don't overlap, the result is the second range. If the second
3709 is a subset of the first, the result is false. Otherwise,
3710 the range starts just after the first range and ends at the
3711 end of the second. */
3712 if (no_overlap)
3713 in_p = 1, low = low1, high = high1;
3714 else if (subset || highequal)
3715 in_p = 0, low = high = 0;
3716 else
3718 in_p = 1, high = high1;
3719 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3720 integer_one_node, 0);
3724 else
3726 /* The case where we are excluding both ranges. Here the complex case
3727 is if they don't overlap. In that case, the only time we have a
3728 range is if they are adjacent. If the second is a subset of the
3729 first, the result is the first. Otherwise, the range to exclude
3730 starts at the beginning of the first range and ends at the end of the
3731 second. */
3732 if (no_overlap)
3734 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3735 range_binop (PLUS_EXPR, NULL_TREE,
3736 high0, 1,
3737 integer_one_node, 1),
3738 1, low1, 0)))
3739 in_p = 0, low = low0, high = high1;
3740 else
3741 return 0;
3743 else if (subset)
3744 in_p = 0, low = low0, high = high0;
3745 else
3746 in_p = 0, low = low0, high = high1;
3749 *pin_p = in_p, *plow = low, *phigh = high;
3750 return 1;
3753 /* EXP is some logical combination of boolean tests. See if we can
3754 merge it into some range test. Return the new tree if so. */
3756 static tree
3757 fold_range_test (exp)
3758 tree exp;
3760 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3761 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3762 int in0_p, in1_p, in_p;
3763 tree low0, low1, low, high0, high1, high;
3764 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3765 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3766 tree tem;
3768 /* If this is an OR operation, invert both sides; we will invert
3769 again at the end. */
3770 if (or_op)
3771 in0_p = ! in0_p, in1_p = ! in1_p;
3773 /* If both expressions are the same, if we can merge the ranges, and we
3774 can build the range test, return it or it inverted. If one of the
3775 ranges is always true or always false, consider it to be the same
3776 expression as the other. */
3777 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3778 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3779 in1_p, low1, high1)
3780 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3781 lhs != 0 ? lhs
3782 : rhs != 0 ? rhs : integer_zero_node,
3783 in_p, low, high))))
3784 return or_op ? invert_truthvalue (tem) : tem;
3786 /* On machines where the branch cost is expensive, if this is a
3787 short-circuited branch and the underlying object on both sides
3788 is the same, make a non-short-circuit operation. */
3789 else if (BRANCH_COST >= 2
3790 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3791 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3792 && operand_equal_p (lhs, rhs, 0))
3794 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3795 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3796 which cases we can't do this. */
3797 if (simple_operand_p (lhs))
3798 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3799 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3800 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3801 TREE_OPERAND (exp, 1));
3803 else if (global_bindings_p () == 0
3804 && ! contains_placeholder_p (lhs))
3806 tree common = save_expr (lhs);
3808 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3809 or_op ? ! in0_p : in0_p,
3810 low0, high0))
3811 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3812 or_op ? ! in1_p : in1_p,
3813 low1, high1))))
3814 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3815 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3816 TREE_TYPE (exp), lhs, rhs);
3820 return 0;
3823 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3824 bit value. Arrange things so the extra bits will be set to zero if and
3825 only if C is signed-extended to its full width. If MASK is nonzero,
3826 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3828 static tree
3829 unextend (c, p, unsignedp, mask)
3830 tree c;
3831 int p;
3832 int unsignedp;
3833 tree mask;
3835 tree type = TREE_TYPE (c);
3836 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3837 tree temp;
3839 if (p == modesize || unsignedp)
3840 return c;
3842 /* We work by getting just the sign bit into the low-order bit, then
3843 into the high-order bit, then sign-extend. We then XOR that value
3844 with C. */
3845 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3846 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3848 /* We must use a signed type in order to get an arithmetic right shift.
3849 However, we must also avoid introducing accidental overflows, so that
3850 a subsequent call to integer_zerop will work. Hence we must
3851 do the type conversion here. At this point, the constant is either
3852 zero or one, and the conversion to a signed type can never overflow.
3853 We could get an overflow if this conversion is done anywhere else. */
3854 if (TREE_UNSIGNED (type))
3855 temp = convert (signed_type (type), temp);
3857 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3858 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3859 if (mask != 0)
3860 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3861 /* If necessary, convert the type back to match the type of C. */
3862 if (TREE_UNSIGNED (type))
3863 temp = convert (type, temp);
3865 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3868 /* Find ways of folding logical expressions of LHS and RHS:
3869 Try to merge two comparisons to the same innermost item.
3870 Look for range tests like "ch >= '0' && ch <= '9'".
3871 Look for combinations of simple terms on machines with expensive branches
3872 and evaluate the RHS unconditionally.
3874 For example, if we have p->a == 2 && p->b == 4 and we can make an
3875 object large enough to span both A and B, we can do this with a comparison
3876 against the object ANDed with the a mask.
3878 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3879 operations to do this with one comparison.
3881 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3882 function and the one above.
3884 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3885 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3887 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3888 two operands.
3890 We return the simplified tree or 0 if no optimization is possible. */
3892 static tree
3893 fold_truthop (code, truth_type, lhs, rhs)
3894 enum tree_code code;
3895 tree truth_type, lhs, rhs;
3897 /* If this is the "or" of two comparisons, we can do something if we
3898 the comparisons are NE_EXPR. If this is the "and", we can do something
3899 if the comparisons are EQ_EXPR. I.e.,
3900 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3902 WANTED_CODE is this operation code. For single bit fields, we can
3903 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3904 comparison for one-bit fields. */
3906 enum tree_code wanted_code;
3907 enum tree_code lcode, rcode;
3908 tree ll_arg, lr_arg, rl_arg, rr_arg;
3909 tree ll_inner, lr_inner, rl_inner, rr_inner;
3910 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3911 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3912 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3913 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3914 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3915 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3916 enum machine_mode lnmode, rnmode;
3917 tree ll_mask, lr_mask, rl_mask, rr_mask;
3918 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3919 tree l_const, r_const;
3920 tree lntype, rntype, result;
3921 int first_bit, end_bit;
3922 int volatilep;
3924 /* Start by getting the comparison codes. Fail if anything is volatile.
3925 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3926 it were surrounded with a NE_EXPR. */
3928 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3929 return 0;
3931 lcode = TREE_CODE (lhs);
3932 rcode = TREE_CODE (rhs);
3934 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3935 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3937 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3938 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3940 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3941 return 0;
3943 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3944 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3946 ll_arg = TREE_OPERAND (lhs, 0);
3947 lr_arg = TREE_OPERAND (lhs, 1);
3948 rl_arg = TREE_OPERAND (rhs, 0);
3949 rr_arg = TREE_OPERAND (rhs, 1);
3951 /* If the RHS can be evaluated unconditionally and its operands are
3952 simple, it wins to evaluate the RHS unconditionally on machines
3953 with expensive branches. In this case, this isn't a comparison
3954 that can be merged. Avoid doing this if the RHS is a floating-point
3955 comparison since those can trap. */
3957 if (BRANCH_COST >= 2
3958 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3959 && simple_operand_p (rl_arg)
3960 && simple_operand_p (rr_arg))
3961 return build (code, truth_type, lhs, rhs);
3963 /* See if the comparisons can be merged. Then get all the parameters for
3964 each side. */
3966 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3967 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3968 return 0;
3970 volatilep = 0;
3971 ll_inner = decode_field_reference (ll_arg,
3972 &ll_bitsize, &ll_bitpos, &ll_mode,
3973 &ll_unsignedp, &volatilep, &ll_mask,
3974 &ll_and_mask);
3975 lr_inner = decode_field_reference (lr_arg,
3976 &lr_bitsize, &lr_bitpos, &lr_mode,
3977 &lr_unsignedp, &volatilep, &lr_mask,
3978 &lr_and_mask);
3979 rl_inner = decode_field_reference (rl_arg,
3980 &rl_bitsize, &rl_bitpos, &rl_mode,
3981 &rl_unsignedp, &volatilep, &rl_mask,
3982 &rl_and_mask);
3983 rr_inner = decode_field_reference (rr_arg,
3984 &rr_bitsize, &rr_bitpos, &rr_mode,
3985 &rr_unsignedp, &volatilep, &rr_mask,
3986 &rr_and_mask);
3988 /* It must be true that the inner operation on the lhs of each
3989 comparison must be the same if we are to be able to do anything.
3990 Then see if we have constants. If not, the same must be true for
3991 the rhs's. */
3992 if (volatilep || ll_inner == 0 || rl_inner == 0
3993 || ! operand_equal_p (ll_inner, rl_inner, 0))
3994 return 0;
3996 if (TREE_CODE (lr_arg) == INTEGER_CST
3997 && TREE_CODE (rr_arg) == INTEGER_CST)
3998 l_const = lr_arg, r_const = rr_arg;
3999 else if (lr_inner == 0 || rr_inner == 0
4000 || ! operand_equal_p (lr_inner, rr_inner, 0))
4001 return 0;
4002 else
4003 l_const = r_const = 0;
4005 /* If either comparison code is not correct for our logical operation,
4006 fail. However, we can convert a one-bit comparison against zero into
4007 the opposite comparison against that bit being set in the field. */
4009 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4010 if (lcode != wanted_code)
4012 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4014 /* Make the left operand unsigned, since we are only interested
4015 in the value of one bit. Otherwise we are doing the wrong
4016 thing below. */
4017 ll_unsignedp = 1;
4018 l_const = ll_mask;
4020 else
4021 return 0;
4024 /* This is analogous to the code for l_const above. */
4025 if (rcode != wanted_code)
4027 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4029 rl_unsignedp = 1;
4030 r_const = rl_mask;
4032 else
4033 return 0;
4036 /* See if we can find a mode that contains both fields being compared on
4037 the left. If we can't, fail. Otherwise, update all constants and masks
4038 to be relative to a field of that size. */
4039 first_bit = MIN (ll_bitpos, rl_bitpos);
4040 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4041 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4042 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4043 volatilep);
4044 if (lnmode == VOIDmode)
4045 return 0;
4047 lnbitsize = GET_MODE_BITSIZE (lnmode);
4048 lnbitpos = first_bit & ~ (lnbitsize - 1);
4049 lntype = type_for_size (lnbitsize, 1);
4050 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4052 if (BYTES_BIG_ENDIAN)
4054 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4055 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4058 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
4059 size_int (xll_bitpos), 0);
4060 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4061 size_int (xrl_bitpos), 0);
4063 if (l_const)
4065 l_const = convert (lntype, l_const);
4066 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4067 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4068 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4069 fold (build1 (BIT_NOT_EXPR,
4070 lntype, ll_mask)),
4071 0)))
4073 warning ("comparison is always %d", wanted_code == NE_EXPR);
4075 return convert (truth_type,
4076 wanted_code == NE_EXPR
4077 ? integer_one_node : integer_zero_node);
4080 if (r_const)
4082 r_const = convert (lntype, r_const);
4083 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4084 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4085 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4086 fold (build1 (BIT_NOT_EXPR,
4087 lntype, rl_mask)),
4088 0)))
4090 warning ("comparison is always %d", wanted_code == NE_EXPR);
4092 return convert (truth_type,
4093 wanted_code == NE_EXPR
4094 ? integer_one_node : integer_zero_node);
4098 /* If the right sides are not constant, do the same for it. Also,
4099 disallow this optimization if a size or signedness mismatch occurs
4100 between the left and right sides. */
4101 if (l_const == 0)
4103 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4104 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4105 /* Make sure the two fields on the right
4106 correspond to the left without being swapped. */
4107 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4108 return 0;
4110 first_bit = MIN (lr_bitpos, rr_bitpos);
4111 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4112 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4113 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4114 volatilep);
4115 if (rnmode == VOIDmode)
4116 return 0;
4118 rnbitsize = GET_MODE_BITSIZE (rnmode);
4119 rnbitpos = first_bit & ~ (rnbitsize - 1);
4120 rntype = type_for_size (rnbitsize, 1);
4121 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4123 if (BYTES_BIG_ENDIAN)
4125 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4126 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4129 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4130 size_int (xlr_bitpos), 0);
4131 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4132 size_int (xrr_bitpos), 0);
4134 /* Make a mask that corresponds to both fields being compared.
4135 Do this for both items being compared. If the operands are the
4136 same size and the bits being compared are in the same position
4137 then we can do this by masking both and comparing the masked
4138 results. */
4139 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4140 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4141 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4143 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4144 ll_unsignedp || rl_unsignedp);
4145 if (! all_ones_mask_p (ll_mask, lnbitsize))
4146 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4148 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4149 lr_unsignedp || rr_unsignedp);
4150 if (! all_ones_mask_p (lr_mask, rnbitsize))
4151 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4153 return build (wanted_code, truth_type, lhs, rhs);
4156 /* There is still another way we can do something: If both pairs of
4157 fields being compared are adjacent, we may be able to make a wider
4158 field containing them both.
4160 Note that we still must mask the lhs/rhs expressions. Furthermore,
4161 the mask must be shifted to account for the shift done by
4162 make_bit_field_ref. */
4163 if ((ll_bitsize + ll_bitpos == rl_bitpos
4164 && lr_bitsize + lr_bitpos == rr_bitpos)
4165 || (ll_bitpos == rl_bitpos + rl_bitsize
4166 && lr_bitpos == rr_bitpos + rr_bitsize))
4168 tree type;
4170 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4171 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4172 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4173 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4175 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4176 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4177 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4178 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4180 /* Convert to the smaller type before masking out unwanted bits. */
4181 type = lntype;
4182 if (lntype != rntype)
4184 if (lnbitsize > rnbitsize)
4186 lhs = convert (rntype, lhs);
4187 ll_mask = convert (rntype, ll_mask);
4188 type = rntype;
4190 else if (lnbitsize < rnbitsize)
4192 rhs = convert (lntype, rhs);
4193 lr_mask = convert (lntype, lr_mask);
4194 type = lntype;
4198 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4199 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4201 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4202 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4204 return build (wanted_code, truth_type, lhs, rhs);
4207 return 0;
4210 /* Handle the case of comparisons with constants. If there is something in
4211 common between the masks, those bits of the constants must be the same.
4212 If not, the condition is always false. Test for this to avoid generating
4213 incorrect code below. */
4214 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4215 if (! integer_zerop (result)
4216 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4217 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4219 if (wanted_code == NE_EXPR)
4221 warning ("`or' of unmatched not-equal tests is always 1");
4222 return convert (truth_type, integer_one_node);
4224 else
4226 warning ("`and' of mutually exclusive equal-tests is always 0");
4227 return convert (truth_type, integer_zero_node);
4231 /* Construct the expression we will return. First get the component
4232 reference we will make. Unless the mask is all ones the width of
4233 that field, perform the mask operation. Then compare with the
4234 merged constant. */
4235 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4236 ll_unsignedp || rl_unsignedp);
4238 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4239 if (! all_ones_mask_p (ll_mask, lnbitsize))
4240 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4242 return build (wanted_code, truth_type, result,
4243 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4246 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4247 constant. */
4249 static tree
4250 optimize_minmax_comparison (t)
4251 tree t;
4253 tree type = TREE_TYPE (t);
4254 tree arg0 = TREE_OPERAND (t, 0);
4255 enum tree_code op_code;
4256 tree comp_const = TREE_OPERAND (t, 1);
4257 tree minmax_const;
4258 int consts_equal, consts_lt;
4259 tree inner;
4261 STRIP_SIGN_NOPS (arg0);
4263 op_code = TREE_CODE (arg0);
4264 minmax_const = TREE_OPERAND (arg0, 1);
4265 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4266 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4267 inner = TREE_OPERAND (arg0, 0);
4269 /* If something does not permit us to optimize, return the original tree. */
4270 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4271 || TREE_CODE (comp_const) != INTEGER_CST
4272 || TREE_CONSTANT_OVERFLOW (comp_const)
4273 || TREE_CODE (minmax_const) != INTEGER_CST
4274 || TREE_CONSTANT_OVERFLOW (minmax_const))
4275 return t;
4277 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4278 and GT_EXPR, doing the rest with recursive calls using logical
4279 simplifications. */
4280 switch (TREE_CODE (t))
4282 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4283 return
4284 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4286 case GE_EXPR:
4287 return
4288 fold (build (TRUTH_ORIF_EXPR, type,
4289 optimize_minmax_comparison
4290 (build (EQ_EXPR, type, arg0, comp_const)),
4291 optimize_minmax_comparison
4292 (build (GT_EXPR, type, arg0, comp_const))));
4294 case EQ_EXPR:
4295 if (op_code == MAX_EXPR && consts_equal)
4296 /* MAX (X, 0) == 0 -> X <= 0 */
4297 return fold (build (LE_EXPR, type, inner, comp_const));
4299 else if (op_code == MAX_EXPR && consts_lt)
4300 /* MAX (X, 0) == 5 -> X == 5 */
4301 return fold (build (EQ_EXPR, type, inner, comp_const));
4303 else if (op_code == MAX_EXPR)
4304 /* MAX (X, 0) == -1 -> false */
4305 return omit_one_operand (type, integer_zero_node, inner);
4307 else if (consts_equal)
4308 /* MIN (X, 0) == 0 -> X >= 0 */
4309 return fold (build (GE_EXPR, type, inner, comp_const));
4311 else if (consts_lt)
4312 /* MIN (X, 0) == 5 -> false */
4313 return omit_one_operand (type, integer_zero_node, inner);
4315 else
4316 /* MIN (X, 0) == -1 -> X == -1 */
4317 return fold (build (EQ_EXPR, type, inner, comp_const));
4319 case GT_EXPR:
4320 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4321 /* MAX (X, 0) > 0 -> X > 0
4322 MAX (X, 0) > 5 -> X > 5 */
4323 return fold (build (GT_EXPR, type, inner, comp_const));
4325 else if (op_code == MAX_EXPR)
4326 /* MAX (X, 0) > -1 -> true */
4327 return omit_one_operand (type, integer_one_node, inner);
4329 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4330 /* MIN (X, 0) > 0 -> false
4331 MIN (X, 0) > 5 -> false */
4332 return omit_one_operand (type, integer_zero_node, inner);
4334 else
4335 /* MIN (X, 0) > -1 -> X > -1 */
4336 return fold (build (GT_EXPR, type, inner, comp_const));
4338 default:
4339 return t;
4343 /* T is an integer expression that is being multiplied, divided, or taken a
4344 modulus (CODE says which and what kind of divide or modulus) by a
4345 constant C. See if we can eliminate that operation by folding it with
4346 other operations already in T. WIDE_TYPE, if non-null, is a type that
4347 should be used for the computation if wider than our type.
4349 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4350 (X * 2) + (Y + 4). We must, however, be assured that either the original
4351 expression would not overflow or that overflow is undefined for the type
4352 in the language in question.
4354 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4355 the machine has a multiply-accumulate insn or that this is part of an
4356 addressing calculation.
4358 If we return a non-null expression, it is an equivalent form of the
4359 original computation, but need not be in the original type. */
4361 static tree
4362 extract_muldiv (t, c, code, wide_type)
4363 tree t;
4364 tree c;
4365 enum tree_code code;
4366 tree wide_type;
4368 tree type = TREE_TYPE (t);
4369 enum tree_code tcode = TREE_CODE (t);
4370 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4371 > GET_MODE_SIZE (TYPE_MODE (type)))
4372 ? wide_type : type);
4373 tree t1, t2;
4374 int same_p = tcode == code;
4375 tree op0 = NULL_TREE, op1 = NULL_TREE;
4377 /* Don't deal with constants of zero here; they confuse the code below. */
4378 if (integer_zerop (c))
4379 return NULL_TREE;
4381 if (TREE_CODE_CLASS (tcode) == '1')
4382 op0 = TREE_OPERAND (t, 0);
4384 if (TREE_CODE_CLASS (tcode) == '2')
4385 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4387 /* Note that we need not handle conditional operations here since fold
4388 already handles those cases. So just do arithmetic here. */
4389 switch (tcode)
4391 case INTEGER_CST:
4392 /* For a constant, we can always simplify if we are a multiply
4393 or (for divide and modulus) if it is a multiple of our constant. */
4394 if (code == MULT_EXPR
4395 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4396 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4397 break;
4399 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4400 /* Pass the constant down and see if we can make a simplification. If
4401 we can, replace this expression with the inner simplification for
4402 possible later conversion to our or some other type. */
4403 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4404 code == MULT_EXPR ? ctype : NULL_TREE)))
4405 return t1;
4406 break;
4408 case NEGATE_EXPR: case ABS_EXPR:
4409 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4410 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4411 break;
4413 case MIN_EXPR: case MAX_EXPR:
4414 /* If widening the type changes the signedness, then we can't perform
4415 this optimization as that changes the result. */
4416 if (ctype != type && TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4417 break;
4419 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4420 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4421 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4423 if (tree_int_cst_sgn (c) < 0)
4424 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4426 return fold (build (tcode, ctype, convert (ctype, t1),
4427 convert (ctype, t2)));
4429 break;
4431 case WITH_RECORD_EXPR:
4432 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4433 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4434 TREE_OPERAND (t, 1));
4435 break;
4437 case SAVE_EXPR:
4438 /* If this has not been evaluated and the operand has no side effects,
4439 we can see if we can do something inside it and make a new one.
4440 Note that this test is overly conservative since we can do this
4441 if the only reason it had side effects is that it was another
4442 similar SAVE_EXPR, but that isn't worth bothering with. */
4443 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4444 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4445 wide_type)))
4446 return save_expr (t1);
4447 break;
4449 case LSHIFT_EXPR: case RSHIFT_EXPR:
4450 /* If the second operand is constant, this is a multiplication
4451 or floor division, by a power of two, so we can treat it that
4452 way unless the multiplier or divisor overflows. */
4453 if (TREE_CODE (op1) == INTEGER_CST
4454 /* const_binop may not detect overflow correctly,
4455 so check for it explicitly here. */
4456 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4457 && TREE_INT_CST_HIGH (op1) == 0
4458 && 0 != (t1 = convert (ctype,
4459 const_binop (LSHIFT_EXPR, size_one_node,
4460 op1, 0)))
4461 && ! TREE_OVERFLOW (t1))
4462 return extract_muldiv (build (tcode == LSHIFT_EXPR
4463 ? MULT_EXPR : FLOOR_DIV_EXPR,
4464 ctype, convert (ctype, op0), t1),
4465 c, code, wide_type);
4466 break;
4468 case PLUS_EXPR: case MINUS_EXPR:
4469 /* See if we can eliminate the operation on both sides. If we can, we
4470 can return a new PLUS or MINUS. If we can't, the only remaining
4471 cases where we can do anything are if the second operand is a
4472 constant. */
4473 t1 = extract_muldiv (op0, c, code, wide_type);
4474 t2 = extract_muldiv (op1, c, code, wide_type);
4475 if (t1 != 0 && t2 != 0)
4476 return fold (build (tcode, ctype, convert (ctype, t1),
4477 convert (ctype, t2)));
4479 /* If this was a subtraction, negate OP1 and set it to be an addition.
4480 This simplifies the logic below. */
4481 if (tcode == MINUS_EXPR)
4482 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4484 if (TREE_CODE (op1) != INTEGER_CST)
4485 break;
4487 /* If either OP1 or C are negative, this optimization is not safe for
4488 some of the division and remainder types while for others we need
4489 to change the code. */
4490 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4492 if (code == CEIL_DIV_EXPR)
4493 code = FLOOR_DIV_EXPR;
4494 else if (code == CEIL_MOD_EXPR)
4495 code = FLOOR_MOD_EXPR;
4496 else if (code == FLOOR_DIV_EXPR)
4497 code = CEIL_DIV_EXPR;
4498 else if (code == FLOOR_MOD_EXPR)
4499 code = CEIL_MOD_EXPR;
4500 else if (code != MULT_EXPR)
4501 break;
4504 /* Now do the operation and verify it doesn't overflow. */
4505 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4506 if (op1 == 0 || TREE_OVERFLOW (op1))
4507 break;
4509 /* If we have an unsigned type is not a sizetype, we cannot widen
4510 the operation since it will change the result if the original
4511 computation overflowed. */
4512 if (TREE_UNSIGNED (ctype)
4513 && ! TYPE_IS_SIZETYPE (ctype)
4514 && ctype != type)
4515 break;
4517 /* If we were able to eliminate our operation from the first side,
4518 apply our operation to the second side and reform the PLUS. */
4519 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4520 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4522 /* The last case is if we are a multiply. In that case, we can
4523 apply the distributive law to commute the multiply and addition
4524 if the multiplication of the constants doesn't overflow. */
4525 if (code == MULT_EXPR)
4526 return fold (build (tcode, ctype, fold (build (code, ctype,
4527 convert (ctype, op0),
4528 convert (ctype, c))),
4529 op1));
4531 break;
4533 case MULT_EXPR:
4534 /* We have a special case here if we are doing something like
4535 (C * 8) % 4 since we know that's zero. */
4536 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4537 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4538 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4539 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4540 return omit_one_operand (type, integer_zero_node, op0);
4542 /* ... fall through ... */
4544 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4545 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4546 /* If we can extract our operation from the LHS, do so and return a
4547 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4548 do something only if the second operand is a constant. */
4549 if (same_p
4550 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4551 return fold (build (tcode, ctype, convert (ctype, t1),
4552 convert (ctype, op1)));
4553 else if (tcode == MULT_EXPR && code == MULT_EXPR
4554 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4555 return fold (build (tcode, ctype, convert (ctype, op0),
4556 convert (ctype, t1)));
4557 else if (TREE_CODE (op1) != INTEGER_CST)
4558 return 0;
4560 /* If these are the same operation types, we can associate them
4561 assuming no overflow. */
4562 if (tcode == code
4563 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4564 convert (ctype, c), 0))
4565 && ! TREE_OVERFLOW (t1))
4566 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4568 /* If these operations "cancel" each other, we have the main
4569 optimizations of this pass, which occur when either constant is a
4570 multiple of the other, in which case we replace this with either an
4571 operation or CODE or TCODE.
4573 If we have an unsigned type that is not a sizetype, we canot do
4574 this since it will change the result if the original computation
4575 overflowed. */
4576 if ((! TREE_UNSIGNED (ctype)
4577 || (TREE_CODE (ctype) == INTEGER_TYPE
4578 && TYPE_IS_SIZETYPE (ctype)))
4579 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4580 || (tcode == MULT_EXPR
4581 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4582 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4584 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4585 return fold (build (tcode, ctype, convert (ctype, op0),
4586 convert (ctype,
4587 const_binop (TRUNC_DIV_EXPR,
4588 op1, c, 0))));
4589 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4590 return fold (build (code, ctype, convert (ctype, op0),
4591 convert (ctype,
4592 const_binop (TRUNC_DIV_EXPR,
4593 c, op1, 0))));
4595 break;
4597 default:
4598 break;
4601 return 0;
4604 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4605 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4606 that we may sometimes modify the tree. */
4608 static tree
4609 strip_compound_expr (t, s)
4610 tree t;
4611 tree s;
4613 enum tree_code code = TREE_CODE (t);
4615 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4616 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4617 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4618 return TREE_OPERAND (t, 1);
4620 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4621 don't bother handling any other types. */
4622 else if (code == COND_EXPR)
4624 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4625 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4626 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4628 else if (TREE_CODE_CLASS (code) == '1')
4629 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4630 else if (TREE_CODE_CLASS (code) == '<'
4631 || TREE_CODE_CLASS (code) == '2')
4633 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4634 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4637 return t;
4640 /* Return a node which has the indicated constant VALUE (either 0 or
4641 1), and is of the indicated TYPE. */
4643 static tree
4644 constant_boolean_node (value, type)
4645 int value;
4646 tree type;
4648 if (type == integer_type_node)
4649 return value ? integer_one_node : integer_zero_node;
4650 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4651 return truthvalue_conversion (value ? integer_one_node :
4652 integer_zero_node);
4653 else
4655 tree t = build_int_2 (value, 0);
4657 TREE_TYPE (t) = type;
4658 return t;
4662 /* Utility function for the following routine, to see how complex a nesting of
4663 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4664 we don't care (to avoid spending too much time on complex expressions.). */
4666 static int
4667 count_cond (expr, lim)
4668 tree expr;
4669 int lim;
4671 int true, false;
4673 if (TREE_CODE (expr) != COND_EXPR)
4674 return 0;
4675 else if (lim <= 0)
4676 return 0;
4678 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4679 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4680 return MIN (lim, 1 + true + false);
4683 /* Perform constant folding and related simplification of EXPR.
4684 The related simplifications include x*1 => x, x*0 => 0, etc.,
4685 and application of the associative law.
4686 NOP_EXPR conversions may be removed freely (as long as we
4687 are careful not to change the C type of the overall expression)
4688 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4689 but we can constant-fold them if they have constant operands. */
4691 tree
4692 fold (expr)
4693 tree expr;
4695 register tree t = expr;
4696 tree t1 = NULL_TREE;
4697 tree tem;
4698 tree type = TREE_TYPE (expr);
4699 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4700 register enum tree_code code = TREE_CODE (t);
4701 register int kind;
4702 int invert;
4703 /* WINS will be nonzero when the switch is done
4704 if all operands are constant. */
4705 int wins = 1;
4707 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4708 Likewise for a SAVE_EXPR that's already been evaluated. */
4709 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4710 return t;
4712 /* Return right away if already constant. */
4713 if (TREE_CONSTANT (t))
4715 if (code == CONST_DECL)
4716 return DECL_INITIAL (t);
4717 return t;
4720 #ifdef MAX_INTEGER_COMPUTATION_MODE
4721 check_max_integer_computation_mode (expr);
4722 #endif
4724 kind = TREE_CODE_CLASS (code);
4725 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4727 tree subop;
4729 /* Special case for conversion ops that can have fixed point args. */
4730 arg0 = TREE_OPERAND (t, 0);
4732 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4733 if (arg0 != 0)
4734 STRIP_SIGN_NOPS (arg0);
4736 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4737 subop = TREE_REALPART (arg0);
4738 else
4739 subop = arg0;
4741 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4742 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4743 && TREE_CODE (subop) != REAL_CST
4744 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4746 /* Note that TREE_CONSTANT isn't enough:
4747 static var addresses are constant but we can't
4748 do arithmetic on them. */
4749 wins = 0;
4751 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4753 register int len = TREE_CODE_LENGTH (code);
4754 register int i;
4755 for (i = 0; i < len; i++)
4757 tree op = TREE_OPERAND (t, i);
4758 tree subop;
4760 if (op == 0)
4761 continue; /* Valid for CALL_EXPR, at least. */
4763 if (kind == '<' || code == RSHIFT_EXPR)
4765 /* Signedness matters here. Perhaps we can refine this
4766 later. */
4767 STRIP_SIGN_NOPS (op);
4769 else
4770 /* Strip any conversions that don't change the mode. */
4771 STRIP_NOPS (op);
4773 if (TREE_CODE (op) == COMPLEX_CST)
4774 subop = TREE_REALPART (op);
4775 else
4776 subop = op;
4778 if (TREE_CODE (subop) != INTEGER_CST
4779 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4780 && TREE_CODE (subop) != REAL_CST
4781 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4783 /* Note that TREE_CONSTANT isn't enough:
4784 static var addresses are constant but we can't
4785 do arithmetic on them. */
4786 wins = 0;
4788 if (i == 0)
4789 arg0 = op;
4790 else if (i == 1)
4791 arg1 = op;
4795 /* If this is a commutative operation, and ARG0 is a constant, move it
4796 to ARG1 to reduce the number of tests below. */
4797 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4798 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4799 || code == BIT_AND_EXPR)
4800 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4802 tem = arg0; arg0 = arg1; arg1 = tem;
4804 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4805 TREE_OPERAND (t, 1) = tem;
4808 /* Now WINS is set as described above,
4809 ARG0 is the first operand of EXPR,
4810 and ARG1 is the second operand (if it has more than one operand).
4812 First check for cases where an arithmetic operation is applied to a
4813 compound, conditional, or comparison operation. Push the arithmetic
4814 operation inside the compound or conditional to see if any folding
4815 can then be done. Convert comparison to conditional for this purpose.
4816 The also optimizes non-constant cases that used to be done in
4817 expand_expr.
4819 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4820 one of the operands is a comparison and the other is a comparison, a
4821 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4822 code below would make the expression more complex. Change it to a
4823 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4824 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4826 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4827 || code == EQ_EXPR || code == NE_EXPR)
4828 && ((truth_value_p (TREE_CODE (arg0))
4829 && (truth_value_p (TREE_CODE (arg1))
4830 || (TREE_CODE (arg1) == BIT_AND_EXPR
4831 && integer_onep (TREE_OPERAND (arg1, 1)))))
4832 || (truth_value_p (TREE_CODE (arg1))
4833 && (truth_value_p (TREE_CODE (arg0))
4834 || (TREE_CODE (arg0) == BIT_AND_EXPR
4835 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4837 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4838 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4839 : TRUTH_XOR_EXPR,
4840 type, arg0, arg1));
4842 if (code == EQ_EXPR)
4843 t = invert_truthvalue (t);
4845 return t;
4848 if (TREE_CODE_CLASS (code) == '1')
4850 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4851 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4852 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4853 else if (TREE_CODE (arg0) == COND_EXPR)
4855 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4856 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4857 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4859 /* If this was a conversion, and all we did was to move into
4860 inside the COND_EXPR, bring it back out. But leave it if
4861 it is a conversion from integer to integer and the
4862 result precision is no wider than a word since such a
4863 conversion is cheap and may be optimized away by combine,
4864 while it couldn't if it were outside the COND_EXPR. Then return
4865 so we don't get into an infinite recursion loop taking the
4866 conversion out and then back in. */
4868 if ((code == NOP_EXPR || code == CONVERT_EXPR
4869 || code == NON_LVALUE_EXPR)
4870 && TREE_CODE (t) == COND_EXPR
4871 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4872 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4873 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4874 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4875 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4876 && (INTEGRAL_TYPE_P
4877 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4878 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4879 t = build1 (code, type,
4880 build (COND_EXPR,
4881 TREE_TYPE (TREE_OPERAND
4882 (TREE_OPERAND (t, 1), 0)),
4883 TREE_OPERAND (t, 0),
4884 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4885 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4886 return t;
4888 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4889 return fold (build (COND_EXPR, type, arg0,
4890 fold (build1 (code, type, integer_one_node)),
4891 fold (build1 (code, type, integer_zero_node))));
4893 else if (TREE_CODE_CLASS (code) == '2'
4894 || TREE_CODE_CLASS (code) == '<')
4896 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4897 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4898 fold (build (code, type,
4899 arg0, TREE_OPERAND (arg1, 1))));
4900 else if ((TREE_CODE (arg1) == COND_EXPR
4901 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4902 && TREE_CODE_CLASS (code) != '<'))
4903 && (TREE_CODE (arg0) != COND_EXPR
4904 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4905 && (! TREE_SIDE_EFFECTS (arg0)
4906 || (global_bindings_p () == 0
4907 && ! contains_placeholder_p (arg0))))
4909 tree test, true_value, false_value;
4910 tree lhs = 0, rhs = 0;
4912 if (TREE_CODE (arg1) == COND_EXPR)
4914 test = TREE_OPERAND (arg1, 0);
4915 true_value = TREE_OPERAND (arg1, 1);
4916 false_value = TREE_OPERAND (arg1, 2);
4918 else
4920 tree testtype = TREE_TYPE (arg1);
4921 test = arg1;
4922 true_value = convert (testtype, integer_one_node);
4923 false_value = convert (testtype, integer_zero_node);
4926 /* If ARG0 is complex we want to make sure we only evaluate
4927 it once. Though this is only required if it is volatile, it
4928 might be more efficient even if it is not. However, if we
4929 succeed in folding one part to a constant, we do not need
4930 to make this SAVE_EXPR. Since we do this optimization
4931 primarily to see if we do end up with constant and this
4932 SAVE_EXPR interferes with later optimizations, suppressing
4933 it when we can is important.
4935 If we are not in a function, we can't make a SAVE_EXPR, so don't
4936 try to do so. Don't try to see if the result is a constant
4937 if an arm is a COND_EXPR since we get exponential behavior
4938 in that case. */
4940 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4941 && global_bindings_p () == 0
4942 && ((TREE_CODE (arg0) != VAR_DECL
4943 && TREE_CODE (arg0) != PARM_DECL)
4944 || TREE_SIDE_EFFECTS (arg0)))
4946 if (TREE_CODE (true_value) != COND_EXPR)
4947 lhs = fold (build (code, type, arg0, true_value));
4949 if (TREE_CODE (false_value) != COND_EXPR)
4950 rhs = fold (build (code, type, arg0, false_value));
4952 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4953 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4954 arg0 = save_expr (arg0), lhs = rhs = 0;
4957 if (lhs == 0)
4958 lhs = fold (build (code, type, arg0, true_value));
4959 if (rhs == 0)
4960 rhs = fold (build (code, type, arg0, false_value));
4962 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4964 if (TREE_CODE (arg0) == SAVE_EXPR)
4965 return build (COMPOUND_EXPR, type,
4966 convert (void_type_node, arg0),
4967 strip_compound_expr (test, arg0));
4968 else
4969 return convert (type, test);
4972 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4973 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4974 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4975 else if ((TREE_CODE (arg0) == COND_EXPR
4976 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4977 && TREE_CODE_CLASS (code) != '<'))
4978 && (TREE_CODE (arg1) != COND_EXPR
4979 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4980 && (! TREE_SIDE_EFFECTS (arg1)
4981 || (global_bindings_p () == 0
4982 && ! contains_placeholder_p (arg1))))
4984 tree test, true_value, false_value;
4985 tree lhs = 0, rhs = 0;
4987 if (TREE_CODE (arg0) == COND_EXPR)
4989 test = TREE_OPERAND (arg0, 0);
4990 true_value = TREE_OPERAND (arg0, 1);
4991 false_value = TREE_OPERAND (arg0, 2);
4993 else
4995 tree testtype = TREE_TYPE (arg0);
4996 test = arg0;
4997 true_value = convert (testtype, integer_one_node);
4998 false_value = convert (testtype, integer_zero_node);
5001 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
5002 && global_bindings_p () == 0
5003 && ((TREE_CODE (arg1) != VAR_DECL
5004 && TREE_CODE (arg1) != PARM_DECL)
5005 || TREE_SIDE_EFFECTS (arg1)))
5007 if (TREE_CODE (true_value) != COND_EXPR)
5008 lhs = fold (build (code, type, true_value, arg1));
5010 if (TREE_CODE (false_value) != COND_EXPR)
5011 rhs = fold (build (code, type, false_value, arg1));
5013 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
5014 && (rhs == 0 || !TREE_CONSTANT (rhs)))
5015 arg1 = save_expr (arg1), lhs = rhs = 0;
5018 if (lhs == 0)
5019 lhs = fold (build (code, type, true_value, arg1));
5021 if (rhs == 0)
5022 rhs = fold (build (code, type, false_value, arg1));
5024 test = fold (build (COND_EXPR, type, test, lhs, rhs));
5025 if (TREE_CODE (arg1) == SAVE_EXPR)
5026 return build (COMPOUND_EXPR, type,
5027 convert (void_type_node, arg1),
5028 strip_compound_expr (test, arg1));
5029 else
5030 return convert (type, test);
5033 else if (TREE_CODE_CLASS (code) == '<'
5034 && TREE_CODE (arg0) == COMPOUND_EXPR)
5035 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5036 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5037 else if (TREE_CODE_CLASS (code) == '<'
5038 && TREE_CODE (arg1) == COMPOUND_EXPR)
5039 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5040 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5042 switch (code)
5044 case INTEGER_CST:
5045 case REAL_CST:
5046 case STRING_CST:
5047 case COMPLEX_CST:
5048 case CONSTRUCTOR:
5049 return t;
5051 case CONST_DECL:
5052 return fold (DECL_INITIAL (t));
5054 case NOP_EXPR:
5055 case FLOAT_EXPR:
5056 case CONVERT_EXPR:
5057 case FIX_TRUNC_EXPR:
5058 /* Other kinds of FIX are not handled properly by fold_convert. */
5060 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5061 return TREE_OPERAND (t, 0);
5063 /* Handle cases of two conversions in a row. */
5064 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5065 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5067 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5068 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5069 tree final_type = TREE_TYPE (t);
5070 int inside_int = INTEGRAL_TYPE_P (inside_type);
5071 int inside_ptr = POINTER_TYPE_P (inside_type);
5072 int inside_float = FLOAT_TYPE_P (inside_type);
5073 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5074 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5075 int inter_int = INTEGRAL_TYPE_P (inter_type);
5076 int inter_ptr = POINTER_TYPE_P (inter_type);
5077 int inter_float = FLOAT_TYPE_P (inter_type);
5078 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5079 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5080 int final_int = INTEGRAL_TYPE_P (final_type);
5081 int final_ptr = POINTER_TYPE_P (final_type);
5082 int final_float = FLOAT_TYPE_P (final_type);
5083 unsigned int final_prec = TYPE_PRECISION (final_type);
5084 int final_unsignedp = TREE_UNSIGNED (final_type);
5086 /* In addition to the cases of two conversions in a row
5087 handled below, if we are converting something to its own
5088 type via an object of identical or wider precision, neither
5089 conversion is needed. */
5090 if (inside_type == final_type
5091 && ((inter_int && final_int) || (inter_float && final_float))
5092 && inter_prec >= final_prec)
5093 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5095 /* Likewise, if the intermediate and final types are either both
5096 float or both integer, we don't need the middle conversion if
5097 it is wider than the final type and doesn't change the signedness
5098 (for integers). Avoid this if the final type is a pointer
5099 since then we sometimes need the inner conversion. Likewise if
5100 the outer has a precision not equal to the size of its mode. */
5101 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5102 || (inter_float && inside_float))
5103 && inter_prec >= inside_prec
5104 && (inter_float || inter_unsignedp == inside_unsignedp)
5105 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5106 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5107 && ! final_ptr)
5108 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5110 /* If we have a sign-extension of a zero-extended value, we can
5111 replace that by a single zero-extension. */
5112 if (inside_int && inter_int && final_int
5113 && inside_prec < inter_prec && inter_prec < final_prec
5114 && inside_unsignedp && !inter_unsignedp)
5115 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5117 /* Two conversions in a row are not needed unless:
5118 - some conversion is floating-point (overstrict for now), or
5119 - the intermediate type is narrower than both initial and
5120 final, or
5121 - the intermediate type and innermost type differ in signedness,
5122 and the outermost type is wider than the intermediate, or
5123 - the initial type is a pointer type and the precisions of the
5124 intermediate and final types differ, or
5125 - the final type is a pointer type and the precisions of the
5126 initial and intermediate types differ. */
5127 if (! inside_float && ! inter_float && ! final_float
5128 && (inter_prec > inside_prec || inter_prec > final_prec)
5129 && ! (inside_int && inter_int
5130 && inter_unsignedp != inside_unsignedp
5131 && inter_prec < final_prec)
5132 && ((inter_unsignedp && inter_prec > inside_prec)
5133 == (final_unsignedp && final_prec > inter_prec))
5134 && ! (inside_ptr && inter_prec != final_prec)
5135 && ! (final_ptr && inside_prec != inter_prec)
5136 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5137 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5138 && ! final_ptr)
5139 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5142 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5143 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5144 /* Detect assigning a bitfield. */
5145 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5146 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5148 /* Don't leave an assignment inside a conversion
5149 unless assigning a bitfield. */
5150 tree prev = TREE_OPERAND (t, 0);
5151 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5152 /* First do the assignment, then return converted constant. */
5153 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5154 TREE_USED (t) = 1;
5155 return t;
5157 if (!wins)
5159 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5160 return t;
5162 return fold_convert (t, arg0);
5164 #if 0 /* This loses on &"foo"[0]. */
5165 case ARRAY_REF:
5167 int i;
5169 /* Fold an expression like: "foo"[2] */
5170 if (TREE_CODE (arg0) == STRING_CST
5171 && TREE_CODE (arg1) == INTEGER_CST
5172 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
5174 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
5175 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5176 force_fit_type (t, 0);
5179 return t;
5180 #endif /* 0 */
5182 case COMPONENT_REF:
5183 if (TREE_CODE (arg0) == CONSTRUCTOR)
5185 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5186 if (m)
5187 t = TREE_VALUE (m);
5189 return t;
5191 case RANGE_EXPR:
5192 TREE_CONSTANT (t) = wins;
5193 return t;
5195 case NEGATE_EXPR:
5196 if (wins)
5198 if (TREE_CODE (arg0) == INTEGER_CST)
5200 unsigned HOST_WIDE_INT low;
5201 HOST_WIDE_INT high;
5202 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5203 TREE_INT_CST_HIGH (arg0),
5204 &low, &high);
5205 t = build_int_2 (low, high);
5206 TREE_TYPE (t) = type;
5207 TREE_OVERFLOW (t)
5208 = (TREE_OVERFLOW (arg0)
5209 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5210 TREE_CONSTANT_OVERFLOW (t)
5211 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5213 else if (TREE_CODE (arg0) == REAL_CST)
5214 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5216 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5217 return TREE_OPERAND (arg0, 0);
5219 /* Convert - (a - b) to (b - a) for non-floating-point. */
5220 else if (TREE_CODE (arg0) == MINUS_EXPR
5221 && (! FLOAT_TYPE_P (type) || flag_fast_math))
5222 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5223 TREE_OPERAND (arg0, 0));
5225 return t;
5227 case ABS_EXPR:
5228 if (wins)
5230 if (TREE_CODE (arg0) == INTEGER_CST)
5232 if (! TREE_UNSIGNED (type)
5233 && TREE_INT_CST_HIGH (arg0) < 0)
5235 unsigned HOST_WIDE_INT low;
5236 HOST_WIDE_INT high;
5237 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5238 TREE_INT_CST_HIGH (arg0),
5239 &low, &high);
5240 t = build_int_2 (low, high);
5241 TREE_TYPE (t) = type;
5242 TREE_OVERFLOW (t)
5243 = (TREE_OVERFLOW (arg0)
5244 | force_fit_type (t, overflow));
5245 TREE_CONSTANT_OVERFLOW (t)
5246 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5249 else if (TREE_CODE (arg0) == REAL_CST)
5251 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5252 t = build_real (type,
5253 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5256 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5257 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5258 return t;
5260 case CONJ_EXPR:
5261 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5262 return convert (type, arg0);
5263 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5264 return build (COMPLEX_EXPR, type,
5265 TREE_OPERAND (arg0, 0),
5266 negate_expr (TREE_OPERAND (arg0, 1)));
5267 else if (TREE_CODE (arg0) == COMPLEX_CST)
5268 return build_complex (type, TREE_OPERAND (arg0, 0),
5269 negate_expr (TREE_OPERAND (arg0, 1)));
5270 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5271 return fold (build (TREE_CODE (arg0), type,
5272 fold (build1 (CONJ_EXPR, type,
5273 TREE_OPERAND (arg0, 0))),
5274 fold (build1 (CONJ_EXPR,
5275 type, TREE_OPERAND (arg0, 1)))));
5276 else if (TREE_CODE (arg0) == CONJ_EXPR)
5277 return TREE_OPERAND (arg0, 0);
5278 return t;
5280 case BIT_NOT_EXPR:
5281 if (wins)
5283 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5284 ~ TREE_INT_CST_HIGH (arg0));
5285 TREE_TYPE (t) = type;
5286 force_fit_type (t, 0);
5287 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5288 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5290 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5291 return TREE_OPERAND (arg0, 0);
5292 return t;
5294 case PLUS_EXPR:
5295 /* A + (-B) -> A - B */
5296 if (TREE_CODE (arg1) == NEGATE_EXPR)
5297 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5298 /* (-A) + B -> B - A */
5299 if (TREE_CODE (arg0) == NEGATE_EXPR)
5300 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5301 else if (! FLOAT_TYPE_P (type))
5303 if (integer_zerop (arg1))
5304 return non_lvalue (convert (type, arg0));
5306 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5307 with a constant, and the two constants have no bits in common,
5308 we should treat this as a BIT_IOR_EXPR since this may produce more
5309 simplifications. */
5310 if (TREE_CODE (arg0) == BIT_AND_EXPR
5311 && TREE_CODE (arg1) == BIT_AND_EXPR
5312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5313 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5314 && integer_zerop (const_binop (BIT_AND_EXPR,
5315 TREE_OPERAND (arg0, 1),
5316 TREE_OPERAND (arg1, 1), 0)))
5318 code = BIT_IOR_EXPR;
5319 goto bit_ior;
5322 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5323 (plus (plus (mult) (mult)) (foo)) so that we can
5324 take advantage of the factoring cases below. */
5325 if ((TREE_CODE (arg0) == PLUS_EXPR
5326 && TREE_CODE (arg1) == MULT_EXPR)
5327 || (TREE_CODE (arg1) == PLUS_EXPR
5328 && TREE_CODE (arg0) == MULT_EXPR))
5330 tree parg0, parg1, parg, marg;
5332 if (TREE_CODE (arg0) == PLUS_EXPR)
5333 parg = arg0, marg = arg1;
5334 else
5335 parg = arg1, marg = arg0;
5336 parg0 = TREE_OPERAND (parg, 0);
5337 parg1 = TREE_OPERAND (parg, 1);
5338 STRIP_NOPS (parg0);
5339 STRIP_NOPS (parg1);
5341 if (TREE_CODE (parg0) == MULT_EXPR
5342 && TREE_CODE (parg1) != MULT_EXPR)
5343 return fold (build (PLUS_EXPR, type,
5344 fold (build (PLUS_EXPR, type, parg0, marg)),
5345 parg1));
5346 if (TREE_CODE (parg0) != MULT_EXPR
5347 && TREE_CODE (parg1) == MULT_EXPR)
5348 return fold (build (PLUS_EXPR, type,
5349 fold (build (PLUS_EXPR, type, parg1, marg)),
5350 parg0));
5353 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5355 tree arg00, arg01, arg10, arg11;
5356 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5358 /* (A * C) + (B * C) -> (A+B) * C.
5359 We are most concerned about the case where C is a constant,
5360 but other combinations show up during loop reduction. Since
5361 it is not difficult, try all four possibilities. */
5363 arg00 = TREE_OPERAND (arg0, 0);
5364 arg01 = TREE_OPERAND (arg0, 1);
5365 arg10 = TREE_OPERAND (arg1, 0);
5366 arg11 = TREE_OPERAND (arg1, 1);
5367 same = NULL_TREE;
5369 if (operand_equal_p (arg01, arg11, 0))
5370 same = arg01, alt0 = arg00, alt1 = arg10;
5371 else if (operand_equal_p (arg00, arg10, 0))
5372 same = arg00, alt0 = arg01, alt1 = arg11;
5373 else if (operand_equal_p (arg00, arg11, 0))
5374 same = arg00, alt0 = arg01, alt1 = arg10;
5375 else if (operand_equal_p (arg01, arg10, 0))
5376 same = arg01, alt0 = arg00, alt1 = arg11;
5378 /* No identical multiplicands; see if we can find a common
5379 power-of-two factor in non-power-of-two multiplies. This
5380 can help in multi-dimensional array access. */
5381 else if (TREE_CODE (arg01) == INTEGER_CST
5382 && TREE_CODE (arg11) == INTEGER_CST
5383 && TREE_INT_CST_HIGH (arg01) == 0
5384 && TREE_INT_CST_HIGH (arg11) == 0)
5386 HOST_WIDE_INT int01, int11, tmp;
5387 int01 = TREE_INT_CST_LOW (arg01);
5388 int11 = TREE_INT_CST_LOW (arg11);
5390 /* Move min of absolute values to int11. */
5391 if ((int01 >= 0 ? int01 : -int01)
5392 < (int11 >= 0 ? int11 : -int11))
5394 tmp = int01, int01 = int11, int11 = tmp;
5395 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5396 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5399 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5401 alt0 = fold (build (MULT_EXPR, type, arg00,
5402 build_int_2 (int01 / int11, 0)));
5403 alt1 = arg10;
5404 same = arg11;
5408 if (same)
5409 return fold (build (MULT_EXPR, type,
5410 fold (build (PLUS_EXPR, type, alt0, alt1)),
5411 same));
5414 /* In IEEE floating point, x+0 may not equal x. */
5415 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5416 || flag_fast_math)
5417 && real_zerop (arg1))
5418 return non_lvalue (convert (type, arg0));
5419 /* x+(-0) equals x, even for IEEE. */
5420 else if (TREE_CODE (arg1) == REAL_CST
5421 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5422 return non_lvalue (convert (type, arg0));
5424 bit_rotate:
5425 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5426 is a rotate of A by C1 bits. */
5427 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5428 is a rotate of A by B bits. */
5430 register enum tree_code code0, code1;
5431 code0 = TREE_CODE (arg0);
5432 code1 = TREE_CODE (arg1);
5433 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5434 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5435 && operand_equal_p (TREE_OPERAND (arg0, 0),
5436 TREE_OPERAND (arg1,0), 0)
5437 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5439 register tree tree01, tree11;
5440 register enum tree_code code01, code11;
5442 tree01 = TREE_OPERAND (arg0, 1);
5443 tree11 = TREE_OPERAND (arg1, 1);
5444 STRIP_NOPS (tree01);
5445 STRIP_NOPS (tree11);
5446 code01 = TREE_CODE (tree01);
5447 code11 = TREE_CODE (tree11);
5448 if (code01 == INTEGER_CST
5449 && code11 == INTEGER_CST
5450 && TREE_INT_CST_HIGH (tree01) == 0
5451 && TREE_INT_CST_HIGH (tree11) == 0
5452 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5453 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5454 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5455 code0 == LSHIFT_EXPR ? tree01 : tree11);
5456 else if (code11 == MINUS_EXPR)
5458 tree tree110, tree111;
5459 tree110 = TREE_OPERAND (tree11, 0);
5460 tree111 = TREE_OPERAND (tree11, 1);
5461 STRIP_NOPS (tree110);
5462 STRIP_NOPS (tree111);
5463 if (TREE_CODE (tree110) == INTEGER_CST
5464 && 0 == compare_tree_int (tree110,
5465 TYPE_PRECISION
5466 (TREE_TYPE (TREE_OPERAND
5467 (arg0, 0))))
5468 && operand_equal_p (tree01, tree111, 0))
5469 return build ((code0 == LSHIFT_EXPR
5470 ? LROTATE_EXPR
5471 : RROTATE_EXPR),
5472 type, TREE_OPERAND (arg0, 0), tree01);
5474 else if (code01 == MINUS_EXPR)
5476 tree tree010, tree011;
5477 tree010 = TREE_OPERAND (tree01, 0);
5478 tree011 = TREE_OPERAND (tree01, 1);
5479 STRIP_NOPS (tree010);
5480 STRIP_NOPS (tree011);
5481 if (TREE_CODE (tree010) == INTEGER_CST
5482 && 0 == compare_tree_int (tree010,
5483 TYPE_PRECISION
5484 (TREE_TYPE (TREE_OPERAND
5485 (arg0, 0))))
5486 && operand_equal_p (tree11, tree011, 0))
5487 return build ((code0 != LSHIFT_EXPR
5488 ? LROTATE_EXPR
5489 : RROTATE_EXPR),
5490 type, TREE_OPERAND (arg0, 0), tree11);
5496 associate:
5497 /* In most languages, can't associate operations on floats through
5498 parentheses. Rather than remember where the parentheses were, we
5499 don't associate floats at all. It shouldn't matter much. However,
5500 associating multiplications is only very slightly inaccurate, so do
5501 that if -ffast-math is specified. */
5503 if (! wins
5504 && (! FLOAT_TYPE_P (type)
5505 || (flag_fast_math && code != MULT_EXPR)))
5507 tree var0, con0, lit0, var1, con1, lit1;
5509 /* Split both trees into variables, constants, and literals. Then
5510 associate each group together, the constants with literals,
5511 then the result with variables. This increases the chances of
5512 literals being recombined later and of generating relocatable
5513 expressions for the sum of a constant and literal. */
5514 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5515 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5517 /* Only do something if we found more than two objects. Otherwise,
5518 nothing has changed and we risk infinite recursion. */
5519 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5520 + (lit0 != 0) + (lit1 != 0)))
5522 var0 = associate_trees (var0, var1, code, type);
5523 con0 = associate_trees (con0, con1, code, type);
5524 lit0 = associate_trees (lit0, lit1, code, type);
5525 con0 = associate_trees (con0, lit0, code, type);
5526 return convert (type, associate_trees (var0, con0, code, type));
5530 binary:
5531 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5532 if (TREE_CODE (arg1) == REAL_CST)
5533 return t;
5534 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5535 if (wins)
5536 t1 = const_binop (code, arg0, arg1, 0);
5537 if (t1 != NULL_TREE)
5539 /* The return value should always have
5540 the same type as the original expression. */
5541 if (TREE_TYPE (t1) != TREE_TYPE (t))
5542 t1 = convert (TREE_TYPE (t), t1);
5544 return t1;
5546 return t;
5548 case MINUS_EXPR:
5549 /* A - (-B) -> A + B */
5550 if (TREE_CODE (arg1) == NEGATE_EXPR)
5551 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5552 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5553 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5554 return
5555 fold (build (MINUS_EXPR, type,
5556 build_real (TREE_TYPE (arg1),
5557 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5558 TREE_OPERAND (arg0, 0)));
5560 if (! FLOAT_TYPE_P (type))
5562 if (! wins && integer_zerop (arg0))
5563 return convert (type, negate_expr (arg1));
5564 if (integer_zerop (arg1))
5565 return non_lvalue (convert (type, arg0));
5567 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5568 about the case where C is a constant, just try one of the
5569 four possibilities. */
5571 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5572 && operand_equal_p (TREE_OPERAND (arg0, 1),
5573 TREE_OPERAND (arg1, 1), 0))
5574 return fold (build (MULT_EXPR, type,
5575 fold (build (MINUS_EXPR, type,
5576 TREE_OPERAND (arg0, 0),
5577 TREE_OPERAND (arg1, 0))),
5578 TREE_OPERAND (arg0, 1)));
5581 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5582 || flag_fast_math)
5584 /* Except with IEEE floating point, 0-x equals -x. */
5585 if (! wins && real_zerop (arg0))
5586 return convert (type, negate_expr (arg1));
5587 /* Except with IEEE floating point, x-0 equals x. */
5588 if (real_zerop (arg1))
5589 return non_lvalue (convert (type, arg0));
5592 /* Fold &x - &x. This can happen from &x.foo - &x.
5593 This is unsafe for certain floats even in non-IEEE formats.
5594 In IEEE, it is unsafe because it does wrong for NaNs.
5595 Also note that operand_equal_p is always false if an operand
5596 is volatile. */
5598 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5599 && operand_equal_p (arg0, arg1, 0))
5600 return convert (type, integer_zero_node);
5602 goto associate;
5604 case MULT_EXPR:
5605 /* (-A) * (-B) -> A * B */
5606 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5607 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5608 TREE_OPERAND (arg1, 0)));
5610 if (! FLOAT_TYPE_P (type))
5612 if (integer_zerop (arg1))
5613 return omit_one_operand (type, arg1, arg0);
5614 if (integer_onep (arg1))
5615 return non_lvalue (convert (type, arg0));
5617 /* (a * (1 << b)) is (a << b) */
5618 if (TREE_CODE (arg1) == LSHIFT_EXPR
5619 && integer_onep (TREE_OPERAND (arg1, 0)))
5620 return fold (build (LSHIFT_EXPR, type, arg0,
5621 TREE_OPERAND (arg1, 1)));
5622 if (TREE_CODE (arg0) == LSHIFT_EXPR
5623 && integer_onep (TREE_OPERAND (arg0, 0)))
5624 return fold (build (LSHIFT_EXPR, type, arg1,
5625 TREE_OPERAND (arg0, 1)));
5627 if (TREE_CODE (arg1) == INTEGER_CST
5628 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5629 code, NULL_TREE)))
5630 return convert (type, tem);
5633 else
5635 /* x*0 is 0, except for IEEE floating point. */
5636 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5637 || flag_fast_math)
5638 && real_zerop (arg1))
5639 return omit_one_operand (type, arg1, arg0);
5640 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5641 However, ANSI says we can drop signals,
5642 so we can do this anyway. */
5643 if (real_onep (arg1))
5644 return non_lvalue (convert (type, arg0));
5645 /* x*2 is x+x */
5646 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5647 && ! contains_placeholder_p (arg0))
5649 tree arg = save_expr (arg0);
5650 return build (PLUS_EXPR, type, arg, arg);
5653 goto associate;
5655 case BIT_IOR_EXPR:
5656 bit_ior:
5657 if (integer_all_onesp (arg1))
5658 return omit_one_operand (type, arg1, arg0);
5659 if (integer_zerop (arg1))
5660 return non_lvalue (convert (type, arg0));
5661 t1 = distribute_bit_expr (code, type, arg0, arg1);
5662 if (t1 != NULL_TREE)
5663 return t1;
5665 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5667 This results in more efficient code for machines without a NAND
5668 instruction. Combine will canonicalize to the first form
5669 which will allow use of NAND instructions provided by the
5670 backend if they exist. */
5671 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5672 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5674 return fold (build1 (BIT_NOT_EXPR, type,
5675 build (BIT_AND_EXPR, type,
5676 TREE_OPERAND (arg0, 0),
5677 TREE_OPERAND (arg1, 0))));
5680 /* See if this can be simplified into a rotate first. If that
5681 is unsuccessful continue in the association code. */
5682 goto bit_rotate;
5684 case BIT_XOR_EXPR:
5685 if (integer_zerop (arg1))
5686 return non_lvalue (convert (type, arg0));
5687 if (integer_all_onesp (arg1))
5688 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5690 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5691 with a constant, and the two constants have no bits in common,
5692 we should treat this as a BIT_IOR_EXPR since this may produce more
5693 simplifications. */
5694 if (TREE_CODE (arg0) == BIT_AND_EXPR
5695 && TREE_CODE (arg1) == BIT_AND_EXPR
5696 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5697 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5698 && integer_zerop (const_binop (BIT_AND_EXPR,
5699 TREE_OPERAND (arg0, 1),
5700 TREE_OPERAND (arg1, 1), 0)))
5702 code = BIT_IOR_EXPR;
5703 goto bit_ior;
5706 /* See if this can be simplified into a rotate first. If that
5707 is unsuccessful continue in the association code. */
5708 goto bit_rotate;
5710 case BIT_AND_EXPR:
5711 bit_and:
5712 if (integer_all_onesp (arg1))
5713 return non_lvalue (convert (type, arg0));
5714 if (integer_zerop (arg1))
5715 return omit_one_operand (type, arg1, arg0);
5716 t1 = distribute_bit_expr (code, type, arg0, arg1);
5717 if (t1 != NULL_TREE)
5718 return t1;
5719 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5720 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5721 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5723 unsigned int prec
5724 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5726 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5727 && (~TREE_INT_CST_LOW (arg0)
5728 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5729 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5731 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5732 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5734 unsigned int prec
5735 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5737 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5738 && (~TREE_INT_CST_LOW (arg1)
5739 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5740 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5743 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5745 This results in more efficient code for machines without a NOR
5746 instruction. Combine will canonicalize to the first form
5747 which will allow use of NOR instructions provided by the
5748 backend if they exist. */
5749 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5750 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5752 return fold (build1 (BIT_NOT_EXPR, type,
5753 build (BIT_IOR_EXPR, type,
5754 TREE_OPERAND (arg0, 0),
5755 TREE_OPERAND (arg1, 0))));
5758 goto associate;
5760 case BIT_ANDTC_EXPR:
5761 if (integer_all_onesp (arg0))
5762 return non_lvalue (convert (type, arg1));
5763 if (integer_zerop (arg0))
5764 return omit_one_operand (type, arg0, arg1);
5765 if (TREE_CODE (arg1) == INTEGER_CST)
5767 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5768 code = BIT_AND_EXPR;
5769 goto bit_and;
5771 goto binary;
5773 case RDIV_EXPR:
5774 /* In most cases, do nothing with a divide by zero. */
5775 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5776 #ifndef REAL_INFINITY
5777 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5778 return t;
5779 #endif
5780 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5782 /* (-A) / (-B) -> A / B */
5783 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5784 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5785 TREE_OPERAND (arg1, 0)));
5787 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5788 However, ANSI says we can drop signals, so we can do this anyway. */
5789 if (real_onep (arg1))
5790 return non_lvalue (convert (type, arg0));
5792 /* If ARG1 is a constant, we can convert this to a multiply by the
5793 reciprocal. This does not have the same rounding properties,
5794 so only do this if -ffast-math. We can actually always safely
5795 do it if ARG1 is a power of two, but it's hard to tell if it is
5796 or not in a portable manner. */
5797 if (TREE_CODE (arg1) == REAL_CST)
5799 if (flag_fast_math
5800 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5801 arg1, 0)))
5802 return fold (build (MULT_EXPR, type, arg0, tem));
5803 /* Find the reciprocal if optimizing and the result is exact. */
5804 else if (optimize)
5806 REAL_VALUE_TYPE r;
5807 r = TREE_REAL_CST (arg1);
5808 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5810 tem = build_real (type, r);
5811 return fold (build (MULT_EXPR, type, arg0, tem));
5815 goto binary;
5817 case TRUNC_DIV_EXPR:
5818 case ROUND_DIV_EXPR:
5819 case FLOOR_DIV_EXPR:
5820 case CEIL_DIV_EXPR:
5821 case EXACT_DIV_EXPR:
5822 if (integer_onep (arg1))
5823 return non_lvalue (convert (type, arg0));
5824 if (integer_zerop (arg1))
5825 return t;
5827 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5828 operation, EXACT_DIV_EXPR.
5830 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5831 At one time others generated faster code, it's not clear if they do
5832 after the last round to changes to the DIV code in expmed.c. */
5833 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5834 && multiple_of_p (type, arg0, arg1))
5835 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5837 if (TREE_CODE (arg1) == INTEGER_CST
5838 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5839 code, NULL_TREE)))
5840 return convert (type, tem);
5842 goto binary;
5844 case CEIL_MOD_EXPR:
5845 case FLOOR_MOD_EXPR:
5846 case ROUND_MOD_EXPR:
5847 case TRUNC_MOD_EXPR:
5848 if (integer_onep (arg1))
5849 return omit_one_operand (type, integer_zero_node, arg0);
5850 if (integer_zerop (arg1))
5851 return t;
5853 if (TREE_CODE (arg1) == INTEGER_CST
5854 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5855 code, NULL_TREE)))
5856 return convert (type, tem);
5858 goto binary;
5860 case LSHIFT_EXPR:
5861 case RSHIFT_EXPR:
5862 case LROTATE_EXPR:
5863 case RROTATE_EXPR:
5864 if (integer_zerop (arg1))
5865 return non_lvalue (convert (type, arg0));
5866 /* Since negative shift count is not well-defined,
5867 don't try to compute it in the compiler. */
5868 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5869 return t;
5870 /* Rewrite an LROTATE_EXPR by a constant into an
5871 RROTATE_EXPR by a new constant. */
5872 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5874 TREE_SET_CODE (t, RROTATE_EXPR);
5875 code = RROTATE_EXPR;
5876 TREE_OPERAND (t, 1) = arg1
5877 = const_binop
5878 (MINUS_EXPR,
5879 convert (TREE_TYPE (arg1),
5880 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5881 arg1, 0);
5882 if (tree_int_cst_sgn (arg1) < 0)
5883 return t;
5886 /* If we have a rotate of a bit operation with the rotate count and
5887 the second operand of the bit operation both constant,
5888 permute the two operations. */
5889 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5890 && (TREE_CODE (arg0) == BIT_AND_EXPR
5891 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5892 || TREE_CODE (arg0) == BIT_IOR_EXPR
5893 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5895 return fold (build (TREE_CODE (arg0), type,
5896 fold (build (code, type,
5897 TREE_OPERAND (arg0, 0), arg1)),
5898 fold (build (code, type,
5899 TREE_OPERAND (arg0, 1), arg1))));
5901 /* Two consecutive rotates adding up to the width of the mode can
5902 be ignored. */
5903 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5904 && TREE_CODE (arg0) == RROTATE_EXPR
5905 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5906 && TREE_INT_CST_HIGH (arg1) == 0
5907 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5908 && ((TREE_INT_CST_LOW (arg1)
5909 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5910 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5911 return TREE_OPERAND (arg0, 0);
5913 goto binary;
5915 case MIN_EXPR:
5916 if (operand_equal_p (arg0, arg1, 0))
5917 return omit_one_operand (type, arg0, arg1);
5918 if (INTEGRAL_TYPE_P (type)
5919 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5920 return omit_one_operand (type, arg1, arg0);
5921 goto associate;
5923 case MAX_EXPR:
5924 if (operand_equal_p (arg0, arg1, 0))
5925 return omit_one_operand (type, arg0, arg1);
5926 if (INTEGRAL_TYPE_P (type)
5927 && TYPE_MAX_VALUE (type)
5928 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5929 return omit_one_operand (type, arg1, arg0);
5930 goto associate;
5932 case TRUTH_NOT_EXPR:
5933 /* Note that the operand of this must be an int
5934 and its values must be 0 or 1.
5935 ("true" is a fixed value perhaps depending on the language,
5936 but we don't handle values other than 1 correctly yet.) */
5937 tem = invert_truthvalue (arg0);
5938 /* Avoid infinite recursion. */
5939 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5940 return t;
5941 return convert (type, tem);
5943 case TRUTH_ANDIF_EXPR:
5944 /* Note that the operands of this must be ints
5945 and their values must be 0 or 1.
5946 ("true" is a fixed value perhaps depending on the language.) */
5947 /* If first arg is constant zero, return it. */
5948 if (integer_zerop (arg0))
5949 return convert (type, arg0);
5950 case TRUTH_AND_EXPR:
5951 /* If either arg is constant true, drop it. */
5952 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5953 return non_lvalue (convert (type, arg1));
5954 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5955 return non_lvalue (convert (type, arg0));
5956 /* If second arg is constant zero, result is zero, but first arg
5957 must be evaluated. */
5958 if (integer_zerop (arg1))
5959 return omit_one_operand (type, arg1, arg0);
5960 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5961 case will be handled here. */
5962 if (integer_zerop (arg0))
5963 return omit_one_operand (type, arg0, arg1);
5965 truth_andor:
5966 /* We only do these simplifications if we are optimizing. */
5967 if (!optimize)
5968 return t;
5970 /* Check for things like (A || B) && (A || C). We can convert this
5971 to A || (B && C). Note that either operator can be any of the four
5972 truth and/or operations and the transformation will still be
5973 valid. Also note that we only care about order for the
5974 ANDIF and ORIF operators. If B contains side effects, this
5975 might change the truth-value of A. */
5976 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5977 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5978 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5979 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5980 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5983 tree a00 = TREE_OPERAND (arg0, 0);
5984 tree a01 = TREE_OPERAND (arg0, 1);
5985 tree a10 = TREE_OPERAND (arg1, 0);
5986 tree a11 = TREE_OPERAND (arg1, 1);
5987 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5988 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5989 && (code == TRUTH_AND_EXPR
5990 || code == TRUTH_OR_EXPR));
5992 if (operand_equal_p (a00, a10, 0))
5993 return fold (build (TREE_CODE (arg0), type, a00,
5994 fold (build (code, type, a01, a11))));
5995 else if (commutative && operand_equal_p (a00, a11, 0))
5996 return fold (build (TREE_CODE (arg0), type, a00,
5997 fold (build (code, type, a01, a10))));
5998 else if (commutative && operand_equal_p (a01, a10, 0))
5999 return fold (build (TREE_CODE (arg0), type, a01,
6000 fold (build (code, type, a00, a11))));
6002 /* This case if tricky because we must either have commutative
6003 operators or else A10 must not have side-effects. */
6005 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6006 && operand_equal_p (a01, a11, 0))
6007 return fold (build (TREE_CODE (arg0), type,
6008 fold (build (code, type, a00, a10)),
6009 a01));
6012 /* See if we can build a range comparison. */
6013 if (0 != (tem = fold_range_test (t)))
6014 return tem;
6016 /* Check for the possibility of merging component references. If our
6017 lhs is another similar operation, try to merge its rhs with our
6018 rhs. Then try to merge our lhs and rhs. */
6019 if (TREE_CODE (arg0) == code
6020 && 0 != (tem = fold_truthop (code, type,
6021 TREE_OPERAND (arg0, 1), arg1)))
6022 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6024 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6025 return tem;
6027 return t;
6029 case TRUTH_ORIF_EXPR:
6030 /* Note that the operands of this must be ints
6031 and their values must be 0 or true.
6032 ("true" is a fixed value perhaps depending on the language.) */
6033 /* If first arg is constant true, return it. */
6034 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6035 return convert (type, arg0);
6036 case TRUTH_OR_EXPR:
6037 /* If either arg is constant zero, drop it. */
6038 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6039 return non_lvalue (convert (type, arg1));
6040 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
6041 return non_lvalue (convert (type, arg0));
6042 /* If second arg is constant true, result is true, but we must
6043 evaluate first arg. */
6044 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6045 return omit_one_operand (type, arg1, arg0);
6046 /* Likewise for first arg, but note this only occurs here for
6047 TRUTH_OR_EXPR. */
6048 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6049 return omit_one_operand (type, arg0, arg1);
6050 goto truth_andor;
6052 case TRUTH_XOR_EXPR:
6053 /* If either arg is constant zero, drop it. */
6054 if (integer_zerop (arg0))
6055 return non_lvalue (convert (type, arg1));
6056 if (integer_zerop (arg1))
6057 return non_lvalue (convert (type, arg0));
6058 /* If either arg is constant true, this is a logical inversion. */
6059 if (integer_onep (arg0))
6060 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6061 if (integer_onep (arg1))
6062 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6063 return t;
6065 case EQ_EXPR:
6066 case NE_EXPR:
6067 case LT_EXPR:
6068 case GT_EXPR:
6069 case LE_EXPR:
6070 case GE_EXPR:
6071 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6073 /* (-a) CMP (-b) -> b CMP a */
6074 if (TREE_CODE (arg0) == NEGATE_EXPR
6075 && TREE_CODE (arg1) == NEGATE_EXPR)
6076 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6077 TREE_OPERAND (arg0, 0)));
6078 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6079 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6080 return
6081 fold (build
6082 (swap_tree_comparison (code), type,
6083 TREE_OPERAND (arg0, 0),
6084 build_real (TREE_TYPE (arg1),
6085 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6086 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6087 /* a CMP (-0) -> a CMP 0 */
6088 if (TREE_CODE (arg1) == REAL_CST
6089 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6090 return fold (build (code, type, arg0,
6091 build_real (TREE_TYPE (arg1), dconst0)));
6095 /* If one arg is a constant integer, put it last. */
6096 if (TREE_CODE (arg0) == INTEGER_CST
6097 && TREE_CODE (arg1) != INTEGER_CST)
6099 TREE_OPERAND (t, 0) = arg1;
6100 TREE_OPERAND (t, 1) = arg0;
6101 arg0 = TREE_OPERAND (t, 0);
6102 arg1 = TREE_OPERAND (t, 1);
6103 code = swap_tree_comparison (code);
6104 TREE_SET_CODE (t, code);
6107 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6108 First, see if one arg is constant; find the constant arg
6109 and the other one. */
6111 tree constop = 0, varop = NULL_TREE;
6112 int constopnum = -1;
6114 if (TREE_CONSTANT (arg1))
6115 constopnum = 1, constop = arg1, varop = arg0;
6116 if (TREE_CONSTANT (arg0))
6117 constopnum = 0, constop = arg0, varop = arg1;
6119 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6121 /* This optimization is invalid for ordered comparisons
6122 if CONST+INCR overflows or if foo+incr might overflow.
6123 This optimization is invalid for floating point due to rounding.
6124 For pointer types we assume overflow doesn't happen. */
6125 if (POINTER_TYPE_P (TREE_TYPE (varop))
6126 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6127 && (code == EQ_EXPR || code == NE_EXPR)))
6129 tree newconst
6130 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6131 constop, TREE_OPERAND (varop, 1)));
6132 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
6134 /* If VAROP is a reference to a bitfield, we must mask
6135 the constant by the width of the field. */
6136 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6137 && DECL_BIT_FIELD(TREE_OPERAND
6138 (TREE_OPERAND (varop, 0), 1)))
6140 int size
6141 = TREE_INT_CST_LOW (DECL_SIZE
6142 (TREE_OPERAND
6143 (TREE_OPERAND (varop, 0), 1)));
6144 tree mask, unsigned_type;
6145 unsigned int precision;
6146 tree folded_compare;
6148 /* First check whether the comparison would come out
6149 always the same. If we don't do that we would
6150 change the meaning with the masking. */
6151 if (constopnum == 0)
6152 folded_compare = fold (build (code, type, constop,
6153 TREE_OPERAND (varop, 0)));
6154 else
6155 folded_compare = fold (build (code, type,
6156 TREE_OPERAND (varop, 0),
6157 constop));
6158 if (integer_zerop (folded_compare)
6159 || integer_onep (folded_compare))
6160 return omit_one_operand (type, folded_compare, varop);
6162 unsigned_type = type_for_size (size, 1);
6163 precision = TYPE_PRECISION (unsigned_type);
6164 mask = build_int_2 (~0, ~0);
6165 TREE_TYPE (mask) = unsigned_type;
6166 force_fit_type (mask, 0);
6167 mask = const_binop (RSHIFT_EXPR, mask,
6168 size_int (precision - size), 0);
6169 newconst = fold (build (BIT_AND_EXPR,
6170 TREE_TYPE (varop), newconst,
6171 convert (TREE_TYPE (varop),
6172 mask)));
6176 t = build (code, type, TREE_OPERAND (t, 0),
6177 TREE_OPERAND (t, 1));
6178 TREE_OPERAND (t, constopnum) = newconst;
6179 return t;
6182 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6184 if (POINTER_TYPE_P (TREE_TYPE (varop))
6185 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6186 && (code == EQ_EXPR || code == NE_EXPR)))
6188 tree newconst
6189 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6190 constop, TREE_OPERAND (varop, 1)));
6191 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
6193 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6194 && DECL_BIT_FIELD(TREE_OPERAND
6195 (TREE_OPERAND (varop, 0), 1)))
6197 int size
6198 = TREE_INT_CST_LOW (DECL_SIZE
6199 (TREE_OPERAND
6200 (TREE_OPERAND (varop, 0), 1)));
6201 tree mask, unsigned_type;
6202 unsigned int precision;
6203 tree folded_compare;
6205 if (constopnum == 0)
6206 folded_compare = fold (build (code, type, constop,
6207 TREE_OPERAND (varop, 0)));
6208 else
6209 folded_compare = fold (build (code, type,
6210 TREE_OPERAND (varop, 0),
6211 constop));
6212 if (integer_zerop (folded_compare)
6213 || integer_onep (folded_compare))
6214 return omit_one_operand (type, folded_compare, varop);
6216 unsigned_type = type_for_size (size, 1);
6217 precision = TYPE_PRECISION (unsigned_type);
6218 mask = build_int_2 (~0, ~0);
6219 TREE_TYPE (mask) = TREE_TYPE (varop);
6220 force_fit_type (mask, 0);
6221 mask = const_binop (RSHIFT_EXPR, mask,
6222 size_int (precision - size), 0);
6223 newconst = fold (build (BIT_AND_EXPR,
6224 TREE_TYPE (varop), newconst,
6225 convert (TREE_TYPE (varop),
6226 mask)));
6230 t = build (code, type, TREE_OPERAND (t, 0),
6231 TREE_OPERAND (t, 1));
6232 TREE_OPERAND (t, constopnum) = newconst;
6233 return t;
6238 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6239 if (TREE_CODE (arg1) == INTEGER_CST
6240 && TREE_CODE (arg0) != INTEGER_CST
6241 && tree_int_cst_sgn (arg1) > 0)
6243 switch (TREE_CODE (t))
6245 case GE_EXPR:
6246 code = GT_EXPR;
6247 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6248 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6249 break;
6251 case LT_EXPR:
6252 code = LE_EXPR;
6253 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6254 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6255 break;
6257 default:
6258 break;
6262 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6263 a MINUS_EXPR of a constant, we can convert it into a comparison with
6264 a revised constant as long as no overflow occurs. */
6265 if ((code == EQ_EXPR || code == NE_EXPR)
6266 && TREE_CODE (arg1) == INTEGER_CST
6267 && (TREE_CODE (arg0) == PLUS_EXPR
6268 || TREE_CODE (arg0) == MINUS_EXPR)
6269 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6270 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6271 ? MINUS_EXPR : PLUS_EXPR,
6272 arg1, TREE_OPERAND (arg0, 1), 0))
6273 && ! TREE_CONSTANT_OVERFLOW (tem))
6274 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6276 /* Similarly for a NEGATE_EXPR. */
6277 else if ((code == EQ_EXPR || code == NE_EXPR)
6278 && TREE_CODE (arg0) == NEGATE_EXPR
6279 && TREE_CODE (arg1) == INTEGER_CST
6280 && 0 != (tem = negate_expr (arg1))
6281 && TREE_CODE (tem) == INTEGER_CST
6282 && ! TREE_CONSTANT_OVERFLOW (tem))
6283 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6285 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6286 for !=. Don't do this for ordered comparisons due to overflow. */
6287 else if ((code == NE_EXPR || code == EQ_EXPR)
6288 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6289 return fold (build (code, type,
6290 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6292 /* If we are widening one operand of an integer comparison,
6293 see if the other operand is similarly being widened. Perhaps we
6294 can do the comparison in the narrower type. */
6295 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6296 && TREE_CODE (arg0) == NOP_EXPR
6297 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6298 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6299 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6300 || (TREE_CODE (t1) == INTEGER_CST
6301 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6302 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6304 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6305 constant, we can simplify it. */
6306 else if (TREE_CODE (arg1) == INTEGER_CST
6307 && (TREE_CODE (arg0) == MIN_EXPR
6308 || TREE_CODE (arg0) == MAX_EXPR)
6309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6310 return optimize_minmax_comparison (t);
6312 /* If we are comparing an ABS_EXPR with a constant, we can
6313 convert all the cases into explicit comparisons, but they may
6314 well not be faster than doing the ABS and one comparison.
6315 But ABS (X) <= C is a range comparison, which becomes a subtraction
6316 and a comparison, and is probably faster. */
6317 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6318 && TREE_CODE (arg0) == ABS_EXPR
6319 && ! TREE_SIDE_EFFECTS (arg0)
6320 && (0 != (tem = negate_expr (arg1)))
6321 && TREE_CODE (tem) == INTEGER_CST
6322 && ! TREE_CONSTANT_OVERFLOW (tem))
6323 return fold (build (TRUTH_ANDIF_EXPR, type,
6324 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6325 build (LE_EXPR, type,
6326 TREE_OPERAND (arg0, 0), arg1)));
6328 /* If this is an EQ or NE comparison with zero and ARG0 is
6329 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6330 two operations, but the latter can be done in one less insn
6331 on machines that have only two-operand insns or on which a
6332 constant cannot be the first operand. */
6333 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6334 && TREE_CODE (arg0) == BIT_AND_EXPR)
6336 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6337 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6338 return
6339 fold (build (code, type,
6340 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6341 build (RSHIFT_EXPR,
6342 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6343 TREE_OPERAND (arg0, 1),
6344 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6345 convert (TREE_TYPE (arg0),
6346 integer_one_node)),
6347 arg1));
6348 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6349 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6350 return
6351 fold (build (code, type,
6352 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6353 build (RSHIFT_EXPR,
6354 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6355 TREE_OPERAND (arg0, 0),
6356 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6357 convert (TREE_TYPE (arg0),
6358 integer_one_node)),
6359 arg1));
6362 /* If this is an NE or EQ comparison of zero against the result of a
6363 signed MOD operation whose second operand is a power of 2, make
6364 the MOD operation unsigned since it is simpler and equivalent. */
6365 if ((code == NE_EXPR || code == EQ_EXPR)
6366 && integer_zerop (arg1)
6367 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6368 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6369 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6370 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6371 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6372 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6374 tree newtype = unsigned_type (TREE_TYPE (arg0));
6375 tree newmod = build (TREE_CODE (arg0), newtype,
6376 convert (newtype, TREE_OPERAND (arg0, 0)),
6377 convert (newtype, TREE_OPERAND (arg0, 1)));
6379 return build (code, type, newmod, convert (newtype, arg1));
6382 /* If this is an NE comparison of zero with an AND of one, remove the
6383 comparison since the AND will give the correct value. */
6384 if (code == NE_EXPR && integer_zerop (arg1)
6385 && TREE_CODE (arg0) == BIT_AND_EXPR
6386 && integer_onep (TREE_OPERAND (arg0, 1)))
6387 return convert (type, arg0);
6389 /* If we have (A & C) == C where C is a power of 2, convert this into
6390 (A & C) != 0. Similarly for NE_EXPR. */
6391 if ((code == EQ_EXPR || code == NE_EXPR)
6392 && TREE_CODE (arg0) == BIT_AND_EXPR
6393 && integer_pow2p (TREE_OPERAND (arg0, 1))
6394 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6395 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6396 arg0, integer_zero_node);
6398 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6399 and similarly for >= into !=. */
6400 if ((code == LT_EXPR || code == GE_EXPR)
6401 && TREE_UNSIGNED (TREE_TYPE (arg0))
6402 && TREE_CODE (arg1) == LSHIFT_EXPR
6403 && integer_onep (TREE_OPERAND (arg1, 0)))
6404 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6405 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6406 TREE_OPERAND (arg1, 1)),
6407 convert (TREE_TYPE (arg0), integer_zero_node));
6409 else if ((code == LT_EXPR || code == GE_EXPR)
6410 && TREE_UNSIGNED (TREE_TYPE (arg0))
6411 && (TREE_CODE (arg1) == NOP_EXPR
6412 || TREE_CODE (arg1) == CONVERT_EXPR)
6413 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6414 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6415 return
6416 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6417 convert (TREE_TYPE (arg0),
6418 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6419 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6420 convert (TREE_TYPE (arg0), integer_zero_node));
6422 /* Simplify comparison of something with itself. (For IEEE
6423 floating-point, we can only do some of these simplifications.) */
6424 if (operand_equal_p (arg0, arg1, 0))
6426 switch (code)
6428 case EQ_EXPR:
6429 case GE_EXPR:
6430 case LE_EXPR:
6431 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6432 return constant_boolean_node (1, type);
6433 code = EQ_EXPR;
6434 TREE_SET_CODE (t, code);
6435 break;
6437 case NE_EXPR:
6438 /* For NE, we can only do this simplification if integer. */
6439 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6440 break;
6441 /* ... fall through ... */
6442 case GT_EXPR:
6443 case LT_EXPR:
6444 return constant_boolean_node (0, type);
6445 default:
6446 abort ();
6450 /* An unsigned comparison against 0 can be simplified. */
6451 if (integer_zerop (arg1)
6452 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6453 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6454 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6456 switch (TREE_CODE (t))
6458 case GT_EXPR:
6459 code = NE_EXPR;
6460 TREE_SET_CODE (t, NE_EXPR);
6461 break;
6462 case LE_EXPR:
6463 code = EQ_EXPR;
6464 TREE_SET_CODE (t, EQ_EXPR);
6465 break;
6466 case GE_EXPR:
6467 return omit_one_operand (type,
6468 convert (type, integer_one_node),
6469 arg0);
6470 case LT_EXPR:
6471 return omit_one_operand (type,
6472 convert (type, integer_zero_node),
6473 arg0);
6474 default:
6475 break;
6479 /* Comparisons with the highest or lowest possible integer of
6480 the specified size will have known values and an unsigned
6481 <= 0x7fffffff can be simplified. */
6483 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6485 if (TREE_CODE (arg1) == INTEGER_CST
6486 && ! TREE_CONSTANT_OVERFLOW (arg1)
6487 && width <= HOST_BITS_PER_WIDE_INT
6488 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6489 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6491 if (TREE_INT_CST_HIGH (arg1) == 0
6492 && (TREE_INT_CST_LOW (arg1)
6493 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6494 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6495 switch (TREE_CODE (t))
6497 case GT_EXPR:
6498 return omit_one_operand (type,
6499 convert (type, integer_zero_node),
6500 arg0);
6501 case GE_EXPR:
6502 TREE_SET_CODE (t, EQ_EXPR);
6503 break;
6505 case LE_EXPR:
6506 return omit_one_operand (type,
6507 convert (type, integer_one_node),
6508 arg0);
6509 case LT_EXPR:
6510 TREE_SET_CODE (t, NE_EXPR);
6511 break;
6513 default:
6514 break;
6517 else if (TREE_INT_CST_HIGH (arg1) == -1
6518 && (- TREE_INT_CST_LOW (arg1)
6519 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6520 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6521 switch (TREE_CODE (t))
6523 case LT_EXPR:
6524 return omit_one_operand (type,
6525 convert (type, integer_zero_node),
6526 arg0);
6527 case LE_EXPR:
6528 TREE_SET_CODE (t, EQ_EXPR);
6529 break;
6531 case GE_EXPR:
6532 return omit_one_operand (type,
6533 convert (type, integer_one_node),
6534 arg0);
6535 case GT_EXPR:
6536 TREE_SET_CODE (t, NE_EXPR);
6537 break;
6539 default:
6540 break;
6543 else if (TREE_INT_CST_HIGH (arg1) == 0
6544 && (TREE_INT_CST_LOW (arg1)
6545 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6546 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6548 switch (TREE_CODE (t))
6550 case LE_EXPR:
6551 return fold (build (GE_EXPR, type,
6552 convert (signed_type (TREE_TYPE (arg0)),
6553 arg0),
6554 convert (signed_type (TREE_TYPE (arg1)),
6555 integer_zero_node)));
6556 case GT_EXPR:
6557 return fold (build (LT_EXPR, type,
6558 convert (signed_type (TREE_TYPE (arg0)),
6559 arg0),
6560 convert (signed_type (TREE_TYPE (arg1)),
6561 integer_zero_node)));
6563 default:
6564 break;
6569 /* If we are comparing an expression that just has comparisons
6570 of two integer values, arithmetic expressions of those comparisons,
6571 and constants, we can simplify it. There are only three cases
6572 to check: the two values can either be equal, the first can be
6573 greater, or the second can be greater. Fold the expression for
6574 those three values. Since each value must be 0 or 1, we have
6575 eight possibilities, each of which corresponds to the constant 0
6576 or 1 or one of the six possible comparisons.
6578 This handles common cases like (a > b) == 0 but also handles
6579 expressions like ((x > y) - (y > x)) > 0, which supposedly
6580 occur in macroized code. */
6582 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6584 tree cval1 = 0, cval2 = 0;
6585 int save_p = 0;
6587 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6588 /* Don't handle degenerate cases here; they should already
6589 have been handled anyway. */
6590 && cval1 != 0 && cval2 != 0
6591 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6592 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6593 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6594 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6595 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6596 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6597 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6599 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6600 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6602 /* We can't just pass T to eval_subst in case cval1 or cval2
6603 was the same as ARG1. */
6605 tree high_result
6606 = fold (build (code, type,
6607 eval_subst (arg0, cval1, maxval, cval2, minval),
6608 arg1));
6609 tree equal_result
6610 = fold (build (code, type,
6611 eval_subst (arg0, cval1, maxval, cval2, maxval),
6612 arg1));
6613 tree low_result
6614 = fold (build (code, type,
6615 eval_subst (arg0, cval1, minval, cval2, maxval),
6616 arg1));
6618 /* All three of these results should be 0 or 1. Confirm they
6619 are. Then use those values to select the proper code
6620 to use. */
6622 if ((integer_zerop (high_result)
6623 || integer_onep (high_result))
6624 && (integer_zerop (equal_result)
6625 || integer_onep (equal_result))
6626 && (integer_zerop (low_result)
6627 || integer_onep (low_result)))
6629 /* Make a 3-bit mask with the high-order bit being the
6630 value for `>', the next for '=', and the low for '<'. */
6631 switch ((integer_onep (high_result) * 4)
6632 + (integer_onep (equal_result) * 2)
6633 + integer_onep (low_result))
6635 case 0:
6636 /* Always false. */
6637 return omit_one_operand (type, integer_zero_node, arg0);
6638 case 1:
6639 code = LT_EXPR;
6640 break;
6641 case 2:
6642 code = EQ_EXPR;
6643 break;
6644 case 3:
6645 code = LE_EXPR;
6646 break;
6647 case 4:
6648 code = GT_EXPR;
6649 break;
6650 case 5:
6651 code = NE_EXPR;
6652 break;
6653 case 6:
6654 code = GE_EXPR;
6655 break;
6656 case 7:
6657 /* Always true. */
6658 return omit_one_operand (type, integer_one_node, arg0);
6661 t = build (code, type, cval1, cval2);
6662 if (save_p)
6663 return save_expr (t);
6664 else
6665 return fold (t);
6670 /* If this is a comparison of a field, we may be able to simplify it. */
6671 if ((TREE_CODE (arg0) == COMPONENT_REF
6672 || TREE_CODE (arg0) == BIT_FIELD_REF)
6673 && (code == EQ_EXPR || code == NE_EXPR)
6674 /* Handle the constant case even without -O
6675 to make sure the warnings are given. */
6676 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6678 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6679 return t1 ? t1 : t;
6682 /* If this is a comparison of complex values and either or both sides
6683 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6684 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6685 This may prevent needless evaluations. */
6686 if ((code == EQ_EXPR || code == NE_EXPR)
6687 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6688 && (TREE_CODE (arg0) == COMPLEX_EXPR
6689 || TREE_CODE (arg1) == COMPLEX_EXPR
6690 || TREE_CODE (arg0) == COMPLEX_CST
6691 || TREE_CODE (arg1) == COMPLEX_CST))
6693 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6694 tree real0, imag0, real1, imag1;
6696 arg0 = save_expr (arg0);
6697 arg1 = save_expr (arg1);
6698 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6699 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6700 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6701 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6703 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6704 : TRUTH_ORIF_EXPR),
6705 type,
6706 fold (build (code, type, real0, real1)),
6707 fold (build (code, type, imag0, imag1))));
6710 /* From here on, the only cases we handle are when the result is
6711 known to be a constant.
6713 To compute GT, swap the arguments and do LT.
6714 To compute GE, do LT and invert the result.
6715 To compute LE, swap the arguments, do LT and invert the result.
6716 To compute NE, do EQ and invert the result.
6718 Therefore, the code below must handle only EQ and LT. */
6720 if (code == LE_EXPR || code == GT_EXPR)
6722 tem = arg0, arg0 = arg1, arg1 = tem;
6723 code = swap_tree_comparison (code);
6726 /* Note that it is safe to invert for real values here because we
6727 will check below in the one case that it matters. */
6729 t1 = NULL_TREE;
6730 invert = 0;
6731 if (code == NE_EXPR || code == GE_EXPR)
6733 invert = 1;
6734 code = invert_tree_comparison (code);
6737 /* Compute a result for LT or EQ if args permit;
6738 otherwise return T. */
6739 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6741 if (code == EQ_EXPR)
6742 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6743 else
6744 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6745 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6746 : INT_CST_LT (arg0, arg1)),
6750 #if 0 /* This is no longer useful, but breaks some real code. */
6751 /* Assume a nonexplicit constant cannot equal an explicit one,
6752 since such code would be undefined anyway.
6753 Exception: on sysvr4, using #pragma weak,
6754 a label can come out as 0. */
6755 else if (TREE_CODE (arg1) == INTEGER_CST
6756 && !integer_zerop (arg1)
6757 && TREE_CONSTANT (arg0)
6758 && TREE_CODE (arg0) == ADDR_EXPR
6759 && code == EQ_EXPR)
6760 t1 = build_int_2 (0, 0);
6761 #endif
6762 /* Two real constants can be compared explicitly. */
6763 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6765 /* If either operand is a NaN, the result is false with two
6766 exceptions: First, an NE_EXPR is true on NaNs, but that case
6767 is already handled correctly since we will be inverting the
6768 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6769 or a GE_EXPR into a LT_EXPR, we must return true so that it
6770 will be inverted into false. */
6772 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6773 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6774 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6776 else if (code == EQ_EXPR)
6777 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6778 TREE_REAL_CST (arg1)),
6780 else
6781 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6782 TREE_REAL_CST (arg1)),
6786 if (t1 == NULL_TREE)
6787 return t;
6789 if (invert)
6790 TREE_INT_CST_LOW (t1) ^= 1;
6792 TREE_TYPE (t1) = type;
6793 if (TREE_CODE (type) == BOOLEAN_TYPE)
6794 return truthvalue_conversion (t1);
6795 return t1;
6797 case COND_EXPR:
6798 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6799 so all simple results must be passed through pedantic_non_lvalue. */
6800 if (TREE_CODE (arg0) == INTEGER_CST)
6801 return pedantic_non_lvalue
6802 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6803 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6804 return pedantic_omit_one_operand (type, arg1, arg0);
6806 /* If the second operand is zero, invert the comparison and swap
6807 the second and third operands. Likewise if the second operand
6808 is constant and the third is not or if the third operand is
6809 equivalent to the first operand of the comparison. */
6811 if (integer_zerop (arg1)
6812 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6813 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6814 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6815 TREE_OPERAND (t, 2),
6816 TREE_OPERAND (arg0, 1))))
6818 /* See if this can be inverted. If it can't, possibly because
6819 it was a floating-point inequality comparison, don't do
6820 anything. */
6821 tem = invert_truthvalue (arg0);
6823 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6825 t = build (code, type, tem,
6826 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6827 arg0 = tem;
6828 /* arg1 should be the first argument of the new T. */
6829 arg1 = TREE_OPERAND (t, 1);
6830 STRIP_NOPS (arg1);
6834 /* If we have A op B ? A : C, we may be able to convert this to a
6835 simpler expression, depending on the operation and the values
6836 of B and C. IEEE floating point prevents this though,
6837 because A or B might be -0.0 or a NaN. */
6839 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6840 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6841 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6842 || flag_fast_math)
6843 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6844 arg1, TREE_OPERAND (arg0, 1)))
6846 tree arg2 = TREE_OPERAND (t, 2);
6847 enum tree_code comp_code = TREE_CODE (arg0);
6849 STRIP_NOPS (arg2);
6851 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6852 depending on the comparison operation. */
6853 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6854 ? real_zerop (TREE_OPERAND (arg0, 1))
6855 : integer_zerop (TREE_OPERAND (arg0, 1)))
6856 && TREE_CODE (arg2) == NEGATE_EXPR
6857 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6858 switch (comp_code)
6860 case EQ_EXPR:
6861 return
6862 pedantic_non_lvalue (convert (type, negate_expr (arg1)));
6863 case NE_EXPR:
6864 return pedantic_non_lvalue (convert (type, arg1));
6865 case GE_EXPR:
6866 case GT_EXPR:
6867 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6868 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6869 return pedantic_non_lvalue
6870 (convert (type, fold (build1 (ABS_EXPR,
6871 TREE_TYPE (arg1), arg1))));
6872 case LE_EXPR:
6873 case LT_EXPR:
6874 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6875 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6876 return pedantic_non_lvalue
6877 (negate_expr (convert (type,
6878 fold (build1 (ABS_EXPR,
6879 TREE_TYPE (arg1),
6880 arg1)))));
6881 default:
6882 abort ();
6885 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6886 always zero. */
6888 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6890 if (comp_code == NE_EXPR)
6891 return pedantic_non_lvalue (convert (type, arg1));
6892 else if (comp_code == EQ_EXPR)
6893 return pedantic_non_lvalue (convert (type, integer_zero_node));
6896 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6897 or max (A, B), depending on the operation. */
6899 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6900 arg2, TREE_OPERAND (arg0, 0)))
6902 tree comp_op0 = TREE_OPERAND (arg0, 0);
6903 tree comp_op1 = TREE_OPERAND (arg0, 1);
6904 tree comp_type = TREE_TYPE (comp_op0);
6906 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6907 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6908 comp_type = type;
6910 switch (comp_code)
6912 case EQ_EXPR:
6913 return pedantic_non_lvalue (convert (type, arg2));
6914 case NE_EXPR:
6915 return pedantic_non_lvalue (convert (type, arg1));
6916 case LE_EXPR:
6917 case LT_EXPR:
6918 /* In C++ a ?: expression can be an lvalue, so put the
6919 operand which will be used if they are equal first
6920 so that we can convert this back to the
6921 corresponding COND_EXPR. */
6922 return pedantic_non_lvalue
6923 (convert (type, (fold (build (MIN_EXPR, comp_type,
6924 (comp_code == LE_EXPR
6925 ? comp_op0 : comp_op1),
6926 (comp_code == LE_EXPR
6927 ? comp_op1 : comp_op0))))));
6928 break;
6929 case GE_EXPR:
6930 case GT_EXPR:
6931 return pedantic_non_lvalue
6932 (convert (type, fold (build (MAX_EXPR, comp_type,
6933 (comp_code == GE_EXPR
6934 ? comp_op0 : comp_op1),
6935 (comp_code == GE_EXPR
6936 ? comp_op1 : comp_op0)))));
6937 break;
6938 default:
6939 abort ();
6943 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6944 we might still be able to simplify this. For example,
6945 if C1 is one less or one more than C2, this might have started
6946 out as a MIN or MAX and been transformed by this function.
6947 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6949 if (INTEGRAL_TYPE_P (type)
6950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6951 && TREE_CODE (arg2) == INTEGER_CST)
6952 switch (comp_code)
6954 case EQ_EXPR:
6955 /* We can replace A with C1 in this case. */
6956 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6957 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6958 TREE_OPERAND (t, 2));
6959 break;
6961 case LT_EXPR:
6962 /* If C1 is C2 + 1, this is min(A, C2). */
6963 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6964 && operand_equal_p (TREE_OPERAND (arg0, 1),
6965 const_binop (PLUS_EXPR, arg2,
6966 integer_one_node, 0), 1))
6967 return pedantic_non_lvalue
6968 (fold (build (MIN_EXPR, type, arg1, arg2)));
6969 break;
6971 case LE_EXPR:
6972 /* If C1 is C2 - 1, this is min(A, C2). */
6973 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6974 && operand_equal_p (TREE_OPERAND (arg0, 1),
6975 const_binop (MINUS_EXPR, arg2,
6976 integer_one_node, 0), 1))
6977 return pedantic_non_lvalue
6978 (fold (build (MIN_EXPR, type, arg1, arg2)));
6979 break;
6981 case GT_EXPR:
6982 /* If C1 is C2 - 1, this is max(A, C2). */
6983 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6984 && operand_equal_p (TREE_OPERAND (arg0, 1),
6985 const_binop (MINUS_EXPR, arg2,
6986 integer_one_node, 0), 1))
6987 return pedantic_non_lvalue
6988 (fold (build (MAX_EXPR, type, arg1, arg2)));
6989 break;
6991 case GE_EXPR:
6992 /* If C1 is C2 + 1, this is max(A, C2). */
6993 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6994 && operand_equal_p (TREE_OPERAND (arg0, 1),
6995 const_binop (PLUS_EXPR, arg2,
6996 integer_one_node, 0), 1))
6997 return pedantic_non_lvalue
6998 (fold (build (MAX_EXPR, type, arg1, arg2)));
6999 break;
7000 case NE_EXPR:
7001 break;
7002 default:
7003 abort ();
7007 /* If the second operand is simpler than the third, swap them
7008 since that produces better jump optimization results. */
7009 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7010 || TREE_CODE (arg1) == SAVE_EXPR)
7011 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7012 || DECL_P (TREE_OPERAND (t, 2))
7013 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7015 /* See if this can be inverted. If it can't, possibly because
7016 it was a floating-point inequality comparison, don't do
7017 anything. */
7018 tem = invert_truthvalue (arg0);
7020 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7022 t = build (code, type, tem,
7023 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7024 arg0 = tem;
7025 /* arg1 should be the first argument of the new T. */
7026 arg1 = TREE_OPERAND (t, 1);
7027 STRIP_NOPS (arg1);
7031 /* Convert A ? 1 : 0 to simply A. */
7032 if (integer_onep (TREE_OPERAND (t, 1))
7033 && integer_zerop (TREE_OPERAND (t, 2))
7034 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7035 call to fold will try to move the conversion inside
7036 a COND, which will recurse. In that case, the COND_EXPR
7037 is probably the best choice, so leave it alone. */
7038 && type == TREE_TYPE (arg0))
7039 return pedantic_non_lvalue (arg0);
7041 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7042 operation is simply A & 2. */
7044 if (integer_zerop (TREE_OPERAND (t, 2))
7045 && TREE_CODE (arg0) == NE_EXPR
7046 && integer_zerop (TREE_OPERAND (arg0, 1))
7047 && integer_pow2p (arg1)
7048 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7049 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7050 arg1, 1))
7051 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7053 return t;
7055 case COMPOUND_EXPR:
7056 /* When pedantic, a compound expression can be neither an lvalue
7057 nor an integer constant expression. */
7058 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7059 return t;
7060 /* Don't let (0, 0) be null pointer constant. */
7061 if (integer_zerop (arg1))
7062 return build1 (NOP_EXPR, type, arg1);
7063 return convert (type, arg1);
7065 case COMPLEX_EXPR:
7066 if (wins)
7067 return build_complex (type, arg0, arg1);
7068 return t;
7070 case REALPART_EXPR:
7071 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7072 return t;
7073 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7074 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7075 TREE_OPERAND (arg0, 1));
7076 else if (TREE_CODE (arg0) == COMPLEX_CST)
7077 return TREE_REALPART (arg0);
7078 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7079 return fold (build (TREE_CODE (arg0), type,
7080 fold (build1 (REALPART_EXPR, type,
7081 TREE_OPERAND (arg0, 0))),
7082 fold (build1 (REALPART_EXPR,
7083 type, TREE_OPERAND (arg0, 1)))));
7084 return t;
7086 case IMAGPART_EXPR:
7087 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7088 return convert (type, integer_zero_node);
7089 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7090 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7091 TREE_OPERAND (arg0, 0));
7092 else if (TREE_CODE (arg0) == COMPLEX_CST)
7093 return TREE_IMAGPART (arg0);
7094 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7095 return fold (build (TREE_CODE (arg0), type,
7096 fold (build1 (IMAGPART_EXPR, type,
7097 TREE_OPERAND (arg0, 0))),
7098 fold (build1 (IMAGPART_EXPR, type,
7099 TREE_OPERAND (arg0, 1)))));
7100 return t;
7102 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7103 appropriate. */
7104 case CLEANUP_POINT_EXPR:
7105 if (! has_cleanups (arg0))
7106 return TREE_OPERAND (t, 0);
7109 enum tree_code code0 = TREE_CODE (arg0);
7110 int kind0 = TREE_CODE_CLASS (code0);
7111 tree arg00 = TREE_OPERAND (arg0, 0);
7112 tree arg01;
7114 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7115 return fold (build1 (code0, type,
7116 fold (build1 (CLEANUP_POINT_EXPR,
7117 TREE_TYPE (arg00), arg00))));
7119 if (kind0 == '<' || kind0 == '2'
7120 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7121 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7122 || code0 == TRUTH_XOR_EXPR)
7124 arg01 = TREE_OPERAND (arg0, 1);
7126 if (TREE_CONSTANT (arg00)
7127 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7128 && ! has_cleanups (arg00)))
7129 return fold (build (code0, type, arg00,
7130 fold (build1 (CLEANUP_POINT_EXPR,
7131 TREE_TYPE (arg01), arg01))));
7133 if (TREE_CONSTANT (arg01))
7134 return fold (build (code0, type,
7135 fold (build1 (CLEANUP_POINT_EXPR,
7136 TREE_TYPE (arg00), arg00)),
7137 arg01));
7140 return t;
7143 default:
7144 return t;
7145 } /* switch (code) */
7148 /* Determine if first argument is a multiple of second argument. Return 0 if
7149 it is not, or we cannot easily determined it to be.
7151 An example of the sort of thing we care about (at this point; this routine
7152 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7153 fold cases do now) is discovering that
7155 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7157 is a multiple of
7159 SAVE_EXPR (J * 8)
7161 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7163 This code also handles discovering that
7165 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7167 is a multiple of 8 so we don't have to worry about dealing with a
7168 possible remainder.
7170 Note that we *look* inside a SAVE_EXPR only to determine how it was
7171 calculated; it is not safe for fold to do much of anything else with the
7172 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7173 at run time. For example, the latter example above *cannot* be implemented
7174 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7175 evaluation time of the original SAVE_EXPR is not necessarily the same at
7176 the time the new expression is evaluated. The only optimization of this
7177 sort that would be valid is changing
7179 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7181 divided by 8 to
7183 SAVE_EXPR (I) * SAVE_EXPR (J)
7185 (where the same SAVE_EXPR (J) is used in the original and the
7186 transformed version). */
7188 static int
7189 multiple_of_p (type, top, bottom)
7190 tree type;
7191 tree top;
7192 tree bottom;
7194 if (operand_equal_p (top, bottom, 0))
7195 return 1;
7197 if (TREE_CODE (type) != INTEGER_TYPE)
7198 return 0;
7200 switch (TREE_CODE (top))
7202 case MULT_EXPR:
7203 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7204 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7206 case PLUS_EXPR:
7207 case MINUS_EXPR:
7208 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7209 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7211 case NOP_EXPR:
7212 /* Can't handle conversions from non-integral or wider integral type. */
7213 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7214 || (TYPE_PRECISION (type)
7215 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7216 return 0;
7218 /* .. fall through ... */
7220 case SAVE_EXPR:
7221 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7223 case INTEGER_CST:
7224 if ((TREE_CODE (bottom) != INTEGER_CST)
7225 || (tree_int_cst_sgn (top) < 0)
7226 || (tree_int_cst_sgn (bottom) < 0))
7227 return 0;
7228 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7229 top, bottom, 0));
7231 default:
7232 return 0;