2002-05-04 David S. Miller <davem@redhat.com>
[official-gcc.git] / gcc / fold-const.c
blob084308ec9fa502077144884eca117c86480fc3ea
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "rtl.h"
50 #include "expr.h"
51 #include "tm_p.h"
52 #include "toplev.h"
53 #include "ggc.h"
54 #include "hashtab.h"
55 #include "langhooks.h"
57 static void encode PARAMS ((HOST_WIDE_INT *,
58 unsigned HOST_WIDE_INT,
59 HOST_WIDE_INT));
60 static void decode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static tree negate_expr PARAMS ((tree));
64 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
65 tree *, int));
66 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
67 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static hashval_t size_htab_hash PARAMS ((const void *));
70 static int size_htab_eq PARAMS ((const void *, const void *));
71 static tree fold_convert PARAMS ((tree, tree));
72 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
73 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
74 static int truth_value_p PARAMS ((enum tree_code));
75 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
76 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
77 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
78 static tree omit_one_operand PARAMS ((tree, tree, tree));
79 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
80 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
81 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
82 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
83 tree, tree));
84 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
85 HOST_WIDE_INT *,
86 enum machine_mode *, int *,
87 int *, tree *, tree *));
88 static int all_ones_mask_p PARAMS ((tree, int));
89 static int simple_operand_p PARAMS ((tree));
90 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
91 tree, int));
92 static tree make_range PARAMS ((tree, int *, tree *, tree *));
93 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
94 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
95 int, tree, tree));
96 static tree fold_range_test PARAMS ((tree));
97 static tree unextend PARAMS ((tree, int, int, tree));
98 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
99 static tree optimize_minmax_comparison PARAMS ((tree));
100 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
101 static tree strip_compound_expr PARAMS ((tree, tree));
102 static int multiple_of_p PARAMS ((tree, tree, tree));
103 static tree constant_boolean_node PARAMS ((int, tree));
104 static int count_cond PARAMS ((tree, int));
105 static tree fold_binary_op_with_conditional_arg
106 PARAMS ((enum tree_code, tree, tree, tree, int));
107 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
109 #if defined(HOST_EBCDIC)
110 /* bit 8 is significant in EBCDIC */
111 #define CHARMASK 0xff
112 #else
113 #define CHARMASK 0x7f
114 #endif
116 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
117 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
118 and SUM1. Then this yields nonzero if overflow occurred during the
119 addition.
121 Overflow occurs if A and B have the same sign, but A and SUM differ in
122 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
123 sign. */
124 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
126 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
127 We do that by representing the two-word integer in 4 words, with only
128 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
129 number. The value of the word is LOWPART + HIGHPART * BASE. */
131 #define LOWPART(x) \
132 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
133 #define HIGHPART(x) \
134 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
135 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
137 /* Unpack a two-word integer into 4 words.
138 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
139 WORDS points to the array of HOST_WIDE_INTs. */
141 static void
142 encode (words, low, hi)
143 HOST_WIDE_INT *words;
144 unsigned HOST_WIDE_INT low;
145 HOST_WIDE_INT hi;
147 words[0] = LOWPART (low);
148 words[1] = HIGHPART (low);
149 words[2] = LOWPART (hi);
150 words[3] = HIGHPART (hi);
153 /* Pack an array of 4 words into a two-word integer.
154 WORDS points to the array of words.
155 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
157 static void
158 decode (words, low, hi)
159 HOST_WIDE_INT *words;
160 unsigned HOST_WIDE_INT *low;
161 HOST_WIDE_INT *hi;
163 *low = words[0] + words[1] * BASE;
164 *hi = words[2] + words[3] * BASE;
167 /* Make the integer constant T valid for its type by setting to 0 or 1 all
168 the bits in the constant that don't belong in the type.
170 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
171 nonzero, a signed overflow has already occurred in calculating T, so
172 propagate it.
174 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
175 if it exists. */
178 force_fit_type (t, overflow)
179 tree t;
180 int overflow;
182 unsigned HOST_WIDE_INT low;
183 HOST_WIDE_INT high;
184 unsigned int prec;
186 if (TREE_CODE (t) == REAL_CST)
188 #ifdef CHECK_FLOAT_VALUE
189 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
190 overflow);
191 #endif
192 return overflow;
195 else if (TREE_CODE (t) != INTEGER_CST)
196 return overflow;
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t)))
202 prec = POINTER_SIZE;
203 else
204 prec = TYPE_PRECISION (TREE_TYPE (t));
206 /* First clear all bits that are beyond the type's precision. */
208 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210 else if (prec > HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_HIGH (t)
212 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
213 else
215 TREE_INT_CST_HIGH (t) = 0;
216 if (prec < HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
220 /* Unsigned types do not suffer sign extension or overflow unless they
221 are a sizetype. */
222 if (TREE_UNSIGNED (TREE_TYPE (t))
223 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
225 return overflow;
227 /* If the value's sign bit is set, extend the sign. */
228 if (prec != 2 * HOST_BITS_PER_WIDE_INT
229 && (prec > HOST_BITS_PER_WIDE_INT
230 ? 0 != (TREE_INT_CST_HIGH (t)
231 & ((HOST_WIDE_INT) 1
232 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
233 : 0 != (TREE_INT_CST_LOW (t)
234 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
236 /* Value is negative:
237 set to 1 all the bits that are outside this type's precision. */
238 if (prec > HOST_BITS_PER_WIDE_INT)
239 TREE_INT_CST_HIGH (t)
240 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
241 else
243 TREE_INT_CST_HIGH (t) = -1;
244 if (prec < HOST_BITS_PER_WIDE_INT)
245 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
249 /* Return nonzero if signed overflow occurred. */
250 return
251 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
252 != 0);
255 /* Add two doubleword integers with doubleword result.
256 Each argument is given as two `HOST_WIDE_INT' pieces.
257 One argument is L1 and H1; the other, L2 and H2.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261 add_double (l1, h1, l2, h2, lv, hv)
262 unsigned HOST_WIDE_INT l1, l2;
263 HOST_WIDE_INT h1, h2;
264 unsigned HOST_WIDE_INT *lv;
265 HOST_WIDE_INT *hv;
267 unsigned HOST_WIDE_INT l;
268 HOST_WIDE_INT h;
270 l = l1 + l2;
271 h = h1 + h2 + (l < l1);
273 *lv = l;
274 *hv = h;
275 return OVERFLOW_SUM_SIGN (h1, h2, h);
278 /* Negate a doubleword integer with doubleword result.
279 Return nonzero if the operation overflows, assuming it's signed.
280 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
281 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
284 neg_double (l1, h1, lv, hv)
285 unsigned HOST_WIDE_INT l1;
286 HOST_WIDE_INT h1;
287 unsigned HOST_WIDE_INT *lv;
288 HOST_WIDE_INT *hv;
290 if (l1 == 0)
292 *lv = 0;
293 *hv = - h1;
294 return (*hv & h1) < 0;
296 else
298 *lv = -l1;
299 *hv = ~h1;
300 return 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (l1, h1, l2, h2, lv, hv)
312 unsigned HOST_WIDE_INT l1, l2;
313 HOST_WIDE_INT h1, h2;
314 unsigned HOST_WIDE_INT *lv;
315 HOST_WIDE_INT *hv;
317 HOST_WIDE_INT arg1[4];
318 HOST_WIDE_INT arg2[4];
319 HOST_WIDE_INT prod[4 * 2];
320 unsigned HOST_WIDE_INT carry;
321 int i, j, k;
322 unsigned HOST_WIDE_INT toplow, neglow;
323 HOST_WIDE_INT tophigh, neghigh;
325 encode (arg1, l1, h1);
326 encode (arg2, l2, h2);
328 memset ((char *) prod, 0, sizeof prod);
330 for (i = 0; i < 4; i++)
332 carry = 0;
333 for (j = 0; j < 4; j++)
335 k = i + j;
336 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
337 carry += arg1[i] * arg2[j];
338 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
339 carry += prod[k];
340 prod[k] = LOWPART (carry);
341 carry = HIGHPART (carry);
343 prod[i + 4] = carry;
346 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
348 /* Check for overflow by calculating the top half of the answer in full;
349 it should agree with the low half's sign bit. */
350 decode (prod + 4, &toplow, &tophigh);
351 if (h1 < 0)
353 neg_double (l2, h2, &neglow, &neghigh);
354 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
356 if (h2 < 0)
358 neg_double (l1, h1, &neglow, &neghigh);
359 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
361 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
364 /* Shift the doubleword integer in L1, H1 left by COUNT places
365 keeping only PREC bits of result.
366 Shift right if COUNT is negative.
367 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
368 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 void
371 lshift_double (l1, h1, count, prec, lv, hv, arith)
372 unsigned HOST_WIDE_INT l1;
373 HOST_WIDE_INT h1, count;
374 unsigned int prec;
375 unsigned HOST_WIDE_INT *lv;
376 HOST_WIDE_INT *hv;
377 int arith;
379 unsigned HOST_WIDE_INT signmask;
381 if (count < 0)
383 rshift_double (l1, h1, -count, prec, lv, hv, arith);
384 return;
387 #ifdef SHIFT_COUNT_TRUNCATED
388 if (SHIFT_COUNT_TRUNCATED)
389 count %= prec;
390 #endif
392 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
394 /* Shifting by the host word size is undefined according to the
395 ANSI standard, so we must handle this as a special case. */
396 *hv = 0;
397 *lv = 0;
399 else if (count >= HOST_BITS_PER_WIDE_INT)
401 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
402 *lv = 0;
404 else
406 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
407 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
408 *lv = l1 << count;
411 /* Sign extend all bits that are beyond the precision. */
413 signmask = -((prec > HOST_BITS_PER_WIDE_INT
414 ? (*hv >> (prec - HOST_BITS_PER_WIDE_INT - 1))
415 : (*lv >> (prec - 1))) & 1);
417 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
419 else if (prec >= HOST_BITS_PER_WIDE_INT)
421 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
422 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
424 else
426 *hv = signmask;
427 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
428 *lv |= signmask << prec;
432 /* Shift the doubleword integer in L1, H1 right by COUNT places
433 keeping only PREC bits of result. COUNT must be positive.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 void
438 rshift_double (l1, h1, count, prec, lv, hv, arith)
439 unsigned HOST_WIDE_INT l1;
440 HOST_WIDE_INT h1, count;
441 unsigned int prec;
442 unsigned HOST_WIDE_INT *lv;
443 HOST_WIDE_INT *hv;
444 int arith;
446 unsigned HOST_WIDE_INT signmask;
448 signmask = (arith
449 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
450 : 0);
452 #ifdef SHIFT_COUNT_TRUNCATED
453 if (SHIFT_COUNT_TRUNCATED)
454 count %= prec;
455 #endif
457 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
459 /* Shifting by the host word size is undefined according to the
460 ANSI standard, so we must handle this as a special case. */
461 *hv = 0;
462 *lv = 0;
464 else if (count >= HOST_BITS_PER_WIDE_INT)
466 *hv = 0;
467 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
469 else
471 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
472 *lv = ((l1 >> count)
473 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
476 /* Zero / sign extend all bits that are beyond the precision. */
478 if (count >= (HOST_WIDE_INT)prec)
480 *hv = signmask;
481 *lv = signmask;
483 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
485 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
487 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
488 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
490 else
492 *hv = signmask;
493 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
494 *lv |= signmask << (prec - count);
498 /* Rotate the doubleword integer in L1, H1 left by COUNT places
499 keeping only PREC bits of result.
500 Rotate right if COUNT is negative.
501 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
503 void
504 lrotate_double (l1, h1, count, prec, lv, hv)
505 unsigned HOST_WIDE_INT l1;
506 HOST_WIDE_INT h1, count;
507 unsigned int prec;
508 unsigned HOST_WIDE_INT *lv;
509 HOST_WIDE_INT *hv;
511 unsigned HOST_WIDE_INT s1l, s2l;
512 HOST_WIDE_INT s1h, s2h;
514 count %= prec;
515 if (count < 0)
516 count += prec;
518 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
519 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
520 *lv = s1l | s2l;
521 *hv = s1h | s2h;
524 /* Rotate the doubleword integer in L1, H1 left by COUNT places
525 keeping only PREC bits of result. COUNT must be positive.
526 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
528 void
529 rrotate_double (l1, h1, count, prec, lv, hv)
530 unsigned HOST_WIDE_INT l1;
531 HOST_WIDE_INT h1, count;
532 unsigned int prec;
533 unsigned HOST_WIDE_INT *lv;
534 HOST_WIDE_INT *hv;
536 unsigned HOST_WIDE_INT s1l, s2l;
537 HOST_WIDE_INT s1h, s2h;
539 count %= prec;
540 if (count < 0)
541 count += prec;
543 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
544 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
545 *lv = s1l | s2l;
546 *hv = s1h | s2h;
549 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
550 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
551 CODE is a tree code for a kind of division, one of
552 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
553 or EXACT_DIV_EXPR
554 It controls how the quotient is rounded to an integer.
555 Return nonzero if the operation overflows.
556 UNS nonzero says do unsigned division. */
559 div_and_round_double (code, uns,
560 lnum_orig, hnum_orig, lden_orig, hden_orig,
561 lquo, hquo, lrem, hrem)
562 enum tree_code code;
563 int uns;
564 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
565 HOST_WIDE_INT hnum_orig;
566 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
567 HOST_WIDE_INT hden_orig;
568 unsigned HOST_WIDE_INT *lquo, *lrem;
569 HOST_WIDE_INT *hquo, *hrem;
571 int quo_neg = 0;
572 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
573 HOST_WIDE_INT den[4], quo[4];
574 int i, j;
575 unsigned HOST_WIDE_INT work;
576 unsigned HOST_WIDE_INT carry = 0;
577 unsigned HOST_WIDE_INT lnum = lnum_orig;
578 HOST_WIDE_INT hnum = hnum_orig;
579 unsigned HOST_WIDE_INT lden = lden_orig;
580 HOST_WIDE_INT hden = hden_orig;
581 int overflow = 0;
583 if (hden == 0 && lden == 0)
584 overflow = 1, lden = 1;
586 /* calculate quotient sign and convert operands to unsigned. */
587 if (!uns)
589 if (hnum < 0)
591 quo_neg = ~ quo_neg;
592 /* (minimum integer) / (-1) is the only overflow case. */
593 if (neg_double (lnum, hnum, &lnum, &hnum)
594 && ((HOST_WIDE_INT) lden & hden) == -1)
595 overflow = 1;
597 if (hden < 0)
599 quo_neg = ~ quo_neg;
600 neg_double (lden, hden, &lden, &hden);
604 if (hnum == 0 && hden == 0)
605 { /* single precision */
606 *hquo = *hrem = 0;
607 /* This unsigned division rounds toward zero. */
608 *lquo = lnum / lden;
609 goto finish_up;
612 if (hnum == 0)
613 { /* trivial case: dividend < divisor */
614 /* hden != 0 already checked. */
615 *hquo = *lquo = 0;
616 *hrem = hnum;
617 *lrem = lnum;
618 goto finish_up;
621 memset ((char *) quo, 0, sizeof quo);
623 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
624 memset ((char *) den, 0, sizeof den);
626 encode (num, lnum, hnum);
627 encode (den, lden, hden);
629 /* Special code for when the divisor < BASE. */
630 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
632 /* hnum != 0 already checked. */
633 for (i = 4 - 1; i >= 0; i--)
635 work = num[i] + carry * BASE;
636 quo[i] = work / lden;
637 carry = work % lden;
640 else
642 /* Full double precision division,
643 with thanks to Don Knuth's "Seminumerical Algorithms". */
644 int num_hi_sig, den_hi_sig;
645 unsigned HOST_WIDE_INT quo_est, scale;
647 /* Find the highest non-zero divisor digit. */
648 for (i = 4 - 1;; i--)
649 if (den[i] != 0)
651 den_hi_sig = i;
652 break;
655 /* Insure that the first digit of the divisor is at least BASE/2.
656 This is required by the quotient digit estimation algorithm. */
658 scale = BASE / (den[den_hi_sig] + 1);
659 if (scale > 1)
660 { /* scale divisor and dividend */
661 carry = 0;
662 for (i = 0; i <= 4 - 1; i++)
664 work = (num[i] * scale) + carry;
665 num[i] = LOWPART (work);
666 carry = HIGHPART (work);
669 num[4] = carry;
670 carry = 0;
671 for (i = 0; i <= 4 - 1; i++)
673 work = (den[i] * scale) + carry;
674 den[i] = LOWPART (work);
675 carry = HIGHPART (work);
676 if (den[i] != 0) den_hi_sig = i;
680 num_hi_sig = 4;
682 /* Main loop */
683 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
685 /* Guess the next quotient digit, quo_est, by dividing the first
686 two remaining dividend digits by the high order quotient digit.
687 quo_est is never low and is at most 2 high. */
688 unsigned HOST_WIDE_INT tmp;
690 num_hi_sig = i + den_hi_sig + 1;
691 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
692 if (num[num_hi_sig] != den[den_hi_sig])
693 quo_est = work / den[den_hi_sig];
694 else
695 quo_est = BASE - 1;
697 /* Refine quo_est so it's usually correct, and at most one high. */
698 tmp = work - quo_est * den[den_hi_sig];
699 if (tmp < BASE
700 && (den[den_hi_sig - 1] * quo_est
701 > (tmp * BASE + num[num_hi_sig - 2])))
702 quo_est--;
704 /* Try QUO_EST as the quotient digit, by multiplying the
705 divisor by QUO_EST and subtracting from the remaining dividend.
706 Keep in mind that QUO_EST is the I - 1st digit. */
708 carry = 0;
709 for (j = 0; j <= den_hi_sig; j++)
711 work = quo_est * den[j] + carry;
712 carry = HIGHPART (work);
713 work = num[i + j] - LOWPART (work);
714 num[i + j] = LOWPART (work);
715 carry += HIGHPART (work) != 0;
718 /* If quo_est was high by one, then num[i] went negative and
719 we need to correct things. */
720 if (num[num_hi_sig] < carry)
722 quo_est--;
723 carry = 0; /* add divisor back in */
724 for (j = 0; j <= den_hi_sig; j++)
726 work = num[i + j] + den[j] + carry;
727 carry = HIGHPART (work);
728 num[i + j] = LOWPART (work);
731 num [num_hi_sig] += carry;
734 /* Store the quotient digit. */
735 quo[i] = quo_est;
739 decode (quo, lquo, hquo);
741 finish_up:
742 /* if result is negative, make it so. */
743 if (quo_neg)
744 neg_double (*lquo, *hquo, lquo, hquo);
746 /* compute trial remainder: rem = num - (quo * den) */
747 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
748 neg_double (*lrem, *hrem, lrem, hrem);
749 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
751 switch (code)
753 case TRUNC_DIV_EXPR:
754 case TRUNC_MOD_EXPR: /* round toward zero */
755 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
756 return overflow;
758 case FLOOR_DIV_EXPR:
759 case FLOOR_MOD_EXPR: /* round toward negative infinity */
760 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
762 /* quo = quo - 1; */
763 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
764 lquo, hquo);
766 else
767 return overflow;
768 break;
770 case CEIL_DIV_EXPR:
771 case CEIL_MOD_EXPR: /* round toward positive infinity */
772 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
774 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
775 lquo, hquo);
777 else
778 return overflow;
779 break;
781 case ROUND_DIV_EXPR:
782 case ROUND_MOD_EXPR: /* round to closest integer */
784 unsigned HOST_WIDE_INT labs_rem = *lrem;
785 HOST_WIDE_INT habs_rem = *hrem;
786 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
787 HOST_WIDE_INT habs_den = hden, htwice;
789 /* Get absolute values */
790 if (*hrem < 0)
791 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
792 if (hden < 0)
793 neg_double (lden, hden, &labs_den, &habs_den);
795 /* If (2 * abs (lrem) >= abs (lden)) */
796 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
797 labs_rem, habs_rem, &ltwice, &htwice);
799 if (((unsigned HOST_WIDE_INT) habs_den
800 < (unsigned HOST_WIDE_INT) htwice)
801 || (((unsigned HOST_WIDE_INT) habs_den
802 == (unsigned HOST_WIDE_INT) htwice)
803 && (labs_den < ltwice)))
805 if (*hquo < 0)
806 /* quo = quo - 1; */
807 add_double (*lquo, *hquo,
808 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
809 else
810 /* quo = quo + 1; */
811 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
812 lquo, hquo);
814 else
815 return overflow;
817 break;
819 default:
820 abort ();
823 /* compute true remainder: rem = num - (quo * den) */
824 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
825 neg_double (*lrem, *hrem, lrem, hrem);
826 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
827 return overflow;
830 /* Given T, an expression, return the negation of T. Allow for T to be
831 null, in which case return null. */
833 static tree
834 negate_expr (t)
835 tree t;
837 tree type;
838 tree tem;
840 if (t == 0)
841 return 0;
843 type = TREE_TYPE (t);
844 STRIP_SIGN_NOPS (t);
846 switch (TREE_CODE (t))
848 case INTEGER_CST:
849 case REAL_CST:
850 if (! TREE_UNSIGNED (type)
851 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
852 && ! TREE_OVERFLOW (tem))
853 return tem;
854 break;
856 case NEGATE_EXPR:
857 return convert (type, TREE_OPERAND (t, 0));
859 case MINUS_EXPR:
860 /* - (A - B) -> B - A */
861 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
862 return convert (type,
863 fold (build (MINUS_EXPR, TREE_TYPE (t),
864 TREE_OPERAND (t, 1),
865 TREE_OPERAND (t, 0))));
866 break;
868 default:
869 break;
872 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
875 /* Split a tree IN into a constant, literal and variable parts that could be
876 combined with CODE to make IN. "constant" means an expression with
877 TREE_CONSTANT but that isn't an actual constant. CODE must be a
878 commutative arithmetic operation. Store the constant part into *CONP,
879 the literal in *LITP and return the variable part. If a part isn't
880 present, set it to null. If the tree does not decompose in this way,
881 return the entire tree as the variable part and the other parts as null.
883 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
884 case, we negate an operand that was subtracted. Except if it is a
885 literal for which we use *MINUS_LITP instead.
887 If NEGATE_P is true, we are negating all of IN, again except a literal
888 for which we use *MINUS_LITP instead.
890 If IN is itself a literal or constant, return it as appropriate.
892 Note that we do not guarantee that any of the three values will be the
893 same type as IN, but they will have the same signedness and mode. */
895 static tree
896 split_tree (in, code, conp, litp, minus_litp, negate_p)
897 tree in;
898 enum tree_code code;
899 tree *conp, *litp, *minus_litp;
900 int negate_p;
902 tree var = 0;
904 *conp = 0;
905 *litp = 0;
906 *minus_litp = 0;
908 /* Strip any conversions that don't change the machine mode or signedness. */
909 STRIP_SIGN_NOPS (in);
911 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
912 *litp = in;
913 else if (TREE_CODE (in) == code
914 || (! FLOAT_TYPE_P (TREE_TYPE (in))
915 /* We can associate addition and subtraction together (even
916 though the C standard doesn't say so) for integers because
917 the value is not affected. For reals, the value might be
918 affected, so we can't. */
919 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
922 tree op0 = TREE_OPERAND (in, 0);
923 tree op1 = TREE_OPERAND (in, 1);
924 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
925 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
927 /* First see if either of the operands is a literal, then a constant. */
928 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
929 *litp = op0, op0 = 0;
930 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
931 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
933 if (op0 != 0 && TREE_CONSTANT (op0))
934 *conp = op0, op0 = 0;
935 else if (op1 != 0 && TREE_CONSTANT (op1))
936 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
938 /* If we haven't dealt with either operand, this is not a case we can
939 decompose. Otherwise, VAR is either of the ones remaining, if any. */
940 if (op0 != 0 && op1 != 0)
941 var = in;
942 else if (op0 != 0)
943 var = op0;
944 else
945 var = op1, neg_var_p = neg1_p;
947 /* Now do any needed negations. */
948 if (neg_litp_p)
949 *minus_litp = *litp, *litp = 0;
950 if (neg_conp_p)
951 *conp = negate_expr (*conp);
952 if (neg_var_p)
953 var = negate_expr (var);
955 else if (TREE_CONSTANT (in))
956 *conp = in;
957 else
958 var = in;
960 if (negate_p)
962 if (*litp)
963 *minus_litp = *litp, *litp = 0;
964 else if (*minus_litp)
965 *litp = *minus_litp, *minus_litp = 0;
966 *conp = negate_expr (*conp);
967 var = negate_expr (var);
970 return var;
973 /* Re-associate trees split by the above function. T1 and T2 are either
974 expressions to associate or null. Return the new expression, if any. If
975 we build an operation, do it in TYPE and with CODE. */
977 static tree
978 associate_trees (t1, t2, code, type)
979 tree t1, t2;
980 enum tree_code code;
981 tree type;
983 if (t1 == 0)
984 return t2;
985 else if (t2 == 0)
986 return t1;
988 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
989 try to fold this since we will have infinite recursion. But do
990 deal with any NEGATE_EXPRs. */
991 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
992 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
994 if (TREE_CODE (t1) == NEGATE_EXPR)
995 return build (MINUS_EXPR, type, convert (type, t2),
996 convert (type, TREE_OPERAND (t1, 0)));
997 else if (TREE_CODE (t2) == NEGATE_EXPR)
998 return build (MINUS_EXPR, type, convert (type, t1),
999 convert (type, TREE_OPERAND (t2, 0)));
1000 else
1001 return build (code, type, convert (type, t1), convert (type, t2));
1004 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1007 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1008 to produce a new constant.
1010 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1012 static tree
1013 int_const_binop (code, arg1, arg2, notrunc)
1014 enum tree_code code;
1015 tree arg1, arg2;
1016 int notrunc;
1018 unsigned HOST_WIDE_INT int1l, int2l;
1019 HOST_WIDE_INT int1h, int2h;
1020 unsigned HOST_WIDE_INT low;
1021 HOST_WIDE_INT hi;
1022 unsigned HOST_WIDE_INT garbagel;
1023 HOST_WIDE_INT garbageh;
1024 tree t;
1025 tree type = TREE_TYPE (arg1);
1026 int uns = TREE_UNSIGNED (type);
1027 int is_sizetype
1028 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1029 int overflow = 0;
1030 int no_overflow = 0;
1032 int1l = TREE_INT_CST_LOW (arg1);
1033 int1h = TREE_INT_CST_HIGH (arg1);
1034 int2l = TREE_INT_CST_LOW (arg2);
1035 int2h = TREE_INT_CST_HIGH (arg2);
1037 switch (code)
1039 case BIT_IOR_EXPR:
1040 low = int1l | int2l, hi = int1h | int2h;
1041 break;
1043 case BIT_XOR_EXPR:
1044 low = int1l ^ int2l, hi = int1h ^ int2h;
1045 break;
1047 case BIT_AND_EXPR:
1048 low = int1l & int2l, hi = int1h & int2h;
1049 break;
1051 case BIT_ANDTC_EXPR:
1052 low = int1l & ~int2l, hi = int1h & ~int2h;
1053 break;
1055 case RSHIFT_EXPR:
1056 int2l = -int2l;
1057 case LSHIFT_EXPR:
1058 /* It's unclear from the C standard whether shifts can overflow.
1059 The following code ignores overflow; perhaps a C standard
1060 interpretation ruling is needed. */
1061 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1062 &low, &hi, !uns);
1063 no_overflow = 1;
1064 break;
1066 case RROTATE_EXPR:
1067 int2l = - int2l;
1068 case LROTATE_EXPR:
1069 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1070 &low, &hi);
1071 break;
1073 case PLUS_EXPR:
1074 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1075 break;
1077 case MINUS_EXPR:
1078 neg_double (int2l, int2h, &low, &hi);
1079 add_double (int1l, int1h, low, hi, &low, &hi);
1080 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1081 break;
1083 case MULT_EXPR:
1084 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1085 break;
1087 case TRUNC_DIV_EXPR:
1088 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1089 case EXACT_DIV_EXPR:
1090 /* This is a shortcut for a common special case. */
1091 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1092 && ! TREE_CONSTANT_OVERFLOW (arg1)
1093 && ! TREE_CONSTANT_OVERFLOW (arg2)
1094 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1096 if (code == CEIL_DIV_EXPR)
1097 int1l += int2l - 1;
1099 low = int1l / int2l, hi = 0;
1100 break;
1103 /* ... fall through ... */
1105 case ROUND_DIV_EXPR:
1106 if (int2h == 0 && int2l == 1)
1108 low = int1l, hi = int1h;
1109 break;
1111 if (int1l == int2l && int1h == int2h
1112 && ! (int1l == 0 && int1h == 0))
1114 low = 1, hi = 0;
1115 break;
1117 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1118 &low, &hi, &garbagel, &garbageh);
1119 break;
1121 case TRUNC_MOD_EXPR:
1122 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1123 /* This is a shortcut for a common special case. */
1124 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1125 && ! TREE_CONSTANT_OVERFLOW (arg1)
1126 && ! TREE_CONSTANT_OVERFLOW (arg2)
1127 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1129 if (code == CEIL_MOD_EXPR)
1130 int1l += int2l - 1;
1131 low = int1l % int2l, hi = 0;
1132 break;
1135 /* ... fall through ... */
1137 case ROUND_MOD_EXPR:
1138 overflow = div_and_round_double (code, uns,
1139 int1l, int1h, int2l, int2h,
1140 &garbagel, &garbageh, &low, &hi);
1141 break;
1143 case MIN_EXPR:
1144 case MAX_EXPR:
1145 if (uns)
1146 low = (((unsigned HOST_WIDE_INT) int1h
1147 < (unsigned HOST_WIDE_INT) int2h)
1148 || (((unsigned HOST_WIDE_INT) int1h
1149 == (unsigned HOST_WIDE_INT) int2h)
1150 && int1l < int2l));
1151 else
1152 low = (int1h < int2h
1153 || (int1h == int2h && int1l < int2l));
1155 if (low == (code == MIN_EXPR))
1156 low = int1l, hi = int1h;
1157 else
1158 low = int2l, hi = int2h;
1159 break;
1161 default:
1162 abort ();
1165 /* If this is for a sizetype, can be represented as one (signed)
1166 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1167 constants. */
1168 if (is_sizetype
1169 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1170 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1171 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1172 return size_int_type_wide (low, type);
1173 else
1175 t = build_int_2 (low, hi);
1176 TREE_TYPE (t) = TREE_TYPE (arg1);
1179 TREE_OVERFLOW (t)
1180 = ((notrunc
1181 ? (!uns || is_sizetype) && overflow
1182 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1183 && ! no_overflow))
1184 | TREE_OVERFLOW (arg1)
1185 | TREE_OVERFLOW (arg2));
1187 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1188 So check if force_fit_type truncated the value. */
1189 if (is_sizetype
1190 && ! TREE_OVERFLOW (t)
1191 && (TREE_INT_CST_HIGH (t) != hi
1192 || TREE_INT_CST_LOW (t) != low))
1193 TREE_OVERFLOW (t) = 1;
1195 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1196 | TREE_CONSTANT_OVERFLOW (arg1)
1197 | TREE_CONSTANT_OVERFLOW (arg2));
1198 return t;
1201 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1202 constant. We assume ARG1 and ARG2 have the same data type, or at least
1203 are the same kind of constant and the same machine mode.
1205 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1207 static tree
1208 const_binop (code, arg1, arg2, notrunc)
1209 enum tree_code code;
1210 tree arg1, arg2;
1211 int notrunc;
1213 STRIP_NOPS (arg1);
1214 STRIP_NOPS (arg2);
1216 if (TREE_CODE (arg1) == INTEGER_CST)
1217 return int_const_binop (code, arg1, arg2, notrunc);
1219 if (TREE_CODE (arg1) == REAL_CST)
1221 REAL_VALUE_TYPE d1;
1222 REAL_VALUE_TYPE d2;
1223 REAL_VALUE_TYPE value;
1224 tree t;
1226 d1 = TREE_REAL_CST (arg1);
1227 d2 = TREE_REAL_CST (arg2);
1229 /* If either operand is a NaN, just return it. Otherwise, set up
1230 for floating-point trap; we return an overflow. */
1231 if (REAL_VALUE_ISNAN (d1))
1232 return arg1;
1233 else if (REAL_VALUE_ISNAN (d2))
1234 return arg2;
1236 REAL_ARITHMETIC (value, code, d1, d2);
1238 t = build_real (TREE_TYPE (arg1),
1239 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1240 value));
1242 TREE_OVERFLOW (t)
1243 = (force_fit_type (t, 0)
1244 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1245 TREE_CONSTANT_OVERFLOW (t)
1246 = TREE_OVERFLOW (t)
1247 | TREE_CONSTANT_OVERFLOW (arg1)
1248 | TREE_CONSTANT_OVERFLOW (arg2);
1249 return t;
1251 if (TREE_CODE (arg1) == COMPLEX_CST)
1253 tree type = TREE_TYPE (arg1);
1254 tree r1 = TREE_REALPART (arg1);
1255 tree i1 = TREE_IMAGPART (arg1);
1256 tree r2 = TREE_REALPART (arg2);
1257 tree i2 = TREE_IMAGPART (arg2);
1258 tree t;
1260 switch (code)
1262 case PLUS_EXPR:
1263 t = build_complex (type,
1264 const_binop (PLUS_EXPR, r1, r2, notrunc),
1265 const_binop (PLUS_EXPR, i1, i2, notrunc));
1266 break;
1268 case MINUS_EXPR:
1269 t = build_complex (type,
1270 const_binop (MINUS_EXPR, r1, r2, notrunc),
1271 const_binop (MINUS_EXPR, i1, i2, notrunc));
1272 break;
1274 case MULT_EXPR:
1275 t = build_complex (type,
1276 const_binop (MINUS_EXPR,
1277 const_binop (MULT_EXPR,
1278 r1, r2, notrunc),
1279 const_binop (MULT_EXPR,
1280 i1, i2, notrunc),
1281 notrunc),
1282 const_binop (PLUS_EXPR,
1283 const_binop (MULT_EXPR,
1284 r1, i2, notrunc),
1285 const_binop (MULT_EXPR,
1286 i1, r2, notrunc),
1287 notrunc));
1288 break;
1290 case RDIV_EXPR:
1292 tree magsquared
1293 = const_binop (PLUS_EXPR,
1294 const_binop (MULT_EXPR, r2, r2, notrunc),
1295 const_binop (MULT_EXPR, i2, i2, notrunc),
1296 notrunc);
1298 t = build_complex (type,
1299 const_binop
1300 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1301 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1302 const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2,
1304 notrunc),
1305 const_binop (MULT_EXPR, i1, i2,
1306 notrunc),
1307 notrunc),
1308 magsquared, notrunc),
1309 const_binop
1310 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1311 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1312 const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR, i1, r2,
1314 notrunc),
1315 const_binop (MULT_EXPR, r1, i2,
1316 notrunc),
1317 notrunc),
1318 magsquared, notrunc));
1320 break;
1322 default:
1323 abort ();
1325 return t;
1327 return 0;
1330 /* These are the hash table functions for the hash table of INTEGER_CST
1331 nodes of a sizetype. */
1333 /* Return the hash code code X, an INTEGER_CST. */
1335 static hashval_t
1336 size_htab_hash (x)
1337 const void *x;
1339 tree t = (tree) x;
1341 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1342 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1343 ^ (TREE_OVERFLOW (t) << 20));
1346 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1347 is the same as that given by *Y, which is the same. */
1349 static int
1350 size_htab_eq (x, y)
1351 const void *x;
1352 const void *y;
1354 tree xt = (tree) x;
1355 tree yt = (tree) y;
1357 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1358 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1359 && TREE_TYPE (xt) == TREE_TYPE (yt)
1360 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1363 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1364 bits are given by NUMBER and of the sizetype represented by KIND. */
1366 tree
1367 size_int_wide (number, kind)
1368 HOST_WIDE_INT number;
1369 enum size_type_kind kind;
1371 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1374 /* Likewise, but the desired type is specified explicitly. */
1376 tree
1377 size_int_type_wide (number, type)
1378 HOST_WIDE_INT number;
1379 tree type;
1381 static htab_t size_htab = 0;
1382 static tree new_const = 0;
1383 PTR *slot;
1385 if (size_htab == 0)
1387 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1388 ggc_add_deletable_htab (size_htab, NULL, NULL);
1389 new_const = make_node (INTEGER_CST);
1390 ggc_add_tree_root (&new_const, 1);
1393 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1394 hash table, we return the value from the hash table. Otherwise, we
1395 place that in the hash table and make a new node for the next time. */
1396 TREE_INT_CST_LOW (new_const) = number;
1397 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1398 TREE_TYPE (new_const) = type;
1399 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1400 = force_fit_type (new_const, 0);
1402 slot = htab_find_slot (size_htab, new_const, INSERT);
1403 if (*slot == 0)
1405 tree t = new_const;
1407 *slot = (PTR) new_const;
1408 new_const = make_node (INTEGER_CST);
1409 return t;
1411 else
1412 return (tree) *slot;
1415 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1416 is a tree code. The type of the result is taken from the operands.
1417 Both must be the same type integer type and it must be a size type.
1418 If the operands are constant, so is the result. */
1420 tree
1421 size_binop (code, arg0, arg1)
1422 enum tree_code code;
1423 tree arg0, arg1;
1425 tree type = TREE_TYPE (arg0);
1427 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1428 || type != TREE_TYPE (arg1))
1429 abort ();
1431 /* Handle the special case of two integer constants faster. */
1432 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1434 /* And some specific cases even faster than that. */
1435 if (code == PLUS_EXPR && integer_zerop (arg0))
1436 return arg1;
1437 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1438 && integer_zerop (arg1))
1439 return arg0;
1440 else if (code == MULT_EXPR && integer_onep (arg0))
1441 return arg1;
1443 /* Handle general case of two integer constants. */
1444 return int_const_binop (code, arg0, arg1, 0);
1447 if (arg0 == error_mark_node || arg1 == error_mark_node)
1448 return error_mark_node;
1450 return fold (build (code, type, arg0, arg1));
1453 /* Given two values, either both of sizetype or both of bitsizetype,
1454 compute the difference between the two values. Return the value
1455 in signed type corresponding to the type of the operands. */
1457 tree
1458 size_diffop (arg0, arg1)
1459 tree arg0, arg1;
1461 tree type = TREE_TYPE (arg0);
1462 tree ctype;
1464 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1465 || type != TREE_TYPE (arg1))
1466 abort ();
1468 /* If the type is already signed, just do the simple thing. */
1469 if (! TREE_UNSIGNED (type))
1470 return size_binop (MINUS_EXPR, arg0, arg1);
1472 ctype = (type == bitsizetype || type == ubitsizetype
1473 ? sbitsizetype : ssizetype);
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1479 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1480 convert (ctype, arg1));
1482 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1483 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1484 overflow) and negate (which can't either). Special-case a result
1485 of zero while we're here. */
1486 if (tree_int_cst_equal (arg0, arg1))
1487 return convert (ctype, integer_zero_node);
1488 else if (tree_int_cst_lt (arg1, arg0))
1489 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1490 else
1491 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1492 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1496 /* Given T, a tree representing type conversion of ARG1, a constant,
1497 return a constant tree representing the result of conversion. */
1499 static tree
1500 fold_convert (t, arg1)
1501 tree t;
1502 tree arg1;
1504 tree type = TREE_TYPE (t);
1505 int overflow = 0;
1507 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1509 if (TREE_CODE (arg1) == INTEGER_CST)
1511 /* If we would build a constant wider than GCC supports,
1512 leave the conversion unfolded. */
1513 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1514 return t;
1516 /* If we are trying to make a sizetype for a small integer, use
1517 size_int to pick up cached types to reduce duplicate nodes. */
1518 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1519 && !TREE_CONSTANT_OVERFLOW (arg1)
1520 && compare_tree_int (arg1, 10000) < 0)
1521 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1523 /* Given an integer constant, make new constant with new type,
1524 appropriately sign-extended or truncated. */
1525 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1526 TREE_INT_CST_HIGH (arg1));
1527 TREE_TYPE (t) = type;
1528 /* Indicate an overflow if (1) ARG1 already overflowed,
1529 or (2) force_fit_type indicates an overflow.
1530 Tell force_fit_type that an overflow has already occurred
1531 if ARG1 is a too-large unsigned value and T is signed.
1532 But don't indicate an overflow if converting a pointer. */
1533 TREE_OVERFLOW (t)
1534 = ((force_fit_type (t,
1535 (TREE_INT_CST_HIGH (arg1) < 0
1536 && (TREE_UNSIGNED (type)
1537 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1538 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1539 || TREE_OVERFLOW (arg1));
1540 TREE_CONSTANT_OVERFLOW (t)
1541 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1543 else if (TREE_CODE (arg1) == REAL_CST)
1545 /* Don't initialize these, use assignments.
1546 Initialized local aggregates don't work on old compilers. */
1547 REAL_VALUE_TYPE x;
1548 REAL_VALUE_TYPE l;
1549 REAL_VALUE_TYPE u;
1550 tree type1 = TREE_TYPE (arg1);
1551 int no_upper_bound;
1553 x = TREE_REAL_CST (arg1);
1554 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1556 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1557 if (!no_upper_bound)
1558 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1560 /* See if X will be in range after truncation towards 0.
1561 To compensate for truncation, move the bounds away from 0,
1562 but reject if X exactly equals the adjusted bounds. */
1563 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1564 if (!no_upper_bound)
1565 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1566 /* If X is a NaN, use zero instead and show we have an overflow.
1567 Otherwise, range check. */
1568 if (REAL_VALUE_ISNAN (x))
1569 overflow = 1, x = dconst0;
1570 else if (! (REAL_VALUES_LESS (l, x)
1571 && !no_upper_bound
1572 && REAL_VALUES_LESS (x, u)))
1573 overflow = 1;
1576 HOST_WIDE_INT low, high;
1577 REAL_VALUE_TO_INT (&low, &high, x);
1578 t = build_int_2 (low, high);
1580 TREE_TYPE (t) = type;
1581 TREE_OVERFLOW (t)
1582 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1583 TREE_CONSTANT_OVERFLOW (t)
1584 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1586 TREE_TYPE (t) = type;
1588 else if (TREE_CODE (type) == REAL_TYPE)
1590 if (TREE_CODE (arg1) == INTEGER_CST)
1591 return build_real_from_int_cst (type, arg1);
1592 if (TREE_CODE (arg1) == REAL_CST)
1594 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1596 t = arg1;
1597 TREE_TYPE (arg1) = type;
1598 return t;
1601 t = build_real (type,
1602 real_value_truncate (TYPE_MODE (type),
1603 TREE_REAL_CST (arg1)));
1605 TREE_OVERFLOW (t)
1606 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1607 TREE_CONSTANT_OVERFLOW (t)
1608 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1609 return t;
1612 TREE_CONSTANT (t) = 1;
1613 return t;
1616 /* Return an expr equal to X but certainly not valid as an lvalue. */
1618 tree
1619 non_lvalue (x)
1620 tree x;
1622 tree result;
1624 /* These things are certainly not lvalues. */
1625 if (TREE_CODE (x) == NON_LVALUE_EXPR
1626 || TREE_CODE (x) == INTEGER_CST
1627 || TREE_CODE (x) == REAL_CST
1628 || TREE_CODE (x) == STRING_CST
1629 || TREE_CODE (x) == ADDR_EXPR)
1630 return x;
1632 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1633 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1634 return result;
1637 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1638 Zero means allow extended lvalues. */
1640 int pedantic_lvalues;
1642 /* When pedantic, return an expr equal to X but certainly not valid as a
1643 pedantic lvalue. Otherwise, return X. */
1645 tree
1646 pedantic_non_lvalue (x)
1647 tree x;
1649 if (pedantic_lvalues)
1650 return non_lvalue (x);
1651 else
1652 return x;
1655 /* Given a tree comparison code, return the code that is the logical inverse
1656 of the given code. It is not safe to do this for floating-point
1657 comparisons, except for NE_EXPR and EQ_EXPR. */
1659 static enum tree_code
1660 invert_tree_comparison (code)
1661 enum tree_code code;
1663 switch (code)
1665 case EQ_EXPR:
1666 return NE_EXPR;
1667 case NE_EXPR:
1668 return EQ_EXPR;
1669 case GT_EXPR:
1670 return LE_EXPR;
1671 case GE_EXPR:
1672 return LT_EXPR;
1673 case LT_EXPR:
1674 return GE_EXPR;
1675 case LE_EXPR:
1676 return GT_EXPR;
1677 default:
1678 abort ();
1682 /* Similar, but return the comparison that results if the operands are
1683 swapped. This is safe for floating-point. */
1685 static enum tree_code
1686 swap_tree_comparison (code)
1687 enum tree_code code;
1689 switch (code)
1691 case EQ_EXPR:
1692 case NE_EXPR:
1693 return code;
1694 case GT_EXPR:
1695 return LT_EXPR;
1696 case GE_EXPR:
1697 return LE_EXPR;
1698 case LT_EXPR:
1699 return GT_EXPR;
1700 case LE_EXPR:
1701 return GE_EXPR;
1702 default:
1703 abort ();
1707 /* Return nonzero if CODE is a tree code that represents a truth value. */
1709 static int
1710 truth_value_p (code)
1711 enum tree_code code;
1713 return (TREE_CODE_CLASS (code) == '<'
1714 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1715 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1716 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1719 /* Return nonzero if two operands are necessarily equal.
1720 If ONLY_CONST is non-zero, only return non-zero for constants.
1721 This function tests whether the operands are indistinguishable;
1722 it does not test whether they are equal using C's == operation.
1723 The distinction is important for IEEE floating point, because
1724 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1725 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1728 operand_equal_p (arg0, arg1, only_const)
1729 tree arg0, arg1;
1730 int only_const;
1732 /* If both types don't have the same signedness, then we can't consider
1733 them equal. We must check this before the STRIP_NOPS calls
1734 because they may change the signedness of the arguments. */
1735 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1736 return 0;
1738 STRIP_NOPS (arg0);
1739 STRIP_NOPS (arg1);
1741 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1742 /* This is needed for conversions and for COMPONENT_REF.
1743 Might as well play it safe and always test this. */
1744 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1745 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1746 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1747 return 0;
1749 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1750 We don't care about side effects in that case because the SAVE_EXPR
1751 takes care of that for us. In all other cases, two expressions are
1752 equal if they have no side effects. If we have two identical
1753 expressions with side effects that should be treated the same due
1754 to the only side effects being identical SAVE_EXPR's, that will
1755 be detected in the recursive calls below. */
1756 if (arg0 == arg1 && ! only_const
1757 && (TREE_CODE (arg0) == SAVE_EXPR
1758 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1759 return 1;
1761 /* Next handle constant cases, those for which we can return 1 even
1762 if ONLY_CONST is set. */
1763 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1764 switch (TREE_CODE (arg0))
1766 case INTEGER_CST:
1767 return (! TREE_CONSTANT_OVERFLOW (arg0)
1768 && ! TREE_CONSTANT_OVERFLOW (arg1)
1769 && tree_int_cst_equal (arg0, arg1));
1771 case REAL_CST:
1772 return (! TREE_CONSTANT_OVERFLOW (arg0)
1773 && ! TREE_CONSTANT_OVERFLOW (arg1)
1774 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1775 TREE_REAL_CST (arg1)));
1777 case VECTOR_CST:
1779 tree v1, v2;
1781 if (TREE_CONSTANT_OVERFLOW (arg0)
1782 || TREE_CONSTANT_OVERFLOW (arg1))
1783 return 0;
1785 v1 = TREE_VECTOR_CST_ELTS (arg0);
1786 v2 = TREE_VECTOR_CST_ELTS (arg1);
1787 while (v1 && v2)
1789 if (!operand_equal_p (v1, v2, only_const))
1790 return 0;
1791 v1 = TREE_CHAIN (v1);
1792 v2 = TREE_CHAIN (v2);
1795 return 1;
1798 case COMPLEX_CST:
1799 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1800 only_const)
1801 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1802 only_const));
1804 case STRING_CST:
1805 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1806 && ! memcmp (TREE_STRING_POINTER (arg0),
1807 TREE_STRING_POINTER (arg1),
1808 TREE_STRING_LENGTH (arg0)));
1810 case ADDR_EXPR:
1811 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1813 default:
1814 break;
1817 if (only_const)
1818 return 0;
1820 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1822 case '1':
1823 /* Two conversions are equal only if signedness and modes match. */
1824 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1825 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1826 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1827 return 0;
1829 return operand_equal_p (TREE_OPERAND (arg0, 0),
1830 TREE_OPERAND (arg1, 0), 0);
1832 case '<':
1833 case '2':
1834 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1835 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1837 return 1;
1839 /* For commutative ops, allow the other order. */
1840 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1841 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1842 || TREE_CODE (arg0) == BIT_IOR_EXPR
1843 || TREE_CODE (arg0) == BIT_XOR_EXPR
1844 || TREE_CODE (arg0) == BIT_AND_EXPR
1845 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1846 && operand_equal_p (TREE_OPERAND (arg0, 0),
1847 TREE_OPERAND (arg1, 1), 0)
1848 && operand_equal_p (TREE_OPERAND (arg0, 1),
1849 TREE_OPERAND (arg1, 0), 0));
1851 case 'r':
1852 /* If either of the pointer (or reference) expressions we are dereferencing
1853 contain a side effect, these cannot be equal. */
1854 if (TREE_SIDE_EFFECTS (arg0)
1855 || TREE_SIDE_EFFECTS (arg1))
1856 return 0;
1858 switch (TREE_CODE (arg0))
1860 case INDIRECT_REF:
1861 return operand_equal_p (TREE_OPERAND (arg0, 0),
1862 TREE_OPERAND (arg1, 0), 0);
1864 case COMPONENT_REF:
1865 case ARRAY_REF:
1866 case ARRAY_RANGE_REF:
1867 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1868 TREE_OPERAND (arg1, 0), 0)
1869 && operand_equal_p (TREE_OPERAND (arg0, 1),
1870 TREE_OPERAND (arg1, 1), 0));
1872 case BIT_FIELD_REF:
1873 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1874 TREE_OPERAND (arg1, 0), 0)
1875 && operand_equal_p (TREE_OPERAND (arg0, 1),
1876 TREE_OPERAND (arg1, 1), 0)
1877 && operand_equal_p (TREE_OPERAND (arg0, 2),
1878 TREE_OPERAND (arg1, 2), 0));
1879 default:
1880 return 0;
1883 case 'e':
1884 if (TREE_CODE (arg0) == RTL_EXPR)
1885 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1886 return 0;
1888 default:
1889 return 0;
1893 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1894 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1896 When in doubt, return 0. */
1898 static int
1899 operand_equal_for_comparison_p (arg0, arg1, other)
1900 tree arg0, arg1;
1901 tree other;
1903 int unsignedp1, unsignedpo;
1904 tree primarg0, primarg1, primother;
1905 unsigned int correct_width;
1907 if (operand_equal_p (arg0, arg1, 0))
1908 return 1;
1910 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1911 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1912 return 0;
1914 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1915 and see if the inner values are the same. This removes any
1916 signedness comparison, which doesn't matter here. */
1917 primarg0 = arg0, primarg1 = arg1;
1918 STRIP_NOPS (primarg0);
1919 STRIP_NOPS (primarg1);
1920 if (operand_equal_p (primarg0, primarg1, 0))
1921 return 1;
1923 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1924 actual comparison operand, ARG0.
1926 First throw away any conversions to wider types
1927 already present in the operands. */
1929 primarg1 = get_narrower (arg1, &unsignedp1);
1930 primother = get_narrower (other, &unsignedpo);
1932 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1933 if (unsignedp1 == unsignedpo
1934 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1935 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1937 tree type = TREE_TYPE (arg0);
1939 /* Make sure shorter operand is extended the right way
1940 to match the longer operand. */
1941 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
1942 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
1944 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1945 return 1;
1948 return 0;
1951 /* See if ARG is an expression that is either a comparison or is performing
1952 arithmetic on comparisons. The comparisons must only be comparing
1953 two different values, which will be stored in *CVAL1 and *CVAL2; if
1954 they are non-zero it means that some operands have already been found.
1955 No variables may be used anywhere else in the expression except in the
1956 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1957 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1959 If this is true, return 1. Otherwise, return zero. */
1961 static int
1962 twoval_comparison_p (arg, cval1, cval2, save_p)
1963 tree arg;
1964 tree *cval1, *cval2;
1965 int *save_p;
1967 enum tree_code code = TREE_CODE (arg);
1968 char class = TREE_CODE_CLASS (code);
1970 /* We can handle some of the 'e' cases here. */
1971 if (class == 'e' && code == TRUTH_NOT_EXPR)
1972 class = '1';
1973 else if (class == 'e'
1974 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1975 || code == COMPOUND_EXPR))
1976 class = '2';
1978 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
1979 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
1981 /* If we've already found a CVAL1 or CVAL2, this expression is
1982 two complex to handle. */
1983 if (*cval1 || *cval2)
1984 return 0;
1986 class = '1';
1987 *save_p = 1;
1990 switch (class)
1992 case '1':
1993 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1995 case '2':
1996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1997 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1998 cval1, cval2, save_p));
2000 case 'c':
2001 return 1;
2003 case 'e':
2004 if (code == COND_EXPR)
2005 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2006 cval1, cval2, save_p)
2007 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2008 cval1, cval2, save_p)
2009 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2010 cval1, cval2, save_p));
2011 return 0;
2013 case '<':
2014 /* First see if we can handle the first operand, then the second. For
2015 the second operand, we know *CVAL1 can't be zero. It must be that
2016 one side of the comparison is each of the values; test for the
2017 case where this isn't true by failing if the two operands
2018 are the same. */
2020 if (operand_equal_p (TREE_OPERAND (arg, 0),
2021 TREE_OPERAND (arg, 1), 0))
2022 return 0;
2024 if (*cval1 == 0)
2025 *cval1 = TREE_OPERAND (arg, 0);
2026 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2028 else if (*cval2 == 0)
2029 *cval2 = TREE_OPERAND (arg, 0);
2030 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2032 else
2033 return 0;
2035 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2037 else if (*cval2 == 0)
2038 *cval2 = TREE_OPERAND (arg, 1);
2039 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2041 else
2042 return 0;
2044 return 1;
2046 default:
2047 return 0;
2051 /* ARG is a tree that is known to contain just arithmetic operations and
2052 comparisons. Evaluate the operations in the tree substituting NEW0 for
2053 any occurrence of OLD0 as an operand of a comparison and likewise for
2054 NEW1 and OLD1. */
2056 static tree
2057 eval_subst (arg, old0, new0, old1, new1)
2058 tree arg;
2059 tree old0, new0, old1, new1;
2061 tree type = TREE_TYPE (arg);
2062 enum tree_code code = TREE_CODE (arg);
2063 char class = TREE_CODE_CLASS (code);
2065 /* We can handle some of the 'e' cases here. */
2066 if (class == 'e' && code == TRUTH_NOT_EXPR)
2067 class = '1';
2068 else if (class == 'e'
2069 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2070 class = '2';
2072 switch (class)
2074 case '1':
2075 return fold (build1 (code, type,
2076 eval_subst (TREE_OPERAND (arg, 0),
2077 old0, new0, old1, new1)));
2079 case '2':
2080 return fold (build (code, type,
2081 eval_subst (TREE_OPERAND (arg, 0),
2082 old0, new0, old1, new1),
2083 eval_subst (TREE_OPERAND (arg, 1),
2084 old0, new0, old1, new1)));
2086 case 'e':
2087 switch (code)
2089 case SAVE_EXPR:
2090 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2092 case COMPOUND_EXPR:
2093 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2095 case COND_EXPR:
2096 return fold (build (code, type,
2097 eval_subst (TREE_OPERAND (arg, 0),
2098 old0, new0, old1, new1),
2099 eval_subst (TREE_OPERAND (arg, 1),
2100 old0, new0, old1, new1),
2101 eval_subst (TREE_OPERAND (arg, 2),
2102 old0, new0, old1, new1)));
2103 default:
2104 break;
2106 /* fall through - ??? */
2108 case '<':
2110 tree arg0 = TREE_OPERAND (arg, 0);
2111 tree arg1 = TREE_OPERAND (arg, 1);
2113 /* We need to check both for exact equality and tree equality. The
2114 former will be true if the operand has a side-effect. In that
2115 case, we know the operand occurred exactly once. */
2117 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2118 arg0 = new0;
2119 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2120 arg0 = new1;
2122 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2123 arg1 = new0;
2124 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2125 arg1 = new1;
2127 return fold (build (code, type, arg0, arg1));
2130 default:
2131 return arg;
2135 /* Return a tree for the case when the result of an expression is RESULT
2136 converted to TYPE and OMITTED was previously an operand of the expression
2137 but is now not needed (e.g., we folded OMITTED * 0).
2139 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2140 the conversion of RESULT to TYPE. */
2142 static tree
2143 omit_one_operand (type, result, omitted)
2144 tree type, result, omitted;
2146 tree t = convert (type, result);
2148 if (TREE_SIDE_EFFECTS (omitted))
2149 return build (COMPOUND_EXPR, type, omitted, t);
2151 return non_lvalue (t);
2154 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2156 static tree
2157 pedantic_omit_one_operand (type, result, omitted)
2158 tree type, result, omitted;
2160 tree t = convert (type, result);
2162 if (TREE_SIDE_EFFECTS (omitted))
2163 return build (COMPOUND_EXPR, type, omitted, t);
2165 return pedantic_non_lvalue (t);
2168 /* Return a simplified tree node for the truth-negation of ARG. This
2169 never alters ARG itself. We assume that ARG is an operation that
2170 returns a truth value (0 or 1). */
2172 tree
2173 invert_truthvalue (arg)
2174 tree arg;
2176 tree type = TREE_TYPE (arg);
2177 enum tree_code code = TREE_CODE (arg);
2179 if (code == ERROR_MARK)
2180 return arg;
2182 /* If this is a comparison, we can simply invert it, except for
2183 floating-point non-equality comparisons, in which case we just
2184 enclose a TRUTH_NOT_EXPR around what we have. */
2186 if (TREE_CODE_CLASS (code) == '<')
2188 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2189 && !flag_unsafe_math_optimizations
2190 && code != NE_EXPR
2191 && code != EQ_EXPR)
2192 return build1 (TRUTH_NOT_EXPR, type, arg);
2193 else
2194 return build (invert_tree_comparison (code), type,
2195 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2198 switch (code)
2200 case INTEGER_CST:
2201 return convert (type, build_int_2 (integer_zerop (arg), 0));
2203 case TRUTH_AND_EXPR:
2204 return build (TRUTH_OR_EXPR, type,
2205 invert_truthvalue (TREE_OPERAND (arg, 0)),
2206 invert_truthvalue (TREE_OPERAND (arg, 1)));
2208 case TRUTH_OR_EXPR:
2209 return build (TRUTH_AND_EXPR, type,
2210 invert_truthvalue (TREE_OPERAND (arg, 0)),
2211 invert_truthvalue (TREE_OPERAND (arg, 1)));
2213 case TRUTH_XOR_EXPR:
2214 /* Here we can invert either operand. We invert the first operand
2215 unless the second operand is a TRUTH_NOT_EXPR in which case our
2216 result is the XOR of the first operand with the inside of the
2217 negation of the second operand. */
2219 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2220 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2221 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2222 else
2223 return build (TRUTH_XOR_EXPR, type,
2224 invert_truthvalue (TREE_OPERAND (arg, 0)),
2225 TREE_OPERAND (arg, 1));
2227 case TRUTH_ANDIF_EXPR:
2228 return build (TRUTH_ORIF_EXPR, type,
2229 invert_truthvalue (TREE_OPERAND (arg, 0)),
2230 invert_truthvalue (TREE_OPERAND (arg, 1)));
2232 case TRUTH_ORIF_EXPR:
2233 return build (TRUTH_ANDIF_EXPR, type,
2234 invert_truthvalue (TREE_OPERAND (arg, 0)),
2235 invert_truthvalue (TREE_OPERAND (arg, 1)));
2237 case TRUTH_NOT_EXPR:
2238 return TREE_OPERAND (arg, 0);
2240 case COND_EXPR:
2241 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2242 invert_truthvalue (TREE_OPERAND (arg, 1)),
2243 invert_truthvalue (TREE_OPERAND (arg, 2)));
2245 case COMPOUND_EXPR:
2246 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2247 invert_truthvalue (TREE_OPERAND (arg, 1)));
2249 case WITH_RECORD_EXPR:
2250 return build (WITH_RECORD_EXPR, type,
2251 invert_truthvalue (TREE_OPERAND (arg, 0)),
2252 TREE_OPERAND (arg, 1));
2254 case NON_LVALUE_EXPR:
2255 return invert_truthvalue (TREE_OPERAND (arg, 0));
2257 case NOP_EXPR:
2258 case CONVERT_EXPR:
2259 case FLOAT_EXPR:
2260 return build1 (TREE_CODE (arg), type,
2261 invert_truthvalue (TREE_OPERAND (arg, 0)));
2263 case BIT_AND_EXPR:
2264 if (!integer_onep (TREE_OPERAND (arg, 1)))
2265 break;
2266 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2268 case SAVE_EXPR:
2269 return build1 (TRUTH_NOT_EXPR, type, arg);
2271 case CLEANUP_POINT_EXPR:
2272 return build1 (CLEANUP_POINT_EXPR, type,
2273 invert_truthvalue (TREE_OPERAND (arg, 0)));
2275 default:
2276 break;
2278 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2279 abort ();
2280 return build1 (TRUTH_NOT_EXPR, type, arg);
2283 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2284 operands are another bit-wise operation with a common input. If so,
2285 distribute the bit operations to save an operation and possibly two if
2286 constants are involved. For example, convert
2287 (A | B) & (A | C) into A | (B & C)
2288 Further simplification will occur if B and C are constants.
2290 If this optimization cannot be done, 0 will be returned. */
2292 static tree
2293 distribute_bit_expr (code, type, arg0, arg1)
2294 enum tree_code code;
2295 tree type;
2296 tree arg0, arg1;
2298 tree common;
2299 tree left, right;
2301 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2302 || TREE_CODE (arg0) == code
2303 || (TREE_CODE (arg0) != BIT_AND_EXPR
2304 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2305 return 0;
2307 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2309 common = TREE_OPERAND (arg0, 0);
2310 left = TREE_OPERAND (arg0, 1);
2311 right = TREE_OPERAND (arg1, 1);
2313 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2315 common = TREE_OPERAND (arg0, 0);
2316 left = TREE_OPERAND (arg0, 1);
2317 right = TREE_OPERAND (arg1, 0);
2319 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2321 common = TREE_OPERAND (arg0, 1);
2322 left = TREE_OPERAND (arg0, 0);
2323 right = TREE_OPERAND (arg1, 1);
2325 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2327 common = TREE_OPERAND (arg0, 1);
2328 left = TREE_OPERAND (arg0, 0);
2329 right = TREE_OPERAND (arg1, 0);
2331 else
2332 return 0;
2334 return fold (build (TREE_CODE (arg0), type, common,
2335 fold (build (code, type, left, right))));
2338 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2339 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2341 static tree
2342 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2343 tree inner;
2344 tree type;
2345 int bitsize, bitpos;
2346 int unsignedp;
2348 tree result = build (BIT_FIELD_REF, type, inner,
2349 size_int (bitsize), bitsize_int (bitpos));
2351 TREE_UNSIGNED (result) = unsignedp;
2353 return result;
2356 /* Optimize a bit-field compare.
2358 There are two cases: First is a compare against a constant and the
2359 second is a comparison of two items where the fields are at the same
2360 bit position relative to the start of a chunk (byte, halfword, word)
2361 large enough to contain it. In these cases we can avoid the shift
2362 implicit in bitfield extractions.
2364 For constants, we emit a compare of the shifted constant with the
2365 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2366 compared. For two fields at the same position, we do the ANDs with the
2367 similar mask and compare the result of the ANDs.
2369 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2370 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2371 are the left and right operands of the comparison, respectively.
2373 If the optimization described above can be done, we return the resulting
2374 tree. Otherwise we return zero. */
2376 static tree
2377 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2378 enum tree_code code;
2379 tree compare_type;
2380 tree lhs, rhs;
2382 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2383 tree type = TREE_TYPE (lhs);
2384 tree signed_type, unsigned_type;
2385 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2386 enum machine_mode lmode, rmode, nmode;
2387 int lunsignedp, runsignedp;
2388 int lvolatilep = 0, rvolatilep = 0;
2389 tree linner, rinner = NULL_TREE;
2390 tree mask;
2391 tree offset;
2393 /* Get all the information about the extractions being done. If the bit size
2394 if the same as the size of the underlying object, we aren't doing an
2395 extraction at all and so can do nothing. We also don't want to
2396 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2397 then will no longer be able to replace it. */
2398 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2399 &lunsignedp, &lvolatilep);
2400 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2401 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2402 return 0;
2404 if (!const_p)
2406 /* If this is not a constant, we can only do something if bit positions,
2407 sizes, and signedness are the same. */
2408 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2409 &runsignedp, &rvolatilep);
2411 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2412 || lunsignedp != runsignedp || offset != 0
2413 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2414 return 0;
2417 /* See if we can find a mode to refer to this field. We should be able to,
2418 but fail if we can't. */
2419 nmode = get_best_mode (lbitsize, lbitpos,
2420 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2421 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2422 TYPE_ALIGN (TREE_TYPE (rinner))),
2423 word_mode, lvolatilep || rvolatilep);
2424 if (nmode == VOIDmode)
2425 return 0;
2427 /* Set signed and unsigned types of the precision of this mode for the
2428 shifts below. */
2429 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2430 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2432 /* Compute the bit position and size for the new reference and our offset
2433 within it. If the new reference is the same size as the original, we
2434 won't optimize anything, so return zero. */
2435 nbitsize = GET_MODE_BITSIZE (nmode);
2436 nbitpos = lbitpos & ~ (nbitsize - 1);
2437 lbitpos -= nbitpos;
2438 if (nbitsize == lbitsize)
2439 return 0;
2441 if (BYTES_BIG_ENDIAN)
2442 lbitpos = nbitsize - lbitsize - lbitpos;
2444 /* Make the mask to be used against the extracted field. */
2445 mask = build_int_2 (~0, ~0);
2446 TREE_TYPE (mask) = unsigned_type;
2447 force_fit_type (mask, 0);
2448 mask = convert (unsigned_type, mask);
2449 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2450 mask = const_binop (RSHIFT_EXPR, mask,
2451 size_int (nbitsize - lbitsize - lbitpos), 0);
2453 if (! const_p)
2454 /* If not comparing with constant, just rework the comparison
2455 and return. */
2456 return build (code, compare_type,
2457 build (BIT_AND_EXPR, unsigned_type,
2458 make_bit_field_ref (linner, unsigned_type,
2459 nbitsize, nbitpos, 1),
2460 mask),
2461 build (BIT_AND_EXPR, unsigned_type,
2462 make_bit_field_ref (rinner, unsigned_type,
2463 nbitsize, nbitpos, 1),
2464 mask));
2466 /* Otherwise, we are handling the constant case. See if the constant is too
2467 big for the field. Warn and return a tree of for 0 (false) if so. We do
2468 this not only for its own sake, but to avoid having to test for this
2469 error case below. If we didn't, we might generate wrong code.
2471 For unsigned fields, the constant shifted right by the field length should
2472 be all zero. For signed fields, the high-order bits should agree with
2473 the sign bit. */
2475 if (lunsignedp)
2477 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2478 convert (unsigned_type, rhs),
2479 size_int (lbitsize), 0)))
2481 warning ("comparison is always %d due to width of bit-field",
2482 code == NE_EXPR);
2483 return convert (compare_type,
2484 (code == NE_EXPR
2485 ? integer_one_node : integer_zero_node));
2488 else
2490 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2491 size_int (lbitsize - 1), 0);
2492 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2494 warning ("comparison is always %d due to width of bit-field",
2495 code == NE_EXPR);
2496 return convert (compare_type,
2497 (code == NE_EXPR
2498 ? integer_one_node : integer_zero_node));
2502 /* Single-bit compares should always be against zero. */
2503 if (lbitsize == 1 && ! integer_zerop (rhs))
2505 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2506 rhs = convert (type, integer_zero_node);
2509 /* Make a new bitfield reference, shift the constant over the
2510 appropriate number of bits and mask it with the computed mask
2511 (in case this was a signed field). If we changed it, make a new one. */
2512 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2513 if (lvolatilep)
2515 TREE_SIDE_EFFECTS (lhs) = 1;
2516 TREE_THIS_VOLATILE (lhs) = 1;
2519 rhs = fold (const_binop (BIT_AND_EXPR,
2520 const_binop (LSHIFT_EXPR,
2521 convert (unsigned_type, rhs),
2522 size_int (lbitpos), 0),
2523 mask, 0));
2525 return build (code, compare_type,
2526 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2527 rhs);
2530 /* Subroutine for fold_truthop: decode a field reference.
2532 If EXP is a comparison reference, we return the innermost reference.
2534 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2535 set to the starting bit number.
2537 If the innermost field can be completely contained in a mode-sized
2538 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2540 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2541 otherwise it is not changed.
2543 *PUNSIGNEDP is set to the signedness of the field.
2545 *PMASK is set to the mask used. This is either contained in a
2546 BIT_AND_EXPR or derived from the width of the field.
2548 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2550 Return 0 if this is not a component reference or is one that we can't
2551 do anything with. */
2553 static tree
2554 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2555 pvolatilep, pmask, pand_mask)
2556 tree exp;
2557 HOST_WIDE_INT *pbitsize, *pbitpos;
2558 enum machine_mode *pmode;
2559 int *punsignedp, *pvolatilep;
2560 tree *pmask;
2561 tree *pand_mask;
2563 tree and_mask = 0;
2564 tree mask, inner, offset;
2565 tree unsigned_type;
2566 unsigned int precision;
2568 /* All the optimizations using this function assume integer fields.
2569 There are problems with FP fields since the type_for_size call
2570 below can fail for, e.g., XFmode. */
2571 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2572 return 0;
2574 STRIP_NOPS (exp);
2576 if (TREE_CODE (exp) == BIT_AND_EXPR)
2578 and_mask = TREE_OPERAND (exp, 1);
2579 exp = TREE_OPERAND (exp, 0);
2580 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2581 if (TREE_CODE (and_mask) != INTEGER_CST)
2582 return 0;
2585 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2586 punsignedp, pvolatilep);
2587 if ((inner == exp && and_mask == 0)
2588 || *pbitsize < 0 || offset != 0
2589 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2590 return 0;
2592 /* Compute the mask to access the bitfield. */
2593 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2594 precision = TYPE_PRECISION (unsigned_type);
2596 mask = build_int_2 (~0, ~0);
2597 TREE_TYPE (mask) = unsigned_type;
2598 force_fit_type (mask, 0);
2599 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2600 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2602 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2603 if (and_mask != 0)
2604 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2605 convert (unsigned_type, and_mask), mask));
2607 *pmask = mask;
2608 *pand_mask = and_mask;
2609 return inner;
2612 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2613 bit positions. */
2615 static int
2616 all_ones_mask_p (mask, size)
2617 tree mask;
2618 int size;
2620 tree type = TREE_TYPE (mask);
2621 unsigned int precision = TYPE_PRECISION (type);
2622 tree tmask;
2624 tmask = build_int_2 (~0, ~0);
2625 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2626 force_fit_type (tmask, 0);
2627 return
2628 tree_int_cst_equal (mask,
2629 const_binop (RSHIFT_EXPR,
2630 const_binop (LSHIFT_EXPR, tmask,
2631 size_int (precision - size),
2633 size_int (precision - size), 0));
2636 /* Subroutine for fold_truthop: determine if an operand is simple enough
2637 to be evaluated unconditionally. */
2639 static int
2640 simple_operand_p (exp)
2641 tree exp;
2643 /* Strip any conversions that don't change the machine mode. */
2644 while ((TREE_CODE (exp) == NOP_EXPR
2645 || TREE_CODE (exp) == CONVERT_EXPR)
2646 && (TYPE_MODE (TREE_TYPE (exp))
2647 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2648 exp = TREE_OPERAND (exp, 0);
2650 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2651 || (DECL_P (exp)
2652 && ! TREE_ADDRESSABLE (exp)
2653 && ! TREE_THIS_VOLATILE (exp)
2654 && ! DECL_NONLOCAL (exp)
2655 /* Don't regard global variables as simple. They may be
2656 allocated in ways unknown to the compiler (shared memory,
2657 #pragma weak, etc). */
2658 && ! TREE_PUBLIC (exp)
2659 && ! DECL_EXTERNAL (exp)
2660 /* Loading a static variable is unduly expensive, but global
2661 registers aren't expensive. */
2662 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2665 /* The following functions are subroutines to fold_range_test and allow it to
2666 try to change a logical combination of comparisons into a range test.
2668 For example, both
2669 X == 2 || X == 3 || X == 4 || X == 5
2671 X >= 2 && X <= 5
2672 are converted to
2673 (unsigned) (X - 2) <= 3
2675 We describe each set of comparisons as being either inside or outside
2676 a range, using a variable named like IN_P, and then describe the
2677 range with a lower and upper bound. If one of the bounds is omitted,
2678 it represents either the highest or lowest value of the type.
2680 In the comments below, we represent a range by two numbers in brackets
2681 preceded by a "+" to designate being inside that range, or a "-" to
2682 designate being outside that range, so the condition can be inverted by
2683 flipping the prefix. An omitted bound is represented by a "-". For
2684 example, "- [-, 10]" means being outside the range starting at the lowest
2685 possible value and ending at 10, in other words, being greater than 10.
2686 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2687 always false.
2689 We set up things so that the missing bounds are handled in a consistent
2690 manner so neither a missing bound nor "true" and "false" need to be
2691 handled using a special case. */
2693 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2694 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2695 and UPPER1_P are nonzero if the respective argument is an upper bound
2696 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2697 must be specified for a comparison. ARG1 will be converted to ARG0's
2698 type if both are specified. */
2700 static tree
2701 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2702 enum tree_code code;
2703 tree type;
2704 tree arg0, arg1;
2705 int upper0_p, upper1_p;
2707 tree tem;
2708 int result;
2709 int sgn0, sgn1;
2711 /* If neither arg represents infinity, do the normal operation.
2712 Else, if not a comparison, return infinity. Else handle the special
2713 comparison rules. Note that most of the cases below won't occur, but
2714 are handled for consistency. */
2716 if (arg0 != 0 && arg1 != 0)
2718 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2719 arg0, convert (TREE_TYPE (arg0), arg1)));
2720 STRIP_NOPS (tem);
2721 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2724 if (TREE_CODE_CLASS (code) != '<')
2725 return 0;
2727 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2728 for neither. In real maths, we cannot assume open ended ranges are
2729 the same. But, this is computer arithmetic, where numbers are finite.
2730 We can therefore make the transformation of any unbounded range with
2731 the value Z, Z being greater than any representable number. This permits
2732 us to treat unbounded ranges as equal. */
2733 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2734 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2735 switch (code)
2737 case EQ_EXPR:
2738 result = sgn0 == sgn1;
2739 break;
2740 case NE_EXPR:
2741 result = sgn0 != sgn1;
2742 break;
2743 case LT_EXPR:
2744 result = sgn0 < sgn1;
2745 break;
2746 case LE_EXPR:
2747 result = sgn0 <= sgn1;
2748 break;
2749 case GT_EXPR:
2750 result = sgn0 > sgn1;
2751 break;
2752 case GE_EXPR:
2753 result = sgn0 >= sgn1;
2754 break;
2755 default:
2756 abort ();
2759 return convert (type, result ? integer_one_node : integer_zero_node);
2762 /* Given EXP, a logical expression, set the range it is testing into
2763 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2764 actually being tested. *PLOW and *PHIGH will be made of the same type
2765 as the returned expression. If EXP is not a comparison, we will most
2766 likely not be returning a useful value and range. */
2768 static tree
2769 make_range (exp, pin_p, plow, phigh)
2770 tree exp;
2771 int *pin_p;
2772 tree *plow, *phigh;
2774 enum tree_code code;
2775 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2776 tree orig_type = NULL_TREE;
2777 int in_p, n_in_p;
2778 tree low, high, n_low, n_high;
2780 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2781 and see if we can refine the range. Some of the cases below may not
2782 happen, but it doesn't seem worth worrying about this. We "continue"
2783 the outer loop when we've changed something; otherwise we "break"
2784 the switch, which will "break" the while. */
2786 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2788 while (1)
2790 code = TREE_CODE (exp);
2792 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2794 arg0 = TREE_OPERAND (exp, 0);
2795 if (TREE_CODE_CLASS (code) == '<'
2796 || TREE_CODE_CLASS (code) == '1'
2797 || TREE_CODE_CLASS (code) == '2')
2798 type = TREE_TYPE (arg0);
2799 if (TREE_CODE_CLASS (code) == '2'
2800 || TREE_CODE_CLASS (code) == '<'
2801 || (TREE_CODE_CLASS (code) == 'e'
2802 && TREE_CODE_LENGTH (code) > 1))
2803 arg1 = TREE_OPERAND (exp, 1);
2806 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2807 lose a cast by accident. */
2808 if (type != NULL_TREE && orig_type == NULL_TREE)
2809 orig_type = type;
2811 switch (code)
2813 case TRUTH_NOT_EXPR:
2814 in_p = ! in_p, exp = arg0;
2815 continue;
2817 case EQ_EXPR: case NE_EXPR:
2818 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2819 /* We can only do something if the range is testing for zero
2820 and if the second operand is an integer constant. Note that
2821 saying something is "in" the range we make is done by
2822 complementing IN_P since it will set in the initial case of
2823 being not equal to zero; "out" is leaving it alone. */
2824 if (low == 0 || high == 0
2825 || ! integer_zerop (low) || ! integer_zerop (high)
2826 || TREE_CODE (arg1) != INTEGER_CST)
2827 break;
2829 switch (code)
2831 case NE_EXPR: /* - [c, c] */
2832 low = high = arg1;
2833 break;
2834 case EQ_EXPR: /* + [c, c] */
2835 in_p = ! in_p, low = high = arg1;
2836 break;
2837 case GT_EXPR: /* - [-, c] */
2838 low = 0, high = arg1;
2839 break;
2840 case GE_EXPR: /* + [c, -] */
2841 in_p = ! in_p, low = arg1, high = 0;
2842 break;
2843 case LT_EXPR: /* - [c, -] */
2844 low = arg1, high = 0;
2845 break;
2846 case LE_EXPR: /* + [-, c] */
2847 in_p = ! in_p, low = 0, high = arg1;
2848 break;
2849 default:
2850 abort ();
2853 exp = arg0;
2855 /* If this is an unsigned comparison, we also know that EXP is
2856 greater than or equal to zero. We base the range tests we make
2857 on that fact, so we record it here so we can parse existing
2858 range tests. */
2859 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2861 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2862 1, convert (type, integer_zero_node),
2863 NULL_TREE))
2864 break;
2866 in_p = n_in_p, low = n_low, high = n_high;
2868 /* If the high bound is missing, but we
2869 have a low bound, reverse the range so
2870 it goes from zero to the low bound minus 1. */
2871 if (high == 0 && low)
2873 in_p = ! in_p;
2874 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2875 integer_one_node, 0);
2876 low = convert (type, integer_zero_node);
2879 continue;
2881 case NEGATE_EXPR:
2882 /* (-x) IN [a,b] -> x in [-b, -a] */
2883 n_low = range_binop (MINUS_EXPR, type,
2884 convert (type, integer_zero_node), 0, high, 1);
2885 n_high = range_binop (MINUS_EXPR, type,
2886 convert (type, integer_zero_node), 0, low, 0);
2887 low = n_low, high = n_high;
2888 exp = arg0;
2889 continue;
2891 case BIT_NOT_EXPR:
2892 /* ~ X -> -X - 1 */
2893 exp = build (MINUS_EXPR, type, negate_expr (arg0),
2894 convert (type, integer_one_node));
2895 continue;
2897 case PLUS_EXPR: case MINUS_EXPR:
2898 if (TREE_CODE (arg1) != INTEGER_CST)
2899 break;
2901 /* If EXP is signed, any overflow in the computation is undefined,
2902 so we don't worry about it so long as our computations on
2903 the bounds don't overflow. For unsigned, overflow is defined
2904 and this is exactly the right thing. */
2905 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2906 type, low, 0, arg1, 0);
2907 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2908 type, high, 1, arg1, 0);
2909 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2910 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2911 break;
2913 /* Check for an unsigned range which has wrapped around the maximum
2914 value thus making n_high < n_low, and normalize it. */
2915 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2917 low = range_binop (PLUS_EXPR, type, n_high, 0,
2918 integer_one_node, 0);
2919 high = range_binop (MINUS_EXPR, type, n_low, 0,
2920 integer_one_node, 0);
2922 /* If the range is of the form +/- [ x+1, x ], we won't
2923 be able to normalize it. But then, it represents the
2924 whole range or the empty set, so make it
2925 +/- [ -, - ]. */
2926 if (tree_int_cst_equal (n_low, low)
2927 && tree_int_cst_equal (n_high, high))
2928 low = high = 0;
2929 else
2930 in_p = ! in_p;
2932 else
2933 low = n_low, high = n_high;
2935 exp = arg0;
2936 continue;
2938 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2939 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
2940 break;
2942 if (! INTEGRAL_TYPE_P (type)
2943 || (low != 0 && ! int_fits_type_p (low, type))
2944 || (high != 0 && ! int_fits_type_p (high, type)))
2945 break;
2947 n_low = low, n_high = high;
2949 if (n_low != 0)
2950 n_low = convert (type, n_low);
2952 if (n_high != 0)
2953 n_high = convert (type, n_high);
2955 /* If we're converting from an unsigned to a signed type,
2956 we will be doing the comparison as unsigned. The tests above
2957 have already verified that LOW and HIGH are both positive.
2959 So we have to make sure that the original unsigned value will
2960 be interpreted as positive. */
2961 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2963 tree equiv_type = (*lang_hooks.types.type_for_mode)
2964 (TYPE_MODE (type), 1);
2965 tree high_positive;
2967 /* A range without an upper bound is, naturally, unbounded.
2968 Since convert would have cropped a very large value, use
2969 the max value for the destination type. */
2970 high_positive
2971 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
2972 : TYPE_MAX_VALUE (type);
2974 high_positive = fold (build (RSHIFT_EXPR, type,
2975 convert (type, high_positive),
2976 convert (type, integer_one_node)));
2978 /* If the low bound is specified, "and" the range with the
2979 range for which the original unsigned value will be
2980 positive. */
2981 if (low != 0)
2983 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2984 1, n_low, n_high,
2985 1, convert (type, integer_zero_node),
2986 high_positive))
2987 break;
2989 in_p = (n_in_p == in_p);
2991 else
2993 /* Otherwise, "or" the range with the range of the input
2994 that will be interpreted as negative. */
2995 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2996 0, n_low, n_high,
2997 1, convert (type, integer_zero_node),
2998 high_positive))
2999 break;
3001 in_p = (in_p != n_in_p);
3005 exp = arg0;
3006 low = n_low, high = n_high;
3007 continue;
3009 default:
3010 break;
3013 break;
3016 /* If EXP is a constant, we can evaluate whether this is true or false. */
3017 if (TREE_CODE (exp) == INTEGER_CST)
3019 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3020 exp, 0, low, 0))
3021 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3022 exp, 1, high, 1)));
3023 low = high = 0;
3024 exp = 0;
3027 *pin_p = in_p, *plow = low, *phigh = high;
3028 return exp;
3031 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3032 type, TYPE, return an expression to test if EXP is in (or out of, depending
3033 on IN_P) the range. */
3035 static tree
3036 build_range_check (type, exp, in_p, low, high)
3037 tree type;
3038 tree exp;
3039 int in_p;
3040 tree low, high;
3042 tree etype = TREE_TYPE (exp);
3043 tree utype, value;
3045 if (! in_p
3046 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3047 return invert_truthvalue (value);
3049 else if (low == 0 && high == 0)
3050 return convert (type, integer_one_node);
3052 else if (low == 0)
3053 return fold (build (LE_EXPR, type, exp, high));
3055 else if (high == 0)
3056 return fold (build (GE_EXPR, type, exp, low));
3058 else if (operand_equal_p (low, high, 0))
3059 return fold (build (EQ_EXPR, type, exp, low));
3061 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3062 return build_range_check (type, exp, 1, 0, high);
3064 else if (integer_zerop (low))
3066 utype = (*lang_hooks.types.unsigned_type) (etype);
3067 return build_range_check (type, convert (utype, exp), 1, 0,
3068 convert (utype, high));
3071 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3072 && ! TREE_OVERFLOW (value))
3073 return build_range_check (type,
3074 fold (build (MINUS_EXPR, etype, exp, low)),
3075 1, convert (etype, integer_zero_node), value);
3076 else
3077 return 0;
3080 /* Given two ranges, see if we can merge them into one. Return 1 if we
3081 can, 0 if we can't. Set the output range into the specified parameters. */
3083 static int
3084 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3085 int *pin_p;
3086 tree *plow, *phigh;
3087 int in0_p, in1_p;
3088 tree low0, high0, low1, high1;
3090 int no_overlap;
3091 int subset;
3092 int temp;
3093 tree tem;
3094 int in_p;
3095 tree low, high;
3096 int lowequal = ((low0 == 0 && low1 == 0)
3097 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3098 low0, 0, low1, 0)));
3099 int highequal = ((high0 == 0 && high1 == 0)
3100 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3101 high0, 1, high1, 1)));
3103 /* Make range 0 be the range that starts first, or ends last if they
3104 start at the same value. Swap them if it isn't. */
3105 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3106 low0, 0, low1, 0))
3107 || (lowequal
3108 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3109 high1, 1, high0, 1))))
3111 temp = in0_p, in0_p = in1_p, in1_p = temp;
3112 tem = low0, low0 = low1, low1 = tem;
3113 tem = high0, high0 = high1, high1 = tem;
3116 /* Now flag two cases, whether the ranges are disjoint or whether the
3117 second range is totally subsumed in the first. Note that the tests
3118 below are simplified by the ones above. */
3119 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3120 high0, 1, low1, 0));
3121 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3122 high1, 1, high0, 1));
3124 /* We now have four cases, depending on whether we are including or
3125 excluding the two ranges. */
3126 if (in0_p && in1_p)
3128 /* If they don't overlap, the result is false. If the second range
3129 is a subset it is the result. Otherwise, the range is from the start
3130 of the second to the end of the first. */
3131 if (no_overlap)
3132 in_p = 0, low = high = 0;
3133 else if (subset)
3134 in_p = 1, low = low1, high = high1;
3135 else
3136 in_p = 1, low = low1, high = high0;
3139 else if (in0_p && ! in1_p)
3141 /* If they don't overlap, the result is the first range. If they are
3142 equal, the result is false. If the second range is a subset of the
3143 first, and the ranges begin at the same place, we go from just after
3144 the end of the first range to the end of the second. If the second
3145 range is not a subset of the first, or if it is a subset and both
3146 ranges end at the same place, the range starts at the start of the
3147 first range and ends just before the second range.
3148 Otherwise, we can't describe this as a single range. */
3149 if (no_overlap)
3150 in_p = 1, low = low0, high = high0;
3151 else if (lowequal && highequal)
3152 in_p = 0, low = high = 0;
3153 else if (subset && lowequal)
3155 in_p = 1, high = high0;
3156 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3157 integer_one_node, 0);
3159 else if (! subset || highequal)
3161 in_p = 1, low = low0;
3162 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3163 integer_one_node, 0);
3165 else
3166 return 0;
3169 else if (! in0_p && in1_p)
3171 /* If they don't overlap, the result is the second range. If the second
3172 is a subset of the first, the result is false. Otherwise,
3173 the range starts just after the first range and ends at the
3174 end of the second. */
3175 if (no_overlap)
3176 in_p = 1, low = low1, high = high1;
3177 else if (subset || highequal)
3178 in_p = 0, low = high = 0;
3179 else
3181 in_p = 1, high = high1;
3182 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3183 integer_one_node, 0);
3187 else
3189 /* The case where we are excluding both ranges. Here the complex case
3190 is if they don't overlap. In that case, the only time we have a
3191 range is if they are adjacent. If the second is a subset of the
3192 first, the result is the first. Otherwise, the range to exclude
3193 starts at the beginning of the first range and ends at the end of the
3194 second. */
3195 if (no_overlap)
3197 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3198 range_binop (PLUS_EXPR, NULL_TREE,
3199 high0, 1,
3200 integer_one_node, 1),
3201 1, low1, 0)))
3202 in_p = 0, low = low0, high = high1;
3203 else
3204 return 0;
3206 else if (subset)
3207 in_p = 0, low = low0, high = high0;
3208 else
3209 in_p = 0, low = low0, high = high1;
3212 *pin_p = in_p, *plow = low, *phigh = high;
3213 return 1;
3216 /* EXP is some logical combination of boolean tests. See if we can
3217 merge it into some range test. Return the new tree if so. */
3219 static tree
3220 fold_range_test (exp)
3221 tree exp;
3223 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3224 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3225 int in0_p, in1_p, in_p;
3226 tree low0, low1, low, high0, high1, high;
3227 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3228 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3229 tree tem;
3231 /* If this is an OR operation, invert both sides; we will invert
3232 again at the end. */
3233 if (or_op)
3234 in0_p = ! in0_p, in1_p = ! in1_p;
3236 /* If both expressions are the same, if we can merge the ranges, and we
3237 can build the range test, return it or it inverted. If one of the
3238 ranges is always true or always false, consider it to be the same
3239 expression as the other. */
3240 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3241 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3242 in1_p, low1, high1)
3243 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3244 lhs != 0 ? lhs
3245 : rhs != 0 ? rhs : integer_zero_node,
3246 in_p, low, high))))
3247 return or_op ? invert_truthvalue (tem) : tem;
3249 /* On machines where the branch cost is expensive, if this is a
3250 short-circuited branch and the underlying object on both sides
3251 is the same, make a non-short-circuit operation. */
3252 else if (BRANCH_COST >= 2
3253 && lhs != 0 && rhs != 0
3254 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3255 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3256 && operand_equal_p (lhs, rhs, 0))
3258 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3259 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3260 which cases we can't do this. */
3261 if (simple_operand_p (lhs))
3262 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3263 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3264 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3265 TREE_OPERAND (exp, 1));
3267 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3268 && ! contains_placeholder_p (lhs))
3270 tree common = save_expr (lhs);
3272 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3273 or_op ? ! in0_p : in0_p,
3274 low0, high0))
3275 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3276 or_op ? ! in1_p : in1_p,
3277 low1, high1))))
3278 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3279 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3280 TREE_TYPE (exp), lhs, rhs);
3284 return 0;
3287 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3288 bit value. Arrange things so the extra bits will be set to zero if and
3289 only if C is signed-extended to its full width. If MASK is nonzero,
3290 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3292 static tree
3293 unextend (c, p, unsignedp, mask)
3294 tree c;
3295 int p;
3296 int unsignedp;
3297 tree mask;
3299 tree type = TREE_TYPE (c);
3300 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3301 tree temp;
3303 if (p == modesize || unsignedp)
3304 return c;
3306 /* We work by getting just the sign bit into the low-order bit, then
3307 into the high-order bit, then sign-extend. We then XOR that value
3308 with C. */
3309 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3310 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3312 /* We must use a signed type in order to get an arithmetic right shift.
3313 However, we must also avoid introducing accidental overflows, so that
3314 a subsequent call to integer_zerop will work. Hence we must
3315 do the type conversion here. At this point, the constant is either
3316 zero or one, and the conversion to a signed type can never overflow.
3317 We could get an overflow if this conversion is done anywhere else. */
3318 if (TREE_UNSIGNED (type))
3319 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3321 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3322 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3323 if (mask != 0)
3324 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3325 /* If necessary, convert the type back to match the type of C. */
3326 if (TREE_UNSIGNED (type))
3327 temp = convert (type, temp);
3329 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3332 /* Find ways of folding logical expressions of LHS and RHS:
3333 Try to merge two comparisons to the same innermost item.
3334 Look for range tests like "ch >= '0' && ch <= '9'".
3335 Look for combinations of simple terms on machines with expensive branches
3336 and evaluate the RHS unconditionally.
3338 For example, if we have p->a == 2 && p->b == 4 and we can make an
3339 object large enough to span both A and B, we can do this with a comparison
3340 against the object ANDed with the a mask.
3342 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3343 operations to do this with one comparison.
3345 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3346 function and the one above.
3348 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3349 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3351 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3352 two operands.
3354 We return the simplified tree or 0 if no optimization is possible. */
3356 static tree
3357 fold_truthop (code, truth_type, lhs, rhs)
3358 enum tree_code code;
3359 tree truth_type, lhs, rhs;
3361 /* If this is the "or" of two comparisons, we can do something if
3362 the comparisons are NE_EXPR. If this is the "and", we can do something
3363 if the comparisons are EQ_EXPR. I.e.,
3364 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3366 WANTED_CODE is this operation code. For single bit fields, we can
3367 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3368 comparison for one-bit fields. */
3370 enum tree_code wanted_code;
3371 enum tree_code lcode, rcode;
3372 tree ll_arg, lr_arg, rl_arg, rr_arg;
3373 tree ll_inner, lr_inner, rl_inner, rr_inner;
3374 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3375 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3376 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3377 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3378 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3379 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3380 enum machine_mode lnmode, rnmode;
3381 tree ll_mask, lr_mask, rl_mask, rr_mask;
3382 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3383 tree l_const, r_const;
3384 tree lntype, rntype, result;
3385 int first_bit, end_bit;
3386 int volatilep;
3388 /* Start by getting the comparison codes. Fail if anything is volatile.
3389 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3390 it were surrounded with a NE_EXPR. */
3392 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3393 return 0;
3395 lcode = TREE_CODE (lhs);
3396 rcode = TREE_CODE (rhs);
3398 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3399 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3401 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3402 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3404 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3405 return 0;
3407 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3408 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3410 ll_arg = TREE_OPERAND (lhs, 0);
3411 lr_arg = TREE_OPERAND (lhs, 1);
3412 rl_arg = TREE_OPERAND (rhs, 0);
3413 rr_arg = TREE_OPERAND (rhs, 1);
3415 /* If the RHS can be evaluated unconditionally and its operands are
3416 simple, it wins to evaluate the RHS unconditionally on machines
3417 with expensive branches. In this case, this isn't a comparison
3418 that can be merged. Avoid doing this if the RHS is a floating-point
3419 comparison since those can trap. */
3421 if (BRANCH_COST >= 2
3422 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3423 && simple_operand_p (rl_arg)
3424 && simple_operand_p (rr_arg))
3425 return build (code, truth_type, lhs, rhs);
3427 /* See if the comparisons can be merged. Then get all the parameters for
3428 each side. */
3430 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3431 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3432 return 0;
3434 volatilep = 0;
3435 ll_inner = decode_field_reference (ll_arg,
3436 &ll_bitsize, &ll_bitpos, &ll_mode,
3437 &ll_unsignedp, &volatilep, &ll_mask,
3438 &ll_and_mask);
3439 lr_inner = decode_field_reference (lr_arg,
3440 &lr_bitsize, &lr_bitpos, &lr_mode,
3441 &lr_unsignedp, &volatilep, &lr_mask,
3442 &lr_and_mask);
3443 rl_inner = decode_field_reference (rl_arg,
3444 &rl_bitsize, &rl_bitpos, &rl_mode,
3445 &rl_unsignedp, &volatilep, &rl_mask,
3446 &rl_and_mask);
3447 rr_inner = decode_field_reference (rr_arg,
3448 &rr_bitsize, &rr_bitpos, &rr_mode,
3449 &rr_unsignedp, &volatilep, &rr_mask,
3450 &rr_and_mask);
3452 /* It must be true that the inner operation on the lhs of each
3453 comparison must be the same if we are to be able to do anything.
3454 Then see if we have constants. If not, the same must be true for
3455 the rhs's. */
3456 if (volatilep || ll_inner == 0 || rl_inner == 0
3457 || ! operand_equal_p (ll_inner, rl_inner, 0))
3458 return 0;
3460 if (TREE_CODE (lr_arg) == INTEGER_CST
3461 && TREE_CODE (rr_arg) == INTEGER_CST)
3462 l_const = lr_arg, r_const = rr_arg;
3463 else if (lr_inner == 0 || rr_inner == 0
3464 || ! operand_equal_p (lr_inner, rr_inner, 0))
3465 return 0;
3466 else
3467 l_const = r_const = 0;
3469 /* If either comparison code is not correct for our logical operation,
3470 fail. However, we can convert a one-bit comparison against zero into
3471 the opposite comparison against that bit being set in the field. */
3473 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3474 if (lcode != wanted_code)
3476 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3478 /* Make the left operand unsigned, since we are only interested
3479 in the value of one bit. Otherwise we are doing the wrong
3480 thing below. */
3481 ll_unsignedp = 1;
3482 l_const = ll_mask;
3484 else
3485 return 0;
3488 /* This is analogous to the code for l_const above. */
3489 if (rcode != wanted_code)
3491 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3493 rl_unsignedp = 1;
3494 r_const = rl_mask;
3496 else
3497 return 0;
3500 /* See if we can find a mode that contains both fields being compared on
3501 the left. If we can't, fail. Otherwise, update all constants and masks
3502 to be relative to a field of that size. */
3503 first_bit = MIN (ll_bitpos, rl_bitpos);
3504 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3505 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3506 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3507 volatilep);
3508 if (lnmode == VOIDmode)
3509 return 0;
3511 lnbitsize = GET_MODE_BITSIZE (lnmode);
3512 lnbitpos = first_bit & ~ (lnbitsize - 1);
3513 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3514 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3516 if (BYTES_BIG_ENDIAN)
3518 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3519 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3522 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3523 size_int (xll_bitpos), 0);
3524 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3525 size_int (xrl_bitpos), 0);
3527 if (l_const)
3529 l_const = convert (lntype, l_const);
3530 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3531 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3532 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3533 fold (build1 (BIT_NOT_EXPR,
3534 lntype, ll_mask)),
3535 0)))
3537 warning ("comparison is always %d", wanted_code == NE_EXPR);
3539 return convert (truth_type,
3540 wanted_code == NE_EXPR
3541 ? integer_one_node : integer_zero_node);
3544 if (r_const)
3546 r_const = convert (lntype, r_const);
3547 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3548 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3549 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3550 fold (build1 (BIT_NOT_EXPR,
3551 lntype, rl_mask)),
3552 0)))
3554 warning ("comparison is always %d", wanted_code == NE_EXPR);
3556 return convert (truth_type,
3557 wanted_code == NE_EXPR
3558 ? integer_one_node : integer_zero_node);
3562 /* If the right sides are not constant, do the same for it. Also,
3563 disallow this optimization if a size or signedness mismatch occurs
3564 between the left and right sides. */
3565 if (l_const == 0)
3567 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3568 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3569 /* Make sure the two fields on the right
3570 correspond to the left without being swapped. */
3571 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3572 return 0;
3574 first_bit = MIN (lr_bitpos, rr_bitpos);
3575 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3576 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3577 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3578 volatilep);
3579 if (rnmode == VOIDmode)
3580 return 0;
3582 rnbitsize = GET_MODE_BITSIZE (rnmode);
3583 rnbitpos = first_bit & ~ (rnbitsize - 1);
3584 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3585 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3587 if (BYTES_BIG_ENDIAN)
3589 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3590 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3593 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3594 size_int (xlr_bitpos), 0);
3595 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3596 size_int (xrr_bitpos), 0);
3598 /* Make a mask that corresponds to both fields being compared.
3599 Do this for both items being compared. If the operands are the
3600 same size and the bits being compared are in the same position
3601 then we can do this by masking both and comparing the masked
3602 results. */
3603 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3604 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3605 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3607 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3608 ll_unsignedp || rl_unsignedp);
3609 if (! all_ones_mask_p (ll_mask, lnbitsize))
3610 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3612 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3613 lr_unsignedp || rr_unsignedp);
3614 if (! all_ones_mask_p (lr_mask, rnbitsize))
3615 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3617 return build (wanted_code, truth_type, lhs, rhs);
3620 /* There is still another way we can do something: If both pairs of
3621 fields being compared are adjacent, we may be able to make a wider
3622 field containing them both.
3624 Note that we still must mask the lhs/rhs expressions. Furthermore,
3625 the mask must be shifted to account for the shift done by
3626 make_bit_field_ref. */
3627 if ((ll_bitsize + ll_bitpos == rl_bitpos
3628 && lr_bitsize + lr_bitpos == rr_bitpos)
3629 || (ll_bitpos == rl_bitpos + rl_bitsize
3630 && lr_bitpos == rr_bitpos + rr_bitsize))
3632 tree type;
3634 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3635 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3636 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3637 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3639 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3640 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3641 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3642 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3644 /* Convert to the smaller type before masking out unwanted bits. */
3645 type = lntype;
3646 if (lntype != rntype)
3648 if (lnbitsize > rnbitsize)
3650 lhs = convert (rntype, lhs);
3651 ll_mask = convert (rntype, ll_mask);
3652 type = rntype;
3654 else if (lnbitsize < rnbitsize)
3656 rhs = convert (lntype, rhs);
3657 lr_mask = convert (lntype, lr_mask);
3658 type = lntype;
3662 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3663 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3665 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3666 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3668 return build (wanted_code, truth_type, lhs, rhs);
3671 return 0;
3674 /* Handle the case of comparisons with constants. If there is something in
3675 common between the masks, those bits of the constants must be the same.
3676 If not, the condition is always false. Test for this to avoid generating
3677 incorrect code below. */
3678 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3679 if (! integer_zerop (result)
3680 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3681 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3683 if (wanted_code == NE_EXPR)
3685 warning ("`or' of unmatched not-equal tests is always 1");
3686 return convert (truth_type, integer_one_node);
3688 else
3690 warning ("`and' of mutually exclusive equal-tests is always 0");
3691 return convert (truth_type, integer_zero_node);
3695 /* Construct the expression we will return. First get the component
3696 reference we will make. Unless the mask is all ones the width of
3697 that field, perform the mask operation. Then compare with the
3698 merged constant. */
3699 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3700 ll_unsignedp || rl_unsignedp);
3702 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3703 if (! all_ones_mask_p (ll_mask, lnbitsize))
3704 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3706 return build (wanted_code, truth_type, result,
3707 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3710 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3711 constant. */
3713 static tree
3714 optimize_minmax_comparison (t)
3715 tree t;
3717 tree type = TREE_TYPE (t);
3718 tree arg0 = TREE_OPERAND (t, 0);
3719 enum tree_code op_code;
3720 tree comp_const = TREE_OPERAND (t, 1);
3721 tree minmax_const;
3722 int consts_equal, consts_lt;
3723 tree inner;
3725 STRIP_SIGN_NOPS (arg0);
3727 op_code = TREE_CODE (arg0);
3728 minmax_const = TREE_OPERAND (arg0, 1);
3729 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3730 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3731 inner = TREE_OPERAND (arg0, 0);
3733 /* If something does not permit us to optimize, return the original tree. */
3734 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3735 || TREE_CODE (comp_const) != INTEGER_CST
3736 || TREE_CONSTANT_OVERFLOW (comp_const)
3737 || TREE_CODE (minmax_const) != INTEGER_CST
3738 || TREE_CONSTANT_OVERFLOW (minmax_const))
3739 return t;
3741 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3742 and GT_EXPR, doing the rest with recursive calls using logical
3743 simplifications. */
3744 switch (TREE_CODE (t))
3746 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3747 return
3748 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3750 case GE_EXPR:
3751 return
3752 fold (build (TRUTH_ORIF_EXPR, type,
3753 optimize_minmax_comparison
3754 (build (EQ_EXPR, type, arg0, comp_const)),
3755 optimize_minmax_comparison
3756 (build (GT_EXPR, type, arg0, comp_const))));
3758 case EQ_EXPR:
3759 if (op_code == MAX_EXPR && consts_equal)
3760 /* MAX (X, 0) == 0 -> X <= 0 */
3761 return fold (build (LE_EXPR, type, inner, comp_const));
3763 else if (op_code == MAX_EXPR && consts_lt)
3764 /* MAX (X, 0) == 5 -> X == 5 */
3765 return fold (build (EQ_EXPR, type, inner, comp_const));
3767 else if (op_code == MAX_EXPR)
3768 /* MAX (X, 0) == -1 -> false */
3769 return omit_one_operand (type, integer_zero_node, inner);
3771 else if (consts_equal)
3772 /* MIN (X, 0) == 0 -> X >= 0 */
3773 return fold (build (GE_EXPR, type, inner, comp_const));
3775 else if (consts_lt)
3776 /* MIN (X, 0) == 5 -> false */
3777 return omit_one_operand (type, integer_zero_node, inner);
3779 else
3780 /* MIN (X, 0) == -1 -> X == -1 */
3781 return fold (build (EQ_EXPR, type, inner, comp_const));
3783 case GT_EXPR:
3784 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3785 /* MAX (X, 0) > 0 -> X > 0
3786 MAX (X, 0) > 5 -> X > 5 */
3787 return fold (build (GT_EXPR, type, inner, comp_const));
3789 else if (op_code == MAX_EXPR)
3790 /* MAX (X, 0) > -1 -> true */
3791 return omit_one_operand (type, integer_one_node, inner);
3793 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
3794 /* MIN (X, 0) > 0 -> false
3795 MIN (X, 0) > 5 -> false */
3796 return omit_one_operand (type, integer_zero_node, inner);
3798 else
3799 /* MIN (X, 0) > -1 -> X > -1 */
3800 return fold (build (GT_EXPR, type, inner, comp_const));
3802 default:
3803 return t;
3807 /* T is an integer expression that is being multiplied, divided, or taken a
3808 modulus (CODE says which and what kind of divide or modulus) by a
3809 constant C. See if we can eliminate that operation by folding it with
3810 other operations already in T. WIDE_TYPE, if non-null, is a type that
3811 should be used for the computation if wider than our type.
3813 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
3814 (X * 2) + (Y * 4). We must, however, be assured that either the original
3815 expression would not overflow or that overflow is undefined for the type
3816 in the language in question.
3818 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
3819 the machine has a multiply-accumulate insn or that this is part of an
3820 addressing calculation.
3822 If we return a non-null expression, it is an equivalent form of the
3823 original computation, but need not be in the original type. */
3825 static tree
3826 extract_muldiv (t, c, code, wide_type)
3827 tree t;
3828 tree c;
3829 enum tree_code code;
3830 tree wide_type;
3832 tree type = TREE_TYPE (t);
3833 enum tree_code tcode = TREE_CODE (t);
3834 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
3835 > GET_MODE_SIZE (TYPE_MODE (type)))
3836 ? wide_type : type);
3837 tree t1, t2;
3838 int same_p = tcode == code;
3839 tree op0 = NULL_TREE, op1 = NULL_TREE;
3841 /* Don't deal with constants of zero here; they confuse the code below. */
3842 if (integer_zerop (c))
3843 return NULL_TREE;
3845 if (TREE_CODE_CLASS (tcode) == '1')
3846 op0 = TREE_OPERAND (t, 0);
3848 if (TREE_CODE_CLASS (tcode) == '2')
3849 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
3851 /* Note that we need not handle conditional operations here since fold
3852 already handles those cases. So just do arithmetic here. */
3853 switch (tcode)
3855 case INTEGER_CST:
3856 /* For a constant, we can always simplify if we are a multiply
3857 or (for divide and modulus) if it is a multiple of our constant. */
3858 if (code == MULT_EXPR
3859 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
3860 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
3861 break;
3863 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
3864 /* If op0 is an expression, and is unsigned, and the type is
3865 smaller than ctype, then we cannot widen the expression. */
3866 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
3867 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
3868 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
3869 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
3870 && TREE_UNSIGNED (TREE_TYPE (op0))
3871 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3872 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
3873 && (GET_MODE_SIZE (TYPE_MODE (ctype))
3874 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
3875 break;
3877 /* Pass the constant down and see if we can make a simplification. If
3878 we can, replace this expression with the inner simplification for
3879 possible later conversion to our or some other type. */
3880 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
3881 code == MULT_EXPR ? ctype : NULL_TREE)))
3882 return t1;
3883 break;
3885 case NEGATE_EXPR: case ABS_EXPR:
3886 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
3887 return fold (build1 (tcode, ctype, convert (ctype, t1)));
3888 break;
3890 case MIN_EXPR: case MAX_EXPR:
3891 /* If widening the type changes the signedness, then we can't perform
3892 this optimization as that changes the result. */
3893 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
3894 break;
3896 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
3897 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
3898 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
3900 if (tree_int_cst_sgn (c) < 0)
3901 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
3903 return fold (build (tcode, ctype, convert (ctype, t1),
3904 convert (ctype, t2)));
3906 break;
3908 case WITH_RECORD_EXPR:
3909 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
3910 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
3911 TREE_OPERAND (t, 1));
3912 break;
3914 case SAVE_EXPR:
3915 /* If this has not been evaluated and the operand has no side effects,
3916 we can see if we can do something inside it and make a new one.
3917 Note that this test is overly conservative since we can do this
3918 if the only reason it had side effects is that it was another
3919 similar SAVE_EXPR, but that isn't worth bothering with. */
3920 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
3921 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
3922 wide_type)))
3924 t1 = save_expr (t1);
3925 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
3926 SAVE_EXPR_PERSISTENT_P (t1) = 1;
3927 if (is_pending_size (t))
3928 put_pending_size (t1);
3929 return t1;
3931 break;
3933 case LSHIFT_EXPR: case RSHIFT_EXPR:
3934 /* If the second operand is constant, this is a multiplication
3935 or floor division, by a power of two, so we can treat it that
3936 way unless the multiplier or divisor overflows. */
3937 if (TREE_CODE (op1) == INTEGER_CST
3938 /* const_binop may not detect overflow correctly,
3939 so check for it explicitly here. */
3940 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
3941 && TREE_INT_CST_HIGH (op1) == 0
3942 && 0 != (t1 = convert (ctype,
3943 const_binop (LSHIFT_EXPR, size_one_node,
3944 op1, 0)))
3945 && ! TREE_OVERFLOW (t1))
3946 return extract_muldiv (build (tcode == LSHIFT_EXPR
3947 ? MULT_EXPR : FLOOR_DIV_EXPR,
3948 ctype, convert (ctype, op0), t1),
3949 c, code, wide_type);
3950 break;
3952 case PLUS_EXPR: case MINUS_EXPR:
3953 /* See if we can eliminate the operation on both sides. If we can, we
3954 can return a new PLUS or MINUS. If we can't, the only remaining
3955 cases where we can do anything are if the second operand is a
3956 constant. */
3957 t1 = extract_muldiv (op0, c, code, wide_type);
3958 t2 = extract_muldiv (op1, c, code, wide_type);
3959 if (t1 != 0 && t2 != 0
3960 && (code == MULT_EXPR
3961 /* If not multiplication, we can only do this if either operand
3962 is divisible by c. */
3963 || multiple_of_p (ctype, op0, c)
3964 || multiple_of_p (ctype, op1, c)))
3965 return fold (build (tcode, ctype, convert (ctype, t1),
3966 convert (ctype, t2)));
3968 /* If this was a subtraction, negate OP1 and set it to be an addition.
3969 This simplifies the logic below. */
3970 if (tcode == MINUS_EXPR)
3971 tcode = PLUS_EXPR, op1 = negate_expr (op1);
3973 if (TREE_CODE (op1) != INTEGER_CST)
3974 break;
3976 /* If either OP1 or C are negative, this optimization is not safe for
3977 some of the division and remainder types while for others we need
3978 to change the code. */
3979 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
3981 if (code == CEIL_DIV_EXPR)
3982 code = FLOOR_DIV_EXPR;
3983 else if (code == FLOOR_DIV_EXPR)
3984 code = CEIL_DIV_EXPR;
3985 else if (code != MULT_EXPR
3986 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
3987 break;
3990 /* If it's a multiply or a division/modulus operation of a multiple
3991 of our constant, do the operation and verify it doesn't overflow. */
3992 if (code == MULT_EXPR
3993 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
3995 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
3996 if (op1 == 0 || TREE_OVERFLOW (op1))
3997 break;
3999 else
4000 break;
4002 /* If we have an unsigned type is not a sizetype, we cannot widen
4003 the operation since it will change the result if the original
4004 computation overflowed. */
4005 if (TREE_UNSIGNED (ctype)
4006 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4007 && ctype != type)
4008 break;
4010 /* If we were able to eliminate our operation from the first side,
4011 apply our operation to the second side and reform the PLUS. */
4012 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4013 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4015 /* The last case is if we are a multiply. In that case, we can
4016 apply the distributive law to commute the multiply and addition
4017 if the multiplication of the constants doesn't overflow. */
4018 if (code == MULT_EXPR)
4019 return fold (build (tcode, ctype, fold (build (code, ctype,
4020 convert (ctype, op0),
4021 convert (ctype, c))),
4022 op1));
4024 break;
4026 case MULT_EXPR:
4027 /* We have a special case here if we are doing something like
4028 (C * 8) % 4 since we know that's zero. */
4029 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4030 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4031 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4032 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4033 return omit_one_operand (type, integer_zero_node, op0);
4035 /* ... fall through ... */
4037 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4038 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4039 /* If we can extract our operation from the LHS, do so and return a
4040 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4041 do something only if the second operand is a constant. */
4042 if (same_p
4043 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4044 return fold (build (tcode, ctype, convert (ctype, t1),
4045 convert (ctype, op1)));
4046 else if (tcode == MULT_EXPR && code == MULT_EXPR
4047 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4048 return fold (build (tcode, ctype, convert (ctype, op0),
4049 convert (ctype, t1)));
4050 else if (TREE_CODE (op1) != INTEGER_CST)
4051 return 0;
4053 /* If these are the same operation types, we can associate them
4054 assuming no overflow. */
4055 if (tcode == code
4056 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4057 convert (ctype, c), 0))
4058 && ! TREE_OVERFLOW (t1))
4059 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4061 /* If these operations "cancel" each other, we have the main
4062 optimizations of this pass, which occur when either constant is a
4063 multiple of the other, in which case we replace this with either an
4064 operation or CODE or TCODE.
4066 If we have an unsigned type that is not a sizetype, we cannot do
4067 this since it will change the result if the original computation
4068 overflowed. */
4069 if ((! TREE_UNSIGNED (ctype)
4070 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4071 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4072 || (tcode == MULT_EXPR
4073 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4074 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4076 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4077 return fold (build (tcode, ctype, convert (ctype, op0),
4078 convert (ctype,
4079 const_binop (TRUNC_DIV_EXPR,
4080 op1, c, 0))));
4081 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4082 return fold (build (code, ctype, convert (ctype, op0),
4083 convert (ctype,
4084 const_binop (TRUNC_DIV_EXPR,
4085 c, op1, 0))));
4087 break;
4089 default:
4090 break;
4093 return 0;
4096 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4097 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4098 that we may sometimes modify the tree. */
4100 static tree
4101 strip_compound_expr (t, s)
4102 tree t;
4103 tree s;
4105 enum tree_code code = TREE_CODE (t);
4107 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4108 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4109 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4110 return TREE_OPERAND (t, 1);
4112 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4113 don't bother handling any other types. */
4114 else if (code == COND_EXPR)
4116 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4117 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4118 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4120 else if (TREE_CODE_CLASS (code) == '1')
4121 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4122 else if (TREE_CODE_CLASS (code) == '<'
4123 || TREE_CODE_CLASS (code) == '2')
4125 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4126 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4129 return t;
4132 /* Return a node which has the indicated constant VALUE (either 0 or
4133 1), and is of the indicated TYPE. */
4135 static tree
4136 constant_boolean_node (value, type)
4137 int value;
4138 tree type;
4140 if (type == integer_type_node)
4141 return value ? integer_one_node : integer_zero_node;
4142 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4143 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4144 integer_zero_node);
4145 else
4147 tree t = build_int_2 (value, 0);
4149 TREE_TYPE (t) = type;
4150 return t;
4154 /* Utility function for the following routine, to see how complex a nesting of
4155 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4156 we don't care (to avoid spending too much time on complex expressions.). */
4158 static int
4159 count_cond (expr, lim)
4160 tree expr;
4161 int lim;
4163 int ctrue, cfalse;
4165 if (TREE_CODE (expr) != COND_EXPR)
4166 return 0;
4167 else if (lim <= 0)
4168 return 0;
4170 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4171 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4172 return MIN (lim, 1 + ctrue + cfalse);
4175 /* Transform `a + (b ? x : y)' into `x ? (a + b) : (a + y)'.
4176 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4177 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4178 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4179 COND is the first argument to CODE; otherwise (as in the example
4180 given here), it is the second argument. TYPE is the type of the
4181 original expression. */
4183 static tree
4184 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4185 enum tree_code code;
4186 tree type;
4187 tree cond;
4188 tree arg;
4189 int cond_first_p;
4191 tree test, true_value, false_value;
4192 tree lhs = NULL_TREE;
4193 tree rhs = NULL_TREE;
4194 /* In the end, we'll produce a COND_EXPR. Both arms of the
4195 conditional expression will be binary operations. The left-hand
4196 side of the expression to be executed if the condition is true
4197 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4198 of the expression to be executed if the condition is true will be
4199 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4200 but apply to the expression to be executed if the conditional is
4201 false. */
4202 tree *true_lhs;
4203 tree *true_rhs;
4204 tree *false_lhs;
4205 tree *false_rhs;
4206 /* These are the codes to use for the left-hand side and right-hand
4207 side of the COND_EXPR. Normally, they are the same as CODE. */
4208 enum tree_code lhs_code = code;
4209 enum tree_code rhs_code = code;
4210 /* And these are the types of the expressions. */
4211 tree lhs_type = type;
4212 tree rhs_type = type;
4214 if (cond_first_p)
4216 true_rhs = false_rhs = &arg;
4217 true_lhs = &true_value;
4218 false_lhs = &false_value;
4220 else
4222 true_lhs = false_lhs = &arg;
4223 true_rhs = &true_value;
4224 false_rhs = &false_value;
4227 if (TREE_CODE (cond) == COND_EXPR)
4229 test = TREE_OPERAND (cond, 0);
4230 true_value = TREE_OPERAND (cond, 1);
4231 false_value = TREE_OPERAND (cond, 2);
4232 /* If this operand throws an expression, then it does not make
4233 sense to try to perform a logical or arithmetic operation
4234 involving it. Instead of building `a + throw 3' for example,
4235 we simply build `a, throw 3'. */
4236 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4238 lhs_code = COMPOUND_EXPR;
4239 if (!cond_first_p)
4240 lhs_type = void_type_node;
4242 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4244 rhs_code = COMPOUND_EXPR;
4245 if (!cond_first_p)
4246 rhs_type = void_type_node;
4249 else
4251 tree testtype = TREE_TYPE (cond);
4252 test = cond;
4253 true_value = convert (testtype, integer_one_node);
4254 false_value = convert (testtype, integer_zero_node);
4257 /* If ARG is complex we want to make sure we only evaluate
4258 it once. Though this is only required if it is volatile, it
4259 might be more efficient even if it is not. However, if we
4260 succeed in folding one part to a constant, we do not need
4261 to make this SAVE_EXPR. Since we do this optimization
4262 primarily to see if we do end up with constant and this
4263 SAVE_EXPR interferes with later optimizations, suppressing
4264 it when we can is important.
4266 If we are not in a function, we can't make a SAVE_EXPR, so don't
4267 try to do so. Don't try to see if the result is a constant
4268 if an arm is a COND_EXPR since we get exponential behavior
4269 in that case. */
4271 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4272 && (*lang_hooks.decls.global_bindings_p) () == 0
4273 && ((TREE_CODE (arg) != VAR_DECL
4274 && TREE_CODE (arg) != PARM_DECL)
4275 || TREE_SIDE_EFFECTS (arg)))
4277 if (TREE_CODE (true_value) != COND_EXPR)
4278 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4280 if (TREE_CODE (false_value) != COND_EXPR)
4281 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4283 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4284 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4285 arg = save_expr (arg), lhs = rhs = 0;
4288 if (lhs == 0)
4289 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4290 if (rhs == 0)
4291 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4293 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4295 if (TREE_CODE (arg) == SAVE_EXPR)
4296 return build (COMPOUND_EXPR, type,
4297 convert (void_type_node, arg),
4298 strip_compound_expr (test, arg));
4299 else
4300 return convert (type, test);
4304 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4306 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4307 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4308 ADDEND is the same as X.
4310 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4311 and finite. The problematic cases are when X is zero, and its mode
4312 has signed zeros. In the case of rounding towards -infinity,
4313 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4314 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4316 static bool
4317 fold_real_zero_addition_p (type, addend, negate)
4318 tree type, addend;
4319 int negate;
4321 if (!real_zerop (addend))
4322 return false;
4324 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4325 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4326 return true;
4328 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4329 if (TREE_CODE (addend) == REAL_CST
4330 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4331 negate = !negate;
4333 /* The mode has signed zeros, and we have to honor their sign.
4334 In this situation, there is only one case we can return true for.
4335 X - 0 is the same as X unless rounding towards -infinity is
4336 supported. */
4337 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4341 /* Perform constant folding and related simplification of EXPR.
4342 The related simplifications include x*1 => x, x*0 => 0, etc.,
4343 and application of the associative law.
4344 NOP_EXPR conversions may be removed freely (as long as we
4345 are careful not to change the C type of the overall expression)
4346 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4347 but we can constant-fold them if they have constant operands. */
4349 tree
4350 fold (expr)
4351 tree expr;
4353 tree t = expr;
4354 tree t1 = NULL_TREE;
4355 tree tem;
4356 tree type = TREE_TYPE (expr);
4357 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4358 enum tree_code code = TREE_CODE (t);
4359 int kind = TREE_CODE_CLASS (code);
4360 int invert;
4361 /* WINS will be nonzero when the switch is done
4362 if all operands are constant. */
4363 int wins = 1;
4365 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4366 Likewise for a SAVE_EXPR that's already been evaluated. */
4367 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4368 return t;
4370 /* Return right away if a constant. */
4371 if (kind == 'c')
4372 return t;
4374 #ifdef MAX_INTEGER_COMPUTATION_MODE
4375 check_max_integer_computation_mode (expr);
4376 #endif
4378 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4380 tree subop;
4382 /* Special case for conversion ops that can have fixed point args. */
4383 arg0 = TREE_OPERAND (t, 0);
4385 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4386 if (arg0 != 0)
4387 STRIP_SIGN_NOPS (arg0);
4389 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4390 subop = TREE_REALPART (arg0);
4391 else
4392 subop = arg0;
4394 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4395 && TREE_CODE (subop) != REAL_CST
4397 /* Note that TREE_CONSTANT isn't enough:
4398 static var addresses are constant but we can't
4399 do arithmetic on them. */
4400 wins = 0;
4402 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4404 int len = first_rtl_op (code);
4405 int i;
4406 for (i = 0; i < len; i++)
4408 tree op = TREE_OPERAND (t, i);
4409 tree subop;
4411 if (op == 0)
4412 continue; /* Valid for CALL_EXPR, at least. */
4414 if (kind == '<' || code == RSHIFT_EXPR)
4416 /* Signedness matters here. Perhaps we can refine this
4417 later. */
4418 STRIP_SIGN_NOPS (op);
4420 else
4421 /* Strip any conversions that don't change the mode. */
4422 STRIP_NOPS (op);
4424 if (TREE_CODE (op) == COMPLEX_CST)
4425 subop = TREE_REALPART (op);
4426 else
4427 subop = op;
4429 if (TREE_CODE (subop) != INTEGER_CST
4430 && TREE_CODE (subop) != REAL_CST)
4431 /* Note that TREE_CONSTANT isn't enough:
4432 static var addresses are constant but we can't
4433 do arithmetic on them. */
4434 wins = 0;
4436 if (i == 0)
4437 arg0 = op;
4438 else if (i == 1)
4439 arg1 = op;
4443 /* If this is a commutative operation, and ARG0 is a constant, move it
4444 to ARG1 to reduce the number of tests below. */
4445 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4446 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4447 || code == BIT_AND_EXPR)
4448 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4450 tem = arg0; arg0 = arg1; arg1 = tem;
4452 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4453 TREE_OPERAND (t, 1) = tem;
4456 /* Now WINS is set as described above,
4457 ARG0 is the first operand of EXPR,
4458 and ARG1 is the second operand (if it has more than one operand).
4460 First check for cases where an arithmetic operation is applied to a
4461 compound, conditional, or comparison operation. Push the arithmetic
4462 operation inside the compound or conditional to see if any folding
4463 can then be done. Convert comparison to conditional for this purpose.
4464 The also optimizes non-constant cases that used to be done in
4465 expand_expr.
4467 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4468 one of the operands is a comparison and the other is a comparison, a
4469 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4470 code below would make the expression more complex. Change it to a
4471 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4472 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4474 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4475 || code == EQ_EXPR || code == NE_EXPR)
4476 && ((truth_value_p (TREE_CODE (arg0))
4477 && (truth_value_p (TREE_CODE (arg1))
4478 || (TREE_CODE (arg1) == BIT_AND_EXPR
4479 && integer_onep (TREE_OPERAND (arg1, 1)))))
4480 || (truth_value_p (TREE_CODE (arg1))
4481 && (truth_value_p (TREE_CODE (arg0))
4482 || (TREE_CODE (arg0) == BIT_AND_EXPR
4483 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4485 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4486 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4487 : TRUTH_XOR_EXPR,
4488 type, arg0, arg1));
4490 if (code == EQ_EXPR)
4491 t = invert_truthvalue (t);
4493 return t;
4496 if (TREE_CODE_CLASS (code) == '1')
4498 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4499 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4500 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4501 else if (TREE_CODE (arg0) == COND_EXPR)
4503 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4504 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4505 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4507 /* If this was a conversion, and all we did was to move into
4508 inside the COND_EXPR, bring it back out. But leave it if
4509 it is a conversion from integer to integer and the
4510 result precision is no wider than a word since such a
4511 conversion is cheap and may be optimized away by combine,
4512 while it couldn't if it were outside the COND_EXPR. Then return
4513 so we don't get into an infinite recursion loop taking the
4514 conversion out and then back in. */
4516 if ((code == NOP_EXPR || code == CONVERT_EXPR
4517 || code == NON_LVALUE_EXPR)
4518 && TREE_CODE (t) == COND_EXPR
4519 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4520 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4521 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4522 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4523 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4524 && (INTEGRAL_TYPE_P
4525 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4526 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4527 t = build1 (code, type,
4528 build (COND_EXPR,
4529 TREE_TYPE (TREE_OPERAND
4530 (TREE_OPERAND (t, 1), 0)),
4531 TREE_OPERAND (t, 0),
4532 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4533 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4534 return t;
4536 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4537 return fold (build (COND_EXPR, type, arg0,
4538 fold (build1 (code, type, integer_one_node)),
4539 fold (build1 (code, type, integer_zero_node))));
4541 else if (TREE_CODE_CLASS (code) == '2'
4542 || TREE_CODE_CLASS (code) == '<')
4544 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4545 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4546 fold (build (code, type,
4547 arg0, TREE_OPERAND (arg1, 1))));
4548 else if ((TREE_CODE (arg1) == COND_EXPR
4549 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4550 && TREE_CODE_CLASS (code) != '<'))
4551 && (TREE_CODE (arg0) != COND_EXPR
4552 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4553 && (! TREE_SIDE_EFFECTS (arg0)
4554 || ((*lang_hooks.decls.global_bindings_p) () == 0
4555 && ! contains_placeholder_p (arg0))))
4556 return
4557 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4558 /*cond_first_p=*/0);
4559 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4560 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4561 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4562 else if ((TREE_CODE (arg0) == COND_EXPR
4563 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4564 && TREE_CODE_CLASS (code) != '<'))
4565 && (TREE_CODE (arg1) != COND_EXPR
4566 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4567 && (! TREE_SIDE_EFFECTS (arg1)
4568 || ((*lang_hooks.decls.global_bindings_p) () == 0
4569 && ! contains_placeholder_p (arg1))))
4570 return
4571 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4572 /*cond_first_p=*/1);
4574 else if (TREE_CODE_CLASS (code) == '<'
4575 && TREE_CODE (arg0) == COMPOUND_EXPR)
4576 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4577 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4578 else if (TREE_CODE_CLASS (code) == '<'
4579 && TREE_CODE (arg1) == COMPOUND_EXPR)
4580 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4581 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4583 switch (code)
4585 case INTEGER_CST:
4586 case REAL_CST:
4587 case VECTOR_CST:
4588 case STRING_CST:
4589 case COMPLEX_CST:
4590 case CONSTRUCTOR:
4591 return t;
4593 case CONST_DECL:
4594 return fold (DECL_INITIAL (t));
4596 case NOP_EXPR:
4597 case FLOAT_EXPR:
4598 case CONVERT_EXPR:
4599 case FIX_TRUNC_EXPR:
4600 /* Other kinds of FIX are not handled properly by fold_convert. */
4602 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4603 return TREE_OPERAND (t, 0);
4605 /* Handle cases of two conversions in a row. */
4606 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4607 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4609 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4610 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4611 tree final_type = TREE_TYPE (t);
4612 int inside_int = INTEGRAL_TYPE_P (inside_type);
4613 int inside_ptr = POINTER_TYPE_P (inside_type);
4614 int inside_float = FLOAT_TYPE_P (inside_type);
4615 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4616 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4617 int inter_int = INTEGRAL_TYPE_P (inter_type);
4618 int inter_ptr = POINTER_TYPE_P (inter_type);
4619 int inter_float = FLOAT_TYPE_P (inter_type);
4620 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4621 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4622 int final_int = INTEGRAL_TYPE_P (final_type);
4623 int final_ptr = POINTER_TYPE_P (final_type);
4624 int final_float = FLOAT_TYPE_P (final_type);
4625 unsigned int final_prec = TYPE_PRECISION (final_type);
4626 int final_unsignedp = TREE_UNSIGNED (final_type);
4628 /* In addition to the cases of two conversions in a row
4629 handled below, if we are converting something to its own
4630 type via an object of identical or wider precision, neither
4631 conversion is needed. */
4632 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4633 && ((inter_int && final_int) || (inter_float && final_float))
4634 && inter_prec >= final_prec)
4635 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4637 /* Likewise, if the intermediate and final types are either both
4638 float or both integer, we don't need the middle conversion if
4639 it is wider than the final type and doesn't change the signedness
4640 (for integers). Avoid this if the final type is a pointer
4641 since then we sometimes need the inner conversion. Likewise if
4642 the outer has a precision not equal to the size of its mode. */
4643 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4644 || (inter_float && inside_float))
4645 && inter_prec >= inside_prec
4646 && (inter_float || inter_unsignedp == inside_unsignedp)
4647 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4648 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4649 && ! final_ptr)
4650 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4652 /* If we have a sign-extension of a zero-extended value, we can
4653 replace that by a single zero-extension. */
4654 if (inside_int && inter_int && final_int
4655 && inside_prec < inter_prec && inter_prec < final_prec
4656 && inside_unsignedp && !inter_unsignedp)
4657 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4659 /* Two conversions in a row are not needed unless:
4660 - some conversion is floating-point (overstrict for now), or
4661 - the intermediate type is narrower than both initial and
4662 final, or
4663 - the intermediate type and innermost type differ in signedness,
4664 and the outermost type is wider than the intermediate, or
4665 - the initial type is a pointer type and the precisions of the
4666 intermediate and final types differ, or
4667 - the final type is a pointer type and the precisions of the
4668 initial and intermediate types differ. */
4669 if (! inside_float && ! inter_float && ! final_float
4670 && (inter_prec > inside_prec || inter_prec > final_prec)
4671 && ! (inside_int && inter_int
4672 && inter_unsignedp != inside_unsignedp
4673 && inter_prec < final_prec)
4674 && ((inter_unsignedp && inter_prec > inside_prec)
4675 == (final_unsignedp && final_prec > inter_prec))
4676 && ! (inside_ptr && inter_prec != final_prec)
4677 && ! (final_ptr && inside_prec != inter_prec)
4678 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4679 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4680 && ! final_ptr)
4681 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4684 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4685 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4686 /* Detect assigning a bitfield. */
4687 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4688 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4690 /* Don't leave an assignment inside a conversion
4691 unless assigning a bitfield. */
4692 tree prev = TREE_OPERAND (t, 0);
4693 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4694 /* First do the assignment, then return converted constant. */
4695 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4696 TREE_USED (t) = 1;
4697 return t;
4700 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4701 constants (if x has signed type, the sign bit cannot be set
4702 in c). This folds extension into the BIT_AND_EXPR. */
4703 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4704 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4705 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4707 tree and = TREE_OPERAND (t, 0);
4708 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4709 int change = 0;
4711 if (TREE_UNSIGNED (TREE_TYPE (and))
4712 || (TYPE_PRECISION (TREE_TYPE (t))
4713 <= TYPE_PRECISION (TREE_TYPE (and))))
4714 change = 1;
4715 else if (TYPE_PRECISION (TREE_TYPE (and1))
4716 <= HOST_BITS_PER_WIDE_INT
4717 && host_integerp (and1, 1))
4719 unsigned HOST_WIDE_INT cst;
4721 cst = tree_low_cst (and1, 1);
4722 cst &= (HOST_WIDE_INT) -1
4723 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4724 change = (cst == 0);
4725 #ifdef LOAD_EXTEND_OP
4726 if (change
4727 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4728 == ZERO_EXTEND))
4730 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4731 and0 = convert (uns, and0);
4732 and1 = convert (uns, and1);
4734 #endif
4736 if (change)
4737 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4738 convert (TREE_TYPE (t), and0),
4739 convert (TREE_TYPE (t), and1)));
4742 if (!wins)
4744 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4745 return t;
4747 return fold_convert (t, arg0);
4749 case VIEW_CONVERT_EXPR:
4750 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4751 return build1 (VIEW_CONVERT_EXPR, type,
4752 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4753 return t;
4755 case COMPONENT_REF:
4756 if (TREE_CODE (arg0) == CONSTRUCTOR)
4758 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4759 if (m)
4760 t = TREE_VALUE (m);
4762 return t;
4764 case RANGE_EXPR:
4765 TREE_CONSTANT (t) = wins;
4766 return t;
4768 case NEGATE_EXPR:
4769 if (wins)
4771 if (TREE_CODE (arg0) == INTEGER_CST)
4773 unsigned HOST_WIDE_INT low;
4774 HOST_WIDE_INT high;
4775 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4776 TREE_INT_CST_HIGH (arg0),
4777 &low, &high);
4778 t = build_int_2 (low, high);
4779 TREE_TYPE (t) = type;
4780 TREE_OVERFLOW (t)
4781 = (TREE_OVERFLOW (arg0)
4782 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4783 TREE_CONSTANT_OVERFLOW (t)
4784 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4786 else if (TREE_CODE (arg0) == REAL_CST)
4787 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4789 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4790 return TREE_OPERAND (arg0, 0);
4792 /* Convert - (a - b) to (b - a) for non-floating-point. */
4793 else if (TREE_CODE (arg0) == MINUS_EXPR
4794 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
4795 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4796 TREE_OPERAND (arg0, 0));
4798 return t;
4800 case ABS_EXPR:
4801 if (wins)
4803 if (TREE_CODE (arg0) == INTEGER_CST)
4805 /* If the value is unsigned, then the absolute value is
4806 the same as the ordinary value. */
4807 if (TREE_UNSIGNED (type))
4808 return arg0;
4809 /* Similarly, if the value is non-negative. */
4810 else if (INT_CST_LT (integer_minus_one_node, arg0))
4811 return arg0;
4812 /* If the value is negative, then the absolute value is
4813 its negation. */
4814 else
4816 unsigned HOST_WIDE_INT low;
4817 HOST_WIDE_INT high;
4818 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4819 TREE_INT_CST_HIGH (arg0),
4820 &low, &high);
4821 t = build_int_2 (low, high);
4822 TREE_TYPE (t) = type;
4823 TREE_OVERFLOW (t)
4824 = (TREE_OVERFLOW (arg0)
4825 | force_fit_type (t, overflow));
4826 TREE_CONSTANT_OVERFLOW (t)
4827 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4830 else if (TREE_CODE (arg0) == REAL_CST)
4832 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4833 t = build_real (type,
4834 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4837 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4838 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4839 return t;
4841 case CONJ_EXPR:
4842 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4843 return convert (type, arg0);
4844 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4845 return build (COMPLEX_EXPR, type,
4846 TREE_OPERAND (arg0, 0),
4847 negate_expr (TREE_OPERAND (arg0, 1)));
4848 else if (TREE_CODE (arg0) == COMPLEX_CST)
4849 return build_complex (type, TREE_REALPART (arg0),
4850 negate_expr (TREE_IMAGPART (arg0)));
4851 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4852 return fold (build (TREE_CODE (arg0), type,
4853 fold (build1 (CONJ_EXPR, type,
4854 TREE_OPERAND (arg0, 0))),
4855 fold (build1 (CONJ_EXPR,
4856 type, TREE_OPERAND (arg0, 1)))));
4857 else if (TREE_CODE (arg0) == CONJ_EXPR)
4858 return TREE_OPERAND (arg0, 0);
4859 return t;
4861 case BIT_NOT_EXPR:
4862 if (wins)
4864 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4865 ~ TREE_INT_CST_HIGH (arg0));
4866 TREE_TYPE (t) = type;
4867 force_fit_type (t, 0);
4868 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4869 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4871 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4872 return TREE_OPERAND (arg0, 0);
4873 return t;
4875 case PLUS_EXPR:
4876 /* A + (-B) -> A - B */
4877 if (TREE_CODE (arg1) == NEGATE_EXPR)
4878 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4879 /* (-A) + B -> B - A */
4880 if (TREE_CODE (arg0) == NEGATE_EXPR)
4881 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4882 else if (! FLOAT_TYPE_P (type))
4884 if (integer_zerop (arg1))
4885 return non_lvalue (convert (type, arg0));
4887 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4888 with a constant, and the two constants have no bits in common,
4889 we should treat this as a BIT_IOR_EXPR since this may produce more
4890 simplifications. */
4891 if (TREE_CODE (arg0) == BIT_AND_EXPR
4892 && TREE_CODE (arg1) == BIT_AND_EXPR
4893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4894 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4895 && integer_zerop (const_binop (BIT_AND_EXPR,
4896 TREE_OPERAND (arg0, 1),
4897 TREE_OPERAND (arg1, 1), 0)))
4899 code = BIT_IOR_EXPR;
4900 goto bit_ior;
4903 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
4904 (plus (plus (mult) (mult)) (foo)) so that we can
4905 take advantage of the factoring cases below. */
4906 if ((TREE_CODE (arg0) == PLUS_EXPR
4907 && TREE_CODE (arg1) == MULT_EXPR)
4908 || (TREE_CODE (arg1) == PLUS_EXPR
4909 && TREE_CODE (arg0) == MULT_EXPR))
4911 tree parg0, parg1, parg, marg;
4913 if (TREE_CODE (arg0) == PLUS_EXPR)
4914 parg = arg0, marg = arg1;
4915 else
4916 parg = arg1, marg = arg0;
4917 parg0 = TREE_OPERAND (parg, 0);
4918 parg1 = TREE_OPERAND (parg, 1);
4919 STRIP_NOPS (parg0);
4920 STRIP_NOPS (parg1);
4922 if (TREE_CODE (parg0) == MULT_EXPR
4923 && TREE_CODE (parg1) != MULT_EXPR)
4924 return fold (build (PLUS_EXPR, type,
4925 fold (build (PLUS_EXPR, type, parg0, marg)),
4926 parg1));
4927 if (TREE_CODE (parg0) != MULT_EXPR
4928 && TREE_CODE (parg1) == MULT_EXPR)
4929 return fold (build (PLUS_EXPR, type,
4930 fold (build (PLUS_EXPR, type, parg1, marg)),
4931 parg0));
4934 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
4936 tree arg00, arg01, arg10, arg11;
4937 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
4939 /* (A * C) + (B * C) -> (A+B) * C.
4940 We are most concerned about the case where C is a constant,
4941 but other combinations show up during loop reduction. Since
4942 it is not difficult, try all four possibilities. */
4944 arg00 = TREE_OPERAND (arg0, 0);
4945 arg01 = TREE_OPERAND (arg0, 1);
4946 arg10 = TREE_OPERAND (arg1, 0);
4947 arg11 = TREE_OPERAND (arg1, 1);
4948 same = NULL_TREE;
4950 if (operand_equal_p (arg01, arg11, 0))
4951 same = arg01, alt0 = arg00, alt1 = arg10;
4952 else if (operand_equal_p (arg00, arg10, 0))
4953 same = arg00, alt0 = arg01, alt1 = arg11;
4954 else if (operand_equal_p (arg00, arg11, 0))
4955 same = arg00, alt0 = arg01, alt1 = arg10;
4956 else if (operand_equal_p (arg01, arg10, 0))
4957 same = arg01, alt0 = arg00, alt1 = arg11;
4959 /* No identical multiplicands; see if we can find a common
4960 power-of-two factor in non-power-of-two multiplies. This
4961 can help in multi-dimensional array access. */
4962 else if (TREE_CODE (arg01) == INTEGER_CST
4963 && TREE_CODE (arg11) == INTEGER_CST
4964 && TREE_INT_CST_HIGH (arg01) == 0
4965 && TREE_INT_CST_HIGH (arg11) == 0)
4967 HOST_WIDE_INT int01, int11, tmp;
4968 int01 = TREE_INT_CST_LOW (arg01);
4969 int11 = TREE_INT_CST_LOW (arg11);
4971 /* Move min of absolute values to int11. */
4972 if ((int01 >= 0 ? int01 : -int01)
4973 < (int11 >= 0 ? int11 : -int11))
4975 tmp = int01, int01 = int11, int11 = tmp;
4976 alt0 = arg00, arg00 = arg10, arg10 = alt0;
4977 alt0 = arg01, arg01 = arg11, arg11 = alt0;
4980 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
4982 alt0 = fold (build (MULT_EXPR, type, arg00,
4983 build_int_2 (int01 / int11, 0)));
4984 alt1 = arg10;
4985 same = arg11;
4989 if (same)
4990 return fold (build (MULT_EXPR, type,
4991 fold (build (PLUS_EXPR, type, alt0, alt1)),
4992 same));
4996 /* See if ARG1 is zero and X + ARG1 reduces to X. */
4997 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
4998 return non_lvalue (convert (type, arg0));
5000 /* Likewise if the operands are reversed. */
5001 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5002 return non_lvalue (convert (type, arg1));
5004 bit_rotate:
5005 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5006 is a rotate of A by C1 bits. */
5007 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5008 is a rotate of A by B bits. */
5010 enum tree_code code0, code1;
5011 code0 = TREE_CODE (arg0);
5012 code1 = TREE_CODE (arg1);
5013 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5014 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5015 && operand_equal_p (TREE_OPERAND (arg0, 0),
5016 TREE_OPERAND (arg1, 0), 0)
5017 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5019 tree tree01, tree11;
5020 enum tree_code code01, code11;
5022 tree01 = TREE_OPERAND (arg0, 1);
5023 tree11 = TREE_OPERAND (arg1, 1);
5024 STRIP_NOPS (tree01);
5025 STRIP_NOPS (tree11);
5026 code01 = TREE_CODE (tree01);
5027 code11 = TREE_CODE (tree11);
5028 if (code01 == INTEGER_CST
5029 && code11 == INTEGER_CST
5030 && TREE_INT_CST_HIGH (tree01) == 0
5031 && TREE_INT_CST_HIGH (tree11) == 0
5032 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5033 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5034 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5035 code0 == LSHIFT_EXPR ? tree01 : tree11);
5036 else if (code11 == MINUS_EXPR)
5038 tree tree110, tree111;
5039 tree110 = TREE_OPERAND (tree11, 0);
5040 tree111 = TREE_OPERAND (tree11, 1);
5041 STRIP_NOPS (tree110);
5042 STRIP_NOPS (tree111);
5043 if (TREE_CODE (tree110) == INTEGER_CST
5044 && 0 == compare_tree_int (tree110,
5045 TYPE_PRECISION
5046 (TREE_TYPE (TREE_OPERAND
5047 (arg0, 0))))
5048 && operand_equal_p (tree01, tree111, 0))
5049 return build ((code0 == LSHIFT_EXPR
5050 ? LROTATE_EXPR
5051 : RROTATE_EXPR),
5052 type, TREE_OPERAND (arg0, 0), tree01);
5054 else if (code01 == MINUS_EXPR)
5056 tree tree010, tree011;
5057 tree010 = TREE_OPERAND (tree01, 0);
5058 tree011 = TREE_OPERAND (tree01, 1);
5059 STRIP_NOPS (tree010);
5060 STRIP_NOPS (tree011);
5061 if (TREE_CODE (tree010) == INTEGER_CST
5062 && 0 == compare_tree_int (tree010,
5063 TYPE_PRECISION
5064 (TREE_TYPE (TREE_OPERAND
5065 (arg0, 0))))
5066 && operand_equal_p (tree11, tree011, 0))
5067 return build ((code0 != LSHIFT_EXPR
5068 ? LROTATE_EXPR
5069 : RROTATE_EXPR),
5070 type, TREE_OPERAND (arg0, 0), tree11);
5075 associate:
5076 /* In most languages, can't associate operations on floats through
5077 parentheses. Rather than remember where the parentheses were, we
5078 don't associate floats at all. It shouldn't matter much. However,
5079 associating multiplications is only very slightly inaccurate, so do
5080 that if -funsafe-math-optimizations is specified. */
5082 if (! wins
5083 && (! FLOAT_TYPE_P (type)
5084 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5086 tree var0, con0, lit0, minus_lit0;
5087 tree var1, con1, lit1, minus_lit1;
5089 /* Split both trees into variables, constants, and literals. Then
5090 associate each group together, the constants with literals,
5091 then the result with variables. This increases the chances of
5092 literals being recombined later and of generating relocatable
5093 expressions for the sum of a constant and literal. */
5094 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5095 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5096 code == MINUS_EXPR);
5098 /* Only do something if we found more than two objects. Otherwise,
5099 nothing has changed and we risk infinite recursion. */
5100 if (2 < ((var0 != 0) + (var1 != 0)
5101 + (con0 != 0) + (con1 != 0)
5102 + (lit0 != 0) + (lit1 != 0)
5103 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5105 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5106 if (code == MINUS_EXPR)
5107 code = PLUS_EXPR;
5109 var0 = associate_trees (var0, var1, code, type);
5110 con0 = associate_trees (con0, con1, code, type);
5111 lit0 = associate_trees (lit0, lit1, code, type);
5112 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5114 /* Preserve the MINUS_EXPR if the negative part of the literal is
5115 greater than the positive part. Otherwise, the multiplicative
5116 folding code (i.e extract_muldiv) may be fooled in case
5117 unsigned constants are substracted, like in the following
5118 example: ((X*2 + 4) - 8U)/2. */
5119 if (minus_lit0 && lit0)
5121 if (tree_int_cst_lt (lit0, minus_lit0))
5123 minus_lit0 = associate_trees (minus_lit0, lit0,
5124 MINUS_EXPR, type);
5125 lit0 = 0;
5127 else
5129 lit0 = associate_trees (lit0, minus_lit0,
5130 MINUS_EXPR, type);
5131 minus_lit0 = 0;
5134 if (minus_lit0)
5136 if (con0 == 0)
5137 return convert (type, associate_trees (var0, minus_lit0,
5138 MINUS_EXPR, type));
5139 else
5141 con0 = associate_trees (con0, minus_lit0,
5142 MINUS_EXPR, type);
5143 return convert (type, associate_trees (var0, con0,
5144 PLUS_EXPR, type));
5148 con0 = associate_trees (con0, lit0, code, type);
5149 return convert (type, associate_trees (var0, con0, code, type));
5153 binary:
5154 if (wins)
5155 t1 = const_binop (code, arg0, arg1, 0);
5156 if (t1 != NULL_TREE)
5158 /* The return value should always have
5159 the same type as the original expression. */
5160 if (TREE_TYPE (t1) != TREE_TYPE (t))
5161 t1 = convert (TREE_TYPE (t), t1);
5163 return t1;
5165 return t;
5167 case MINUS_EXPR:
5168 /* A - (-B) -> A + B */
5169 if (TREE_CODE (arg1) == NEGATE_EXPR)
5170 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5171 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5172 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5173 return
5174 fold (build (MINUS_EXPR, type,
5175 build_real (TREE_TYPE (arg1),
5176 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5177 TREE_OPERAND (arg0, 0)));
5179 if (! FLOAT_TYPE_P (type))
5181 if (! wins && integer_zerop (arg0))
5182 return negate_expr (convert (type, arg1));
5183 if (integer_zerop (arg1))
5184 return non_lvalue (convert (type, arg0));
5186 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5187 about the case where C is a constant, just try one of the
5188 four possibilities. */
5190 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5191 && operand_equal_p (TREE_OPERAND (arg0, 1),
5192 TREE_OPERAND (arg1, 1), 0))
5193 return fold (build (MULT_EXPR, type,
5194 fold (build (MINUS_EXPR, type,
5195 TREE_OPERAND (arg0, 0),
5196 TREE_OPERAND (arg1, 0))),
5197 TREE_OPERAND (arg0, 1)));
5200 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5201 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5202 return non_lvalue (convert (type, arg0));
5204 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5205 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5206 (-ARG1 + ARG0) reduces to -ARG1. */
5207 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5208 return negate_expr (convert (type, arg1));
5210 /* Fold &x - &x. This can happen from &x.foo - &x.
5211 This is unsafe for certain floats even in non-IEEE formats.
5212 In IEEE, it is unsafe because it does wrong for NaNs.
5213 Also note that operand_equal_p is always false if an operand
5214 is volatile. */
5216 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5217 && operand_equal_p (arg0, arg1, 0))
5218 return convert (type, integer_zero_node);
5220 goto associate;
5222 case MULT_EXPR:
5223 /* (-A) * (-B) -> A * B */
5224 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5225 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5226 TREE_OPERAND (arg1, 0)));
5228 if (! FLOAT_TYPE_P (type))
5230 if (integer_zerop (arg1))
5231 return omit_one_operand (type, arg1, arg0);
5232 if (integer_onep (arg1))
5233 return non_lvalue (convert (type, arg0));
5235 /* (a * (1 << b)) is (a << b) */
5236 if (TREE_CODE (arg1) == LSHIFT_EXPR
5237 && integer_onep (TREE_OPERAND (arg1, 0)))
5238 return fold (build (LSHIFT_EXPR, type, arg0,
5239 TREE_OPERAND (arg1, 1)));
5240 if (TREE_CODE (arg0) == LSHIFT_EXPR
5241 && integer_onep (TREE_OPERAND (arg0, 0)))
5242 return fold (build (LSHIFT_EXPR, type, arg1,
5243 TREE_OPERAND (arg0, 1)));
5245 if (TREE_CODE (arg1) == INTEGER_CST
5246 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5247 code, NULL_TREE)))
5248 return convert (type, tem);
5251 else
5253 /* Maybe fold x * 0 to 0. The expressions aren't the same
5254 when x is NaN, since x * 0 is also NaN. Nor are they the
5255 same in modes with signed zeros, since multiplying a
5256 negative value by 0 gives -0, not +0. */
5257 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5258 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5259 && real_zerop (arg1))
5260 return omit_one_operand (type, arg1, arg0);
5261 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5262 However, ANSI says we can drop signals,
5263 so we can do this anyway. */
5264 if (real_onep (arg1))
5265 return non_lvalue (convert (type, arg0));
5266 /* x*2 is x+x */
5267 if (! wins && real_twop (arg1)
5268 && (*lang_hooks.decls.global_bindings_p) () == 0
5269 && ! contains_placeholder_p (arg0))
5271 tree arg = save_expr (arg0);
5272 return build (PLUS_EXPR, type, arg, arg);
5275 goto associate;
5277 case BIT_IOR_EXPR:
5278 bit_ior:
5279 if (integer_all_onesp (arg1))
5280 return omit_one_operand (type, arg1, arg0);
5281 if (integer_zerop (arg1))
5282 return non_lvalue (convert (type, arg0));
5283 t1 = distribute_bit_expr (code, type, arg0, arg1);
5284 if (t1 != NULL_TREE)
5285 return t1;
5287 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5289 This results in more efficient code for machines without a NAND
5290 instruction. Combine will canonicalize to the first form
5291 which will allow use of NAND instructions provided by the
5292 backend if they exist. */
5293 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5294 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5296 return fold (build1 (BIT_NOT_EXPR, type,
5297 build (BIT_AND_EXPR, type,
5298 TREE_OPERAND (arg0, 0),
5299 TREE_OPERAND (arg1, 0))));
5302 /* See if this can be simplified into a rotate first. If that
5303 is unsuccessful continue in the association code. */
5304 goto bit_rotate;
5306 case BIT_XOR_EXPR:
5307 if (integer_zerop (arg1))
5308 return non_lvalue (convert (type, arg0));
5309 if (integer_all_onesp (arg1))
5310 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5312 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5313 with a constant, and the two constants have no bits in common,
5314 we should treat this as a BIT_IOR_EXPR since this may produce more
5315 simplifications. */
5316 if (TREE_CODE (arg0) == BIT_AND_EXPR
5317 && TREE_CODE (arg1) == BIT_AND_EXPR
5318 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5319 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5320 && integer_zerop (const_binop (BIT_AND_EXPR,
5321 TREE_OPERAND (arg0, 1),
5322 TREE_OPERAND (arg1, 1), 0)))
5324 code = BIT_IOR_EXPR;
5325 goto bit_ior;
5328 /* See if this can be simplified into a rotate first. If that
5329 is unsuccessful continue in the association code. */
5330 goto bit_rotate;
5332 case BIT_AND_EXPR:
5333 bit_and:
5334 if (integer_all_onesp (arg1))
5335 return non_lvalue (convert (type, arg0));
5336 if (integer_zerop (arg1))
5337 return omit_one_operand (type, arg1, arg0);
5338 t1 = distribute_bit_expr (code, type, arg0, arg1);
5339 if (t1 != NULL_TREE)
5340 return t1;
5341 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5342 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5343 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5345 unsigned int prec
5346 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5348 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5349 && (~TREE_INT_CST_LOW (arg0)
5350 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5351 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5353 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5354 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5356 unsigned int prec
5357 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5359 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5360 && (~TREE_INT_CST_LOW (arg1)
5361 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5362 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5365 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5367 This results in more efficient code for machines without a NOR
5368 instruction. Combine will canonicalize to the first form
5369 which will allow use of NOR instructions provided by the
5370 backend if they exist. */
5371 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5372 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5374 return fold (build1 (BIT_NOT_EXPR, type,
5375 build (BIT_IOR_EXPR, type,
5376 TREE_OPERAND (arg0, 0),
5377 TREE_OPERAND (arg1, 0))));
5380 goto associate;
5382 case BIT_ANDTC_EXPR:
5383 if (integer_all_onesp (arg0))
5384 return non_lvalue (convert (type, arg1));
5385 if (integer_zerop (arg0))
5386 return omit_one_operand (type, arg0, arg1);
5387 if (TREE_CODE (arg1) == INTEGER_CST)
5389 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5390 code = BIT_AND_EXPR;
5391 goto bit_and;
5393 goto binary;
5395 case RDIV_EXPR:
5396 /* Don't touch a floating-point divide by zero unless the mode
5397 of the constant can represent infinity. */
5398 if (TREE_CODE (arg1) == REAL_CST
5399 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5400 && real_zerop (arg1))
5401 return t;
5403 /* (-A) / (-B) -> A / B */
5404 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5405 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5406 TREE_OPERAND (arg1, 0)));
5408 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5409 However, ANSI says we can drop signals, so we can do this anyway. */
5410 if (real_onep (arg1))
5411 return non_lvalue (convert (type, arg0));
5413 /* If ARG1 is a constant, we can convert this to a multiply by the
5414 reciprocal. This does not have the same rounding properties,
5415 so only do this if -funsafe-math-optimizations. We can actually
5416 always safely do it if ARG1 is a power of two, but it's hard to
5417 tell if it is or not in a portable manner. */
5418 if (TREE_CODE (arg1) == REAL_CST)
5420 if (flag_unsafe_math_optimizations
5421 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5422 arg1, 0)))
5423 return fold (build (MULT_EXPR, type, arg0, tem));
5424 /* Find the reciprocal if optimizing and the result is exact. */
5425 else if (optimize)
5427 REAL_VALUE_TYPE r;
5428 r = TREE_REAL_CST (arg1);
5429 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5431 tem = build_real (type, r);
5432 return fold (build (MULT_EXPR, type, arg0, tem));
5436 /* Convert A/B/C to A/(B*C). */
5437 if (flag_unsafe_math_optimizations
5438 && TREE_CODE (arg0) == RDIV_EXPR)
5440 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5441 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5442 arg1)));
5444 /* Convert A/(B/C) to (A/B)*C. */
5445 if (flag_unsafe_math_optimizations
5446 && TREE_CODE (arg1) == RDIV_EXPR)
5448 return fold (build (MULT_EXPR, type,
5449 build (RDIV_EXPR, type, arg0,
5450 TREE_OPERAND (arg1, 0)),
5451 TREE_OPERAND (arg1, 1)));
5453 goto binary;
5455 case TRUNC_DIV_EXPR:
5456 case ROUND_DIV_EXPR:
5457 case FLOOR_DIV_EXPR:
5458 case CEIL_DIV_EXPR:
5459 case EXACT_DIV_EXPR:
5460 if (integer_onep (arg1))
5461 return non_lvalue (convert (type, arg0));
5462 if (integer_zerop (arg1))
5463 return t;
5465 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5466 operation, EXACT_DIV_EXPR.
5468 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5469 At one time others generated faster code, it's not clear if they do
5470 after the last round to changes to the DIV code in expmed.c. */
5471 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5472 && multiple_of_p (type, arg0, arg1))
5473 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5475 if (TREE_CODE (arg1) == INTEGER_CST
5476 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5477 code, NULL_TREE)))
5478 return convert (type, tem);
5480 goto binary;
5482 case CEIL_MOD_EXPR:
5483 case FLOOR_MOD_EXPR:
5484 case ROUND_MOD_EXPR:
5485 case TRUNC_MOD_EXPR:
5486 if (integer_onep (arg1))
5487 return omit_one_operand (type, integer_zero_node, arg0);
5488 if (integer_zerop (arg1))
5489 return t;
5491 if (TREE_CODE (arg1) == INTEGER_CST
5492 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5493 code, NULL_TREE)))
5494 return convert (type, tem);
5496 goto binary;
5498 case LSHIFT_EXPR:
5499 case RSHIFT_EXPR:
5500 case LROTATE_EXPR:
5501 case RROTATE_EXPR:
5502 if (integer_zerop (arg1))
5503 return non_lvalue (convert (type, arg0));
5504 /* Since negative shift count is not well-defined,
5505 don't try to compute it in the compiler. */
5506 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5507 return t;
5508 /* Rewrite an LROTATE_EXPR by a constant into an
5509 RROTATE_EXPR by a new constant. */
5510 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5512 TREE_SET_CODE (t, RROTATE_EXPR);
5513 code = RROTATE_EXPR;
5514 TREE_OPERAND (t, 1) = arg1
5515 = const_binop
5516 (MINUS_EXPR,
5517 convert (TREE_TYPE (arg1),
5518 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5519 arg1, 0);
5520 if (tree_int_cst_sgn (arg1) < 0)
5521 return t;
5524 /* If we have a rotate of a bit operation with the rotate count and
5525 the second operand of the bit operation both constant,
5526 permute the two operations. */
5527 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5528 && (TREE_CODE (arg0) == BIT_AND_EXPR
5529 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5530 || TREE_CODE (arg0) == BIT_IOR_EXPR
5531 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5533 return fold (build (TREE_CODE (arg0), type,
5534 fold (build (code, type,
5535 TREE_OPERAND (arg0, 0), arg1)),
5536 fold (build (code, type,
5537 TREE_OPERAND (arg0, 1), arg1))));
5539 /* Two consecutive rotates adding up to the width of the mode can
5540 be ignored. */
5541 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5542 && TREE_CODE (arg0) == RROTATE_EXPR
5543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5544 && TREE_INT_CST_HIGH (arg1) == 0
5545 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5546 && ((TREE_INT_CST_LOW (arg1)
5547 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5548 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5549 return TREE_OPERAND (arg0, 0);
5551 goto binary;
5553 case MIN_EXPR:
5554 if (operand_equal_p (arg0, arg1, 0))
5555 return omit_one_operand (type, arg0, arg1);
5556 if (INTEGRAL_TYPE_P (type)
5557 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5558 return omit_one_operand (type, arg1, arg0);
5559 goto associate;
5561 case MAX_EXPR:
5562 if (operand_equal_p (arg0, arg1, 0))
5563 return omit_one_operand (type, arg0, arg1);
5564 if (INTEGRAL_TYPE_P (type)
5565 && TYPE_MAX_VALUE (type)
5566 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5567 return omit_one_operand (type, arg1, arg0);
5568 goto associate;
5570 case TRUTH_NOT_EXPR:
5571 /* Note that the operand of this must be an int
5572 and its values must be 0 or 1.
5573 ("true" is a fixed value perhaps depending on the language,
5574 but we don't handle values other than 1 correctly yet.) */
5575 tem = invert_truthvalue (arg0);
5576 /* Avoid infinite recursion. */
5577 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5578 return t;
5579 return convert (type, tem);
5581 case TRUTH_ANDIF_EXPR:
5582 /* Note that the operands of this must be ints
5583 and their values must be 0 or 1.
5584 ("true" is a fixed value perhaps depending on the language.) */
5585 /* If first arg is constant zero, return it. */
5586 if (integer_zerop (arg0))
5587 return convert (type, arg0);
5588 case TRUTH_AND_EXPR:
5589 /* If either arg is constant true, drop it. */
5590 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5591 return non_lvalue (convert (type, arg1));
5592 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5593 /* Preserve sequence points. */
5594 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5595 return non_lvalue (convert (type, arg0));
5596 /* If second arg is constant zero, result is zero, but first arg
5597 must be evaluated. */
5598 if (integer_zerop (arg1))
5599 return omit_one_operand (type, arg1, arg0);
5600 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5601 case will be handled here. */
5602 if (integer_zerop (arg0))
5603 return omit_one_operand (type, arg0, arg1);
5605 truth_andor:
5606 /* We only do these simplifications if we are optimizing. */
5607 if (!optimize)
5608 return t;
5610 /* Check for things like (A || B) && (A || C). We can convert this
5611 to A || (B && C). Note that either operator can be any of the four
5612 truth and/or operations and the transformation will still be
5613 valid. Also note that we only care about order for the
5614 ANDIF and ORIF operators. If B contains side effects, this
5615 might change the truth-value of A. */
5616 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5617 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5618 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5619 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5620 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5621 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5623 tree a00 = TREE_OPERAND (arg0, 0);
5624 tree a01 = TREE_OPERAND (arg0, 1);
5625 tree a10 = TREE_OPERAND (arg1, 0);
5626 tree a11 = TREE_OPERAND (arg1, 1);
5627 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5628 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5629 && (code == TRUTH_AND_EXPR
5630 || code == TRUTH_OR_EXPR));
5632 if (operand_equal_p (a00, a10, 0))
5633 return fold (build (TREE_CODE (arg0), type, a00,
5634 fold (build (code, type, a01, a11))));
5635 else if (commutative && operand_equal_p (a00, a11, 0))
5636 return fold (build (TREE_CODE (arg0), type, a00,
5637 fold (build (code, type, a01, a10))));
5638 else if (commutative && operand_equal_p (a01, a10, 0))
5639 return fold (build (TREE_CODE (arg0), type, a01,
5640 fold (build (code, type, a00, a11))));
5642 /* This case if tricky because we must either have commutative
5643 operators or else A10 must not have side-effects. */
5645 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5646 && operand_equal_p (a01, a11, 0))
5647 return fold (build (TREE_CODE (arg0), type,
5648 fold (build (code, type, a00, a10)),
5649 a01));
5652 /* See if we can build a range comparison. */
5653 if (0 != (tem = fold_range_test (t)))
5654 return tem;
5656 /* Check for the possibility of merging component references. If our
5657 lhs is another similar operation, try to merge its rhs with our
5658 rhs. Then try to merge our lhs and rhs. */
5659 if (TREE_CODE (arg0) == code
5660 && 0 != (tem = fold_truthop (code, type,
5661 TREE_OPERAND (arg0, 1), arg1)))
5662 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5664 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5665 return tem;
5667 return t;
5669 case TRUTH_ORIF_EXPR:
5670 /* Note that the operands of this must be ints
5671 and their values must be 0 or true.
5672 ("true" is a fixed value perhaps depending on the language.) */
5673 /* If first arg is constant true, return it. */
5674 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5675 return convert (type, arg0);
5676 case TRUTH_OR_EXPR:
5677 /* If either arg is constant zero, drop it. */
5678 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5679 return non_lvalue (convert (type, arg1));
5680 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5681 /* Preserve sequence points. */
5682 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5683 return non_lvalue (convert (type, arg0));
5684 /* If second arg is constant true, result is true, but we must
5685 evaluate first arg. */
5686 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5687 return omit_one_operand (type, arg1, arg0);
5688 /* Likewise for first arg, but note this only occurs here for
5689 TRUTH_OR_EXPR. */
5690 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5691 return omit_one_operand (type, arg0, arg1);
5692 goto truth_andor;
5694 case TRUTH_XOR_EXPR:
5695 /* If either arg is constant zero, drop it. */
5696 if (integer_zerop (arg0))
5697 return non_lvalue (convert (type, arg1));
5698 if (integer_zerop (arg1))
5699 return non_lvalue (convert (type, arg0));
5700 /* If either arg is constant true, this is a logical inversion. */
5701 if (integer_onep (arg0))
5702 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5703 if (integer_onep (arg1))
5704 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5705 return t;
5707 case EQ_EXPR:
5708 case NE_EXPR:
5709 case LT_EXPR:
5710 case GT_EXPR:
5711 case LE_EXPR:
5712 case GE_EXPR:
5713 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5715 /* (-a) CMP (-b) -> b CMP a */
5716 if (TREE_CODE (arg0) == NEGATE_EXPR
5717 && TREE_CODE (arg1) == NEGATE_EXPR)
5718 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5719 TREE_OPERAND (arg0, 0)));
5720 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5721 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5722 return
5723 fold (build
5724 (swap_tree_comparison (code), type,
5725 TREE_OPERAND (arg0, 0),
5726 build_real (TREE_TYPE (arg1),
5727 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5728 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5729 /* a CMP (-0) -> a CMP 0 */
5730 if (TREE_CODE (arg1) == REAL_CST
5731 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5732 return fold (build (code, type, arg0,
5733 build_real (TREE_TYPE (arg1), dconst0)));
5736 /* If one arg is a constant integer, put it last. */
5737 if (TREE_CODE (arg0) == INTEGER_CST
5738 && TREE_CODE (arg1) != INTEGER_CST)
5740 TREE_OPERAND (t, 0) = arg1;
5741 TREE_OPERAND (t, 1) = arg0;
5742 arg0 = TREE_OPERAND (t, 0);
5743 arg1 = TREE_OPERAND (t, 1);
5744 code = swap_tree_comparison (code);
5745 TREE_SET_CODE (t, code);
5748 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5749 First, see if one arg is constant; find the constant arg
5750 and the other one. */
5752 tree constop = 0, varop = NULL_TREE;
5753 int constopnum = -1;
5755 if (TREE_CONSTANT (arg1))
5756 constopnum = 1, constop = arg1, varop = arg0;
5757 if (TREE_CONSTANT (arg0))
5758 constopnum = 0, constop = arg0, varop = arg1;
5760 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5762 /* This optimization is invalid for ordered comparisons
5763 if CONST+INCR overflows or if foo+incr might overflow.
5764 This optimization is invalid for floating point due to rounding.
5765 For pointer types we assume overflow doesn't happen. */
5766 if (POINTER_TYPE_P (TREE_TYPE (varop))
5767 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5768 && (code == EQ_EXPR || code == NE_EXPR)))
5770 tree newconst
5771 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5772 constop, TREE_OPERAND (varop, 1)));
5774 /* Do not overwrite the current varop to be a preincrement,
5775 create a new node so that we won't confuse our caller who
5776 might create trees and throw them away, reusing the
5777 arguments that they passed to build. This shows up in
5778 the THEN or ELSE parts of ?: being postincrements. */
5779 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5780 TREE_OPERAND (varop, 0),
5781 TREE_OPERAND (varop, 1));
5783 /* If VAROP is a reference to a bitfield, we must mask
5784 the constant by the width of the field. */
5785 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5786 && DECL_BIT_FIELD(TREE_OPERAND
5787 (TREE_OPERAND (varop, 0), 1)))
5789 int size
5790 = TREE_INT_CST_LOW (DECL_SIZE
5791 (TREE_OPERAND
5792 (TREE_OPERAND (varop, 0), 1)));
5793 tree mask, unsigned_type;
5794 unsigned int precision;
5795 tree folded_compare;
5797 /* First check whether the comparison would come out
5798 always the same. If we don't do that we would
5799 change the meaning with the masking. */
5800 if (constopnum == 0)
5801 folded_compare = fold (build (code, type, constop,
5802 TREE_OPERAND (varop, 0)));
5803 else
5804 folded_compare = fold (build (code, type,
5805 TREE_OPERAND (varop, 0),
5806 constop));
5807 if (integer_zerop (folded_compare)
5808 || integer_onep (folded_compare))
5809 return omit_one_operand (type, folded_compare, varop);
5811 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
5812 precision = TYPE_PRECISION (unsigned_type);
5813 mask = build_int_2 (~0, ~0);
5814 TREE_TYPE (mask) = unsigned_type;
5815 force_fit_type (mask, 0);
5816 mask = const_binop (RSHIFT_EXPR, mask,
5817 size_int (precision - size), 0);
5818 newconst = fold (build (BIT_AND_EXPR,
5819 TREE_TYPE (varop), newconst,
5820 convert (TREE_TYPE (varop),
5821 mask)));
5824 t = build (code, type,
5825 (constopnum == 0) ? newconst : varop,
5826 (constopnum == 1) ? newconst : varop);
5827 return t;
5830 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5832 if (POINTER_TYPE_P (TREE_TYPE (varop))
5833 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5834 && (code == EQ_EXPR || code == NE_EXPR)))
5836 tree newconst
5837 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5838 constop, TREE_OPERAND (varop, 1)));
5840 /* Do not overwrite the current varop to be a predecrement,
5841 create a new node so that we won't confuse our caller who
5842 might create trees and throw them away, reusing the
5843 arguments that they passed to build. This shows up in
5844 the THEN or ELSE parts of ?: being postdecrements. */
5845 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5846 TREE_OPERAND (varop, 0),
5847 TREE_OPERAND (varop, 1));
5849 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5850 && DECL_BIT_FIELD(TREE_OPERAND
5851 (TREE_OPERAND (varop, 0), 1)))
5853 int size
5854 = TREE_INT_CST_LOW (DECL_SIZE
5855 (TREE_OPERAND
5856 (TREE_OPERAND (varop, 0), 1)));
5857 tree mask, unsigned_type;
5858 unsigned int precision;
5859 tree folded_compare;
5861 if (constopnum == 0)
5862 folded_compare = fold (build (code, type, constop,
5863 TREE_OPERAND (varop, 0)));
5864 else
5865 folded_compare = fold (build (code, type,
5866 TREE_OPERAND (varop, 0),
5867 constop));
5868 if (integer_zerop (folded_compare)
5869 || integer_onep (folded_compare))
5870 return omit_one_operand (type, folded_compare, varop);
5872 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
5873 precision = TYPE_PRECISION (unsigned_type);
5874 mask = build_int_2 (~0, ~0);
5875 TREE_TYPE (mask) = TREE_TYPE (varop);
5876 force_fit_type (mask, 0);
5877 mask = const_binop (RSHIFT_EXPR, mask,
5878 size_int (precision - size), 0);
5879 newconst = fold (build (BIT_AND_EXPR,
5880 TREE_TYPE (varop), newconst,
5881 convert (TREE_TYPE (varop),
5882 mask)));
5885 t = build (code, type,
5886 (constopnum == 0) ? newconst : varop,
5887 (constopnum == 1) ? newconst : varop);
5888 return t;
5893 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5894 if (TREE_CODE (arg1) == INTEGER_CST
5895 && TREE_CODE (arg0) != INTEGER_CST
5896 && tree_int_cst_sgn (arg1) > 0)
5898 switch (TREE_CODE (t))
5900 case GE_EXPR:
5901 code = GT_EXPR;
5902 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5903 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5904 break;
5906 case LT_EXPR:
5907 code = LE_EXPR;
5908 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5909 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5910 break;
5912 default:
5913 break;
5917 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
5918 a MINUS_EXPR of a constant, we can convert it into a comparison with
5919 a revised constant as long as no overflow occurs. */
5920 if ((code == EQ_EXPR || code == NE_EXPR)
5921 && TREE_CODE (arg1) == INTEGER_CST
5922 && (TREE_CODE (arg0) == PLUS_EXPR
5923 || TREE_CODE (arg0) == MINUS_EXPR)
5924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5925 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5926 ? MINUS_EXPR : PLUS_EXPR,
5927 arg1, TREE_OPERAND (arg0, 1), 0))
5928 && ! TREE_CONSTANT_OVERFLOW (tem))
5929 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5931 /* Similarly for a NEGATE_EXPR. */
5932 else if ((code == EQ_EXPR || code == NE_EXPR)
5933 && TREE_CODE (arg0) == NEGATE_EXPR
5934 && TREE_CODE (arg1) == INTEGER_CST
5935 && 0 != (tem = negate_expr (arg1))
5936 && TREE_CODE (tem) == INTEGER_CST
5937 && ! TREE_CONSTANT_OVERFLOW (tem))
5938 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5940 /* If we have X - Y == 0, we can convert that to X == Y and similarly
5941 for !=. Don't do this for ordered comparisons due to overflow. */
5942 else if ((code == NE_EXPR || code == EQ_EXPR)
5943 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
5944 return fold (build (code, type,
5945 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
5947 /* If we are widening one operand of an integer comparison,
5948 see if the other operand is similarly being widened. Perhaps we
5949 can do the comparison in the narrower type. */
5950 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
5951 && TREE_CODE (arg0) == NOP_EXPR
5952 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
5953 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
5954 && (TREE_TYPE (t1) == TREE_TYPE (tem)
5955 || (TREE_CODE (t1) == INTEGER_CST
5956 && int_fits_type_p (t1, TREE_TYPE (tem)))))
5957 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
5959 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
5960 constant, we can simplify it. */
5961 else if (TREE_CODE (arg1) == INTEGER_CST
5962 && (TREE_CODE (arg0) == MIN_EXPR
5963 || TREE_CODE (arg0) == MAX_EXPR)
5964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5965 return optimize_minmax_comparison (t);
5967 /* If we are comparing an ABS_EXPR with a constant, we can
5968 convert all the cases into explicit comparisons, but they may
5969 well not be faster than doing the ABS and one comparison.
5970 But ABS (X) <= C is a range comparison, which becomes a subtraction
5971 and a comparison, and is probably faster. */
5972 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5973 && TREE_CODE (arg0) == ABS_EXPR
5974 && ! TREE_SIDE_EFFECTS (arg0)
5975 && (0 != (tem = negate_expr (arg1)))
5976 && TREE_CODE (tem) == INTEGER_CST
5977 && ! TREE_CONSTANT_OVERFLOW (tem))
5978 return fold (build (TRUTH_ANDIF_EXPR, type,
5979 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
5980 build (LE_EXPR, type,
5981 TREE_OPERAND (arg0, 0), arg1)));
5983 /* If this is an EQ or NE comparison with zero and ARG0 is
5984 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5985 two operations, but the latter can be done in one less insn
5986 on machines that have only two-operand insns or on which a
5987 constant cannot be the first operand. */
5988 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5989 && TREE_CODE (arg0) == BIT_AND_EXPR)
5991 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5992 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5993 return
5994 fold (build (code, type,
5995 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5996 build (RSHIFT_EXPR,
5997 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5998 TREE_OPERAND (arg0, 1),
5999 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6000 convert (TREE_TYPE (arg0),
6001 integer_one_node)),
6002 arg1));
6003 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6004 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6005 return
6006 fold (build (code, type,
6007 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6008 build (RSHIFT_EXPR,
6009 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6010 TREE_OPERAND (arg0, 0),
6011 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6012 convert (TREE_TYPE (arg0),
6013 integer_one_node)),
6014 arg1));
6017 /* If this is an NE or EQ comparison of zero against the result of a
6018 signed MOD operation whose second operand is a power of 2, make
6019 the MOD operation unsigned since it is simpler and equivalent. */
6020 if ((code == NE_EXPR || code == EQ_EXPR)
6021 && integer_zerop (arg1)
6022 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6023 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6024 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6025 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6026 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6027 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6029 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6030 tree newmod = build (TREE_CODE (arg0), newtype,
6031 convert (newtype, TREE_OPERAND (arg0, 0)),
6032 convert (newtype, TREE_OPERAND (arg0, 1)));
6034 return build (code, type, newmod, convert (newtype, arg1));
6037 /* If this is an NE comparison of zero with an AND of one, remove the
6038 comparison since the AND will give the correct value. */
6039 if (code == NE_EXPR && integer_zerop (arg1)
6040 && TREE_CODE (arg0) == BIT_AND_EXPR
6041 && integer_onep (TREE_OPERAND (arg0, 1)))
6042 return convert (type, arg0);
6044 /* If we have (A & C) == C where C is a power of 2, convert this into
6045 (A & C) != 0. Similarly for NE_EXPR. */
6046 if ((code == EQ_EXPR || code == NE_EXPR)
6047 && TREE_CODE (arg0) == BIT_AND_EXPR
6048 && integer_pow2p (TREE_OPERAND (arg0, 1))
6049 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6050 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6051 arg0, integer_zero_node);
6053 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6054 and similarly for >= into !=. */
6055 if ((code == LT_EXPR || code == GE_EXPR)
6056 && TREE_UNSIGNED (TREE_TYPE (arg0))
6057 && TREE_CODE (arg1) == LSHIFT_EXPR
6058 && integer_onep (TREE_OPERAND (arg1, 0)))
6059 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6060 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6061 TREE_OPERAND (arg1, 1)),
6062 convert (TREE_TYPE (arg0), integer_zero_node));
6064 else if ((code == LT_EXPR || code == GE_EXPR)
6065 && TREE_UNSIGNED (TREE_TYPE (arg0))
6066 && (TREE_CODE (arg1) == NOP_EXPR
6067 || TREE_CODE (arg1) == CONVERT_EXPR)
6068 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6069 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6070 return
6071 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6072 convert (TREE_TYPE (arg0),
6073 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6074 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6075 convert (TREE_TYPE (arg0), integer_zero_node));
6077 /* Simplify comparison of something with itself. (For IEEE
6078 floating-point, we can only do some of these simplifications.) */
6079 if (operand_equal_p (arg0, arg1, 0))
6081 switch (code)
6083 case EQ_EXPR:
6084 case GE_EXPR:
6085 case LE_EXPR:
6086 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6087 return constant_boolean_node (1, type);
6088 code = EQ_EXPR;
6089 TREE_SET_CODE (t, code);
6090 break;
6092 case NE_EXPR:
6093 /* For NE, we can only do this simplification if integer. */
6094 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6095 break;
6096 /* ... fall through ... */
6097 case GT_EXPR:
6098 case LT_EXPR:
6099 return constant_boolean_node (0, type);
6100 default:
6101 abort ();
6105 /* An unsigned comparison against 0 can be simplified. */
6106 if (integer_zerop (arg1)
6107 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6108 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6109 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6111 switch (TREE_CODE (t))
6113 case GT_EXPR:
6114 code = NE_EXPR;
6115 TREE_SET_CODE (t, NE_EXPR);
6116 break;
6117 case LE_EXPR:
6118 code = EQ_EXPR;
6119 TREE_SET_CODE (t, EQ_EXPR);
6120 break;
6121 case GE_EXPR:
6122 return omit_one_operand (type,
6123 convert (type, integer_one_node),
6124 arg0);
6125 case LT_EXPR:
6126 return omit_one_operand (type,
6127 convert (type, integer_zero_node),
6128 arg0);
6129 default:
6130 break;
6134 /* Comparisons with the highest or lowest possible integer of
6135 the specified size will have known values and an unsigned
6136 <= 0x7fffffff can be simplified. */
6138 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6140 if (TREE_CODE (arg1) == INTEGER_CST
6141 && ! TREE_CONSTANT_OVERFLOW (arg1)
6142 && width <= HOST_BITS_PER_WIDE_INT
6143 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6144 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6146 if (TREE_INT_CST_HIGH (arg1) == 0
6147 && (TREE_INT_CST_LOW (arg1)
6148 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6149 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6150 switch (TREE_CODE (t))
6152 case GT_EXPR:
6153 return omit_one_operand (type,
6154 convert (type, integer_zero_node),
6155 arg0);
6156 case GE_EXPR:
6157 TREE_SET_CODE (t, EQ_EXPR);
6158 break;
6160 case LE_EXPR:
6161 return omit_one_operand (type,
6162 convert (type, integer_one_node),
6163 arg0);
6164 case LT_EXPR:
6165 TREE_SET_CODE (t, NE_EXPR);
6166 break;
6168 default:
6169 break;
6172 else if (TREE_INT_CST_HIGH (arg1) == -1
6173 && (TREE_INT_CST_LOW (arg1)
6174 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6175 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6176 switch (TREE_CODE (t))
6178 case LT_EXPR:
6179 return omit_one_operand (type,
6180 convert (type, integer_zero_node),
6181 arg0);
6182 case LE_EXPR:
6183 TREE_SET_CODE (t, EQ_EXPR);
6184 break;
6186 case GE_EXPR:
6187 return omit_one_operand (type,
6188 convert (type, integer_one_node),
6189 arg0);
6190 case GT_EXPR:
6191 TREE_SET_CODE (t, NE_EXPR);
6192 break;
6194 default:
6195 break;
6198 else if (TREE_INT_CST_HIGH (arg1) == 0
6199 && (TREE_INT_CST_LOW (arg1)
6200 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6201 && TREE_UNSIGNED (TREE_TYPE (arg1))
6202 /* signed_type does not work on pointer types. */
6203 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6205 if (TREE_CODE (t) == LE_EXPR || TREE_CODE (t) == GT_EXPR)
6207 tree st0, st1;
6208 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6209 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6210 return fold
6211 (build (TREE_CODE (t) == LE_EXPR ? GE_EXPR: LT_EXPR,
6212 type, convert (st0, arg0),
6213 convert (st1, integer_zero_node)));
6216 else if (TREE_INT_CST_HIGH (arg1) == 0
6217 && (TREE_INT_CST_LOW (arg1)
6218 == ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1)
6219 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6220 switch (TREE_CODE (t))
6222 case GT_EXPR:
6223 return omit_one_operand (type,
6224 convert (type, integer_zero_node),
6225 arg0);
6226 case GE_EXPR:
6227 TREE_SET_CODE (t, EQ_EXPR);
6228 break;
6230 case LE_EXPR:
6231 return omit_one_operand (type,
6232 convert (type, integer_one_node),
6233 arg0);
6234 case LT_EXPR:
6235 TREE_SET_CODE (t, NE_EXPR);
6236 break;
6238 default:
6239 break;
6244 /* If we are comparing an expression that just has comparisons
6245 of two integer values, arithmetic expressions of those comparisons,
6246 and constants, we can simplify it. There are only three cases
6247 to check: the two values can either be equal, the first can be
6248 greater, or the second can be greater. Fold the expression for
6249 those three values. Since each value must be 0 or 1, we have
6250 eight possibilities, each of which corresponds to the constant 0
6251 or 1 or one of the six possible comparisons.
6253 This handles common cases like (a > b) == 0 but also handles
6254 expressions like ((x > y) - (y > x)) > 0, which supposedly
6255 occur in macroized code. */
6257 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6259 tree cval1 = 0, cval2 = 0;
6260 int save_p = 0;
6262 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6263 /* Don't handle degenerate cases here; they should already
6264 have been handled anyway. */
6265 && cval1 != 0 && cval2 != 0
6266 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6267 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6268 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6269 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6270 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6271 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6272 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6274 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6275 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6277 /* We can't just pass T to eval_subst in case cval1 or cval2
6278 was the same as ARG1. */
6280 tree high_result
6281 = fold (build (code, type,
6282 eval_subst (arg0, cval1, maxval, cval2, minval),
6283 arg1));
6284 tree equal_result
6285 = fold (build (code, type,
6286 eval_subst (arg0, cval1, maxval, cval2, maxval),
6287 arg1));
6288 tree low_result
6289 = fold (build (code, type,
6290 eval_subst (arg0, cval1, minval, cval2, maxval),
6291 arg1));
6293 /* All three of these results should be 0 or 1. Confirm they
6294 are. Then use those values to select the proper code
6295 to use. */
6297 if ((integer_zerop (high_result)
6298 || integer_onep (high_result))
6299 && (integer_zerop (equal_result)
6300 || integer_onep (equal_result))
6301 && (integer_zerop (low_result)
6302 || integer_onep (low_result)))
6304 /* Make a 3-bit mask with the high-order bit being the
6305 value for `>', the next for '=', and the low for '<'. */
6306 switch ((integer_onep (high_result) * 4)
6307 + (integer_onep (equal_result) * 2)
6308 + integer_onep (low_result))
6310 case 0:
6311 /* Always false. */
6312 return omit_one_operand (type, integer_zero_node, arg0);
6313 case 1:
6314 code = LT_EXPR;
6315 break;
6316 case 2:
6317 code = EQ_EXPR;
6318 break;
6319 case 3:
6320 code = LE_EXPR;
6321 break;
6322 case 4:
6323 code = GT_EXPR;
6324 break;
6325 case 5:
6326 code = NE_EXPR;
6327 break;
6328 case 6:
6329 code = GE_EXPR;
6330 break;
6331 case 7:
6332 /* Always true. */
6333 return omit_one_operand (type, integer_one_node, arg0);
6336 t = build (code, type, cval1, cval2);
6337 if (save_p)
6338 return save_expr (t);
6339 else
6340 return fold (t);
6345 /* If this is a comparison of a field, we may be able to simplify it. */
6346 if ((TREE_CODE (arg0) == COMPONENT_REF
6347 || TREE_CODE (arg0) == BIT_FIELD_REF)
6348 && (code == EQ_EXPR || code == NE_EXPR)
6349 /* Handle the constant case even without -O
6350 to make sure the warnings are given. */
6351 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6353 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6354 return t1 ? t1 : t;
6357 /* If this is a comparison of complex values and either or both sides
6358 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6359 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6360 This may prevent needless evaluations. */
6361 if ((code == EQ_EXPR || code == NE_EXPR)
6362 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6363 && (TREE_CODE (arg0) == COMPLEX_EXPR
6364 || TREE_CODE (arg1) == COMPLEX_EXPR
6365 || TREE_CODE (arg0) == COMPLEX_CST
6366 || TREE_CODE (arg1) == COMPLEX_CST))
6368 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6369 tree real0, imag0, real1, imag1;
6371 arg0 = save_expr (arg0);
6372 arg1 = save_expr (arg1);
6373 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6374 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6375 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6376 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6378 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6379 : TRUTH_ORIF_EXPR),
6380 type,
6381 fold (build (code, type, real0, real1)),
6382 fold (build (code, type, imag0, imag1))));
6385 /* Optimize comparisons of strlen vs zero to a compare of the
6386 first character of the string vs zero. To wit,
6387 strlen(ptr) == 0 => *ptr == 0
6388 strlen(ptr) != 0 => *ptr != 0
6389 Other cases should reduce to one of these two (or a constant)
6390 due to the return value of strlen being unsigned. */
6391 if ((code == EQ_EXPR || code == NE_EXPR)
6392 && integer_zerop (arg1)
6393 && TREE_CODE (arg0) == CALL_EXPR
6394 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6396 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6397 tree arglist;
6399 if (TREE_CODE (fndecl) == FUNCTION_DECL
6400 && DECL_BUILT_IN (fndecl)
6401 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6402 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6403 && (arglist = TREE_OPERAND (arg0, 1))
6404 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6405 && ! TREE_CHAIN (arglist))
6406 return fold (build (code, type,
6407 build1 (INDIRECT_REF, char_type_node,
6408 TREE_VALUE(arglist)),
6409 integer_zero_node));
6412 /* From here on, the only cases we handle are when the result is
6413 known to be a constant.
6415 To compute GT, swap the arguments and do LT.
6416 To compute GE, do LT and invert the result.
6417 To compute LE, swap the arguments, do LT and invert the result.
6418 To compute NE, do EQ and invert the result.
6420 Therefore, the code below must handle only EQ and LT. */
6422 if (code == LE_EXPR || code == GT_EXPR)
6424 tem = arg0, arg0 = arg1, arg1 = tem;
6425 code = swap_tree_comparison (code);
6428 /* Note that it is safe to invert for real values here because we
6429 will check below in the one case that it matters. */
6431 t1 = NULL_TREE;
6432 invert = 0;
6433 if (code == NE_EXPR || code == GE_EXPR)
6435 invert = 1;
6436 code = invert_tree_comparison (code);
6439 /* Compute a result for LT or EQ if args permit;
6440 otherwise return T. */
6441 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6443 if (code == EQ_EXPR)
6444 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6445 else
6446 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6447 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6448 : INT_CST_LT (arg0, arg1)),
6452 #if 0 /* This is no longer useful, but breaks some real code. */
6453 /* Assume a nonexplicit constant cannot equal an explicit one,
6454 since such code would be undefined anyway.
6455 Exception: on sysvr4, using #pragma weak,
6456 a label can come out as 0. */
6457 else if (TREE_CODE (arg1) == INTEGER_CST
6458 && !integer_zerop (arg1)
6459 && TREE_CONSTANT (arg0)
6460 && TREE_CODE (arg0) == ADDR_EXPR
6461 && code == EQ_EXPR)
6462 t1 = build_int_2 (0, 0);
6463 #endif
6464 /* Two real constants can be compared explicitly. */
6465 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6467 /* If either operand is a NaN, the result is false with two
6468 exceptions: First, an NE_EXPR is true on NaNs, but that case
6469 is already handled correctly since we will be inverting the
6470 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6471 or a GE_EXPR into a LT_EXPR, we must return true so that it
6472 will be inverted into false. */
6474 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6475 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6476 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6478 else if (code == EQ_EXPR)
6479 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6480 TREE_REAL_CST (arg1)),
6482 else
6483 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6484 TREE_REAL_CST (arg1)),
6488 if (t1 == NULL_TREE)
6489 return t;
6491 if (invert)
6492 TREE_INT_CST_LOW (t1) ^= 1;
6494 TREE_TYPE (t1) = type;
6495 if (TREE_CODE (type) == BOOLEAN_TYPE)
6496 return (*lang_hooks.truthvalue_conversion) (t1);
6497 return t1;
6499 case COND_EXPR:
6500 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6501 so all simple results must be passed through pedantic_non_lvalue. */
6502 if (TREE_CODE (arg0) == INTEGER_CST)
6503 return pedantic_non_lvalue
6504 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6505 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6506 return pedantic_omit_one_operand (type, arg1, arg0);
6508 /* If the second operand is zero, invert the comparison and swap
6509 the second and third operands. Likewise if the second operand
6510 is constant and the third is not or if the third operand is
6511 equivalent to the first operand of the comparison. */
6513 if (integer_zerop (arg1)
6514 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6515 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6516 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6517 TREE_OPERAND (t, 2),
6518 TREE_OPERAND (arg0, 1))))
6520 /* See if this can be inverted. If it can't, possibly because
6521 it was a floating-point inequality comparison, don't do
6522 anything. */
6523 tem = invert_truthvalue (arg0);
6525 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6527 t = build (code, type, tem,
6528 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6529 arg0 = tem;
6530 /* arg1 should be the first argument of the new T. */
6531 arg1 = TREE_OPERAND (t, 1);
6532 STRIP_NOPS (arg1);
6536 /* If we have A op B ? A : C, we may be able to convert this to a
6537 simpler expression, depending on the operation and the values
6538 of B and C. Signed zeros prevent all of these transformations,
6539 for reasons given above each one. */
6541 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6542 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6543 arg1, TREE_OPERAND (arg0, 1))
6544 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6546 tree arg2 = TREE_OPERAND (t, 2);
6547 enum tree_code comp_code = TREE_CODE (arg0);
6549 STRIP_NOPS (arg2);
6551 /* If we have A op 0 ? A : -A, consider applying the following
6552 transformations:
6554 A == 0? A : -A same as -A
6555 A != 0? A : -A same as A
6556 A >= 0? A : -A same as abs (A)
6557 A > 0? A : -A same as abs (A)
6558 A <= 0? A : -A same as -abs (A)
6559 A < 0? A : -A same as -abs (A)
6561 None of these transformations work for modes with signed
6562 zeros. If A is +/-0, the first two transformations will
6563 change the sign of the result (from +0 to -0, or vice
6564 versa). The last four will fix the sign of the result,
6565 even though the original expressions could be positive or
6566 negative, depending on the sign of A.
6568 Note that all these transformations are correct if A is
6569 NaN, since the two alternatives (A and -A) are also NaNs. */
6570 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6571 ? real_zerop (TREE_OPERAND (arg0, 1))
6572 : integer_zerop (TREE_OPERAND (arg0, 1)))
6573 && TREE_CODE (arg2) == NEGATE_EXPR
6574 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6575 switch (comp_code)
6577 case EQ_EXPR:
6578 return
6579 pedantic_non_lvalue
6580 (convert (type,
6581 negate_expr
6582 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6583 arg1))));
6584 case NE_EXPR:
6585 return pedantic_non_lvalue (convert (type, arg1));
6586 case GE_EXPR:
6587 case GT_EXPR:
6588 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6589 arg1 = convert ((*lang_hooks.types.signed_type)
6590 (TREE_TYPE (arg1)), arg1);
6591 return pedantic_non_lvalue
6592 (convert (type, fold (build1 (ABS_EXPR,
6593 TREE_TYPE (arg1), arg1))));
6594 case LE_EXPR:
6595 case LT_EXPR:
6596 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6597 arg1 = convert ((lang_hooks.types.signed_type)
6598 (TREE_TYPE (arg1)), arg1);
6599 return pedantic_non_lvalue
6600 (negate_expr (convert (type,
6601 fold (build1 (ABS_EXPR,
6602 TREE_TYPE (arg1),
6603 arg1)))));
6604 default:
6605 abort ();
6608 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6609 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6610 both transformations are correct when A is NaN: A != 0
6611 is then true, and A == 0 is false. */
6613 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6615 if (comp_code == NE_EXPR)
6616 return pedantic_non_lvalue (convert (type, arg1));
6617 else if (comp_code == EQ_EXPR)
6618 return pedantic_non_lvalue (convert (type, integer_zero_node));
6621 /* Try some transformations of A op B ? A : B.
6623 A == B? A : B same as B
6624 A != B? A : B same as A
6625 A >= B? A : B same as max (A, B)
6626 A > B? A : B same as max (B, A)
6627 A <= B? A : B same as min (A, B)
6628 A < B? A : B same as min (B, A)
6630 As above, these transformations don't work in the presence
6631 of signed zeros. For example, if A and B are zeros of
6632 opposite sign, the first two transformations will change
6633 the sign of the result. In the last four, the original
6634 expressions give different results for (A=+0, B=-0) and
6635 (A=-0, B=+0), but the transformed expressions do not.
6637 The first two transformations are correct if either A or B
6638 is a NaN. In the first transformation, the condition will
6639 be false, and B will indeed be chosen. In the case of the
6640 second transformation, the condition A != B will be true,
6641 and A will be chosen.
6643 The conversions to max() and min() are not correct if B is
6644 a number and A is not. The conditions in the original
6645 expressions will be false, so all four give B. The min()
6646 and max() versions would give a NaN instead. */
6647 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6648 arg2, TREE_OPERAND (arg0, 0)))
6650 tree comp_op0 = TREE_OPERAND (arg0, 0);
6651 tree comp_op1 = TREE_OPERAND (arg0, 1);
6652 tree comp_type = TREE_TYPE (comp_op0);
6654 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6655 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6656 comp_type = type;
6658 switch (comp_code)
6660 case EQ_EXPR:
6661 return pedantic_non_lvalue (convert (type, arg2));
6662 case NE_EXPR:
6663 return pedantic_non_lvalue (convert (type, arg1));
6664 case LE_EXPR:
6665 case LT_EXPR:
6666 /* In C++ a ?: expression can be an lvalue, so put the
6667 operand which will be used if they are equal first
6668 so that we can convert this back to the
6669 corresponding COND_EXPR. */
6670 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6671 return pedantic_non_lvalue
6672 (convert (type, fold (build (MIN_EXPR, comp_type,
6673 (comp_code == LE_EXPR
6674 ? comp_op0 : comp_op1),
6675 (comp_code == LE_EXPR
6676 ? comp_op1 : comp_op0)))));
6677 break;
6678 case GE_EXPR:
6679 case GT_EXPR:
6680 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6681 return pedantic_non_lvalue
6682 (convert (type, fold (build (MAX_EXPR, comp_type,
6683 (comp_code == GE_EXPR
6684 ? comp_op0 : comp_op1),
6685 (comp_code == GE_EXPR
6686 ? comp_op1 : comp_op0)))));
6687 break;
6688 default:
6689 abort ();
6693 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6694 we might still be able to simplify this. For example,
6695 if C1 is one less or one more than C2, this might have started
6696 out as a MIN or MAX and been transformed by this function.
6697 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6699 if (INTEGRAL_TYPE_P (type)
6700 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6701 && TREE_CODE (arg2) == INTEGER_CST)
6702 switch (comp_code)
6704 case EQ_EXPR:
6705 /* We can replace A with C1 in this case. */
6706 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6707 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6708 TREE_OPERAND (t, 2));
6709 break;
6711 case LT_EXPR:
6712 /* If C1 is C2 + 1, this is min(A, C2). */
6713 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6714 && operand_equal_p (TREE_OPERAND (arg0, 1),
6715 const_binop (PLUS_EXPR, arg2,
6716 integer_one_node, 0), 1))
6717 return pedantic_non_lvalue
6718 (fold (build (MIN_EXPR, type, arg1, arg2)));
6719 break;
6721 case LE_EXPR:
6722 /* If C1 is C2 - 1, this is min(A, C2). */
6723 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6724 && operand_equal_p (TREE_OPERAND (arg0, 1),
6725 const_binop (MINUS_EXPR, arg2,
6726 integer_one_node, 0), 1))
6727 return pedantic_non_lvalue
6728 (fold (build (MIN_EXPR, type, arg1, arg2)));
6729 break;
6731 case GT_EXPR:
6732 /* If C1 is C2 - 1, this is max(A, C2). */
6733 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6734 && operand_equal_p (TREE_OPERAND (arg0, 1),
6735 const_binop (MINUS_EXPR, arg2,
6736 integer_one_node, 0), 1))
6737 return pedantic_non_lvalue
6738 (fold (build (MAX_EXPR, type, arg1, arg2)));
6739 break;
6741 case GE_EXPR:
6742 /* If C1 is C2 + 1, this is max(A, C2). */
6743 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6744 && operand_equal_p (TREE_OPERAND (arg0, 1),
6745 const_binop (PLUS_EXPR, arg2,
6746 integer_one_node, 0), 1))
6747 return pedantic_non_lvalue
6748 (fold (build (MAX_EXPR, type, arg1, arg2)));
6749 break;
6750 case NE_EXPR:
6751 break;
6752 default:
6753 abort ();
6757 /* If the second operand is simpler than the third, swap them
6758 since that produces better jump optimization results. */
6759 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6760 || TREE_CODE (arg1) == SAVE_EXPR)
6761 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6762 || DECL_P (TREE_OPERAND (t, 2))
6763 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6765 /* See if this can be inverted. If it can't, possibly because
6766 it was a floating-point inequality comparison, don't do
6767 anything. */
6768 tem = invert_truthvalue (arg0);
6770 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6772 t = build (code, type, tem,
6773 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6774 arg0 = tem;
6775 /* arg1 should be the first argument of the new T. */
6776 arg1 = TREE_OPERAND (t, 1);
6777 STRIP_NOPS (arg1);
6781 /* Convert A ? 1 : 0 to simply A. */
6782 if (integer_onep (TREE_OPERAND (t, 1))
6783 && integer_zerop (TREE_OPERAND (t, 2))
6784 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6785 call to fold will try to move the conversion inside
6786 a COND, which will recurse. In that case, the COND_EXPR
6787 is probably the best choice, so leave it alone. */
6788 && type == TREE_TYPE (arg0))
6789 return pedantic_non_lvalue (arg0);
6791 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6792 operation is simply A & 2. */
6794 if (integer_zerop (TREE_OPERAND (t, 2))
6795 && TREE_CODE (arg0) == NE_EXPR
6796 && integer_zerop (TREE_OPERAND (arg0, 1))
6797 && integer_pow2p (arg1)
6798 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6799 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6800 arg1, 1))
6801 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6803 return t;
6805 case COMPOUND_EXPR:
6806 /* When pedantic, a compound expression can be neither an lvalue
6807 nor an integer constant expression. */
6808 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6809 return t;
6810 /* Don't let (0, 0) be null pointer constant. */
6811 if (integer_zerop (arg1))
6812 return build1 (NOP_EXPR, type, arg1);
6813 return convert (type, arg1);
6815 case COMPLEX_EXPR:
6816 if (wins)
6817 return build_complex (type, arg0, arg1);
6818 return t;
6820 case REALPART_EXPR:
6821 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6822 return t;
6823 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6824 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6825 TREE_OPERAND (arg0, 1));
6826 else if (TREE_CODE (arg0) == COMPLEX_CST)
6827 return TREE_REALPART (arg0);
6828 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6829 return fold (build (TREE_CODE (arg0), type,
6830 fold (build1 (REALPART_EXPR, type,
6831 TREE_OPERAND (arg0, 0))),
6832 fold (build1 (REALPART_EXPR,
6833 type, TREE_OPERAND (arg0, 1)))));
6834 return t;
6836 case IMAGPART_EXPR:
6837 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6838 return convert (type, integer_zero_node);
6839 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6840 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6841 TREE_OPERAND (arg0, 0));
6842 else if (TREE_CODE (arg0) == COMPLEX_CST)
6843 return TREE_IMAGPART (arg0);
6844 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6845 return fold (build (TREE_CODE (arg0), type,
6846 fold (build1 (IMAGPART_EXPR, type,
6847 TREE_OPERAND (arg0, 0))),
6848 fold (build1 (IMAGPART_EXPR, type,
6849 TREE_OPERAND (arg0, 1)))));
6850 return t;
6852 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6853 appropriate. */
6854 case CLEANUP_POINT_EXPR:
6855 if (! has_cleanups (arg0))
6856 return TREE_OPERAND (t, 0);
6859 enum tree_code code0 = TREE_CODE (arg0);
6860 int kind0 = TREE_CODE_CLASS (code0);
6861 tree arg00 = TREE_OPERAND (arg0, 0);
6862 tree arg01;
6864 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6865 return fold (build1 (code0, type,
6866 fold (build1 (CLEANUP_POINT_EXPR,
6867 TREE_TYPE (arg00), arg00))));
6869 if (kind0 == '<' || kind0 == '2'
6870 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6871 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6872 || code0 == TRUTH_XOR_EXPR)
6874 arg01 = TREE_OPERAND (arg0, 1);
6876 if (TREE_CONSTANT (arg00)
6877 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6878 && ! has_cleanups (arg00)))
6879 return fold (build (code0, type, arg00,
6880 fold (build1 (CLEANUP_POINT_EXPR,
6881 TREE_TYPE (arg01), arg01))));
6883 if (TREE_CONSTANT (arg01))
6884 return fold (build (code0, type,
6885 fold (build1 (CLEANUP_POINT_EXPR,
6886 TREE_TYPE (arg00), arg00)),
6887 arg01));
6890 return t;
6893 case CALL_EXPR:
6894 /* Check for a built-in function. */
6895 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
6896 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
6897 == FUNCTION_DECL)
6898 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
6900 tree tmp = fold_builtin (expr);
6901 if (tmp)
6902 return tmp;
6904 return t;
6906 default:
6907 return t;
6908 } /* switch (code) */
6911 /* Determine if first argument is a multiple of second argument. Return 0 if
6912 it is not, or we cannot easily determined it to be.
6914 An example of the sort of thing we care about (at this point; this routine
6915 could surely be made more general, and expanded to do what the *_DIV_EXPR's
6916 fold cases do now) is discovering that
6918 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6920 is a multiple of
6922 SAVE_EXPR (J * 8)
6924 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
6926 This code also handles discovering that
6928 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6930 is a multiple of 8 so we don't have to worry about dealing with a
6931 possible remainder.
6933 Note that we *look* inside a SAVE_EXPR only to determine how it was
6934 calculated; it is not safe for fold to do much of anything else with the
6935 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
6936 at run time. For example, the latter example above *cannot* be implemented
6937 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
6938 evaluation time of the original SAVE_EXPR is not necessarily the same at
6939 the time the new expression is evaluated. The only optimization of this
6940 sort that would be valid is changing
6942 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6944 divided by 8 to
6946 SAVE_EXPR (I) * SAVE_EXPR (J)
6948 (where the same SAVE_EXPR (J) is used in the original and the
6949 transformed version). */
6951 static int
6952 multiple_of_p (type, top, bottom)
6953 tree type;
6954 tree top;
6955 tree bottom;
6957 if (operand_equal_p (top, bottom, 0))
6958 return 1;
6960 if (TREE_CODE (type) != INTEGER_TYPE)
6961 return 0;
6963 switch (TREE_CODE (top))
6965 case MULT_EXPR:
6966 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6967 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6969 case PLUS_EXPR:
6970 case MINUS_EXPR:
6971 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6972 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6974 case LSHIFT_EXPR:
6975 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
6977 tree op1, t1;
6979 op1 = TREE_OPERAND (top, 1);
6980 /* const_binop may not detect overflow correctly,
6981 so check for it explicitly here. */
6982 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
6983 > TREE_INT_CST_LOW (op1)
6984 && TREE_INT_CST_HIGH (op1) == 0
6985 && 0 != (t1 = convert (type,
6986 const_binop (LSHIFT_EXPR, size_one_node,
6987 op1, 0)))
6988 && ! TREE_OVERFLOW (t1))
6989 return multiple_of_p (type, t1, bottom);
6991 return 0;
6993 case NOP_EXPR:
6994 /* Can't handle conversions from non-integral or wider integral type. */
6995 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6996 || (TYPE_PRECISION (type)
6997 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6998 return 0;
7000 /* .. fall through ... */
7002 case SAVE_EXPR:
7003 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7005 case INTEGER_CST:
7006 if (TREE_CODE (bottom) != INTEGER_CST
7007 || (TREE_UNSIGNED (type)
7008 && (tree_int_cst_sgn (top) < 0
7009 || tree_int_cst_sgn (bottom) < 0)))
7010 return 0;
7011 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7012 top, bottom, 0));
7014 default:
7015 return 0;
7019 /* Return true if `t' is known to be non-negative. */
7022 tree_expr_nonnegative_p (t)
7023 tree t;
7025 switch (TREE_CODE (t))
7027 case ABS_EXPR:
7028 case FFS_EXPR:
7029 return 1;
7030 case INTEGER_CST:
7031 return tree_int_cst_sgn (t) >= 0;
7032 case TRUNC_DIV_EXPR:
7033 case CEIL_DIV_EXPR:
7034 case FLOOR_DIV_EXPR:
7035 case ROUND_DIV_EXPR:
7036 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7037 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7038 case TRUNC_MOD_EXPR:
7039 case CEIL_MOD_EXPR:
7040 case FLOOR_MOD_EXPR:
7041 case ROUND_MOD_EXPR:
7042 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7043 case COND_EXPR:
7044 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7045 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7046 case COMPOUND_EXPR:
7047 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7048 case MIN_EXPR:
7049 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7050 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7051 case MAX_EXPR:
7052 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7053 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7054 case MODIFY_EXPR:
7055 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7056 case BIND_EXPR:
7057 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7058 case SAVE_EXPR:
7059 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7060 case NON_LVALUE_EXPR:
7061 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7062 case RTL_EXPR:
7063 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7065 default:
7066 if (truth_value_p (TREE_CODE (t)))
7067 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7068 return 1;
7069 else
7070 /* We don't know sign of `t', so be conservative and return false. */
7071 return 0;
7075 /* Return true if `r' is known to be non-negative.
7076 Only handles constants at the moment. */
7079 rtl_expr_nonnegative_p (r)
7080 rtx r;
7082 switch (GET_CODE (r))
7084 case CONST_INT:
7085 return INTVAL (r) >= 0;
7087 case CONST_DOUBLE:
7088 if (GET_MODE (r) == VOIDmode)
7089 return CONST_DOUBLE_HIGH (r) >= 0;
7090 return 0;
7092 case CONST_VECTOR:
7094 int units, i;
7095 rtx elt;
7097 units = CONST_VECTOR_NUNITS (r);
7099 for (i = 0; i < units; ++i)
7101 elt = CONST_VECTOR_ELT (r, i);
7102 if (!rtl_expr_nonnegative_p (elt))
7103 return 0;
7106 return 1;
7109 case SYMBOL_REF:
7110 case LABEL_REF:
7111 /* These are always nonnegative. */
7112 return 1;
7114 default:
7115 return 0;