* config/xtensa/xtensa.h (GO_IF_MODE_DEPENDENT_ADDRESS): Treat
[official-gcc.git] / gcc / fold-const.c
blobfeb3e5f72fdca6df406510e298eaccfd1a655893
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "rtl.h"
50 #include "expr.h"
51 #include "tm_p.h"
52 #include "toplev.h"
53 #include "ggc.h"
54 #include "hashtab.h"
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
58 HOST_WIDE_INT));
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
61 HOST_WIDE_INT *));
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
64 int));
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static hashval_t size_htab_hash PARAMS ((const void *));
70 static int size_htab_eq PARAMS ((const void *, const void *));
71 static void fold_convert_1 PARAMS ((PTR));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int truth_value_p PARAMS ((enum tree_code));
76 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
77 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
78 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
79 static tree omit_one_operand PARAMS ((tree, tree, tree));
80 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
81 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
82 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
83 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
84 tree, tree));
85 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
86 HOST_WIDE_INT *,
87 enum machine_mode *, int *,
88 int *, tree *, tree *));
89 static int all_ones_mask_p PARAMS ((tree, int));
90 static int simple_operand_p PARAMS ((tree));
91 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
92 tree, int));
93 static tree make_range PARAMS ((tree, int *, tree *, tree *));
94 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
95 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
96 int, tree, tree));
97 static tree fold_range_test PARAMS ((tree));
98 static tree unextend PARAMS ((tree, int, int, tree));
99 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
100 static tree optimize_minmax_comparison PARAMS ((tree));
101 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
102 static tree strip_compound_expr PARAMS ((tree, tree));
103 static int multiple_of_p PARAMS ((tree, tree, tree));
104 static tree constant_boolean_node PARAMS ((int, tree));
105 static int count_cond PARAMS ((tree, int));
106 static tree fold_binary_op_with_conditional_arg
107 PARAMS ((enum tree_code, tree, tree, tree, int));
108 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
110 #ifndef BRANCH_COST
111 #define BRANCH_COST 1
112 #endif
114 #if defined(HOST_EBCDIC)
115 /* bit 8 is significant in EBCDIC */
116 #define CHARMASK 0xff
117 #else
118 #define CHARMASK 0x7f
119 #endif
121 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
122 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
123 and SUM1. Then this yields nonzero if overflow occurred during the
124 addition.
126 Overflow occurs if A and B have the same sign, but A and SUM differ in
127 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
128 sign. */
129 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
131 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
132 We do that by representing the two-word integer in 4 words, with only
133 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
134 number. The value of the word is LOWPART + HIGHPART * BASE. */
136 #define LOWPART(x) \
137 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
138 #define HIGHPART(x) \
139 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
140 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
142 /* Unpack a two-word integer into 4 words.
143 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
144 WORDS points to the array of HOST_WIDE_INTs. */
146 static void
147 encode (words, low, hi)
148 HOST_WIDE_INT *words;
149 unsigned HOST_WIDE_INT low;
150 HOST_WIDE_INT hi;
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
162 static void
163 decode (words, low, hi)
164 HOST_WIDE_INT *words;
165 unsigned HOST_WIDE_INT *low;
166 HOST_WIDE_INT *hi;
168 *low = words[0] + words[1] * BASE;
169 *hi = words[2] + words[3] * BASE;
172 /* Make the integer constant T valid for its type by setting to 0 or 1 all
173 the bits in the constant that don't belong in the type.
175 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
176 nonzero, a signed overflow has already occurred in calculating T, so
177 propagate it.
179 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
180 if it exists. */
183 force_fit_type (t, overflow)
184 tree t;
185 int overflow;
187 unsigned HOST_WIDE_INT low;
188 HOST_WIDE_INT high;
189 unsigned int prec;
191 if (TREE_CODE (t) == REAL_CST)
193 #ifdef CHECK_FLOAT_VALUE
194 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
195 overflow);
196 #endif
197 return overflow;
200 else if (TREE_CODE (t) != INTEGER_CST)
201 return overflow;
203 low = TREE_INT_CST_LOW (t);
204 high = TREE_INT_CST_HIGH (t);
206 if (POINTER_TYPE_P (TREE_TYPE (t)))
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (TREE_TYPE (t));
211 /* First clear all bits that are beyond the type's precision. */
213 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
215 else if (prec > HOST_BITS_PER_WIDE_INT)
216 TREE_INT_CST_HIGH (t)
217 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218 else
220 TREE_INT_CST_HIGH (t) = 0;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
225 /* Unsigned types do not suffer sign extension or overflow unless they
226 are a sizetype. */
227 if (TREE_UNSIGNED (TREE_TYPE (t))
228 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230 return overflow;
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec != 2 * HOST_BITS_PER_WIDE_INT
234 && (prec > HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t)
236 & ((HOST_WIDE_INT) 1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 : 0 != (TREE_INT_CST_LOW (t)
239 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec > HOST_BITS_PER_WIDE_INT)
244 TREE_INT_CST_HIGH (t)
245 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246 else
248 TREE_INT_CST_HIGH (t) = -1;
249 if (prec < HOST_BITS_PER_WIDE_INT)
250 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
254 /* Return nonzero if signed overflow occurred. */
255 return
256 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
257 != 0);
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
266 add_double (l1, h1, l2, h2, lv, hv)
267 unsigned HOST_WIDE_INT l1, l2;
268 HOST_WIDE_INT h1, h2;
269 unsigned HOST_WIDE_INT *lv;
270 HOST_WIDE_INT *hv;
272 unsigned HOST_WIDE_INT l;
273 HOST_WIDE_INT h;
275 l = l1 + l2;
276 h = h1 + h2 + (l < l1);
278 *lv = l;
279 *hv = h;
280 return OVERFLOW_SUM_SIGN (h1, h2, h);
283 /* Negate a doubleword integer with doubleword result.
284 Return nonzero if the operation overflows, assuming it's signed.
285 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
286 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
289 neg_double (l1, h1, lv, hv)
290 unsigned HOST_WIDE_INT l1;
291 HOST_WIDE_INT h1;
292 unsigned HOST_WIDE_INT *lv;
293 HOST_WIDE_INT *hv;
295 if (l1 == 0)
297 *lv = 0;
298 *hv = - h1;
299 return (*hv & h1) < 0;
301 else
303 *lv = -l1;
304 *hv = ~h1;
305 return 0;
309 /* Multiply two doubleword integers with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 Each argument is given as two `HOST_WIDE_INT' pieces.
312 One argument is L1 and H1; the other, L2 and H2.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 mul_double (l1, h1, l2, h2, lv, hv)
317 unsigned HOST_WIDE_INT l1, l2;
318 HOST_WIDE_INT h1, h2;
319 unsigned HOST_WIDE_INT *lv;
320 HOST_WIDE_INT *hv;
322 HOST_WIDE_INT arg1[4];
323 HOST_WIDE_INT arg2[4];
324 HOST_WIDE_INT prod[4 * 2];
325 unsigned HOST_WIDE_INT carry;
326 int i, j, k;
327 unsigned HOST_WIDE_INT toplow, neglow;
328 HOST_WIDE_INT tophigh, neghigh;
330 encode (arg1, l1, h1);
331 encode (arg2, l2, h2);
333 memset ((char *) prod, 0, sizeof prod);
335 for (i = 0; i < 4; i++)
337 carry = 0;
338 for (j = 0; j < 4; j++)
340 k = i + j;
341 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
342 carry += arg1[i] * arg2[j];
343 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
344 carry += prod[k];
345 prod[k] = LOWPART (carry);
346 carry = HIGHPART (carry);
348 prod[i + 4] = carry;
351 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
353 /* Check for overflow by calculating the top half of the answer in full;
354 it should agree with the low half's sign bit. */
355 decode (prod + 4, &toplow, &tophigh);
356 if (h1 < 0)
358 neg_double (l2, h2, &neglow, &neghigh);
359 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
361 if (h2 < 0)
363 neg_double (l1, h1, &neglow, &neghigh);
364 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
366 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
369 /* Shift the doubleword integer in L1, H1 left by COUNT places
370 keeping only PREC bits of result.
371 Shift right if COUNT is negative.
372 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 void
376 lshift_double (l1, h1, count, prec, lv, hv, arith)
377 unsigned HOST_WIDE_INT l1;
378 HOST_WIDE_INT h1, count;
379 unsigned int prec;
380 unsigned HOST_WIDE_INT *lv;
381 HOST_WIDE_INT *hv;
382 int arith;
384 unsigned HOST_WIDE_INT signmask;
386 if (count < 0)
388 rshift_double (l1, h1, -count, prec, lv, hv, arith);
389 return;
392 #ifdef SHIFT_COUNT_TRUNCATED
393 if (SHIFT_COUNT_TRUNCATED)
394 count %= prec;
395 #endif
397 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
399 /* Shifting by the host word size is undefined according to the
400 ANSI standard, so we must handle this as a special case. */
401 *hv = 0;
402 *lv = 0;
404 else if (count >= HOST_BITS_PER_WIDE_INT)
406 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407 *lv = 0;
409 else
411 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
412 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413 *lv = l1 << count;
416 /* Sign extend all bits that are beyond the precision. */
418 signmask = -((prec > HOST_BITS_PER_WIDE_INT
419 ? (*hv >> (prec - HOST_BITS_PER_WIDE_INT - 1))
420 : (*lv >> (prec - 1))) & 1);
422 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
424 else if (prec >= HOST_BITS_PER_WIDE_INT)
426 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
427 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429 else
431 *hv = signmask;
432 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
433 *lv |= signmask << prec;
437 /* Shift the doubleword integer in L1, H1 right by COUNT places
438 keeping only PREC bits of result. COUNT must be positive.
439 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
440 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442 void
443 rshift_double (l1, h1, count, prec, lv, hv, arith)
444 unsigned HOST_WIDE_INT l1;
445 HOST_WIDE_INT h1, count;
446 unsigned int prec;
447 unsigned HOST_WIDE_INT *lv;
448 HOST_WIDE_INT *hv;
449 int arith;
451 unsigned HOST_WIDE_INT signmask;
453 signmask = (arith
454 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
455 : 0);
457 #ifdef SHIFT_COUNT_TRUNCATED
458 if (SHIFT_COUNT_TRUNCATED)
459 count %= prec;
460 #endif
462 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
464 /* Shifting by the host word size is undefined according to the
465 ANSI standard, so we must handle this as a special case. */
466 *hv = 0;
467 *lv = 0;
469 else if (count >= HOST_BITS_PER_WIDE_INT)
471 *hv = 0;
472 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
474 else
476 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
477 *lv = ((l1 >> count)
478 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
481 /* Zero / sign extend all bits that are beyond the precision. */
483 if (count >= (HOST_WIDE_INT)prec)
485 *hv = signmask;
486 *lv = signmask;
488 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
490 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
492 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
493 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
495 else
497 *hv = signmask;
498 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
499 *lv |= signmask << (prec - count);
503 /* Rotate the doubleword integer in L1, H1 left by COUNT places
504 keeping only PREC bits of result.
505 Rotate right if COUNT is negative.
506 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508 void
509 lrotate_double (l1, h1, count, prec, lv, hv)
510 unsigned HOST_WIDE_INT l1;
511 HOST_WIDE_INT h1, count;
512 unsigned int prec;
513 unsigned HOST_WIDE_INT *lv;
514 HOST_WIDE_INT *hv;
516 unsigned HOST_WIDE_INT s1l, s2l;
517 HOST_WIDE_INT s1h, s2h;
519 count %= prec;
520 if (count < 0)
521 count += prec;
523 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
524 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
525 *lv = s1l | s2l;
526 *hv = s1h | s2h;
529 /* Rotate the doubleword integer in L1, H1 left by COUNT places
530 keeping only PREC bits of result. COUNT must be positive.
531 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
533 void
534 rrotate_double (l1, h1, count, prec, lv, hv)
535 unsigned HOST_WIDE_INT l1;
536 HOST_WIDE_INT h1, count;
537 unsigned int prec;
538 unsigned HOST_WIDE_INT *lv;
539 HOST_WIDE_INT *hv;
541 unsigned HOST_WIDE_INT s1l, s2l;
542 HOST_WIDE_INT s1h, s2h;
544 count %= prec;
545 if (count < 0)
546 count += prec;
548 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
549 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
550 *lv = s1l | s2l;
551 *hv = s1h | s2h;
554 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
555 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
556 CODE is a tree code for a kind of division, one of
557 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
558 or EXACT_DIV_EXPR
559 It controls how the quotient is rounded to an integer.
560 Return nonzero if the operation overflows.
561 UNS nonzero says do unsigned division. */
564 div_and_round_double (code, uns,
565 lnum_orig, hnum_orig, lden_orig, hden_orig,
566 lquo, hquo, lrem, hrem)
567 enum tree_code code;
568 int uns;
569 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
570 HOST_WIDE_INT hnum_orig;
571 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
572 HOST_WIDE_INT hden_orig;
573 unsigned HOST_WIDE_INT *lquo, *lrem;
574 HOST_WIDE_INT *hquo, *hrem;
576 int quo_neg = 0;
577 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den[4], quo[4];
579 int i, j;
580 unsigned HOST_WIDE_INT work;
581 unsigned HOST_WIDE_INT carry = 0;
582 unsigned HOST_WIDE_INT lnum = lnum_orig;
583 HOST_WIDE_INT hnum = hnum_orig;
584 unsigned HOST_WIDE_INT lden = lden_orig;
585 HOST_WIDE_INT hden = hden_orig;
586 int overflow = 0;
588 if (hden == 0 && lden == 0)
589 overflow = 1, lden = 1;
591 /* calculate quotient sign and convert operands to unsigned. */
592 if (!uns)
594 if (hnum < 0)
596 quo_neg = ~ quo_neg;
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum, hnum, &lnum, &hnum)
599 && ((HOST_WIDE_INT) lden & hden) == -1)
600 overflow = 1;
602 if (hden < 0)
604 quo_neg = ~ quo_neg;
605 neg_double (lden, hden, &lden, &hden);
609 if (hnum == 0 && hden == 0)
610 { /* single precision */
611 *hquo = *hrem = 0;
612 /* This unsigned division rounds toward zero. */
613 *lquo = lnum / lden;
614 goto finish_up;
617 if (hnum == 0)
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
620 *hquo = *lquo = 0;
621 *hrem = hnum;
622 *lrem = lnum;
623 goto finish_up;
626 memset ((char *) quo, 0, sizeof quo);
628 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
629 memset ((char *) den, 0, sizeof den);
631 encode (num, lnum, hnum);
632 encode (den, lden, hden);
634 /* Special code for when the divisor < BASE. */
635 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
637 /* hnum != 0 already checked. */
638 for (i = 4 - 1; i >= 0; i--)
640 work = num[i] + carry * BASE;
641 quo[i] = work / lden;
642 carry = work % lden;
645 else
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig, den_hi_sig;
650 unsigned HOST_WIDE_INT quo_est, scale;
652 /* Find the highest non-zero divisor digit. */
653 for (i = 4 - 1;; i--)
654 if (den[i] != 0)
656 den_hi_sig = i;
657 break;
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
663 scale = BASE / (den[den_hi_sig] + 1);
664 if (scale > 1)
665 { /* scale divisor and dividend */
666 carry = 0;
667 for (i = 0; i <= 4 - 1; i++)
669 work = (num[i] * scale) + carry;
670 num[i] = LOWPART (work);
671 carry = HIGHPART (work);
674 num[4] = carry;
675 carry = 0;
676 for (i = 0; i <= 4 - 1; i++)
678 work = (den[i] * scale) + carry;
679 den[i] = LOWPART (work);
680 carry = HIGHPART (work);
681 if (den[i] != 0) den_hi_sig = i;
685 num_hi_sig = 4;
687 /* Main loop */
688 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp;
695 num_hi_sig = i + den_hi_sig + 1;
696 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
697 if (num[num_hi_sig] != den[den_hi_sig])
698 quo_est = work / den[den_hi_sig];
699 else
700 quo_est = BASE - 1;
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp = work - quo_est * den[den_hi_sig];
704 if (tmp < BASE
705 && (den[den_hi_sig - 1] * quo_est
706 > (tmp * BASE + num[num_hi_sig - 2])))
707 quo_est--;
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
713 carry = 0;
714 for (j = 0; j <= den_hi_sig; j++)
716 work = quo_est * den[j] + carry;
717 carry = HIGHPART (work);
718 work = num[i + j] - LOWPART (work);
719 num[i + j] = LOWPART (work);
720 carry += HIGHPART (work) != 0;
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num[num_hi_sig] < carry)
727 quo_est--;
728 carry = 0; /* add divisor back in */
729 for (j = 0; j <= den_hi_sig; j++)
731 work = num[i + j] + den[j] + carry;
732 carry = HIGHPART (work);
733 num[i + j] = LOWPART (work);
736 num [num_hi_sig] += carry;
739 /* Store the quotient digit. */
740 quo[i] = quo_est;
744 decode (quo, lquo, hquo);
746 finish_up:
747 /* if result is negative, make it so. */
748 if (quo_neg)
749 neg_double (*lquo, *hquo, lquo, hquo);
751 /* compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
753 neg_double (*lrem, *hrem, lrem, hrem);
754 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
756 switch (code)
758 case TRUNC_DIV_EXPR:
759 case TRUNC_MOD_EXPR: /* round toward zero */
760 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
761 return overflow;
763 case FLOOR_DIV_EXPR:
764 case FLOOR_MOD_EXPR: /* round toward negative infinity */
765 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 /* quo = quo - 1; */
768 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
769 lquo, hquo);
771 else
772 return overflow;
773 break;
775 case CEIL_DIV_EXPR:
776 case CEIL_MOD_EXPR: /* round toward positive infinity */
777 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
779 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
780 lquo, hquo);
782 else
783 return overflow;
784 break;
786 case ROUND_DIV_EXPR:
787 case ROUND_MOD_EXPR: /* round to closest integer */
789 unsigned HOST_WIDE_INT labs_rem = *lrem;
790 HOST_WIDE_INT habs_rem = *hrem;
791 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
792 HOST_WIDE_INT habs_den = hden, htwice;
794 /* Get absolute values */
795 if (*hrem < 0)
796 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 if (hden < 0)
798 neg_double (lden, hden, &labs_den, &habs_den);
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
802 labs_rem, habs_rem, &ltwice, &htwice);
804 if (((unsigned HOST_WIDE_INT) habs_den
805 < (unsigned HOST_WIDE_INT) htwice)
806 || (((unsigned HOST_WIDE_INT) habs_den
807 == (unsigned HOST_WIDE_INT) htwice)
808 && (labs_den < ltwice)))
810 if (*hquo < 0)
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo,
813 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 else
815 /* quo = quo + 1; */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
822 break;
824 default:
825 abort ();
828 /* compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
830 neg_double (*lrem, *hrem, lrem, hrem);
831 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
832 return overflow;
835 /* Given T, an expression, return the negation of T. Allow for T to be
836 null, in which case return null. */
838 static tree
839 negate_expr (t)
840 tree t;
842 tree type;
843 tree tem;
845 if (t == 0)
846 return 0;
848 type = TREE_TYPE (t);
849 STRIP_SIGN_NOPS (t);
851 switch (TREE_CODE (t))
853 case INTEGER_CST:
854 case REAL_CST:
855 if (! TREE_UNSIGNED (type)
856 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
857 && ! TREE_OVERFLOW (tem))
858 return tem;
859 break;
861 case NEGATE_EXPR:
862 return convert (type, TREE_OPERAND (t, 0));
864 case MINUS_EXPR:
865 /* - (A - B) -> B - A */
866 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
867 return convert (type,
868 fold (build (MINUS_EXPR, TREE_TYPE (t),
869 TREE_OPERAND (t, 1),
870 TREE_OPERAND (t, 0))));
871 break;
873 default:
874 break;
877 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
880 /* Split a tree IN into a constant, literal and variable parts that could be
881 combined with CODE to make IN. "constant" means an expression with
882 TREE_CONSTANT but that isn't an actual constant. CODE must be a
883 commutative arithmetic operation. Store the constant part into *CONP,
884 the literal in &LITP and return the variable part. If a part isn't
885 present, set it to null. If the tree does not decompose in this way,
886 return the entire tree as the variable part and the other parts as null.
888 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
889 case, we negate an operand that was subtracted. If NEGATE_P is true, we
890 are negating all of IN.
892 If IN is itself a literal or constant, return it as appropriate.
894 Note that we do not guarantee that any of the three values will be the
895 same type as IN, but they will have the same signedness and mode. */
897 static tree
898 split_tree (in, code, conp, litp, negate_p)
899 tree in;
900 enum tree_code code;
901 tree *conp, *litp;
902 int negate_p;
904 tree var = 0;
906 *conp = 0;
907 *litp = 0;
909 /* Strip any conversions that don't change the machine mode or signedness. */
910 STRIP_SIGN_NOPS (in);
912 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
913 *litp = in;
914 else if (TREE_CODE (in) == code
915 || (! FLOAT_TYPE_P (TREE_TYPE (in))
916 /* We can associate addition and subtraction together (even
917 though the C standard doesn't say so) for integers because
918 the value is not affected. For reals, the value might be
919 affected, so we can't. */
920 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
921 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
923 tree op0 = TREE_OPERAND (in, 0);
924 tree op1 = TREE_OPERAND (in, 1);
925 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
926 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
928 /* First see if either of the operands is a literal, then a constant. */
929 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
930 *litp = op0, op0 = 0;
931 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
932 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
934 if (op0 != 0 && TREE_CONSTANT (op0))
935 *conp = op0, op0 = 0;
936 else if (op1 != 0 && TREE_CONSTANT (op1))
937 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
939 /* If we haven't dealt with either operand, this is not a case we can
940 decompose. Otherwise, VAR is either of the ones remaining, if any. */
941 if (op0 != 0 && op1 != 0)
942 var = in;
943 else if (op0 != 0)
944 var = op0;
945 else
946 var = op1, neg_var_p = neg1_p;
948 /* Now do any needed negations. */
949 if (neg_litp_p) *litp = negate_expr (*litp);
950 if (neg_conp_p) *conp = negate_expr (*conp);
951 if (neg_var_p) var = negate_expr (var);
953 else if (TREE_CONSTANT (in))
954 *conp = in;
955 else
956 var = in;
958 if (negate_p)
960 var = negate_expr (var);
961 *conp = negate_expr (*conp);
962 *litp = negate_expr (*litp);
965 return var;
968 /* Re-associate trees split by the above function. T1 and T2 are either
969 expressions to associate or null. Return the new expression, if any. If
970 we build an operation, do it in TYPE and with CODE, except if CODE is a
971 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
972 have taken care of the negations. */
974 static tree
975 associate_trees (t1, t2, code, type)
976 tree t1, t2;
977 enum tree_code code;
978 tree type;
980 if (t1 == 0)
981 return t2;
982 else if (t2 == 0)
983 return t1;
985 if (code == MINUS_EXPR)
986 code = PLUS_EXPR;
988 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
989 try to fold this since we will have infinite recursion. But do
990 deal with any NEGATE_EXPRs. */
991 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
992 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
994 if (TREE_CODE (t1) == NEGATE_EXPR)
995 return build (MINUS_EXPR, type, convert (type, t2),
996 convert (type, TREE_OPERAND (t1, 0)));
997 else if (TREE_CODE (t2) == NEGATE_EXPR)
998 return build (MINUS_EXPR, type, convert (type, t1),
999 convert (type, TREE_OPERAND (t2, 0)));
1000 else
1001 return build (code, type, convert (type, t1), convert (type, t2));
1004 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1007 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1008 to produce a new constant.
1010 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1012 static tree
1013 int_const_binop (code, arg1, arg2, notrunc)
1014 enum tree_code code;
1015 tree arg1, arg2;
1016 int notrunc;
1018 unsigned HOST_WIDE_INT int1l, int2l;
1019 HOST_WIDE_INT int1h, int2h;
1020 unsigned HOST_WIDE_INT low;
1021 HOST_WIDE_INT hi;
1022 unsigned HOST_WIDE_INT garbagel;
1023 HOST_WIDE_INT garbageh;
1024 tree t;
1025 tree type = TREE_TYPE (arg1);
1026 int uns = TREE_UNSIGNED (type);
1027 int is_sizetype
1028 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1029 int overflow = 0;
1030 int no_overflow = 0;
1032 int1l = TREE_INT_CST_LOW (arg1);
1033 int1h = TREE_INT_CST_HIGH (arg1);
1034 int2l = TREE_INT_CST_LOW (arg2);
1035 int2h = TREE_INT_CST_HIGH (arg2);
1037 switch (code)
1039 case BIT_IOR_EXPR:
1040 low = int1l | int2l, hi = int1h | int2h;
1041 break;
1043 case BIT_XOR_EXPR:
1044 low = int1l ^ int2l, hi = int1h ^ int2h;
1045 break;
1047 case BIT_AND_EXPR:
1048 low = int1l & int2l, hi = int1h & int2h;
1049 break;
1051 case BIT_ANDTC_EXPR:
1052 low = int1l & ~int2l, hi = int1h & ~int2h;
1053 break;
1055 case RSHIFT_EXPR:
1056 int2l = -int2l;
1057 case LSHIFT_EXPR:
1058 /* It's unclear from the C standard whether shifts can overflow.
1059 The following code ignores overflow; perhaps a C standard
1060 interpretation ruling is needed. */
1061 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1062 &low, &hi, !uns);
1063 no_overflow = 1;
1064 break;
1066 case RROTATE_EXPR:
1067 int2l = - int2l;
1068 case LROTATE_EXPR:
1069 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1070 &low, &hi);
1071 break;
1073 case PLUS_EXPR:
1074 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1075 break;
1077 case MINUS_EXPR:
1078 neg_double (int2l, int2h, &low, &hi);
1079 add_double (int1l, int1h, low, hi, &low, &hi);
1080 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1081 break;
1083 case MULT_EXPR:
1084 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1085 break;
1087 case TRUNC_DIV_EXPR:
1088 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1089 case EXACT_DIV_EXPR:
1090 /* This is a shortcut for a common special case. */
1091 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1092 && ! TREE_CONSTANT_OVERFLOW (arg1)
1093 && ! TREE_CONSTANT_OVERFLOW (arg2)
1094 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1096 if (code == CEIL_DIV_EXPR)
1097 int1l += int2l - 1;
1099 low = int1l / int2l, hi = 0;
1100 break;
1103 /* ... fall through ... */
1105 case ROUND_DIV_EXPR:
1106 if (int2h == 0 && int2l == 1)
1108 low = int1l, hi = int1h;
1109 break;
1111 if (int1l == int2l && int1h == int2h
1112 && ! (int1l == 0 && int1h == 0))
1114 low = 1, hi = 0;
1115 break;
1117 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1118 &low, &hi, &garbagel, &garbageh);
1119 break;
1121 case TRUNC_MOD_EXPR:
1122 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1123 /* This is a shortcut for a common special case. */
1124 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1125 && ! TREE_CONSTANT_OVERFLOW (arg1)
1126 && ! TREE_CONSTANT_OVERFLOW (arg2)
1127 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1129 if (code == CEIL_MOD_EXPR)
1130 int1l += int2l - 1;
1131 low = int1l % int2l, hi = 0;
1132 break;
1135 /* ... fall through ... */
1137 case ROUND_MOD_EXPR:
1138 overflow = div_and_round_double (code, uns,
1139 int1l, int1h, int2l, int2h,
1140 &garbagel, &garbageh, &low, &hi);
1141 break;
1143 case MIN_EXPR:
1144 case MAX_EXPR:
1145 if (uns)
1146 low = (((unsigned HOST_WIDE_INT) int1h
1147 < (unsigned HOST_WIDE_INT) int2h)
1148 || (((unsigned HOST_WIDE_INT) int1h
1149 == (unsigned HOST_WIDE_INT) int2h)
1150 && int1l < int2l));
1151 else
1152 low = (int1h < int2h
1153 || (int1h == int2h && int1l < int2l));
1155 if (low == (code == MIN_EXPR))
1156 low = int1l, hi = int1h;
1157 else
1158 low = int2l, hi = int2h;
1159 break;
1161 default:
1162 abort ();
1165 /* If this is for a sizetype, can be represented as one (signed)
1166 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1167 constants. */
1168 if (is_sizetype
1169 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1170 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1171 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1172 return size_int_type_wide (low, type);
1173 else
1175 t = build_int_2 (low, hi);
1176 TREE_TYPE (t) = TREE_TYPE (arg1);
1179 TREE_OVERFLOW (t)
1180 = ((notrunc
1181 ? (!uns || is_sizetype) && overflow
1182 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1183 && ! no_overflow))
1184 | TREE_OVERFLOW (arg1)
1185 | TREE_OVERFLOW (arg2));
1187 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1188 So check if force_fit_type truncated the value. */
1189 if (is_sizetype
1190 && ! TREE_OVERFLOW (t)
1191 && (TREE_INT_CST_HIGH (t) != hi
1192 || TREE_INT_CST_LOW (t) != low))
1193 TREE_OVERFLOW (t) = 1;
1195 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1196 | TREE_CONSTANT_OVERFLOW (arg1)
1197 | TREE_CONSTANT_OVERFLOW (arg2));
1198 return t;
1201 /* Define input and output argument for const_binop_1. */
1202 struct cb_args
1204 enum tree_code code; /* Input: tree code for operation. */
1205 tree type; /* Input: tree type for operation. */
1206 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1207 tree t; /* Output: constant for result. */
1210 /* Do the real arithmetic for const_binop while protected by a
1211 float overflow handler. */
1213 static void
1214 const_binop_1 (data)
1215 PTR data;
1217 struct cb_args *args = (struct cb_args *) data;
1218 REAL_VALUE_TYPE value;
1220 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1222 args->t
1223 = build_real (args->type,
1224 real_value_truncate (TYPE_MODE (args->type), value));
1227 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1228 constant. We assume ARG1 and ARG2 have the same data type, or at least
1229 are the same kind of constant and the same machine mode.
1231 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 static tree
1234 const_binop (code, arg1, arg2, notrunc)
1235 enum tree_code code;
1236 tree arg1, arg2;
1237 int notrunc;
1239 STRIP_NOPS (arg1);
1240 STRIP_NOPS (arg2);
1242 if (TREE_CODE (arg1) == INTEGER_CST)
1243 return int_const_binop (code, arg1, arg2, notrunc);
1245 if (TREE_CODE (arg1) == REAL_CST)
1247 REAL_VALUE_TYPE d1;
1248 REAL_VALUE_TYPE d2;
1249 int overflow = 0;
1250 tree t;
1251 struct cb_args args;
1253 d1 = TREE_REAL_CST (arg1);
1254 d2 = TREE_REAL_CST (arg2);
1256 /* If either operand is a NaN, just return it. Otherwise, set up
1257 for floating-point trap; we return an overflow. */
1258 if (REAL_VALUE_ISNAN (d1))
1259 return arg1;
1260 else if (REAL_VALUE_ISNAN (d2))
1261 return arg2;
1263 /* Setup input for const_binop_1() */
1264 args.type = TREE_TYPE (arg1);
1265 args.d1 = d1;
1266 args.d2 = d2;
1267 args.code = code;
1269 if (do_float_handler (const_binop_1, (PTR) &args))
1270 /* Receive output from const_binop_1. */
1271 t = args.t;
1272 else
1274 /* We got an exception from const_binop_1. */
1275 t = copy_node (arg1);
1276 overflow = 1;
1279 TREE_OVERFLOW (t)
1280 = (force_fit_type (t, overflow)
1281 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1282 TREE_CONSTANT_OVERFLOW (t)
1283 = TREE_OVERFLOW (t)
1284 | TREE_CONSTANT_OVERFLOW (arg1)
1285 | TREE_CONSTANT_OVERFLOW (arg2);
1286 return t;
1288 if (TREE_CODE (arg1) == COMPLEX_CST)
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree t;
1297 switch (code)
1299 case PLUS_EXPR:
1300 t = build_complex (type,
1301 const_binop (PLUS_EXPR, r1, r2, notrunc),
1302 const_binop (PLUS_EXPR, i1, i2, notrunc));
1303 break;
1305 case MINUS_EXPR:
1306 t = build_complex (type,
1307 const_binop (MINUS_EXPR, r1, r2, notrunc),
1308 const_binop (MINUS_EXPR, i1, i2, notrunc));
1309 break;
1311 case MULT_EXPR:
1312 t = build_complex (type,
1313 const_binop (MINUS_EXPR,
1314 const_binop (MULT_EXPR,
1315 r1, r2, notrunc),
1316 const_binop (MULT_EXPR,
1317 i1, i2, notrunc),
1318 notrunc),
1319 const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR,
1321 r1, i2, notrunc),
1322 const_binop (MULT_EXPR,
1323 i1, r2, notrunc),
1324 notrunc));
1325 break;
1327 case RDIV_EXPR:
1329 tree magsquared
1330 = const_binop (PLUS_EXPR,
1331 const_binop (MULT_EXPR, r2, r2, notrunc),
1332 const_binop (MULT_EXPR, i2, i2, notrunc),
1333 notrunc);
1335 t = build_complex (type,
1336 const_binop
1337 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1338 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1339 const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r1, r2,
1341 notrunc),
1342 const_binop (MULT_EXPR, i1, i2,
1343 notrunc),
1344 notrunc),
1345 magsquared, notrunc),
1346 const_binop
1347 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1348 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1349 const_binop (MINUS_EXPR,
1350 const_binop (MULT_EXPR, i1, r2,
1351 notrunc),
1352 const_binop (MULT_EXPR, r1, i2,
1353 notrunc),
1354 notrunc),
1355 magsquared, notrunc));
1357 break;
1359 default:
1360 abort ();
1362 return t;
1364 return 0;
1367 /* These are the hash table functions for the hash table of INTEGER_CST
1368 nodes of a sizetype. */
1370 /* Return the hash code code X, an INTEGER_CST. */
1372 static hashval_t
1373 size_htab_hash (x)
1374 const void *x;
1376 tree t = (tree) x;
1378 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1379 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1380 ^ (TREE_OVERFLOW (t) << 20));
1383 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1384 is the same as that given by *Y, which is the same. */
1386 static int
1387 size_htab_eq (x, y)
1388 const void *x;
1389 const void *y;
1391 tree xt = (tree) x;
1392 tree yt = (tree) y;
1394 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1395 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1396 && TREE_TYPE (xt) == TREE_TYPE (yt)
1397 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1400 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1401 bits are given by NUMBER and of the sizetype represented by KIND. */
1403 tree
1404 size_int_wide (number, kind)
1405 HOST_WIDE_INT number;
1406 enum size_type_kind kind;
1408 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1411 /* Likewise, but the desired type is specified explicitly. */
1413 tree
1414 size_int_type_wide (number, type)
1415 HOST_WIDE_INT number;
1416 tree type;
1418 static htab_t size_htab = 0;
1419 static tree new_const = 0;
1420 PTR *slot;
1422 if (size_htab == 0)
1424 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1425 ggc_add_deletable_htab (size_htab, NULL, NULL);
1426 new_const = make_node (INTEGER_CST);
1427 ggc_add_tree_root (&new_const, 1);
1430 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1431 hash table, we return the value from the hash table. Otherwise, we
1432 place that in the hash table and make a new node for the next time. */
1433 TREE_INT_CST_LOW (new_const) = number;
1434 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1435 TREE_TYPE (new_const) = type;
1436 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1437 = force_fit_type (new_const, 0);
1439 slot = htab_find_slot (size_htab, new_const, INSERT);
1440 if (*slot == 0)
1442 tree t = new_const;
1444 *slot = (PTR) new_const;
1445 new_const = make_node (INTEGER_CST);
1446 return t;
1448 else
1449 return (tree) *slot;
1452 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1453 is a tree code. The type of the result is taken from the operands.
1454 Both must be the same type integer type and it must be a size type.
1455 If the operands are constant, so is the result. */
1457 tree
1458 size_binop (code, arg0, arg1)
1459 enum tree_code code;
1460 tree arg0, arg1;
1462 tree type = TREE_TYPE (arg0);
1464 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1465 || type != TREE_TYPE (arg1))
1466 abort ();
1468 /* Handle the special case of two integer constants faster. */
1469 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1471 /* And some specific cases even faster than that. */
1472 if (code == PLUS_EXPR && integer_zerop (arg0))
1473 return arg1;
1474 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1475 && integer_zerop (arg1))
1476 return arg0;
1477 else if (code == MULT_EXPR && integer_onep (arg0))
1478 return arg1;
1480 /* Handle general case of two integer constants. */
1481 return int_const_binop (code, arg0, arg1, 0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 return fold (build (code, type, arg0, arg1));
1490 /* Given two values, either both of sizetype or both of bitsizetype,
1491 compute the difference between the two values. Return the value
1492 in signed type corresponding to the type of the operands. */
1494 tree
1495 size_diffop (arg0, arg1)
1496 tree arg0, arg1;
1498 tree type = TREE_TYPE (arg0);
1499 tree ctype;
1501 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1502 || type != TREE_TYPE (arg1))
1503 abort ();
1505 /* If the type is already signed, just do the simple thing. */
1506 if (! TREE_UNSIGNED (type))
1507 return size_binop (MINUS_EXPR, arg0, arg1);
1509 ctype = (type == bitsizetype || type == ubitsizetype
1510 ? sbitsizetype : ssizetype);
1512 /* If either operand is not a constant, do the conversions to the signed
1513 type and subtract. The hardware will do the right thing with any
1514 overflow in the subtraction. */
1515 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1516 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1517 convert (ctype, arg1));
1519 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1520 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1521 overflow) and negate (which can't either). Special-case a result
1522 of zero while we're here. */
1523 if (tree_int_cst_equal (arg0, arg1))
1524 return convert (ctype, integer_zero_node);
1525 else if (tree_int_cst_lt (arg1, arg0))
1526 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1527 else
1528 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1529 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1532 /* This structure is used to communicate arguments to fold_convert_1. */
1533 struct fc_args
1535 tree arg1; /* Input: value to convert. */
1536 tree type; /* Input: type to convert value to. */
1537 tree t; /* Output: result of conversion. */
1540 /* Function to convert floating-point constants, protected by floating
1541 point exception handler. */
1543 static void
1544 fold_convert_1 (data)
1545 PTR data;
1547 struct fc_args *args = (struct fc_args *) data;
1549 args->t = build_real (args->type,
1550 real_value_truncate (TYPE_MODE (args->type),
1551 TREE_REAL_CST (args->arg1)));
1554 /* Given T, a tree representing type conversion of ARG1, a constant,
1555 return a constant tree representing the result of conversion. */
1557 static tree
1558 fold_convert (t, arg1)
1559 tree t;
1560 tree arg1;
1562 tree type = TREE_TYPE (t);
1563 int overflow = 0;
1565 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1567 if (TREE_CODE (arg1) == INTEGER_CST)
1569 /* If we would build a constant wider than GCC supports,
1570 leave the conversion unfolded. */
1571 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1572 return t;
1574 /* If we are trying to make a sizetype for a small integer, use
1575 size_int to pick up cached types to reduce duplicate nodes. */
1576 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1577 && !TREE_CONSTANT_OVERFLOW (arg1)
1578 && compare_tree_int (arg1, 10000) < 0)
1579 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1581 /* Given an integer constant, make new constant with new type,
1582 appropriately sign-extended or truncated. */
1583 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1584 TREE_INT_CST_HIGH (arg1));
1585 TREE_TYPE (t) = type;
1586 /* Indicate an overflow if (1) ARG1 already overflowed,
1587 or (2) force_fit_type indicates an overflow.
1588 Tell force_fit_type that an overflow has already occurred
1589 if ARG1 is a too-large unsigned value and T is signed.
1590 But don't indicate an overflow if converting a pointer. */
1591 TREE_OVERFLOW (t)
1592 = ((force_fit_type (t,
1593 (TREE_INT_CST_HIGH (arg1) < 0
1594 && (TREE_UNSIGNED (type)
1595 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1596 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1597 || TREE_OVERFLOW (arg1));
1598 TREE_CONSTANT_OVERFLOW (t)
1599 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1601 else if (TREE_CODE (arg1) == REAL_CST)
1603 /* Don't initialize these, use assignments.
1604 Initialized local aggregates don't work on old compilers. */
1605 REAL_VALUE_TYPE x;
1606 REAL_VALUE_TYPE l;
1607 REAL_VALUE_TYPE u;
1608 tree type1 = TREE_TYPE (arg1);
1609 int no_upper_bound;
1611 x = TREE_REAL_CST (arg1);
1612 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1614 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1615 if (!no_upper_bound)
1616 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1618 /* See if X will be in range after truncation towards 0.
1619 To compensate for truncation, move the bounds away from 0,
1620 but reject if X exactly equals the adjusted bounds. */
1621 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1622 if (!no_upper_bound)
1623 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1624 /* If X is a NaN, use zero instead and show we have an overflow.
1625 Otherwise, range check. */
1626 if (REAL_VALUE_ISNAN (x))
1627 overflow = 1, x = dconst0;
1628 else if (! (REAL_VALUES_LESS (l, x)
1629 && !no_upper_bound
1630 && REAL_VALUES_LESS (x, u)))
1631 overflow = 1;
1634 HOST_WIDE_INT low, high;
1635 REAL_VALUE_TO_INT (&low, &high, x);
1636 t = build_int_2 (low, high);
1638 TREE_TYPE (t) = type;
1639 TREE_OVERFLOW (t)
1640 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1641 TREE_CONSTANT_OVERFLOW (t)
1642 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1644 TREE_TYPE (t) = type;
1646 else if (TREE_CODE (type) == REAL_TYPE)
1648 if (TREE_CODE (arg1) == INTEGER_CST)
1649 return build_real_from_int_cst (type, arg1);
1650 if (TREE_CODE (arg1) == REAL_CST)
1652 struct fc_args args;
1654 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1656 t = arg1;
1657 TREE_TYPE (arg1) = type;
1658 return t;
1661 /* Setup input for fold_convert_1() */
1662 args.arg1 = arg1;
1663 args.type = type;
1665 if (do_float_handler (fold_convert_1, (PTR) &args))
1667 /* Receive output from fold_convert_1() */
1668 t = args.t;
1670 else
1672 /* We got an exception from fold_convert_1() */
1673 overflow = 1;
1674 t = copy_node (arg1);
1677 TREE_OVERFLOW (t)
1678 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1679 TREE_CONSTANT_OVERFLOW (t)
1680 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1681 return t;
1684 TREE_CONSTANT (t) = 1;
1685 return t;
1688 /* Return an expr equal to X but certainly not valid as an lvalue. */
1690 tree
1691 non_lvalue (x)
1692 tree x;
1694 tree result;
1696 /* These things are certainly not lvalues. */
1697 if (TREE_CODE (x) == NON_LVALUE_EXPR
1698 || TREE_CODE (x) == INTEGER_CST
1699 || TREE_CODE (x) == REAL_CST
1700 || TREE_CODE (x) == STRING_CST
1701 || TREE_CODE (x) == ADDR_EXPR)
1702 return x;
1704 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1705 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1706 return result;
1709 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1710 Zero means allow extended lvalues. */
1712 int pedantic_lvalues;
1714 /* When pedantic, return an expr equal to X but certainly not valid as a
1715 pedantic lvalue. Otherwise, return X. */
1717 tree
1718 pedantic_non_lvalue (x)
1719 tree x;
1721 if (pedantic_lvalues)
1722 return non_lvalue (x);
1723 else
1724 return x;
1727 /* Given a tree comparison code, return the code that is the logical inverse
1728 of the given code. It is not safe to do this for floating-point
1729 comparisons, except for NE_EXPR and EQ_EXPR. */
1731 static enum tree_code
1732 invert_tree_comparison (code)
1733 enum tree_code code;
1735 switch (code)
1737 case EQ_EXPR:
1738 return NE_EXPR;
1739 case NE_EXPR:
1740 return EQ_EXPR;
1741 case GT_EXPR:
1742 return LE_EXPR;
1743 case GE_EXPR:
1744 return LT_EXPR;
1745 case LT_EXPR:
1746 return GE_EXPR;
1747 case LE_EXPR:
1748 return GT_EXPR;
1749 default:
1750 abort ();
1754 /* Similar, but return the comparison that results if the operands are
1755 swapped. This is safe for floating-point. */
1757 static enum tree_code
1758 swap_tree_comparison (code)
1759 enum tree_code code;
1761 switch (code)
1763 case EQ_EXPR:
1764 case NE_EXPR:
1765 return code;
1766 case GT_EXPR:
1767 return LT_EXPR;
1768 case GE_EXPR:
1769 return LE_EXPR;
1770 case LT_EXPR:
1771 return GT_EXPR;
1772 case LE_EXPR:
1773 return GE_EXPR;
1774 default:
1775 abort ();
1779 /* Return nonzero if CODE is a tree code that represents a truth value. */
1781 static int
1782 truth_value_p (code)
1783 enum tree_code code;
1785 return (TREE_CODE_CLASS (code) == '<'
1786 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1787 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1788 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1791 /* Return nonzero if two operands are necessarily equal.
1792 If ONLY_CONST is non-zero, only return non-zero for constants.
1793 This function tests whether the operands are indistinguishable;
1794 it does not test whether they are equal using C's == operation.
1795 The distinction is important for IEEE floating point, because
1796 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1797 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1800 operand_equal_p (arg0, arg1, only_const)
1801 tree arg0, arg1;
1802 int only_const;
1804 /* If both types don't have the same signedness, then we can't consider
1805 them equal. We must check this before the STRIP_NOPS calls
1806 because they may change the signedness of the arguments. */
1807 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1808 return 0;
1810 STRIP_NOPS (arg0);
1811 STRIP_NOPS (arg1);
1813 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1814 /* This is needed for conversions and for COMPONENT_REF.
1815 Might as well play it safe and always test this. */
1816 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1817 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1818 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1819 return 0;
1821 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1822 We don't care about side effects in that case because the SAVE_EXPR
1823 takes care of that for us. In all other cases, two expressions are
1824 equal if they have no side effects. If we have two identical
1825 expressions with side effects that should be treated the same due
1826 to the only side effects being identical SAVE_EXPR's, that will
1827 be detected in the recursive calls below. */
1828 if (arg0 == arg1 && ! only_const
1829 && (TREE_CODE (arg0) == SAVE_EXPR
1830 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1831 return 1;
1833 /* Next handle constant cases, those for which we can return 1 even
1834 if ONLY_CONST is set. */
1835 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1836 switch (TREE_CODE (arg0))
1838 case INTEGER_CST:
1839 return (! TREE_CONSTANT_OVERFLOW (arg0)
1840 && ! TREE_CONSTANT_OVERFLOW (arg1)
1841 && tree_int_cst_equal (arg0, arg1));
1843 case REAL_CST:
1844 return (! TREE_CONSTANT_OVERFLOW (arg0)
1845 && ! TREE_CONSTANT_OVERFLOW (arg1)
1846 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1847 TREE_REAL_CST (arg1)));
1849 case VECTOR_CST:
1851 tree v1, v2;
1853 if (TREE_CONSTANT_OVERFLOW (arg0)
1854 || TREE_CONSTANT_OVERFLOW (arg1))
1855 return 0;
1857 v1 = TREE_VECTOR_CST_ELTS (arg0);
1858 v2 = TREE_VECTOR_CST_ELTS (arg1);
1859 while (v1 && v2)
1861 if (!operand_equal_p (v1, v2, only_const))
1862 return 0;
1863 v1 = TREE_CHAIN (v1);
1864 v2 = TREE_CHAIN (v2);
1867 return 1;
1870 case COMPLEX_CST:
1871 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1872 only_const)
1873 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1874 only_const));
1876 case STRING_CST:
1877 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1878 && ! memcmp (TREE_STRING_POINTER (arg0),
1879 TREE_STRING_POINTER (arg1),
1880 TREE_STRING_LENGTH (arg0)));
1882 case ADDR_EXPR:
1883 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1885 default:
1886 break;
1889 if (only_const)
1890 return 0;
1892 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1894 case '1':
1895 /* Two conversions are equal only if signedness and modes match. */
1896 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1897 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1898 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1899 return 0;
1901 return operand_equal_p (TREE_OPERAND (arg0, 0),
1902 TREE_OPERAND (arg1, 0), 0);
1904 case '<':
1905 case '2':
1906 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1907 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1909 return 1;
1911 /* For commutative ops, allow the other order. */
1912 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1913 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1914 || TREE_CODE (arg0) == BIT_IOR_EXPR
1915 || TREE_CODE (arg0) == BIT_XOR_EXPR
1916 || TREE_CODE (arg0) == BIT_AND_EXPR
1917 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1918 && operand_equal_p (TREE_OPERAND (arg0, 0),
1919 TREE_OPERAND (arg1, 1), 0)
1920 && operand_equal_p (TREE_OPERAND (arg0, 1),
1921 TREE_OPERAND (arg1, 0), 0));
1923 case 'r':
1924 /* If either of the pointer (or reference) expressions we are dereferencing
1925 contain a side effect, these cannot be equal. */
1926 if (TREE_SIDE_EFFECTS (arg0)
1927 || TREE_SIDE_EFFECTS (arg1))
1928 return 0;
1930 switch (TREE_CODE (arg0))
1932 case INDIRECT_REF:
1933 return operand_equal_p (TREE_OPERAND (arg0, 0),
1934 TREE_OPERAND (arg1, 0), 0);
1936 case COMPONENT_REF:
1937 case ARRAY_REF:
1938 case ARRAY_RANGE_REF:
1939 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1940 TREE_OPERAND (arg1, 0), 0)
1941 && operand_equal_p (TREE_OPERAND (arg0, 1),
1942 TREE_OPERAND (arg1, 1), 0));
1944 case BIT_FIELD_REF:
1945 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1946 TREE_OPERAND (arg1, 0), 0)
1947 && operand_equal_p (TREE_OPERAND (arg0, 1),
1948 TREE_OPERAND (arg1, 1), 0)
1949 && operand_equal_p (TREE_OPERAND (arg0, 2),
1950 TREE_OPERAND (arg1, 2), 0));
1951 default:
1952 return 0;
1955 case 'e':
1956 if (TREE_CODE (arg0) == RTL_EXPR)
1957 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1958 return 0;
1960 default:
1961 return 0;
1965 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1966 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1968 When in doubt, return 0. */
1970 static int
1971 operand_equal_for_comparison_p (arg0, arg1, other)
1972 tree arg0, arg1;
1973 tree other;
1975 int unsignedp1, unsignedpo;
1976 tree primarg0, primarg1, primother;
1977 unsigned int correct_width;
1979 if (operand_equal_p (arg0, arg1, 0))
1980 return 1;
1982 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1983 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1984 return 0;
1986 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1987 and see if the inner values are the same. This removes any
1988 signedness comparison, which doesn't matter here. */
1989 primarg0 = arg0, primarg1 = arg1;
1990 STRIP_NOPS (primarg0);
1991 STRIP_NOPS (primarg1);
1992 if (operand_equal_p (primarg0, primarg1, 0))
1993 return 1;
1995 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1996 actual comparison operand, ARG0.
1998 First throw away any conversions to wider types
1999 already present in the operands. */
2001 primarg1 = get_narrower (arg1, &unsignedp1);
2002 primother = get_narrower (other, &unsignedpo);
2004 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2005 if (unsignedp1 == unsignedpo
2006 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2007 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2009 tree type = TREE_TYPE (arg0);
2011 /* Make sure shorter operand is extended the right way
2012 to match the longer operand. */
2013 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2014 TREE_TYPE (primarg1)),
2015 primarg1);
2017 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2018 return 1;
2021 return 0;
2024 /* See if ARG is an expression that is either a comparison or is performing
2025 arithmetic on comparisons. The comparisons must only be comparing
2026 two different values, which will be stored in *CVAL1 and *CVAL2; if
2027 they are non-zero it means that some operands have already been found.
2028 No variables may be used anywhere else in the expression except in the
2029 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2030 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2032 If this is true, return 1. Otherwise, return zero. */
2034 static int
2035 twoval_comparison_p (arg, cval1, cval2, save_p)
2036 tree arg;
2037 tree *cval1, *cval2;
2038 int *save_p;
2040 enum tree_code code = TREE_CODE (arg);
2041 char class = TREE_CODE_CLASS (code);
2043 /* We can handle some of the 'e' cases here. */
2044 if (class == 'e' && code == TRUTH_NOT_EXPR)
2045 class = '1';
2046 else if (class == 'e'
2047 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2048 || code == COMPOUND_EXPR))
2049 class = '2';
2051 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2052 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2054 /* If we've already found a CVAL1 or CVAL2, this expression is
2055 two complex to handle. */
2056 if (*cval1 || *cval2)
2057 return 0;
2059 class = '1';
2060 *save_p = 1;
2063 switch (class)
2065 case '1':
2066 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2068 case '2':
2069 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2070 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2071 cval1, cval2, save_p));
2073 case 'c':
2074 return 1;
2076 case 'e':
2077 if (code == COND_EXPR)
2078 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2079 cval1, cval2, save_p)
2080 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2081 cval1, cval2, save_p)
2082 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2083 cval1, cval2, save_p));
2084 return 0;
2086 case '<':
2087 /* First see if we can handle the first operand, then the second. For
2088 the second operand, we know *CVAL1 can't be zero. It must be that
2089 one side of the comparison is each of the values; test for the
2090 case where this isn't true by failing if the two operands
2091 are the same. */
2093 if (operand_equal_p (TREE_OPERAND (arg, 0),
2094 TREE_OPERAND (arg, 1), 0))
2095 return 0;
2097 if (*cval1 == 0)
2098 *cval1 = TREE_OPERAND (arg, 0);
2099 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2101 else if (*cval2 == 0)
2102 *cval2 = TREE_OPERAND (arg, 0);
2103 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2105 else
2106 return 0;
2108 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2110 else if (*cval2 == 0)
2111 *cval2 = TREE_OPERAND (arg, 1);
2112 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2114 else
2115 return 0;
2117 return 1;
2119 default:
2120 return 0;
2124 /* ARG is a tree that is known to contain just arithmetic operations and
2125 comparisons. Evaluate the operations in the tree substituting NEW0 for
2126 any occurrence of OLD0 as an operand of a comparison and likewise for
2127 NEW1 and OLD1. */
2129 static tree
2130 eval_subst (arg, old0, new0, old1, new1)
2131 tree arg;
2132 tree old0, new0, old1, new1;
2134 tree type = TREE_TYPE (arg);
2135 enum tree_code code = TREE_CODE (arg);
2136 char class = TREE_CODE_CLASS (code);
2138 /* We can handle some of the 'e' cases here. */
2139 if (class == 'e' && code == TRUTH_NOT_EXPR)
2140 class = '1';
2141 else if (class == 'e'
2142 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2143 class = '2';
2145 switch (class)
2147 case '1':
2148 return fold (build1 (code, type,
2149 eval_subst (TREE_OPERAND (arg, 0),
2150 old0, new0, old1, new1)));
2152 case '2':
2153 return fold (build (code, type,
2154 eval_subst (TREE_OPERAND (arg, 0),
2155 old0, new0, old1, new1),
2156 eval_subst (TREE_OPERAND (arg, 1),
2157 old0, new0, old1, new1)));
2159 case 'e':
2160 switch (code)
2162 case SAVE_EXPR:
2163 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2165 case COMPOUND_EXPR:
2166 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2168 case COND_EXPR:
2169 return fold (build (code, type,
2170 eval_subst (TREE_OPERAND (arg, 0),
2171 old0, new0, old1, new1),
2172 eval_subst (TREE_OPERAND (arg, 1),
2173 old0, new0, old1, new1),
2174 eval_subst (TREE_OPERAND (arg, 2),
2175 old0, new0, old1, new1)));
2176 default:
2177 break;
2179 /* fall through - ??? */
2181 case '<':
2183 tree arg0 = TREE_OPERAND (arg, 0);
2184 tree arg1 = TREE_OPERAND (arg, 1);
2186 /* We need to check both for exact equality and tree equality. The
2187 former will be true if the operand has a side-effect. In that
2188 case, we know the operand occurred exactly once. */
2190 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2191 arg0 = new0;
2192 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2193 arg0 = new1;
2195 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2196 arg1 = new0;
2197 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2198 arg1 = new1;
2200 return fold (build (code, type, arg0, arg1));
2203 default:
2204 return arg;
2208 /* Return a tree for the case when the result of an expression is RESULT
2209 converted to TYPE and OMITTED was previously an operand of the expression
2210 but is now not needed (e.g., we folded OMITTED * 0).
2212 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2213 the conversion of RESULT to TYPE. */
2215 static tree
2216 omit_one_operand (type, result, omitted)
2217 tree type, result, omitted;
2219 tree t = convert (type, result);
2221 if (TREE_SIDE_EFFECTS (omitted))
2222 return build (COMPOUND_EXPR, type, omitted, t);
2224 return non_lvalue (t);
2227 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2229 static tree
2230 pedantic_omit_one_operand (type, result, omitted)
2231 tree type, result, omitted;
2233 tree t = convert (type, result);
2235 if (TREE_SIDE_EFFECTS (omitted))
2236 return build (COMPOUND_EXPR, type, omitted, t);
2238 return pedantic_non_lvalue (t);
2241 /* Return a simplified tree node for the truth-negation of ARG. This
2242 never alters ARG itself. We assume that ARG is an operation that
2243 returns a truth value (0 or 1). */
2245 tree
2246 invert_truthvalue (arg)
2247 tree arg;
2249 tree type = TREE_TYPE (arg);
2250 enum tree_code code = TREE_CODE (arg);
2252 if (code == ERROR_MARK)
2253 return arg;
2255 /* If this is a comparison, we can simply invert it, except for
2256 floating-point non-equality comparisons, in which case we just
2257 enclose a TRUTH_NOT_EXPR around what we have. */
2259 if (TREE_CODE_CLASS (code) == '<')
2261 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2262 && !flag_unsafe_math_optimizations
2263 && code != NE_EXPR
2264 && code != EQ_EXPR)
2265 return build1 (TRUTH_NOT_EXPR, type, arg);
2266 else
2267 return build (invert_tree_comparison (code), type,
2268 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2271 switch (code)
2273 case INTEGER_CST:
2274 return convert (type, build_int_2 (integer_zerop (arg), 0));
2276 case TRUTH_AND_EXPR:
2277 return build (TRUTH_OR_EXPR, type,
2278 invert_truthvalue (TREE_OPERAND (arg, 0)),
2279 invert_truthvalue (TREE_OPERAND (arg, 1)));
2281 case TRUTH_OR_EXPR:
2282 return build (TRUTH_AND_EXPR, type,
2283 invert_truthvalue (TREE_OPERAND (arg, 0)),
2284 invert_truthvalue (TREE_OPERAND (arg, 1)));
2286 case TRUTH_XOR_EXPR:
2287 /* Here we can invert either operand. We invert the first operand
2288 unless the second operand is a TRUTH_NOT_EXPR in which case our
2289 result is the XOR of the first operand with the inside of the
2290 negation of the second operand. */
2292 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2293 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2294 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2295 else
2296 return build (TRUTH_XOR_EXPR, type,
2297 invert_truthvalue (TREE_OPERAND (arg, 0)),
2298 TREE_OPERAND (arg, 1));
2300 case TRUTH_ANDIF_EXPR:
2301 return build (TRUTH_ORIF_EXPR, type,
2302 invert_truthvalue (TREE_OPERAND (arg, 0)),
2303 invert_truthvalue (TREE_OPERAND (arg, 1)));
2305 case TRUTH_ORIF_EXPR:
2306 return build (TRUTH_ANDIF_EXPR, type,
2307 invert_truthvalue (TREE_OPERAND (arg, 0)),
2308 invert_truthvalue (TREE_OPERAND (arg, 1)));
2310 case TRUTH_NOT_EXPR:
2311 return TREE_OPERAND (arg, 0);
2313 case COND_EXPR:
2314 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2315 invert_truthvalue (TREE_OPERAND (arg, 1)),
2316 invert_truthvalue (TREE_OPERAND (arg, 2)));
2318 case COMPOUND_EXPR:
2319 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2320 invert_truthvalue (TREE_OPERAND (arg, 1)));
2322 case WITH_RECORD_EXPR:
2323 return build (WITH_RECORD_EXPR, type,
2324 invert_truthvalue (TREE_OPERAND (arg, 0)),
2325 TREE_OPERAND (arg, 1));
2327 case NON_LVALUE_EXPR:
2328 return invert_truthvalue (TREE_OPERAND (arg, 0));
2330 case NOP_EXPR:
2331 case CONVERT_EXPR:
2332 case FLOAT_EXPR:
2333 return build1 (TREE_CODE (arg), type,
2334 invert_truthvalue (TREE_OPERAND (arg, 0)));
2336 case BIT_AND_EXPR:
2337 if (!integer_onep (TREE_OPERAND (arg, 1)))
2338 break;
2339 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2341 case SAVE_EXPR:
2342 return build1 (TRUTH_NOT_EXPR, type, arg);
2344 case CLEANUP_POINT_EXPR:
2345 return build1 (CLEANUP_POINT_EXPR, type,
2346 invert_truthvalue (TREE_OPERAND (arg, 0)));
2348 default:
2349 break;
2351 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2352 abort ();
2353 return build1 (TRUTH_NOT_EXPR, type, arg);
2356 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2357 operands are another bit-wise operation with a common input. If so,
2358 distribute the bit operations to save an operation and possibly two if
2359 constants are involved. For example, convert
2360 (A | B) & (A | C) into A | (B & C)
2361 Further simplification will occur if B and C are constants.
2363 If this optimization cannot be done, 0 will be returned. */
2365 static tree
2366 distribute_bit_expr (code, type, arg0, arg1)
2367 enum tree_code code;
2368 tree type;
2369 tree arg0, arg1;
2371 tree common;
2372 tree left, right;
2374 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2375 || TREE_CODE (arg0) == code
2376 || (TREE_CODE (arg0) != BIT_AND_EXPR
2377 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2378 return 0;
2380 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2382 common = TREE_OPERAND (arg0, 0);
2383 left = TREE_OPERAND (arg0, 1);
2384 right = TREE_OPERAND (arg1, 1);
2386 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2388 common = TREE_OPERAND (arg0, 0);
2389 left = TREE_OPERAND (arg0, 1);
2390 right = TREE_OPERAND (arg1, 0);
2392 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2394 common = TREE_OPERAND (arg0, 1);
2395 left = TREE_OPERAND (arg0, 0);
2396 right = TREE_OPERAND (arg1, 1);
2398 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2400 common = TREE_OPERAND (arg0, 1);
2401 left = TREE_OPERAND (arg0, 0);
2402 right = TREE_OPERAND (arg1, 0);
2404 else
2405 return 0;
2407 return fold (build (TREE_CODE (arg0), type, common,
2408 fold (build (code, type, left, right))));
2411 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2412 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2414 static tree
2415 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2416 tree inner;
2417 tree type;
2418 int bitsize, bitpos;
2419 int unsignedp;
2421 tree result = build (BIT_FIELD_REF, type, inner,
2422 size_int (bitsize), bitsize_int (bitpos));
2424 TREE_UNSIGNED (result) = unsignedp;
2426 return result;
2429 /* Optimize a bit-field compare.
2431 There are two cases: First is a compare against a constant and the
2432 second is a comparison of two items where the fields are at the same
2433 bit position relative to the start of a chunk (byte, halfword, word)
2434 large enough to contain it. In these cases we can avoid the shift
2435 implicit in bitfield extractions.
2437 For constants, we emit a compare of the shifted constant with the
2438 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2439 compared. For two fields at the same position, we do the ANDs with the
2440 similar mask and compare the result of the ANDs.
2442 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2443 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2444 are the left and right operands of the comparison, respectively.
2446 If the optimization described above can be done, we return the resulting
2447 tree. Otherwise we return zero. */
2449 static tree
2450 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2451 enum tree_code code;
2452 tree compare_type;
2453 tree lhs, rhs;
2455 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2456 tree type = TREE_TYPE (lhs);
2457 tree signed_type, unsigned_type;
2458 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2459 enum machine_mode lmode, rmode, nmode;
2460 int lunsignedp, runsignedp;
2461 int lvolatilep = 0, rvolatilep = 0;
2462 tree linner, rinner = NULL_TREE;
2463 tree mask;
2464 tree offset;
2466 /* Get all the information about the extractions being done. If the bit size
2467 if the same as the size of the underlying object, we aren't doing an
2468 extraction at all and so can do nothing. We also don't want to
2469 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2470 then will no longer be able to replace it. */
2471 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2472 &lunsignedp, &lvolatilep);
2473 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2474 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2475 return 0;
2477 if (!const_p)
2479 /* If this is not a constant, we can only do something if bit positions,
2480 sizes, and signedness are the same. */
2481 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2482 &runsignedp, &rvolatilep);
2484 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2485 || lunsignedp != runsignedp || offset != 0
2486 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2487 return 0;
2490 /* See if we can find a mode to refer to this field. We should be able to,
2491 but fail if we can't. */
2492 nmode = get_best_mode (lbitsize, lbitpos,
2493 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2494 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2495 TYPE_ALIGN (TREE_TYPE (rinner))),
2496 word_mode, lvolatilep || rvolatilep);
2497 if (nmode == VOIDmode)
2498 return 0;
2500 /* Set signed and unsigned types of the precision of this mode for the
2501 shifts below. */
2502 signed_type = type_for_mode (nmode, 0);
2503 unsigned_type = type_for_mode (nmode, 1);
2505 /* Compute the bit position and size for the new reference and our offset
2506 within it. If the new reference is the same size as the original, we
2507 won't optimize anything, so return zero. */
2508 nbitsize = GET_MODE_BITSIZE (nmode);
2509 nbitpos = lbitpos & ~ (nbitsize - 1);
2510 lbitpos -= nbitpos;
2511 if (nbitsize == lbitsize)
2512 return 0;
2514 if (BYTES_BIG_ENDIAN)
2515 lbitpos = nbitsize - lbitsize - lbitpos;
2517 /* Make the mask to be used against the extracted field. */
2518 mask = build_int_2 (~0, ~0);
2519 TREE_TYPE (mask) = unsigned_type;
2520 force_fit_type (mask, 0);
2521 mask = convert (unsigned_type, mask);
2522 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2523 mask = const_binop (RSHIFT_EXPR, mask,
2524 size_int (nbitsize - lbitsize - lbitpos), 0);
2526 if (! const_p)
2527 /* If not comparing with constant, just rework the comparison
2528 and return. */
2529 return build (code, compare_type,
2530 build (BIT_AND_EXPR, unsigned_type,
2531 make_bit_field_ref (linner, unsigned_type,
2532 nbitsize, nbitpos, 1),
2533 mask),
2534 build (BIT_AND_EXPR, unsigned_type,
2535 make_bit_field_ref (rinner, unsigned_type,
2536 nbitsize, nbitpos, 1),
2537 mask));
2539 /* Otherwise, we are handling the constant case. See if the constant is too
2540 big for the field. Warn and return a tree of for 0 (false) if so. We do
2541 this not only for its own sake, but to avoid having to test for this
2542 error case below. If we didn't, we might generate wrong code.
2544 For unsigned fields, the constant shifted right by the field length should
2545 be all zero. For signed fields, the high-order bits should agree with
2546 the sign bit. */
2548 if (lunsignedp)
2550 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2551 convert (unsigned_type, rhs),
2552 size_int (lbitsize), 0)))
2554 warning ("comparison is always %d due to width of bit-field",
2555 code == NE_EXPR);
2556 return convert (compare_type,
2557 (code == NE_EXPR
2558 ? integer_one_node : integer_zero_node));
2561 else
2563 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2564 size_int (lbitsize - 1), 0);
2565 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2567 warning ("comparison is always %d due to width of bit-field",
2568 code == NE_EXPR);
2569 return convert (compare_type,
2570 (code == NE_EXPR
2571 ? integer_one_node : integer_zero_node));
2575 /* Single-bit compares should always be against zero. */
2576 if (lbitsize == 1 && ! integer_zerop (rhs))
2578 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2579 rhs = convert (type, integer_zero_node);
2582 /* Make a new bitfield reference, shift the constant over the
2583 appropriate number of bits and mask it with the computed mask
2584 (in case this was a signed field). If we changed it, make a new one. */
2585 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2586 if (lvolatilep)
2588 TREE_SIDE_EFFECTS (lhs) = 1;
2589 TREE_THIS_VOLATILE (lhs) = 1;
2592 rhs = fold (const_binop (BIT_AND_EXPR,
2593 const_binop (LSHIFT_EXPR,
2594 convert (unsigned_type, rhs),
2595 size_int (lbitpos), 0),
2596 mask, 0));
2598 return build (code, compare_type,
2599 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2600 rhs);
2603 /* Subroutine for fold_truthop: decode a field reference.
2605 If EXP is a comparison reference, we return the innermost reference.
2607 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2608 set to the starting bit number.
2610 If the innermost field can be completely contained in a mode-sized
2611 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2613 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2614 otherwise it is not changed.
2616 *PUNSIGNEDP is set to the signedness of the field.
2618 *PMASK is set to the mask used. This is either contained in a
2619 BIT_AND_EXPR or derived from the width of the field.
2621 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2623 Return 0 if this is not a component reference or is one that we can't
2624 do anything with. */
2626 static tree
2627 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2628 pvolatilep, pmask, pand_mask)
2629 tree exp;
2630 HOST_WIDE_INT *pbitsize, *pbitpos;
2631 enum machine_mode *pmode;
2632 int *punsignedp, *pvolatilep;
2633 tree *pmask;
2634 tree *pand_mask;
2636 tree and_mask = 0;
2637 tree mask, inner, offset;
2638 tree unsigned_type;
2639 unsigned int precision;
2641 /* All the optimizations using this function assume integer fields.
2642 There are problems with FP fields since the type_for_size call
2643 below can fail for, e.g., XFmode. */
2644 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2645 return 0;
2647 STRIP_NOPS (exp);
2649 if (TREE_CODE (exp) == BIT_AND_EXPR)
2651 and_mask = TREE_OPERAND (exp, 1);
2652 exp = TREE_OPERAND (exp, 0);
2653 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2654 if (TREE_CODE (and_mask) != INTEGER_CST)
2655 return 0;
2658 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2659 punsignedp, pvolatilep);
2660 if ((inner == exp && and_mask == 0)
2661 || *pbitsize < 0 || offset != 0
2662 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2663 return 0;
2665 /* Compute the mask to access the bitfield. */
2666 unsigned_type = type_for_size (*pbitsize, 1);
2667 precision = TYPE_PRECISION (unsigned_type);
2669 mask = build_int_2 (~0, ~0);
2670 TREE_TYPE (mask) = unsigned_type;
2671 force_fit_type (mask, 0);
2672 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2673 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2675 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2676 if (and_mask != 0)
2677 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2678 convert (unsigned_type, and_mask), mask));
2680 *pmask = mask;
2681 *pand_mask = and_mask;
2682 return inner;
2685 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2686 bit positions. */
2688 static int
2689 all_ones_mask_p (mask, size)
2690 tree mask;
2691 int size;
2693 tree type = TREE_TYPE (mask);
2694 unsigned int precision = TYPE_PRECISION (type);
2695 tree tmask;
2697 tmask = build_int_2 (~0, ~0);
2698 TREE_TYPE (tmask) = signed_type (type);
2699 force_fit_type (tmask, 0);
2700 return
2701 tree_int_cst_equal (mask,
2702 const_binop (RSHIFT_EXPR,
2703 const_binop (LSHIFT_EXPR, tmask,
2704 size_int (precision - size),
2706 size_int (precision - size), 0));
2709 /* Subroutine for fold_truthop: determine if an operand is simple enough
2710 to be evaluated unconditionally. */
2712 static int
2713 simple_operand_p (exp)
2714 tree exp;
2716 /* Strip any conversions that don't change the machine mode. */
2717 while ((TREE_CODE (exp) == NOP_EXPR
2718 || TREE_CODE (exp) == CONVERT_EXPR)
2719 && (TYPE_MODE (TREE_TYPE (exp))
2720 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2721 exp = TREE_OPERAND (exp, 0);
2723 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2724 || (DECL_P (exp)
2725 && ! TREE_ADDRESSABLE (exp)
2726 && ! TREE_THIS_VOLATILE (exp)
2727 && ! DECL_NONLOCAL (exp)
2728 /* Don't regard global variables as simple. They may be
2729 allocated in ways unknown to the compiler (shared memory,
2730 #pragma weak, etc). */
2731 && ! TREE_PUBLIC (exp)
2732 && ! DECL_EXTERNAL (exp)
2733 /* Loading a static variable is unduly expensive, but global
2734 registers aren't expensive. */
2735 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2738 /* The following functions are subroutines to fold_range_test and allow it to
2739 try to change a logical combination of comparisons into a range test.
2741 For example, both
2742 X == 2 || X == 3 || X == 4 || X == 5
2744 X >= 2 && X <= 5
2745 are converted to
2746 (unsigned) (X - 2) <= 3
2748 We describe each set of comparisons as being either inside or outside
2749 a range, using a variable named like IN_P, and then describe the
2750 range with a lower and upper bound. If one of the bounds is omitted,
2751 it represents either the highest or lowest value of the type.
2753 In the comments below, we represent a range by two numbers in brackets
2754 preceded by a "+" to designate being inside that range, or a "-" to
2755 designate being outside that range, so the condition can be inverted by
2756 flipping the prefix. An omitted bound is represented by a "-". For
2757 example, "- [-, 10]" means being outside the range starting at the lowest
2758 possible value and ending at 10, in other words, being greater than 10.
2759 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2760 always false.
2762 We set up things so that the missing bounds are handled in a consistent
2763 manner so neither a missing bound nor "true" and "false" need to be
2764 handled using a special case. */
2766 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2767 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2768 and UPPER1_P are nonzero if the respective argument is an upper bound
2769 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2770 must be specified for a comparison. ARG1 will be converted to ARG0's
2771 type if both are specified. */
2773 static tree
2774 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2775 enum tree_code code;
2776 tree type;
2777 tree arg0, arg1;
2778 int upper0_p, upper1_p;
2780 tree tem;
2781 int result;
2782 int sgn0, sgn1;
2784 /* If neither arg represents infinity, do the normal operation.
2785 Else, if not a comparison, return infinity. Else handle the special
2786 comparison rules. Note that most of the cases below won't occur, but
2787 are handled for consistency. */
2789 if (arg0 != 0 && arg1 != 0)
2791 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2792 arg0, convert (TREE_TYPE (arg0), arg1)));
2793 STRIP_NOPS (tem);
2794 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2797 if (TREE_CODE_CLASS (code) != '<')
2798 return 0;
2800 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2801 for neither. In real maths, we cannot assume open ended ranges are
2802 the same. But, this is computer arithmetic, where numbers are finite.
2803 We can therefore make the transformation of any unbounded range with
2804 the value Z, Z being greater than any representable number. This permits
2805 us to treat unbounded ranges as equal. */
2806 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2807 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2808 switch (code)
2810 case EQ_EXPR:
2811 result = sgn0 == sgn1;
2812 break;
2813 case NE_EXPR:
2814 result = sgn0 != sgn1;
2815 break;
2816 case LT_EXPR:
2817 result = sgn0 < sgn1;
2818 break;
2819 case LE_EXPR:
2820 result = sgn0 <= sgn1;
2821 break;
2822 case GT_EXPR:
2823 result = sgn0 > sgn1;
2824 break;
2825 case GE_EXPR:
2826 result = sgn0 >= sgn1;
2827 break;
2828 default:
2829 abort ();
2832 return convert (type, result ? integer_one_node : integer_zero_node);
2835 /* Given EXP, a logical expression, set the range it is testing into
2836 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2837 actually being tested. *PLOW and *PHIGH will be made of the same type
2838 as the returned expression. If EXP is not a comparison, we will most
2839 likely not be returning a useful value and range. */
2841 static tree
2842 make_range (exp, pin_p, plow, phigh)
2843 tree exp;
2844 int *pin_p;
2845 tree *plow, *phigh;
2847 enum tree_code code;
2848 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2849 tree orig_type = NULL_TREE;
2850 int in_p, n_in_p;
2851 tree low, high, n_low, n_high;
2853 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2854 and see if we can refine the range. Some of the cases below may not
2855 happen, but it doesn't seem worth worrying about this. We "continue"
2856 the outer loop when we've changed something; otherwise we "break"
2857 the switch, which will "break" the while. */
2859 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2861 while (1)
2863 code = TREE_CODE (exp);
2865 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2867 arg0 = TREE_OPERAND (exp, 0);
2868 if (TREE_CODE_CLASS (code) == '<'
2869 || TREE_CODE_CLASS (code) == '1'
2870 || TREE_CODE_CLASS (code) == '2')
2871 type = TREE_TYPE (arg0);
2872 if (TREE_CODE_CLASS (code) == '2'
2873 || TREE_CODE_CLASS (code) == '<'
2874 || (TREE_CODE_CLASS (code) == 'e'
2875 && TREE_CODE_LENGTH (code) > 1))
2876 arg1 = TREE_OPERAND (exp, 1);
2879 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2880 lose a cast by accident. */
2881 if (type != NULL_TREE && orig_type == NULL_TREE)
2882 orig_type = type;
2884 switch (code)
2886 case TRUTH_NOT_EXPR:
2887 in_p = ! in_p, exp = arg0;
2888 continue;
2890 case EQ_EXPR: case NE_EXPR:
2891 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2892 /* We can only do something if the range is testing for zero
2893 and if the second operand is an integer constant. Note that
2894 saying something is "in" the range we make is done by
2895 complementing IN_P since it will set in the initial case of
2896 being not equal to zero; "out" is leaving it alone. */
2897 if (low == 0 || high == 0
2898 || ! integer_zerop (low) || ! integer_zerop (high)
2899 || TREE_CODE (arg1) != INTEGER_CST)
2900 break;
2902 switch (code)
2904 case NE_EXPR: /* - [c, c] */
2905 low = high = arg1;
2906 break;
2907 case EQ_EXPR: /* + [c, c] */
2908 in_p = ! in_p, low = high = arg1;
2909 break;
2910 case GT_EXPR: /* - [-, c] */
2911 low = 0, high = arg1;
2912 break;
2913 case GE_EXPR: /* + [c, -] */
2914 in_p = ! in_p, low = arg1, high = 0;
2915 break;
2916 case LT_EXPR: /* - [c, -] */
2917 low = arg1, high = 0;
2918 break;
2919 case LE_EXPR: /* + [-, c] */
2920 in_p = ! in_p, low = 0, high = arg1;
2921 break;
2922 default:
2923 abort ();
2926 exp = arg0;
2928 /* If this is an unsigned comparison, we also know that EXP is
2929 greater than or equal to zero. We base the range tests we make
2930 on that fact, so we record it here so we can parse existing
2931 range tests. */
2932 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2934 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2935 1, convert (type, integer_zero_node),
2936 NULL_TREE))
2937 break;
2939 in_p = n_in_p, low = n_low, high = n_high;
2941 /* If the high bound is missing, but we
2942 have a low bound, reverse the range so
2943 it goes from zero to the low bound minus 1. */
2944 if (high == 0 && low)
2946 in_p = ! in_p;
2947 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2948 integer_one_node, 0);
2949 low = convert (type, integer_zero_node);
2952 continue;
2954 case NEGATE_EXPR:
2955 /* (-x) IN [a,b] -> x in [-b, -a] */
2956 n_low = range_binop (MINUS_EXPR, type,
2957 convert (type, integer_zero_node), 0, high, 1);
2958 n_high = range_binop (MINUS_EXPR, type,
2959 convert (type, integer_zero_node), 0, low, 0);
2960 low = n_low, high = n_high;
2961 exp = arg0;
2962 continue;
2964 case BIT_NOT_EXPR:
2965 /* ~ X -> -X - 1 */
2966 exp = build (MINUS_EXPR, type, negate_expr (arg0),
2967 convert (type, integer_one_node));
2968 continue;
2970 case PLUS_EXPR: case MINUS_EXPR:
2971 if (TREE_CODE (arg1) != INTEGER_CST)
2972 break;
2974 /* If EXP is signed, any overflow in the computation is undefined,
2975 so we don't worry about it so long as our computations on
2976 the bounds don't overflow. For unsigned, overflow is defined
2977 and this is exactly the right thing. */
2978 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2979 type, low, 0, arg1, 0);
2980 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2981 type, high, 1, arg1, 0);
2982 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2983 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2984 break;
2986 /* Check for an unsigned range which has wrapped around the maximum
2987 value thus making n_high < n_low, and normalize it. */
2988 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2990 low = range_binop (PLUS_EXPR, type, n_high, 0,
2991 integer_one_node, 0);
2992 high = range_binop (MINUS_EXPR, type, n_low, 0,
2993 integer_one_node, 0);
2995 /* If the range is of the form +/- [ x+1, x ], we won't
2996 be able to normalize it. But then, it represents the
2997 whole range or the empty set, so make it
2998 +/- [ -, - ]. */
2999 if (tree_int_cst_equal (n_low, low)
3000 && tree_int_cst_equal (n_high, high))
3001 low = high = 0;
3002 else
3003 in_p = ! in_p;
3005 else
3006 low = n_low, high = n_high;
3008 exp = arg0;
3009 continue;
3011 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3012 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3013 break;
3015 if (! INTEGRAL_TYPE_P (type)
3016 || (low != 0 && ! int_fits_type_p (low, type))
3017 || (high != 0 && ! int_fits_type_p (high, type)))
3018 break;
3020 n_low = low, n_high = high;
3022 if (n_low != 0)
3023 n_low = convert (type, n_low);
3025 if (n_high != 0)
3026 n_high = convert (type, n_high);
3028 /* If we're converting from an unsigned to a signed type,
3029 we will be doing the comparison as unsigned. The tests above
3030 have already verified that LOW and HIGH are both positive.
3032 So we have to make sure that the original unsigned value will
3033 be interpreted as positive. */
3034 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3036 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3037 tree high_positive;
3039 /* A range without an upper bound is, naturally, unbounded.
3040 Since convert would have cropped a very large value, use
3041 the max value for the destination type. */
3042 high_positive
3043 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3044 : TYPE_MAX_VALUE (type);
3046 high_positive = fold (build (RSHIFT_EXPR, type,
3047 convert (type, high_positive),
3048 convert (type, integer_one_node)));
3050 /* If the low bound is specified, "and" the range with the
3051 range for which the original unsigned value will be
3052 positive. */
3053 if (low != 0)
3055 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3056 1, n_low, n_high,
3057 1, convert (type, integer_zero_node),
3058 high_positive))
3059 break;
3061 in_p = (n_in_p == in_p);
3063 else
3065 /* Otherwise, "or" the range with the range of the input
3066 that will be interpreted as negative. */
3067 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3068 0, n_low, n_high,
3069 1, convert (type, integer_zero_node),
3070 high_positive))
3071 break;
3073 in_p = (in_p != n_in_p);
3077 exp = arg0;
3078 low = n_low, high = n_high;
3079 continue;
3081 default:
3082 break;
3085 break;
3088 /* If EXP is a constant, we can evaluate whether this is true or false. */
3089 if (TREE_CODE (exp) == INTEGER_CST)
3091 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3092 exp, 0, low, 0))
3093 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3094 exp, 1, high, 1)));
3095 low = high = 0;
3096 exp = 0;
3099 *pin_p = in_p, *plow = low, *phigh = high;
3100 return exp;
3103 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3104 type, TYPE, return an expression to test if EXP is in (or out of, depending
3105 on IN_P) the range. */
3107 static tree
3108 build_range_check (type, exp, in_p, low, high)
3109 tree type;
3110 tree exp;
3111 int in_p;
3112 tree low, high;
3114 tree etype = TREE_TYPE (exp);
3115 tree utype, value;
3117 if (! in_p
3118 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3119 return invert_truthvalue (value);
3121 else if (low == 0 && high == 0)
3122 return convert (type, integer_one_node);
3124 else if (low == 0)
3125 return fold (build (LE_EXPR, type, exp, high));
3127 else if (high == 0)
3128 return fold (build (GE_EXPR, type, exp, low));
3130 else if (operand_equal_p (low, high, 0))
3131 return fold (build (EQ_EXPR, type, exp, low));
3133 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3134 return build_range_check (type, exp, 1, 0, high);
3136 else if (integer_zerop (low))
3138 utype = unsigned_type (etype);
3139 return build_range_check (type, convert (utype, exp), 1, 0,
3140 convert (utype, high));
3143 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3144 && ! TREE_OVERFLOW (value))
3145 return build_range_check (type,
3146 fold (build (MINUS_EXPR, etype, exp, low)),
3147 1, convert (etype, integer_zero_node), value);
3148 else
3149 return 0;
3152 /* Given two ranges, see if we can merge them into one. Return 1 if we
3153 can, 0 if we can't. Set the output range into the specified parameters. */
3155 static int
3156 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3157 int *pin_p;
3158 tree *plow, *phigh;
3159 int in0_p, in1_p;
3160 tree low0, high0, low1, high1;
3162 int no_overlap;
3163 int subset;
3164 int temp;
3165 tree tem;
3166 int in_p;
3167 tree low, high;
3168 int lowequal = ((low0 == 0 && low1 == 0)
3169 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3170 low0, 0, low1, 0)));
3171 int highequal = ((high0 == 0 && high1 == 0)
3172 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3173 high0, 1, high1, 1)));
3175 /* Make range 0 be the range that starts first, or ends last if they
3176 start at the same value. Swap them if it isn't. */
3177 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3178 low0, 0, low1, 0))
3179 || (lowequal
3180 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3181 high1, 1, high0, 1))))
3183 temp = in0_p, in0_p = in1_p, in1_p = temp;
3184 tem = low0, low0 = low1, low1 = tem;
3185 tem = high0, high0 = high1, high1 = tem;
3188 /* Now flag two cases, whether the ranges are disjoint or whether the
3189 second range is totally subsumed in the first. Note that the tests
3190 below are simplified by the ones above. */
3191 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3192 high0, 1, low1, 0));
3193 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3194 high1, 1, high0, 1));
3196 /* We now have four cases, depending on whether we are including or
3197 excluding the two ranges. */
3198 if (in0_p && in1_p)
3200 /* If they don't overlap, the result is false. If the second range
3201 is a subset it is the result. Otherwise, the range is from the start
3202 of the second to the end of the first. */
3203 if (no_overlap)
3204 in_p = 0, low = high = 0;
3205 else if (subset)
3206 in_p = 1, low = low1, high = high1;
3207 else
3208 in_p = 1, low = low1, high = high0;
3211 else if (in0_p && ! in1_p)
3213 /* If they don't overlap, the result is the first range. If they are
3214 equal, the result is false. If the second range is a subset of the
3215 first, and the ranges begin at the same place, we go from just after
3216 the end of the first range to the end of the second. If the second
3217 range is not a subset of the first, or if it is a subset and both
3218 ranges end at the same place, the range starts at the start of the
3219 first range and ends just before the second range.
3220 Otherwise, we can't describe this as a single range. */
3221 if (no_overlap)
3222 in_p = 1, low = low0, high = high0;
3223 else if (lowequal && highequal)
3224 in_p = 0, low = high = 0;
3225 else if (subset && lowequal)
3227 in_p = 1, high = high0;
3228 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3229 integer_one_node, 0);
3231 else if (! subset || highequal)
3233 in_p = 1, low = low0;
3234 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3235 integer_one_node, 0);
3237 else
3238 return 0;
3241 else if (! in0_p && in1_p)
3243 /* If they don't overlap, the result is the second range. If the second
3244 is a subset of the first, the result is false. Otherwise,
3245 the range starts just after the first range and ends at the
3246 end of the second. */
3247 if (no_overlap)
3248 in_p = 1, low = low1, high = high1;
3249 else if (subset || highequal)
3250 in_p = 0, low = high = 0;
3251 else
3253 in_p = 1, high = high1;
3254 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3255 integer_one_node, 0);
3259 else
3261 /* The case where we are excluding both ranges. Here the complex case
3262 is if they don't overlap. In that case, the only time we have a
3263 range is if they are adjacent. If the second is a subset of the
3264 first, the result is the first. Otherwise, the range to exclude
3265 starts at the beginning of the first range and ends at the end of the
3266 second. */
3267 if (no_overlap)
3269 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3270 range_binop (PLUS_EXPR, NULL_TREE,
3271 high0, 1,
3272 integer_one_node, 1),
3273 1, low1, 0)))
3274 in_p = 0, low = low0, high = high1;
3275 else
3276 return 0;
3278 else if (subset)
3279 in_p = 0, low = low0, high = high0;
3280 else
3281 in_p = 0, low = low0, high = high1;
3284 *pin_p = in_p, *plow = low, *phigh = high;
3285 return 1;
3288 /* EXP is some logical combination of boolean tests. See if we can
3289 merge it into some range test. Return the new tree if so. */
3291 static tree
3292 fold_range_test (exp)
3293 tree exp;
3295 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3296 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3297 int in0_p, in1_p, in_p;
3298 tree low0, low1, low, high0, high1, high;
3299 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3300 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3301 tree tem;
3303 /* If this is an OR operation, invert both sides; we will invert
3304 again at the end. */
3305 if (or_op)
3306 in0_p = ! in0_p, in1_p = ! in1_p;
3308 /* If both expressions are the same, if we can merge the ranges, and we
3309 can build the range test, return it or it inverted. If one of the
3310 ranges is always true or always false, consider it to be the same
3311 expression as the other. */
3312 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3313 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3314 in1_p, low1, high1)
3315 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3316 lhs != 0 ? lhs
3317 : rhs != 0 ? rhs : integer_zero_node,
3318 in_p, low, high))))
3319 return or_op ? invert_truthvalue (tem) : tem;
3321 /* On machines where the branch cost is expensive, if this is a
3322 short-circuited branch and the underlying object on both sides
3323 is the same, make a non-short-circuit operation. */
3324 else if (BRANCH_COST >= 2
3325 && lhs != 0 && rhs != 0
3326 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3327 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3328 && operand_equal_p (lhs, rhs, 0))
3330 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3331 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3332 which cases we can't do this. */
3333 if (simple_operand_p (lhs))
3334 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3335 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3336 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3337 TREE_OPERAND (exp, 1));
3339 else if (global_bindings_p () == 0
3340 && ! contains_placeholder_p (lhs))
3342 tree common = save_expr (lhs);
3344 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3345 or_op ? ! in0_p : in0_p,
3346 low0, high0))
3347 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3348 or_op ? ! in1_p : in1_p,
3349 low1, high1))))
3350 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3351 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3352 TREE_TYPE (exp), lhs, rhs);
3356 return 0;
3359 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3360 bit value. Arrange things so the extra bits will be set to zero if and
3361 only if C is signed-extended to its full width. If MASK is nonzero,
3362 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3364 static tree
3365 unextend (c, p, unsignedp, mask)
3366 tree c;
3367 int p;
3368 int unsignedp;
3369 tree mask;
3371 tree type = TREE_TYPE (c);
3372 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3373 tree temp;
3375 if (p == modesize || unsignedp)
3376 return c;
3378 /* We work by getting just the sign bit into the low-order bit, then
3379 into the high-order bit, then sign-extend. We then XOR that value
3380 with C. */
3381 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3382 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3384 /* We must use a signed type in order to get an arithmetic right shift.
3385 However, we must also avoid introducing accidental overflows, so that
3386 a subsequent call to integer_zerop will work. Hence we must
3387 do the type conversion here. At this point, the constant is either
3388 zero or one, and the conversion to a signed type can never overflow.
3389 We could get an overflow if this conversion is done anywhere else. */
3390 if (TREE_UNSIGNED (type))
3391 temp = convert (signed_type (type), temp);
3393 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3394 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3395 if (mask != 0)
3396 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3397 /* If necessary, convert the type back to match the type of C. */
3398 if (TREE_UNSIGNED (type))
3399 temp = convert (type, temp);
3401 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3404 /* Find ways of folding logical expressions of LHS and RHS:
3405 Try to merge two comparisons to the same innermost item.
3406 Look for range tests like "ch >= '0' && ch <= '9'".
3407 Look for combinations of simple terms on machines with expensive branches
3408 and evaluate the RHS unconditionally.
3410 For example, if we have p->a == 2 && p->b == 4 and we can make an
3411 object large enough to span both A and B, we can do this with a comparison
3412 against the object ANDed with the a mask.
3414 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3415 operations to do this with one comparison.
3417 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3418 function and the one above.
3420 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3421 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3423 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3424 two operands.
3426 We return the simplified tree or 0 if no optimization is possible. */
3428 static tree
3429 fold_truthop (code, truth_type, lhs, rhs)
3430 enum tree_code code;
3431 tree truth_type, lhs, rhs;
3433 /* If this is the "or" of two comparisons, we can do something if
3434 the comparisons are NE_EXPR. If this is the "and", we can do something
3435 if the comparisons are EQ_EXPR. I.e.,
3436 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3438 WANTED_CODE is this operation code. For single bit fields, we can
3439 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3440 comparison for one-bit fields. */
3442 enum tree_code wanted_code;
3443 enum tree_code lcode, rcode;
3444 tree ll_arg, lr_arg, rl_arg, rr_arg;
3445 tree ll_inner, lr_inner, rl_inner, rr_inner;
3446 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3447 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3448 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3449 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3450 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3451 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3452 enum machine_mode lnmode, rnmode;
3453 tree ll_mask, lr_mask, rl_mask, rr_mask;
3454 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3455 tree l_const, r_const;
3456 tree lntype, rntype, result;
3457 int first_bit, end_bit;
3458 int volatilep;
3460 /* Start by getting the comparison codes. Fail if anything is volatile.
3461 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3462 it were surrounded with a NE_EXPR. */
3464 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3465 return 0;
3467 lcode = TREE_CODE (lhs);
3468 rcode = TREE_CODE (rhs);
3470 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3471 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3473 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3474 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3476 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3477 return 0;
3479 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3480 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3482 ll_arg = TREE_OPERAND (lhs, 0);
3483 lr_arg = TREE_OPERAND (lhs, 1);
3484 rl_arg = TREE_OPERAND (rhs, 0);
3485 rr_arg = TREE_OPERAND (rhs, 1);
3487 /* If the RHS can be evaluated unconditionally and its operands are
3488 simple, it wins to evaluate the RHS unconditionally on machines
3489 with expensive branches. In this case, this isn't a comparison
3490 that can be merged. Avoid doing this if the RHS is a floating-point
3491 comparison since those can trap. */
3493 if (BRANCH_COST >= 2
3494 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3495 && simple_operand_p (rl_arg)
3496 && simple_operand_p (rr_arg))
3497 return build (code, truth_type, lhs, rhs);
3499 /* See if the comparisons can be merged. Then get all the parameters for
3500 each side. */
3502 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3503 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3504 return 0;
3506 volatilep = 0;
3507 ll_inner = decode_field_reference (ll_arg,
3508 &ll_bitsize, &ll_bitpos, &ll_mode,
3509 &ll_unsignedp, &volatilep, &ll_mask,
3510 &ll_and_mask);
3511 lr_inner = decode_field_reference (lr_arg,
3512 &lr_bitsize, &lr_bitpos, &lr_mode,
3513 &lr_unsignedp, &volatilep, &lr_mask,
3514 &lr_and_mask);
3515 rl_inner = decode_field_reference (rl_arg,
3516 &rl_bitsize, &rl_bitpos, &rl_mode,
3517 &rl_unsignedp, &volatilep, &rl_mask,
3518 &rl_and_mask);
3519 rr_inner = decode_field_reference (rr_arg,
3520 &rr_bitsize, &rr_bitpos, &rr_mode,
3521 &rr_unsignedp, &volatilep, &rr_mask,
3522 &rr_and_mask);
3524 /* It must be true that the inner operation on the lhs of each
3525 comparison must be the same if we are to be able to do anything.
3526 Then see if we have constants. If not, the same must be true for
3527 the rhs's. */
3528 if (volatilep || ll_inner == 0 || rl_inner == 0
3529 || ! operand_equal_p (ll_inner, rl_inner, 0))
3530 return 0;
3532 if (TREE_CODE (lr_arg) == INTEGER_CST
3533 && TREE_CODE (rr_arg) == INTEGER_CST)
3534 l_const = lr_arg, r_const = rr_arg;
3535 else if (lr_inner == 0 || rr_inner == 0
3536 || ! operand_equal_p (lr_inner, rr_inner, 0))
3537 return 0;
3538 else
3539 l_const = r_const = 0;
3541 /* If either comparison code is not correct for our logical operation,
3542 fail. However, we can convert a one-bit comparison against zero into
3543 the opposite comparison against that bit being set in the field. */
3545 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3546 if (lcode != wanted_code)
3548 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3550 /* Make the left operand unsigned, since we are only interested
3551 in the value of one bit. Otherwise we are doing the wrong
3552 thing below. */
3553 ll_unsignedp = 1;
3554 l_const = ll_mask;
3556 else
3557 return 0;
3560 /* This is analogous to the code for l_const above. */
3561 if (rcode != wanted_code)
3563 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3565 rl_unsignedp = 1;
3566 r_const = rl_mask;
3568 else
3569 return 0;
3572 /* See if we can find a mode that contains both fields being compared on
3573 the left. If we can't, fail. Otherwise, update all constants and masks
3574 to be relative to a field of that size. */
3575 first_bit = MIN (ll_bitpos, rl_bitpos);
3576 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3577 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3578 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3579 volatilep);
3580 if (lnmode == VOIDmode)
3581 return 0;
3583 lnbitsize = GET_MODE_BITSIZE (lnmode);
3584 lnbitpos = first_bit & ~ (lnbitsize - 1);
3585 lntype = type_for_size (lnbitsize, 1);
3586 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3588 if (BYTES_BIG_ENDIAN)
3590 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3591 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3594 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3595 size_int (xll_bitpos), 0);
3596 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3597 size_int (xrl_bitpos), 0);
3599 if (l_const)
3601 l_const = convert (lntype, l_const);
3602 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3603 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3604 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3605 fold (build1 (BIT_NOT_EXPR,
3606 lntype, ll_mask)),
3607 0)))
3609 warning ("comparison is always %d", wanted_code == NE_EXPR);
3611 return convert (truth_type,
3612 wanted_code == NE_EXPR
3613 ? integer_one_node : integer_zero_node);
3616 if (r_const)
3618 r_const = convert (lntype, r_const);
3619 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3620 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3621 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3622 fold (build1 (BIT_NOT_EXPR,
3623 lntype, rl_mask)),
3624 0)))
3626 warning ("comparison is always %d", wanted_code == NE_EXPR);
3628 return convert (truth_type,
3629 wanted_code == NE_EXPR
3630 ? integer_one_node : integer_zero_node);
3634 /* If the right sides are not constant, do the same for it. Also,
3635 disallow this optimization if a size or signedness mismatch occurs
3636 between the left and right sides. */
3637 if (l_const == 0)
3639 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3640 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3641 /* Make sure the two fields on the right
3642 correspond to the left without being swapped. */
3643 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3644 return 0;
3646 first_bit = MIN (lr_bitpos, rr_bitpos);
3647 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3648 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3649 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3650 volatilep);
3651 if (rnmode == VOIDmode)
3652 return 0;
3654 rnbitsize = GET_MODE_BITSIZE (rnmode);
3655 rnbitpos = first_bit & ~ (rnbitsize - 1);
3656 rntype = type_for_size (rnbitsize, 1);
3657 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3659 if (BYTES_BIG_ENDIAN)
3661 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3662 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3665 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3666 size_int (xlr_bitpos), 0);
3667 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3668 size_int (xrr_bitpos), 0);
3670 /* Make a mask that corresponds to both fields being compared.
3671 Do this for both items being compared. If the operands are the
3672 same size and the bits being compared are in the same position
3673 then we can do this by masking both and comparing the masked
3674 results. */
3675 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3676 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3677 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3679 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3680 ll_unsignedp || rl_unsignedp);
3681 if (! all_ones_mask_p (ll_mask, lnbitsize))
3682 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3684 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3685 lr_unsignedp || rr_unsignedp);
3686 if (! all_ones_mask_p (lr_mask, rnbitsize))
3687 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3689 return build (wanted_code, truth_type, lhs, rhs);
3692 /* There is still another way we can do something: If both pairs of
3693 fields being compared are adjacent, we may be able to make a wider
3694 field containing them both.
3696 Note that we still must mask the lhs/rhs expressions. Furthermore,
3697 the mask must be shifted to account for the shift done by
3698 make_bit_field_ref. */
3699 if ((ll_bitsize + ll_bitpos == rl_bitpos
3700 && lr_bitsize + lr_bitpos == rr_bitpos)
3701 || (ll_bitpos == rl_bitpos + rl_bitsize
3702 && lr_bitpos == rr_bitpos + rr_bitsize))
3704 tree type;
3706 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3707 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3708 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3709 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3711 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3712 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3713 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3714 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3716 /* Convert to the smaller type before masking out unwanted bits. */
3717 type = lntype;
3718 if (lntype != rntype)
3720 if (lnbitsize > rnbitsize)
3722 lhs = convert (rntype, lhs);
3723 ll_mask = convert (rntype, ll_mask);
3724 type = rntype;
3726 else if (lnbitsize < rnbitsize)
3728 rhs = convert (lntype, rhs);
3729 lr_mask = convert (lntype, lr_mask);
3730 type = lntype;
3734 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3735 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3737 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3738 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3740 return build (wanted_code, truth_type, lhs, rhs);
3743 return 0;
3746 /* Handle the case of comparisons with constants. If there is something in
3747 common between the masks, those bits of the constants must be the same.
3748 If not, the condition is always false. Test for this to avoid generating
3749 incorrect code below. */
3750 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3751 if (! integer_zerop (result)
3752 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3753 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3755 if (wanted_code == NE_EXPR)
3757 warning ("`or' of unmatched not-equal tests is always 1");
3758 return convert (truth_type, integer_one_node);
3760 else
3762 warning ("`and' of mutually exclusive equal-tests is always 0");
3763 return convert (truth_type, integer_zero_node);
3767 /* Construct the expression we will return. First get the component
3768 reference we will make. Unless the mask is all ones the width of
3769 that field, perform the mask operation. Then compare with the
3770 merged constant. */
3771 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3772 ll_unsignedp || rl_unsignedp);
3774 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3775 if (! all_ones_mask_p (ll_mask, lnbitsize))
3776 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3778 return build (wanted_code, truth_type, result,
3779 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3782 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3783 constant. */
3785 static tree
3786 optimize_minmax_comparison (t)
3787 tree t;
3789 tree type = TREE_TYPE (t);
3790 tree arg0 = TREE_OPERAND (t, 0);
3791 enum tree_code op_code;
3792 tree comp_const = TREE_OPERAND (t, 1);
3793 tree minmax_const;
3794 int consts_equal, consts_lt;
3795 tree inner;
3797 STRIP_SIGN_NOPS (arg0);
3799 op_code = TREE_CODE (arg0);
3800 minmax_const = TREE_OPERAND (arg0, 1);
3801 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3802 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3803 inner = TREE_OPERAND (arg0, 0);
3805 /* If something does not permit us to optimize, return the original tree. */
3806 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3807 || TREE_CODE (comp_const) != INTEGER_CST
3808 || TREE_CONSTANT_OVERFLOW (comp_const)
3809 || TREE_CODE (minmax_const) != INTEGER_CST
3810 || TREE_CONSTANT_OVERFLOW (minmax_const))
3811 return t;
3813 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3814 and GT_EXPR, doing the rest with recursive calls using logical
3815 simplifications. */
3816 switch (TREE_CODE (t))
3818 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3819 return
3820 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3822 case GE_EXPR:
3823 return
3824 fold (build (TRUTH_ORIF_EXPR, type,
3825 optimize_minmax_comparison
3826 (build (EQ_EXPR, type, arg0, comp_const)),
3827 optimize_minmax_comparison
3828 (build (GT_EXPR, type, arg0, comp_const))));
3830 case EQ_EXPR:
3831 if (op_code == MAX_EXPR && consts_equal)
3832 /* MAX (X, 0) == 0 -> X <= 0 */
3833 return fold (build (LE_EXPR, type, inner, comp_const));
3835 else if (op_code == MAX_EXPR && consts_lt)
3836 /* MAX (X, 0) == 5 -> X == 5 */
3837 return fold (build (EQ_EXPR, type, inner, comp_const));
3839 else if (op_code == MAX_EXPR)
3840 /* MAX (X, 0) == -1 -> false */
3841 return omit_one_operand (type, integer_zero_node, inner);
3843 else if (consts_equal)
3844 /* MIN (X, 0) == 0 -> X >= 0 */
3845 return fold (build (GE_EXPR, type, inner, comp_const));
3847 else if (consts_lt)
3848 /* MIN (X, 0) == 5 -> false */
3849 return omit_one_operand (type, integer_zero_node, inner);
3851 else
3852 /* MIN (X, 0) == -1 -> X == -1 */
3853 return fold (build (EQ_EXPR, type, inner, comp_const));
3855 case GT_EXPR:
3856 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3857 /* MAX (X, 0) > 0 -> X > 0
3858 MAX (X, 0) > 5 -> X > 5 */
3859 return fold (build (GT_EXPR, type, inner, comp_const));
3861 else if (op_code == MAX_EXPR)
3862 /* MAX (X, 0) > -1 -> true */
3863 return omit_one_operand (type, integer_one_node, inner);
3865 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
3866 /* MIN (X, 0) > 0 -> false
3867 MIN (X, 0) > 5 -> false */
3868 return omit_one_operand (type, integer_zero_node, inner);
3870 else
3871 /* MIN (X, 0) > -1 -> X > -1 */
3872 return fold (build (GT_EXPR, type, inner, comp_const));
3874 default:
3875 return t;
3879 /* T is an integer expression that is being multiplied, divided, or taken a
3880 modulus (CODE says which and what kind of divide or modulus) by a
3881 constant C. See if we can eliminate that operation by folding it with
3882 other operations already in T. WIDE_TYPE, if non-null, is a type that
3883 should be used for the computation if wider than our type.
3885 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
3886 (X * 2) + (Y + 4). We must, however, be assured that either the original
3887 expression would not overflow or that overflow is undefined for the type
3888 in the language in question.
3890 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
3891 the machine has a multiply-accumulate insn or that this is part of an
3892 addressing calculation.
3894 If we return a non-null expression, it is an equivalent form of the
3895 original computation, but need not be in the original type. */
3897 static tree
3898 extract_muldiv (t, c, code, wide_type)
3899 tree t;
3900 tree c;
3901 enum tree_code code;
3902 tree wide_type;
3904 tree type = TREE_TYPE (t);
3905 enum tree_code tcode = TREE_CODE (t);
3906 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
3907 > GET_MODE_SIZE (TYPE_MODE (type)))
3908 ? wide_type : type);
3909 tree t1, t2;
3910 int same_p = tcode == code;
3911 tree op0 = NULL_TREE, op1 = NULL_TREE;
3913 /* Don't deal with constants of zero here; they confuse the code below. */
3914 if (integer_zerop (c))
3915 return NULL_TREE;
3917 if (TREE_CODE_CLASS (tcode) == '1')
3918 op0 = TREE_OPERAND (t, 0);
3920 if (TREE_CODE_CLASS (tcode) == '2')
3921 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
3923 /* Note that we need not handle conditional operations here since fold
3924 already handles those cases. So just do arithmetic here. */
3925 switch (tcode)
3927 case INTEGER_CST:
3928 /* For a constant, we can always simplify if we are a multiply
3929 or (for divide and modulus) if it is a multiple of our constant. */
3930 if (code == MULT_EXPR
3931 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
3932 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
3933 break;
3935 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
3936 /* If op0 is an expression, and is unsigned, and the type is
3937 smaller than ctype, then we cannot widen the expression. */
3938 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
3939 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
3940 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
3941 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
3942 && TREE_UNSIGNED (TREE_TYPE (op0))
3943 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3944 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
3945 && (GET_MODE_SIZE (TYPE_MODE (ctype))
3946 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
3947 break;
3949 /* Pass the constant down and see if we can make a simplification. If
3950 we can, replace this expression with the inner simplification for
3951 possible later conversion to our or some other type. */
3952 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
3953 code == MULT_EXPR ? ctype : NULL_TREE)))
3954 return t1;
3955 break;
3957 case NEGATE_EXPR: case ABS_EXPR:
3958 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
3959 return fold (build1 (tcode, ctype, convert (ctype, t1)));
3960 break;
3962 case MIN_EXPR: case MAX_EXPR:
3963 /* If widening the type changes the signedness, then we can't perform
3964 this optimization as that changes the result. */
3965 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
3966 break;
3968 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
3969 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
3970 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
3972 if (tree_int_cst_sgn (c) < 0)
3973 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
3975 return fold (build (tcode, ctype, convert (ctype, t1),
3976 convert (ctype, t2)));
3978 break;
3980 case WITH_RECORD_EXPR:
3981 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
3982 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
3983 TREE_OPERAND (t, 1));
3984 break;
3986 case SAVE_EXPR:
3987 /* If this has not been evaluated and the operand has no side effects,
3988 we can see if we can do something inside it and make a new one.
3989 Note that this test is overly conservative since we can do this
3990 if the only reason it had side effects is that it was another
3991 similar SAVE_EXPR, but that isn't worth bothering with. */
3992 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
3993 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
3994 wide_type)))
3996 t1 = save_expr (t1);
3997 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
3998 SAVE_EXPR_PERSISTENT_P (t1) = 1;
3999 if (is_pending_size (t))
4000 put_pending_size (t1);
4001 return t1;
4003 break;
4005 case LSHIFT_EXPR: case RSHIFT_EXPR:
4006 /* If the second operand is constant, this is a multiplication
4007 or floor division, by a power of two, so we can treat it that
4008 way unless the multiplier or divisor overflows. */
4009 if (TREE_CODE (op1) == INTEGER_CST
4010 /* const_binop may not detect overflow correctly,
4011 so check for it explicitly here. */
4012 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4013 && TREE_INT_CST_HIGH (op1) == 0
4014 && 0 != (t1 = convert (ctype,
4015 const_binop (LSHIFT_EXPR, size_one_node,
4016 op1, 0)))
4017 && ! TREE_OVERFLOW (t1))
4018 return extract_muldiv (build (tcode == LSHIFT_EXPR
4019 ? MULT_EXPR : FLOOR_DIV_EXPR,
4020 ctype, convert (ctype, op0), t1),
4021 c, code, wide_type);
4022 break;
4024 case PLUS_EXPR: case MINUS_EXPR:
4025 /* See if we can eliminate the operation on both sides. If we can, we
4026 can return a new PLUS or MINUS. If we can't, the only remaining
4027 cases where we can do anything are if the second operand is a
4028 constant. */
4029 t1 = extract_muldiv (op0, c, code, wide_type);
4030 t2 = extract_muldiv (op1, c, code, wide_type);
4031 if (t1 != 0 && t2 != 0
4032 && (code == MULT_EXPR
4033 /* If not multiplication, we can only do this if either operand
4034 is divisible by c. */
4035 || multiple_of_p (ctype, op0, c)
4036 || multiple_of_p (ctype, op1, c)))
4037 return fold (build (tcode, ctype, convert (ctype, t1),
4038 convert (ctype, t2)));
4040 /* If this was a subtraction, negate OP1 and set it to be an addition.
4041 This simplifies the logic below. */
4042 if (tcode == MINUS_EXPR)
4043 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4045 if (TREE_CODE (op1) != INTEGER_CST)
4046 break;
4048 /* If either OP1 or C are negative, this optimization is not safe for
4049 some of the division and remainder types while for others we need
4050 to change the code. */
4051 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4053 if (code == CEIL_DIV_EXPR)
4054 code = FLOOR_DIV_EXPR;
4055 else if (code == FLOOR_DIV_EXPR)
4056 code = CEIL_DIV_EXPR;
4057 else if (code != MULT_EXPR
4058 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4059 break;
4062 /* If it's a multiply or a division/modulus operation of a multiple
4063 of our constant, do the operation and verify it doesn't overflow. */
4064 if (code == MULT_EXPR
4065 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4067 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4068 if (op1 == 0 || TREE_OVERFLOW (op1))
4069 break;
4071 else
4072 break;
4074 /* If we have an unsigned type is not a sizetype, we cannot widen
4075 the operation since it will change the result if the original
4076 computation overflowed. */
4077 if (TREE_UNSIGNED (ctype)
4078 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4079 && ctype != type)
4080 break;
4082 /* If we were able to eliminate our operation from the first side,
4083 apply our operation to the second side and reform the PLUS. */
4084 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4085 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4087 /* The last case is if we are a multiply. In that case, we can
4088 apply the distributive law to commute the multiply and addition
4089 if the multiplication of the constants doesn't overflow. */
4090 if (code == MULT_EXPR)
4091 return fold (build (tcode, ctype, fold (build (code, ctype,
4092 convert (ctype, op0),
4093 convert (ctype, c))),
4094 op1));
4096 break;
4098 case MULT_EXPR:
4099 /* We have a special case here if we are doing something like
4100 (C * 8) % 4 since we know that's zero. */
4101 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4102 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4103 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4104 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4105 return omit_one_operand (type, integer_zero_node, op0);
4107 /* ... fall through ... */
4109 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4110 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4111 /* If we can extract our operation from the LHS, do so and return a
4112 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4113 do something only if the second operand is a constant. */
4114 if (same_p
4115 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4116 return fold (build (tcode, ctype, convert (ctype, t1),
4117 convert (ctype, op1)));
4118 else if (tcode == MULT_EXPR && code == MULT_EXPR
4119 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4120 return fold (build (tcode, ctype, convert (ctype, op0),
4121 convert (ctype, t1)));
4122 else if (TREE_CODE (op1) != INTEGER_CST)
4123 return 0;
4125 /* If these are the same operation types, we can associate them
4126 assuming no overflow. */
4127 if (tcode == code
4128 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4129 convert (ctype, c), 0))
4130 && ! TREE_OVERFLOW (t1))
4131 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4133 /* If these operations "cancel" each other, we have the main
4134 optimizations of this pass, which occur when either constant is a
4135 multiple of the other, in which case we replace this with either an
4136 operation or CODE or TCODE.
4138 If we have an unsigned type that is not a sizetype, we cannot do
4139 this since it will change the result if the original computation
4140 overflowed. */
4141 if ((! TREE_UNSIGNED (ctype)
4142 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4143 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4144 || (tcode == MULT_EXPR
4145 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4146 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4148 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4149 return fold (build (tcode, ctype, convert (ctype, op0),
4150 convert (ctype,
4151 const_binop (TRUNC_DIV_EXPR,
4152 op1, c, 0))));
4153 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4154 return fold (build (code, ctype, convert (ctype, op0),
4155 convert (ctype,
4156 const_binop (TRUNC_DIV_EXPR,
4157 c, op1, 0))));
4159 break;
4161 default:
4162 break;
4165 return 0;
4168 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4169 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4170 that we may sometimes modify the tree. */
4172 static tree
4173 strip_compound_expr (t, s)
4174 tree t;
4175 tree s;
4177 enum tree_code code = TREE_CODE (t);
4179 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4180 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4181 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4182 return TREE_OPERAND (t, 1);
4184 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4185 don't bother handling any other types. */
4186 else if (code == COND_EXPR)
4188 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4189 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4190 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4192 else if (TREE_CODE_CLASS (code) == '1')
4193 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4194 else if (TREE_CODE_CLASS (code) == '<'
4195 || TREE_CODE_CLASS (code) == '2')
4197 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4198 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4201 return t;
4204 /* Return a node which has the indicated constant VALUE (either 0 or
4205 1), and is of the indicated TYPE. */
4207 static tree
4208 constant_boolean_node (value, type)
4209 int value;
4210 tree type;
4212 if (type == integer_type_node)
4213 return value ? integer_one_node : integer_zero_node;
4214 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4215 return truthvalue_conversion (value ? integer_one_node :
4216 integer_zero_node);
4217 else
4219 tree t = build_int_2 (value, 0);
4221 TREE_TYPE (t) = type;
4222 return t;
4226 /* Utility function for the following routine, to see how complex a nesting of
4227 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4228 we don't care (to avoid spending too much time on complex expressions.). */
4230 static int
4231 count_cond (expr, lim)
4232 tree expr;
4233 int lim;
4235 int ctrue, cfalse;
4237 if (TREE_CODE (expr) != COND_EXPR)
4238 return 0;
4239 else if (lim <= 0)
4240 return 0;
4242 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4243 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4244 return MIN (lim, 1 + ctrue + cfalse);
4247 /* Transform `a + (b ? x : y)' into `x ? (a + b) : (a + y)'.
4248 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4249 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4250 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4251 COND is the first argument to CODE; otherwise (as in the example
4252 given here), it is the second argument. TYPE is the type of the
4253 original expression. */
4255 static tree
4256 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4257 enum tree_code code;
4258 tree type;
4259 tree cond;
4260 tree arg;
4261 int cond_first_p;
4263 tree test, true_value, false_value;
4264 tree lhs = NULL_TREE;
4265 tree rhs = NULL_TREE;
4266 /* In the end, we'll produce a COND_EXPR. Both arms of the
4267 conditional expression will be binary operations. The left-hand
4268 side of the expression to be executed if the condition is true
4269 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4270 of the expression to be executed if the condition is true will be
4271 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4272 but apply to the expression to be executed if the conditional is
4273 false. */
4274 tree *true_lhs;
4275 tree *true_rhs;
4276 tree *false_lhs;
4277 tree *false_rhs;
4278 /* These are the codes to use for the left-hand side and right-hand
4279 side of the COND_EXPR. Normally, they are the same as CODE. */
4280 enum tree_code lhs_code = code;
4281 enum tree_code rhs_code = code;
4282 /* And these are the types of the expressions. */
4283 tree lhs_type = type;
4284 tree rhs_type = type;
4286 if (cond_first_p)
4288 true_rhs = false_rhs = &arg;
4289 true_lhs = &true_value;
4290 false_lhs = &false_value;
4292 else
4294 true_lhs = false_lhs = &arg;
4295 true_rhs = &true_value;
4296 false_rhs = &false_value;
4299 if (TREE_CODE (cond) == COND_EXPR)
4301 test = TREE_OPERAND (cond, 0);
4302 true_value = TREE_OPERAND (cond, 1);
4303 false_value = TREE_OPERAND (cond, 2);
4304 /* If this operand throws an expression, then it does not make
4305 sense to try to perform a logical or arithmetic operation
4306 involving it. Instead of building `a + throw 3' for example,
4307 we simply build `a, throw 3'. */
4308 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4310 lhs_code = COMPOUND_EXPR;
4311 if (!cond_first_p)
4312 lhs_type = void_type_node;
4314 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4316 rhs_code = COMPOUND_EXPR;
4317 if (!cond_first_p)
4318 rhs_type = void_type_node;
4321 else
4323 tree testtype = TREE_TYPE (cond);
4324 test = cond;
4325 true_value = convert (testtype, integer_one_node);
4326 false_value = convert (testtype, integer_zero_node);
4329 /* If ARG is complex we want to make sure we only evaluate
4330 it once. Though this is only required if it is volatile, it
4331 might be more efficient even if it is not. However, if we
4332 succeed in folding one part to a constant, we do not need
4333 to make this SAVE_EXPR. Since we do this optimization
4334 primarily to see if we do end up with constant and this
4335 SAVE_EXPR interferes with later optimizations, suppressing
4336 it when we can is important.
4338 If we are not in a function, we can't make a SAVE_EXPR, so don't
4339 try to do so. Don't try to see if the result is a constant
4340 if an arm is a COND_EXPR since we get exponential behavior
4341 in that case. */
4343 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4344 && global_bindings_p () == 0
4345 && ((TREE_CODE (arg) != VAR_DECL
4346 && TREE_CODE (arg) != PARM_DECL)
4347 || TREE_SIDE_EFFECTS (arg)))
4349 if (TREE_CODE (true_value) != COND_EXPR)
4350 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4352 if (TREE_CODE (false_value) != COND_EXPR)
4353 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4355 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4356 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4357 arg = save_expr (arg), lhs = rhs = 0;
4360 if (lhs == 0)
4361 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4362 if (rhs == 0)
4363 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4365 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4367 if (TREE_CODE (arg) == SAVE_EXPR)
4368 return build (COMPOUND_EXPR, type,
4369 convert (void_type_node, arg),
4370 strip_compound_expr (test, arg));
4371 else
4372 return convert (type, test);
4376 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4378 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4379 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4380 ADDEND is the same as X.
4382 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4383 and finite. The problematic cases are when X is zero, and its mode
4384 has signed zeros. In the case of rounding towards -infinity,
4385 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4386 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4388 static bool
4389 fold_real_zero_addition_p (type, addend, negate)
4390 tree type, addend;
4391 int negate;
4393 if (!real_zerop (addend))
4394 return false;
4396 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4397 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4398 return true;
4400 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4401 if (TREE_CODE (addend) == REAL_CST
4402 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4403 negate = !negate;
4405 /* The mode has signed zeros, and we have to honor their sign.
4406 In this situation, there is only one case we can return true for.
4407 X - 0 is the same as X unless rounding towards -infinity is
4408 supported. */
4409 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4413 /* Perform constant folding and related simplification of EXPR.
4414 The related simplifications include x*1 => x, x*0 => 0, etc.,
4415 and application of the associative law.
4416 NOP_EXPR conversions may be removed freely (as long as we
4417 are careful not to change the C type of the overall expression)
4418 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4419 but we can constant-fold them if they have constant operands. */
4421 tree
4422 fold (expr)
4423 tree expr;
4425 tree t = expr;
4426 tree t1 = NULL_TREE;
4427 tree tem;
4428 tree type = TREE_TYPE (expr);
4429 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4430 enum tree_code code = TREE_CODE (t);
4431 int kind = TREE_CODE_CLASS (code);
4432 int invert;
4433 /* WINS will be nonzero when the switch is done
4434 if all operands are constant. */
4435 int wins = 1;
4437 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4438 Likewise for a SAVE_EXPR that's already been evaluated. */
4439 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4440 return t;
4442 /* Return right away if a constant. */
4443 if (kind == 'c')
4444 return t;
4446 #ifdef MAX_INTEGER_COMPUTATION_MODE
4447 check_max_integer_computation_mode (expr);
4448 #endif
4450 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4452 tree subop;
4454 /* Special case for conversion ops that can have fixed point args. */
4455 arg0 = TREE_OPERAND (t, 0);
4457 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4458 if (arg0 != 0)
4459 STRIP_SIGN_NOPS (arg0);
4461 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4462 subop = TREE_REALPART (arg0);
4463 else
4464 subop = arg0;
4466 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4467 && TREE_CODE (subop) != REAL_CST
4469 /* Note that TREE_CONSTANT isn't enough:
4470 static var addresses are constant but we can't
4471 do arithmetic on them. */
4472 wins = 0;
4474 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4476 int len = first_rtl_op (code);
4477 int i;
4478 for (i = 0; i < len; i++)
4480 tree op = TREE_OPERAND (t, i);
4481 tree subop;
4483 if (op == 0)
4484 continue; /* Valid for CALL_EXPR, at least. */
4486 if (kind == '<' || code == RSHIFT_EXPR)
4488 /* Signedness matters here. Perhaps we can refine this
4489 later. */
4490 STRIP_SIGN_NOPS (op);
4492 else
4493 /* Strip any conversions that don't change the mode. */
4494 STRIP_NOPS (op);
4496 if (TREE_CODE (op) == COMPLEX_CST)
4497 subop = TREE_REALPART (op);
4498 else
4499 subop = op;
4501 if (TREE_CODE (subop) != INTEGER_CST
4502 && TREE_CODE (subop) != REAL_CST)
4503 /* Note that TREE_CONSTANT isn't enough:
4504 static var addresses are constant but we can't
4505 do arithmetic on them. */
4506 wins = 0;
4508 if (i == 0)
4509 arg0 = op;
4510 else if (i == 1)
4511 arg1 = op;
4515 /* If this is a commutative operation, and ARG0 is a constant, move it
4516 to ARG1 to reduce the number of tests below. */
4517 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4518 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4519 || code == BIT_AND_EXPR)
4520 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4522 tem = arg0; arg0 = arg1; arg1 = tem;
4524 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4525 TREE_OPERAND (t, 1) = tem;
4528 /* Now WINS is set as described above,
4529 ARG0 is the first operand of EXPR,
4530 and ARG1 is the second operand (if it has more than one operand).
4532 First check for cases where an arithmetic operation is applied to a
4533 compound, conditional, or comparison operation. Push the arithmetic
4534 operation inside the compound or conditional to see if any folding
4535 can then be done. Convert comparison to conditional for this purpose.
4536 The also optimizes non-constant cases that used to be done in
4537 expand_expr.
4539 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4540 one of the operands is a comparison and the other is a comparison, a
4541 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4542 code below would make the expression more complex. Change it to a
4543 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4544 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4546 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4547 || code == EQ_EXPR || code == NE_EXPR)
4548 && ((truth_value_p (TREE_CODE (arg0))
4549 && (truth_value_p (TREE_CODE (arg1))
4550 || (TREE_CODE (arg1) == BIT_AND_EXPR
4551 && integer_onep (TREE_OPERAND (arg1, 1)))))
4552 || (truth_value_p (TREE_CODE (arg1))
4553 && (truth_value_p (TREE_CODE (arg0))
4554 || (TREE_CODE (arg0) == BIT_AND_EXPR
4555 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4557 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4558 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4559 : TRUTH_XOR_EXPR,
4560 type, arg0, arg1));
4562 if (code == EQ_EXPR)
4563 t = invert_truthvalue (t);
4565 return t;
4568 if (TREE_CODE_CLASS (code) == '1')
4570 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4571 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4572 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4573 else if (TREE_CODE (arg0) == COND_EXPR)
4575 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4576 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4577 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4579 /* If this was a conversion, and all we did was to move into
4580 inside the COND_EXPR, bring it back out. But leave it if
4581 it is a conversion from integer to integer and the
4582 result precision is no wider than a word since such a
4583 conversion is cheap and may be optimized away by combine,
4584 while it couldn't if it were outside the COND_EXPR. Then return
4585 so we don't get into an infinite recursion loop taking the
4586 conversion out and then back in. */
4588 if ((code == NOP_EXPR || code == CONVERT_EXPR
4589 || code == NON_LVALUE_EXPR)
4590 && TREE_CODE (t) == COND_EXPR
4591 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4592 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4593 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4594 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4595 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4596 && (INTEGRAL_TYPE_P
4597 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4598 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4599 t = build1 (code, type,
4600 build (COND_EXPR,
4601 TREE_TYPE (TREE_OPERAND
4602 (TREE_OPERAND (t, 1), 0)),
4603 TREE_OPERAND (t, 0),
4604 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4605 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4606 return t;
4608 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4609 return fold (build (COND_EXPR, type, arg0,
4610 fold (build1 (code, type, integer_one_node)),
4611 fold (build1 (code, type, integer_zero_node))));
4613 else if (TREE_CODE_CLASS (code) == '2'
4614 || TREE_CODE_CLASS (code) == '<')
4616 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4617 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4618 fold (build (code, type,
4619 arg0, TREE_OPERAND (arg1, 1))));
4620 else if ((TREE_CODE (arg1) == COND_EXPR
4621 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4622 && TREE_CODE_CLASS (code) != '<'))
4623 && (TREE_CODE (arg0) != COND_EXPR
4624 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4625 && (! TREE_SIDE_EFFECTS (arg0)
4626 || (global_bindings_p () == 0
4627 && ! contains_placeholder_p (arg0))))
4628 return
4629 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4630 /*cond_first_p=*/0);
4631 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4632 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4633 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4634 else if ((TREE_CODE (arg0) == COND_EXPR
4635 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4636 && TREE_CODE_CLASS (code) != '<'))
4637 && (TREE_CODE (arg1) != COND_EXPR
4638 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4639 && (! TREE_SIDE_EFFECTS (arg1)
4640 || (global_bindings_p () == 0
4641 && ! contains_placeholder_p (arg1))))
4642 return
4643 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4644 /*cond_first_p=*/1);
4646 else if (TREE_CODE_CLASS (code) == '<'
4647 && TREE_CODE (arg0) == COMPOUND_EXPR)
4648 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4649 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4650 else if (TREE_CODE_CLASS (code) == '<'
4651 && TREE_CODE (arg1) == COMPOUND_EXPR)
4652 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4653 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4655 switch (code)
4657 case INTEGER_CST:
4658 case REAL_CST:
4659 case VECTOR_CST:
4660 case STRING_CST:
4661 case COMPLEX_CST:
4662 case CONSTRUCTOR:
4663 return t;
4665 case CONST_DECL:
4666 return fold (DECL_INITIAL (t));
4668 case NOP_EXPR:
4669 case FLOAT_EXPR:
4670 case CONVERT_EXPR:
4671 case FIX_TRUNC_EXPR:
4672 /* Other kinds of FIX are not handled properly by fold_convert. */
4674 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4675 return TREE_OPERAND (t, 0);
4677 /* Handle cases of two conversions in a row. */
4678 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4679 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4681 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4682 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4683 tree final_type = TREE_TYPE (t);
4684 int inside_int = INTEGRAL_TYPE_P (inside_type);
4685 int inside_ptr = POINTER_TYPE_P (inside_type);
4686 int inside_float = FLOAT_TYPE_P (inside_type);
4687 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4688 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4689 int inter_int = INTEGRAL_TYPE_P (inter_type);
4690 int inter_ptr = POINTER_TYPE_P (inter_type);
4691 int inter_float = FLOAT_TYPE_P (inter_type);
4692 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4693 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4694 int final_int = INTEGRAL_TYPE_P (final_type);
4695 int final_ptr = POINTER_TYPE_P (final_type);
4696 int final_float = FLOAT_TYPE_P (final_type);
4697 unsigned int final_prec = TYPE_PRECISION (final_type);
4698 int final_unsignedp = TREE_UNSIGNED (final_type);
4700 /* In addition to the cases of two conversions in a row
4701 handled below, if we are converting something to its own
4702 type via an object of identical or wider precision, neither
4703 conversion is needed. */
4704 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4705 && ((inter_int && final_int) || (inter_float && final_float))
4706 && inter_prec >= final_prec)
4707 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4709 /* Likewise, if the intermediate and final types are either both
4710 float or both integer, we don't need the middle conversion if
4711 it is wider than the final type and doesn't change the signedness
4712 (for integers). Avoid this if the final type is a pointer
4713 since then we sometimes need the inner conversion. Likewise if
4714 the outer has a precision not equal to the size of its mode. */
4715 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4716 || (inter_float && inside_float))
4717 && inter_prec >= inside_prec
4718 && (inter_float || inter_unsignedp == inside_unsignedp)
4719 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4720 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4721 && ! final_ptr)
4722 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4724 /* If we have a sign-extension of a zero-extended value, we can
4725 replace that by a single zero-extension. */
4726 if (inside_int && inter_int && final_int
4727 && inside_prec < inter_prec && inter_prec < final_prec
4728 && inside_unsignedp && !inter_unsignedp)
4729 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4731 /* Two conversions in a row are not needed unless:
4732 - some conversion is floating-point (overstrict for now), or
4733 - the intermediate type is narrower than both initial and
4734 final, or
4735 - the intermediate type and innermost type differ in signedness,
4736 and the outermost type is wider than the intermediate, or
4737 - the initial type is a pointer type and the precisions of the
4738 intermediate and final types differ, or
4739 - the final type is a pointer type and the precisions of the
4740 initial and intermediate types differ. */
4741 if (! inside_float && ! inter_float && ! final_float
4742 && (inter_prec > inside_prec || inter_prec > final_prec)
4743 && ! (inside_int && inter_int
4744 && inter_unsignedp != inside_unsignedp
4745 && inter_prec < final_prec)
4746 && ((inter_unsignedp && inter_prec > inside_prec)
4747 == (final_unsignedp && final_prec > inter_prec))
4748 && ! (inside_ptr && inter_prec != final_prec)
4749 && ! (final_ptr && inside_prec != inter_prec)
4750 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4751 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4752 && ! final_ptr)
4753 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4756 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4757 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4758 /* Detect assigning a bitfield. */
4759 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4760 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4762 /* Don't leave an assignment inside a conversion
4763 unless assigning a bitfield. */
4764 tree prev = TREE_OPERAND (t, 0);
4765 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4766 /* First do the assignment, then return converted constant. */
4767 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4768 TREE_USED (t) = 1;
4769 return t;
4771 if (!wins)
4773 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4774 return t;
4776 return fold_convert (t, arg0);
4778 case VIEW_CONVERT_EXPR:
4779 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4780 return build1 (VIEW_CONVERT_EXPR, type,
4781 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4782 return t;
4784 #if 0 /* This loses on &"foo"[0]. */
4785 case ARRAY_REF:
4787 int i;
4789 /* Fold an expression like: "foo"[2] */
4790 if (TREE_CODE (arg0) == STRING_CST
4791 && TREE_CODE (arg1) == INTEGER_CST
4792 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
4794 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
4795 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4796 force_fit_type (t, 0);
4799 return t;
4800 #endif /* 0 */
4802 case COMPONENT_REF:
4803 if (TREE_CODE (arg0) == CONSTRUCTOR)
4805 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4806 if (m)
4807 t = TREE_VALUE (m);
4809 return t;
4811 case RANGE_EXPR:
4812 TREE_CONSTANT (t) = wins;
4813 return t;
4815 case NEGATE_EXPR:
4816 if (wins)
4818 if (TREE_CODE (arg0) == INTEGER_CST)
4820 unsigned HOST_WIDE_INT low;
4821 HOST_WIDE_INT high;
4822 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4823 TREE_INT_CST_HIGH (arg0),
4824 &low, &high);
4825 t = build_int_2 (low, high);
4826 TREE_TYPE (t) = type;
4827 TREE_OVERFLOW (t)
4828 = (TREE_OVERFLOW (arg0)
4829 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4830 TREE_CONSTANT_OVERFLOW (t)
4831 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4833 else if (TREE_CODE (arg0) == REAL_CST)
4834 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4836 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4837 return TREE_OPERAND (arg0, 0);
4839 /* Convert - (a - b) to (b - a) for non-floating-point. */
4840 else if (TREE_CODE (arg0) == MINUS_EXPR
4841 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
4842 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4843 TREE_OPERAND (arg0, 0));
4845 return t;
4847 case ABS_EXPR:
4848 if (wins)
4850 if (TREE_CODE (arg0) == INTEGER_CST)
4852 /* If the value is unsigned, then the absolute value is
4853 the same as the ordinary value. */
4854 if (TREE_UNSIGNED (type))
4855 return arg0;
4856 /* Similarly, if the value is non-negative. */
4857 else if (INT_CST_LT (integer_minus_one_node, arg0))
4858 return arg0;
4859 /* If the value is negative, then the absolute value is
4860 its negation. */
4861 else
4863 unsigned HOST_WIDE_INT low;
4864 HOST_WIDE_INT high;
4865 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4866 TREE_INT_CST_HIGH (arg0),
4867 &low, &high);
4868 t = build_int_2 (low, high);
4869 TREE_TYPE (t) = type;
4870 TREE_OVERFLOW (t)
4871 = (TREE_OVERFLOW (arg0)
4872 | force_fit_type (t, overflow));
4873 TREE_CONSTANT_OVERFLOW (t)
4874 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4877 else if (TREE_CODE (arg0) == REAL_CST)
4879 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4880 t = build_real (type,
4881 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4884 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4885 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4886 return t;
4888 case CONJ_EXPR:
4889 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4890 return convert (type, arg0);
4891 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4892 return build (COMPLEX_EXPR, type,
4893 TREE_OPERAND (arg0, 0),
4894 negate_expr (TREE_OPERAND (arg0, 1)));
4895 else if (TREE_CODE (arg0) == COMPLEX_CST)
4896 return build_complex (type, TREE_REALPART (arg0),
4897 negate_expr (TREE_IMAGPART (arg0)));
4898 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4899 return fold (build (TREE_CODE (arg0), type,
4900 fold (build1 (CONJ_EXPR, type,
4901 TREE_OPERAND (arg0, 0))),
4902 fold (build1 (CONJ_EXPR,
4903 type, TREE_OPERAND (arg0, 1)))));
4904 else if (TREE_CODE (arg0) == CONJ_EXPR)
4905 return TREE_OPERAND (arg0, 0);
4906 return t;
4908 case BIT_NOT_EXPR:
4909 if (wins)
4911 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4912 ~ TREE_INT_CST_HIGH (arg0));
4913 TREE_TYPE (t) = type;
4914 force_fit_type (t, 0);
4915 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4916 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4918 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4919 return TREE_OPERAND (arg0, 0);
4920 return t;
4922 case PLUS_EXPR:
4923 /* A + (-B) -> A - B */
4924 if (TREE_CODE (arg1) == NEGATE_EXPR)
4925 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4926 /* (-A) + B -> B - A */
4927 if (TREE_CODE (arg0) == NEGATE_EXPR)
4928 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4929 else if (! FLOAT_TYPE_P (type))
4931 if (integer_zerop (arg1))
4932 return non_lvalue (convert (type, arg0));
4934 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4935 with a constant, and the two constants have no bits in common,
4936 we should treat this as a BIT_IOR_EXPR since this may produce more
4937 simplifications. */
4938 if (TREE_CODE (arg0) == BIT_AND_EXPR
4939 && TREE_CODE (arg1) == BIT_AND_EXPR
4940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4941 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4942 && integer_zerop (const_binop (BIT_AND_EXPR,
4943 TREE_OPERAND (arg0, 1),
4944 TREE_OPERAND (arg1, 1), 0)))
4946 code = BIT_IOR_EXPR;
4947 goto bit_ior;
4950 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
4951 (plus (plus (mult) (mult)) (foo)) so that we can
4952 take advantage of the factoring cases below. */
4953 if ((TREE_CODE (arg0) == PLUS_EXPR
4954 && TREE_CODE (arg1) == MULT_EXPR)
4955 || (TREE_CODE (arg1) == PLUS_EXPR
4956 && TREE_CODE (arg0) == MULT_EXPR))
4958 tree parg0, parg1, parg, marg;
4960 if (TREE_CODE (arg0) == PLUS_EXPR)
4961 parg = arg0, marg = arg1;
4962 else
4963 parg = arg1, marg = arg0;
4964 parg0 = TREE_OPERAND (parg, 0);
4965 parg1 = TREE_OPERAND (parg, 1);
4966 STRIP_NOPS (parg0);
4967 STRIP_NOPS (parg1);
4969 if (TREE_CODE (parg0) == MULT_EXPR
4970 && TREE_CODE (parg1) != MULT_EXPR)
4971 return fold (build (PLUS_EXPR, type,
4972 fold (build (PLUS_EXPR, type, parg0, marg)),
4973 parg1));
4974 if (TREE_CODE (parg0) != MULT_EXPR
4975 && TREE_CODE (parg1) == MULT_EXPR)
4976 return fold (build (PLUS_EXPR, type,
4977 fold (build (PLUS_EXPR, type, parg1, marg)),
4978 parg0));
4981 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
4983 tree arg00, arg01, arg10, arg11;
4984 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
4986 /* (A * C) + (B * C) -> (A+B) * C.
4987 We are most concerned about the case where C is a constant,
4988 but other combinations show up during loop reduction. Since
4989 it is not difficult, try all four possibilities. */
4991 arg00 = TREE_OPERAND (arg0, 0);
4992 arg01 = TREE_OPERAND (arg0, 1);
4993 arg10 = TREE_OPERAND (arg1, 0);
4994 arg11 = TREE_OPERAND (arg1, 1);
4995 same = NULL_TREE;
4997 if (operand_equal_p (arg01, arg11, 0))
4998 same = arg01, alt0 = arg00, alt1 = arg10;
4999 else if (operand_equal_p (arg00, arg10, 0))
5000 same = arg00, alt0 = arg01, alt1 = arg11;
5001 else if (operand_equal_p (arg00, arg11, 0))
5002 same = arg00, alt0 = arg01, alt1 = arg10;
5003 else if (operand_equal_p (arg01, arg10, 0))
5004 same = arg01, alt0 = arg00, alt1 = arg11;
5006 /* No identical multiplicands; see if we can find a common
5007 power-of-two factor in non-power-of-two multiplies. This
5008 can help in multi-dimensional array access. */
5009 else if (TREE_CODE (arg01) == INTEGER_CST
5010 && TREE_CODE (arg11) == INTEGER_CST
5011 && TREE_INT_CST_HIGH (arg01) == 0
5012 && TREE_INT_CST_HIGH (arg11) == 0)
5014 HOST_WIDE_INT int01, int11, tmp;
5015 int01 = TREE_INT_CST_LOW (arg01);
5016 int11 = TREE_INT_CST_LOW (arg11);
5018 /* Move min of absolute values to int11. */
5019 if ((int01 >= 0 ? int01 : -int01)
5020 < (int11 >= 0 ? int11 : -int11))
5022 tmp = int01, int01 = int11, int11 = tmp;
5023 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5024 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5027 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5029 alt0 = fold (build (MULT_EXPR, type, arg00,
5030 build_int_2 (int01 / int11, 0)));
5031 alt1 = arg10;
5032 same = arg11;
5036 if (same)
5037 return fold (build (MULT_EXPR, type,
5038 fold (build (PLUS_EXPR, type, alt0, alt1)),
5039 same));
5043 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5044 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5045 return non_lvalue (convert (type, arg0));
5047 /* Likewise if the operands are reversed. */
5048 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5049 return non_lvalue (convert (type, arg1));
5051 bit_rotate:
5052 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5053 is a rotate of A by C1 bits. */
5054 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5055 is a rotate of A by B bits. */
5057 enum tree_code code0, code1;
5058 code0 = TREE_CODE (arg0);
5059 code1 = TREE_CODE (arg1);
5060 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5061 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5062 && operand_equal_p (TREE_OPERAND (arg0, 0),
5063 TREE_OPERAND (arg1, 0), 0)
5064 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5066 tree tree01, tree11;
5067 enum tree_code code01, code11;
5069 tree01 = TREE_OPERAND (arg0, 1);
5070 tree11 = TREE_OPERAND (arg1, 1);
5071 STRIP_NOPS (tree01);
5072 STRIP_NOPS (tree11);
5073 code01 = TREE_CODE (tree01);
5074 code11 = TREE_CODE (tree11);
5075 if (code01 == INTEGER_CST
5076 && code11 == INTEGER_CST
5077 && TREE_INT_CST_HIGH (tree01) == 0
5078 && TREE_INT_CST_HIGH (tree11) == 0
5079 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5080 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5081 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5082 code0 == LSHIFT_EXPR ? tree01 : tree11);
5083 else if (code11 == MINUS_EXPR)
5085 tree tree110, tree111;
5086 tree110 = TREE_OPERAND (tree11, 0);
5087 tree111 = TREE_OPERAND (tree11, 1);
5088 STRIP_NOPS (tree110);
5089 STRIP_NOPS (tree111);
5090 if (TREE_CODE (tree110) == INTEGER_CST
5091 && 0 == compare_tree_int (tree110,
5092 TYPE_PRECISION
5093 (TREE_TYPE (TREE_OPERAND
5094 (arg0, 0))))
5095 && operand_equal_p (tree01, tree111, 0))
5096 return build ((code0 == LSHIFT_EXPR
5097 ? LROTATE_EXPR
5098 : RROTATE_EXPR),
5099 type, TREE_OPERAND (arg0, 0), tree01);
5101 else if (code01 == MINUS_EXPR)
5103 tree tree010, tree011;
5104 tree010 = TREE_OPERAND (tree01, 0);
5105 tree011 = TREE_OPERAND (tree01, 1);
5106 STRIP_NOPS (tree010);
5107 STRIP_NOPS (tree011);
5108 if (TREE_CODE (tree010) == INTEGER_CST
5109 && 0 == compare_tree_int (tree010,
5110 TYPE_PRECISION
5111 (TREE_TYPE (TREE_OPERAND
5112 (arg0, 0))))
5113 && operand_equal_p (tree11, tree011, 0))
5114 return build ((code0 != LSHIFT_EXPR
5115 ? LROTATE_EXPR
5116 : RROTATE_EXPR),
5117 type, TREE_OPERAND (arg0, 0), tree11);
5122 associate:
5123 /* In most languages, can't associate operations on floats through
5124 parentheses. Rather than remember where the parentheses were, we
5125 don't associate floats at all. It shouldn't matter much. However,
5126 associating multiplications is only very slightly inaccurate, so do
5127 that if -funsafe-math-optimizations is specified. */
5129 if (! wins
5130 && (! FLOAT_TYPE_P (type)
5131 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5133 tree var0, con0, lit0, var1, con1, lit1;
5135 /* Split both trees into variables, constants, and literals. Then
5136 associate each group together, the constants with literals,
5137 then the result with variables. This increases the chances of
5138 literals being recombined later and of generating relocatable
5139 expressions for the sum of a constant and literal. */
5140 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5141 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5143 /* Only do something if we found more than two objects. Otherwise,
5144 nothing has changed and we risk infinite recursion. */
5145 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5146 + (lit0 != 0) + (lit1 != 0)))
5148 var0 = associate_trees (var0, var1, code, type);
5149 con0 = associate_trees (con0, con1, code, type);
5150 lit0 = associate_trees (lit0, lit1, code, type);
5151 con0 = associate_trees (con0, lit0, code, type);
5152 return convert (type, associate_trees (var0, con0, code, type));
5156 binary:
5157 if (wins)
5158 t1 = const_binop (code, arg0, arg1, 0);
5159 if (t1 != NULL_TREE)
5161 /* The return value should always have
5162 the same type as the original expression. */
5163 if (TREE_TYPE (t1) != TREE_TYPE (t))
5164 t1 = convert (TREE_TYPE (t), t1);
5166 return t1;
5168 return t;
5170 case MINUS_EXPR:
5171 /* A - (-B) -> A + B */
5172 if (TREE_CODE (arg1) == NEGATE_EXPR)
5173 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5174 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5175 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5176 return
5177 fold (build (MINUS_EXPR, type,
5178 build_real (TREE_TYPE (arg1),
5179 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5180 TREE_OPERAND (arg0, 0)));
5182 if (! FLOAT_TYPE_P (type))
5184 if (! wins && integer_zerop (arg0))
5185 return negate_expr (convert (type, arg1));
5186 if (integer_zerop (arg1))
5187 return non_lvalue (convert (type, arg0));
5189 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5190 about the case where C is a constant, just try one of the
5191 four possibilities. */
5193 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5194 && operand_equal_p (TREE_OPERAND (arg0, 1),
5195 TREE_OPERAND (arg1, 1), 0))
5196 return fold (build (MULT_EXPR, type,
5197 fold (build (MINUS_EXPR, type,
5198 TREE_OPERAND (arg0, 0),
5199 TREE_OPERAND (arg1, 0))),
5200 TREE_OPERAND (arg0, 1)));
5203 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5204 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5205 return non_lvalue (convert (type, arg0));
5207 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5208 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5209 (-ARG1 + ARG0) reduces to -ARG1. */
5210 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5211 return negate_expr (convert (type, arg1));
5213 /* Fold &x - &x. This can happen from &x.foo - &x.
5214 This is unsafe for certain floats even in non-IEEE formats.
5215 In IEEE, it is unsafe because it does wrong for NaNs.
5216 Also note that operand_equal_p is always false if an operand
5217 is volatile. */
5219 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5220 && operand_equal_p (arg0, arg1, 0))
5221 return convert (type, integer_zero_node);
5223 goto associate;
5225 case MULT_EXPR:
5226 /* (-A) * (-B) -> A * B */
5227 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5228 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5229 TREE_OPERAND (arg1, 0)));
5231 if (! FLOAT_TYPE_P (type))
5233 if (integer_zerop (arg1))
5234 return omit_one_operand (type, arg1, arg0);
5235 if (integer_onep (arg1))
5236 return non_lvalue (convert (type, arg0));
5238 /* (a * (1 << b)) is (a << b) */
5239 if (TREE_CODE (arg1) == LSHIFT_EXPR
5240 && integer_onep (TREE_OPERAND (arg1, 0)))
5241 return fold (build (LSHIFT_EXPR, type, arg0,
5242 TREE_OPERAND (arg1, 1)));
5243 if (TREE_CODE (arg0) == LSHIFT_EXPR
5244 && integer_onep (TREE_OPERAND (arg0, 0)))
5245 return fold (build (LSHIFT_EXPR, type, arg1,
5246 TREE_OPERAND (arg0, 1)));
5248 if (TREE_CODE (arg1) == INTEGER_CST
5249 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5250 code, NULL_TREE)))
5251 return convert (type, tem);
5254 else
5256 /* Maybe fold x * 0 to 0. The expressions aren't the same
5257 when x is NaN, since x * 0 is also NaN. Nor are they the
5258 same in modes with signed zeros, since multiplying a
5259 negative value by 0 gives -0, not +0. */
5260 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5262 && real_zerop (arg1))
5263 return omit_one_operand (type, arg1, arg0);
5264 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5265 However, ANSI says we can drop signals,
5266 so we can do this anyway. */
5267 if (real_onep (arg1))
5268 return non_lvalue (convert (type, arg0));
5269 /* x*2 is x+x */
5270 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5271 && ! contains_placeholder_p (arg0))
5273 tree arg = save_expr (arg0);
5274 return build (PLUS_EXPR, type, arg, arg);
5277 goto associate;
5279 case BIT_IOR_EXPR:
5280 bit_ior:
5281 if (integer_all_onesp (arg1))
5282 return omit_one_operand (type, arg1, arg0);
5283 if (integer_zerop (arg1))
5284 return non_lvalue (convert (type, arg0));
5285 t1 = distribute_bit_expr (code, type, arg0, arg1);
5286 if (t1 != NULL_TREE)
5287 return t1;
5289 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5291 This results in more efficient code for machines without a NAND
5292 instruction. Combine will canonicalize to the first form
5293 which will allow use of NAND instructions provided by the
5294 backend if they exist. */
5295 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5296 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5298 return fold (build1 (BIT_NOT_EXPR, type,
5299 build (BIT_AND_EXPR, type,
5300 TREE_OPERAND (arg0, 0),
5301 TREE_OPERAND (arg1, 0))));
5304 /* See if this can be simplified into a rotate first. If that
5305 is unsuccessful continue in the association code. */
5306 goto bit_rotate;
5308 case BIT_XOR_EXPR:
5309 if (integer_zerop (arg1))
5310 return non_lvalue (convert (type, arg0));
5311 if (integer_all_onesp (arg1))
5312 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5314 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5315 with a constant, and the two constants have no bits in common,
5316 we should treat this as a BIT_IOR_EXPR since this may produce more
5317 simplifications. */
5318 if (TREE_CODE (arg0) == BIT_AND_EXPR
5319 && TREE_CODE (arg1) == BIT_AND_EXPR
5320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5321 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5322 && integer_zerop (const_binop (BIT_AND_EXPR,
5323 TREE_OPERAND (arg0, 1),
5324 TREE_OPERAND (arg1, 1), 0)))
5326 code = BIT_IOR_EXPR;
5327 goto bit_ior;
5330 /* See if this can be simplified into a rotate first. If that
5331 is unsuccessful continue in the association code. */
5332 goto bit_rotate;
5334 case BIT_AND_EXPR:
5335 bit_and:
5336 if (integer_all_onesp (arg1))
5337 return non_lvalue (convert (type, arg0));
5338 if (integer_zerop (arg1))
5339 return omit_one_operand (type, arg1, arg0);
5340 t1 = distribute_bit_expr (code, type, arg0, arg1);
5341 if (t1 != NULL_TREE)
5342 return t1;
5343 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5344 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5345 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5347 unsigned int prec
5348 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5350 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5351 && (~TREE_INT_CST_LOW (arg0)
5352 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5353 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5355 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5356 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5358 unsigned int prec
5359 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5361 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5362 && (~TREE_INT_CST_LOW (arg1)
5363 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5364 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5367 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5369 This results in more efficient code for machines without a NOR
5370 instruction. Combine will canonicalize to the first form
5371 which will allow use of NOR instructions provided by the
5372 backend if they exist. */
5373 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5374 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5376 return fold (build1 (BIT_NOT_EXPR, type,
5377 build (BIT_IOR_EXPR, type,
5378 TREE_OPERAND (arg0, 0),
5379 TREE_OPERAND (arg1, 0))));
5382 goto associate;
5384 case BIT_ANDTC_EXPR:
5385 if (integer_all_onesp (arg0))
5386 return non_lvalue (convert (type, arg1));
5387 if (integer_zerop (arg0))
5388 return omit_one_operand (type, arg0, arg1);
5389 if (TREE_CODE (arg1) == INTEGER_CST)
5391 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5392 code = BIT_AND_EXPR;
5393 goto bit_and;
5395 goto binary;
5397 case RDIV_EXPR:
5398 /* In most cases, do nothing with a divide by zero. */
5399 #ifndef REAL_INFINITY
5400 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5401 return t;
5402 #endif
5404 /* (-A) / (-B) -> A / B */
5405 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5406 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5407 TREE_OPERAND (arg1, 0)));
5409 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5410 However, ANSI says we can drop signals, so we can do this anyway. */
5411 if (real_onep (arg1))
5412 return non_lvalue (convert (type, arg0));
5414 /* If ARG1 is a constant, we can convert this to a multiply by the
5415 reciprocal. This does not have the same rounding properties,
5416 so only do this if -funsafe-math-optimizations. We can actually
5417 always safely do it if ARG1 is a power of two, but it's hard to
5418 tell if it is or not in a portable manner. */
5419 if (TREE_CODE (arg1) == REAL_CST)
5421 if (flag_unsafe_math_optimizations
5422 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5423 arg1, 0)))
5424 return fold (build (MULT_EXPR, type, arg0, tem));
5425 /* Find the reciprocal if optimizing and the result is exact. */
5426 else if (optimize)
5428 REAL_VALUE_TYPE r;
5429 r = TREE_REAL_CST (arg1);
5430 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5432 tem = build_real (type, r);
5433 return fold (build (MULT_EXPR, type, arg0, tem));
5437 /* Convert A/B/C to A/(B*C). */
5438 if (flag_unsafe_math_optimizations
5439 && TREE_CODE (arg0) == RDIV_EXPR)
5441 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5442 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5443 arg1)));
5445 /* Convert A/(B/C) to (A/B)*C. */
5446 if (flag_unsafe_math_optimizations
5447 && TREE_CODE (arg1) == RDIV_EXPR)
5449 return fold (build (MULT_EXPR, type,
5450 build (RDIV_EXPR, type, arg0,
5451 TREE_OPERAND (arg1, 0)),
5452 TREE_OPERAND (arg1, 1)));
5454 goto binary;
5456 case TRUNC_DIV_EXPR:
5457 case ROUND_DIV_EXPR:
5458 case FLOOR_DIV_EXPR:
5459 case CEIL_DIV_EXPR:
5460 case EXACT_DIV_EXPR:
5461 if (integer_onep (arg1))
5462 return non_lvalue (convert (type, arg0));
5463 if (integer_zerop (arg1))
5464 return t;
5466 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5467 operation, EXACT_DIV_EXPR.
5469 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5470 At one time others generated faster code, it's not clear if they do
5471 after the last round to changes to the DIV code in expmed.c. */
5472 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5473 && multiple_of_p (type, arg0, arg1))
5474 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5476 if (TREE_CODE (arg1) == INTEGER_CST
5477 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5478 code, NULL_TREE)))
5479 return convert (type, tem);
5481 goto binary;
5483 case CEIL_MOD_EXPR:
5484 case FLOOR_MOD_EXPR:
5485 case ROUND_MOD_EXPR:
5486 case TRUNC_MOD_EXPR:
5487 if (integer_onep (arg1))
5488 return omit_one_operand (type, integer_zero_node, arg0);
5489 if (integer_zerop (arg1))
5490 return t;
5492 if (TREE_CODE (arg1) == INTEGER_CST
5493 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5494 code, NULL_TREE)))
5495 return convert (type, tem);
5497 goto binary;
5499 case LSHIFT_EXPR:
5500 case RSHIFT_EXPR:
5501 case LROTATE_EXPR:
5502 case RROTATE_EXPR:
5503 if (integer_zerop (arg1))
5504 return non_lvalue (convert (type, arg0));
5505 /* Since negative shift count is not well-defined,
5506 don't try to compute it in the compiler. */
5507 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5508 return t;
5509 /* Rewrite an LROTATE_EXPR by a constant into an
5510 RROTATE_EXPR by a new constant. */
5511 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5513 TREE_SET_CODE (t, RROTATE_EXPR);
5514 code = RROTATE_EXPR;
5515 TREE_OPERAND (t, 1) = arg1
5516 = const_binop
5517 (MINUS_EXPR,
5518 convert (TREE_TYPE (arg1),
5519 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5520 arg1, 0);
5521 if (tree_int_cst_sgn (arg1) < 0)
5522 return t;
5525 /* If we have a rotate of a bit operation with the rotate count and
5526 the second operand of the bit operation both constant,
5527 permute the two operations. */
5528 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5529 && (TREE_CODE (arg0) == BIT_AND_EXPR
5530 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5531 || TREE_CODE (arg0) == BIT_IOR_EXPR
5532 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5533 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5534 return fold (build (TREE_CODE (arg0), type,
5535 fold (build (code, type,
5536 TREE_OPERAND (arg0, 0), arg1)),
5537 fold (build (code, type,
5538 TREE_OPERAND (arg0, 1), arg1))));
5540 /* Two consecutive rotates adding up to the width of the mode can
5541 be ignored. */
5542 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5543 && TREE_CODE (arg0) == RROTATE_EXPR
5544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5545 && TREE_INT_CST_HIGH (arg1) == 0
5546 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5547 && ((TREE_INT_CST_LOW (arg1)
5548 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5549 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5550 return TREE_OPERAND (arg0, 0);
5552 goto binary;
5554 case MIN_EXPR:
5555 if (operand_equal_p (arg0, arg1, 0))
5556 return omit_one_operand (type, arg0, arg1);
5557 if (INTEGRAL_TYPE_P (type)
5558 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5559 return omit_one_operand (type, arg1, arg0);
5560 goto associate;
5562 case MAX_EXPR:
5563 if (operand_equal_p (arg0, arg1, 0))
5564 return omit_one_operand (type, arg0, arg1);
5565 if (INTEGRAL_TYPE_P (type)
5566 && TYPE_MAX_VALUE (type)
5567 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5568 return omit_one_operand (type, arg1, arg0);
5569 goto associate;
5571 case TRUTH_NOT_EXPR:
5572 /* Note that the operand of this must be an int
5573 and its values must be 0 or 1.
5574 ("true" is a fixed value perhaps depending on the language,
5575 but we don't handle values other than 1 correctly yet.) */
5576 tem = invert_truthvalue (arg0);
5577 /* Avoid infinite recursion. */
5578 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5579 return t;
5580 return convert (type, tem);
5582 case TRUTH_ANDIF_EXPR:
5583 /* Note that the operands of this must be ints
5584 and their values must be 0 or 1.
5585 ("true" is a fixed value perhaps depending on the language.) */
5586 /* If first arg is constant zero, return it. */
5587 if (integer_zerop (arg0))
5588 return convert (type, arg0);
5589 case TRUTH_AND_EXPR:
5590 /* If either arg is constant true, drop it. */
5591 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5592 return non_lvalue (convert (type, arg1));
5593 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5594 /* Preserve sequence points. */
5595 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5596 return non_lvalue (convert (type, arg0));
5597 /* If second arg is constant zero, result is zero, but first arg
5598 must be evaluated. */
5599 if (integer_zerop (arg1))
5600 return omit_one_operand (type, arg1, arg0);
5601 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5602 case will be handled here. */
5603 if (integer_zerop (arg0))
5604 return omit_one_operand (type, arg0, arg1);
5606 truth_andor:
5607 /* We only do these simplifications if we are optimizing. */
5608 if (!optimize)
5609 return t;
5611 /* Check for things like (A || B) && (A || C). We can convert this
5612 to A || (B && C). Note that either operator can be any of the four
5613 truth and/or operations and the transformation will still be
5614 valid. Also note that we only care about order for the
5615 ANDIF and ORIF operators. If B contains side effects, this
5616 might change the truth-value of A. */
5617 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5618 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5619 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5620 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5621 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5622 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5624 tree a00 = TREE_OPERAND (arg0, 0);
5625 tree a01 = TREE_OPERAND (arg0, 1);
5626 tree a10 = TREE_OPERAND (arg1, 0);
5627 tree a11 = TREE_OPERAND (arg1, 1);
5628 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5629 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5630 && (code == TRUTH_AND_EXPR
5631 || code == TRUTH_OR_EXPR));
5633 if (operand_equal_p (a00, a10, 0))
5634 return fold (build (TREE_CODE (arg0), type, a00,
5635 fold (build (code, type, a01, a11))));
5636 else if (commutative && operand_equal_p (a00, a11, 0))
5637 return fold (build (TREE_CODE (arg0), type, a00,
5638 fold (build (code, type, a01, a10))));
5639 else if (commutative && operand_equal_p (a01, a10, 0))
5640 return fold (build (TREE_CODE (arg0), type, a01,
5641 fold (build (code, type, a00, a11))));
5643 /* This case if tricky because we must either have commutative
5644 operators or else A10 must not have side-effects. */
5646 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5647 && operand_equal_p (a01, a11, 0))
5648 return fold (build (TREE_CODE (arg0), type,
5649 fold (build (code, type, a00, a10)),
5650 a01));
5653 /* See if we can build a range comparison. */
5654 if (0 != (tem = fold_range_test (t)))
5655 return tem;
5657 /* Check for the possibility of merging component references. If our
5658 lhs is another similar operation, try to merge its rhs with our
5659 rhs. Then try to merge our lhs and rhs. */
5660 if (TREE_CODE (arg0) == code
5661 && 0 != (tem = fold_truthop (code, type,
5662 TREE_OPERAND (arg0, 1), arg1)))
5663 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5665 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5666 return tem;
5668 return t;
5670 case TRUTH_ORIF_EXPR:
5671 /* Note that the operands of this must be ints
5672 and their values must be 0 or true.
5673 ("true" is a fixed value perhaps depending on the language.) */
5674 /* If first arg is constant true, return it. */
5675 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5676 return convert (type, arg0);
5677 case TRUTH_OR_EXPR:
5678 /* If either arg is constant zero, drop it. */
5679 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5680 return non_lvalue (convert (type, arg1));
5681 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5682 /* Preserve sequence points. */
5683 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5684 return non_lvalue (convert (type, arg0));
5685 /* If second arg is constant true, result is true, but we must
5686 evaluate first arg. */
5687 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5688 return omit_one_operand (type, arg1, arg0);
5689 /* Likewise for first arg, but note this only occurs here for
5690 TRUTH_OR_EXPR. */
5691 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5692 return omit_one_operand (type, arg0, arg1);
5693 goto truth_andor;
5695 case TRUTH_XOR_EXPR:
5696 /* If either arg is constant zero, drop it. */
5697 if (integer_zerop (arg0))
5698 return non_lvalue (convert (type, arg1));
5699 if (integer_zerop (arg1))
5700 return non_lvalue (convert (type, arg0));
5701 /* If either arg is constant true, this is a logical inversion. */
5702 if (integer_onep (arg0))
5703 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5704 if (integer_onep (arg1))
5705 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5706 return t;
5708 case EQ_EXPR:
5709 case NE_EXPR:
5710 case LT_EXPR:
5711 case GT_EXPR:
5712 case LE_EXPR:
5713 case GE_EXPR:
5714 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5716 /* (-a) CMP (-b) -> b CMP a */
5717 if (TREE_CODE (arg0) == NEGATE_EXPR
5718 && TREE_CODE (arg1) == NEGATE_EXPR)
5719 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5720 TREE_OPERAND (arg0, 0)));
5721 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5722 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5723 return
5724 fold (build
5725 (swap_tree_comparison (code), type,
5726 TREE_OPERAND (arg0, 0),
5727 build_real (TREE_TYPE (arg1),
5728 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5729 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5730 /* a CMP (-0) -> a CMP 0 */
5731 if (TREE_CODE (arg1) == REAL_CST
5732 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5733 return fold (build (code, type, arg0,
5734 build_real (TREE_TYPE (arg1), dconst0)));
5737 /* If one arg is a constant integer, put it last. */
5738 if (TREE_CODE (arg0) == INTEGER_CST
5739 && TREE_CODE (arg1) != INTEGER_CST)
5741 TREE_OPERAND (t, 0) = arg1;
5742 TREE_OPERAND (t, 1) = arg0;
5743 arg0 = TREE_OPERAND (t, 0);
5744 arg1 = TREE_OPERAND (t, 1);
5745 code = swap_tree_comparison (code);
5746 TREE_SET_CODE (t, code);
5749 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5750 First, see if one arg is constant; find the constant arg
5751 and the other one. */
5753 tree constop = 0, varop = NULL_TREE;
5754 int constopnum = -1;
5756 if (TREE_CONSTANT (arg1))
5757 constopnum = 1, constop = arg1, varop = arg0;
5758 if (TREE_CONSTANT (arg0))
5759 constopnum = 0, constop = arg0, varop = arg1;
5761 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5763 /* This optimization is invalid for ordered comparisons
5764 if CONST+INCR overflows or if foo+incr might overflow.
5765 This optimization is invalid for floating point due to rounding.
5766 For pointer types we assume overflow doesn't happen. */
5767 if (POINTER_TYPE_P (TREE_TYPE (varop))
5768 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5769 && (code == EQ_EXPR || code == NE_EXPR)))
5771 tree newconst
5772 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5773 constop, TREE_OPERAND (varop, 1)));
5775 /* Do not overwrite the current varop to be a preincrement,
5776 create a new node so that we won't confuse our caller who
5777 might create trees and throw them away, reusing the
5778 arguments that they passed to build. This shows up in
5779 the THEN or ELSE parts of ?: being postincrements. */
5780 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5781 TREE_OPERAND (varop, 0),
5782 TREE_OPERAND (varop, 1));
5784 /* If VAROP is a reference to a bitfield, we must mask
5785 the constant by the width of the field. */
5786 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5787 && DECL_BIT_FIELD(TREE_OPERAND
5788 (TREE_OPERAND (varop, 0), 1)))
5790 int size
5791 = TREE_INT_CST_LOW (DECL_SIZE
5792 (TREE_OPERAND
5793 (TREE_OPERAND (varop, 0), 1)));
5794 tree mask, unsigned_type;
5795 unsigned int precision;
5796 tree folded_compare;
5798 /* First check whether the comparison would come out
5799 always the same. If we don't do that we would
5800 change the meaning with the masking. */
5801 if (constopnum == 0)
5802 folded_compare = fold (build (code, type, constop,
5803 TREE_OPERAND (varop, 0)));
5804 else
5805 folded_compare = fold (build (code, type,
5806 TREE_OPERAND (varop, 0),
5807 constop));
5808 if (integer_zerop (folded_compare)
5809 || integer_onep (folded_compare))
5810 return omit_one_operand (type, folded_compare, varop);
5812 unsigned_type = type_for_size (size, 1);
5813 precision = TYPE_PRECISION (unsigned_type);
5814 mask = build_int_2 (~0, ~0);
5815 TREE_TYPE (mask) = unsigned_type;
5816 force_fit_type (mask, 0);
5817 mask = const_binop (RSHIFT_EXPR, mask,
5818 size_int (precision - size), 0);
5819 newconst = fold (build (BIT_AND_EXPR,
5820 TREE_TYPE (varop), newconst,
5821 convert (TREE_TYPE (varop),
5822 mask)));
5825 t = build (code, type,
5826 (constopnum == 0) ? newconst : varop,
5827 (constopnum == 1) ? newconst : varop);
5828 return t;
5831 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5833 if (POINTER_TYPE_P (TREE_TYPE (varop))
5834 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5835 && (code == EQ_EXPR || code == NE_EXPR)))
5837 tree newconst
5838 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5839 constop, TREE_OPERAND (varop, 1)));
5841 /* Do not overwrite the current varop to be a predecrement,
5842 create a new node so that we won't confuse our caller who
5843 might create trees and throw them away, reusing the
5844 arguments that they passed to build. This shows up in
5845 the THEN or ELSE parts of ?: being postdecrements. */
5846 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5847 TREE_OPERAND (varop, 0),
5848 TREE_OPERAND (varop, 1));
5850 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5851 && DECL_BIT_FIELD(TREE_OPERAND
5852 (TREE_OPERAND (varop, 0), 1)))
5854 int size
5855 = TREE_INT_CST_LOW (DECL_SIZE
5856 (TREE_OPERAND
5857 (TREE_OPERAND (varop, 0), 1)));
5858 tree mask, unsigned_type;
5859 unsigned int precision;
5860 tree folded_compare;
5862 if (constopnum == 0)
5863 folded_compare = fold (build (code, type, constop,
5864 TREE_OPERAND (varop, 0)));
5865 else
5866 folded_compare = fold (build (code, type,
5867 TREE_OPERAND (varop, 0),
5868 constop));
5869 if (integer_zerop (folded_compare)
5870 || integer_onep (folded_compare))
5871 return omit_one_operand (type, folded_compare, varop);
5873 unsigned_type = type_for_size (size, 1);
5874 precision = TYPE_PRECISION (unsigned_type);
5875 mask = build_int_2 (~0, ~0);
5876 TREE_TYPE (mask) = TREE_TYPE (varop);
5877 force_fit_type (mask, 0);
5878 mask = const_binop (RSHIFT_EXPR, mask,
5879 size_int (precision - size), 0);
5880 newconst = fold (build (BIT_AND_EXPR,
5881 TREE_TYPE (varop), newconst,
5882 convert (TREE_TYPE (varop),
5883 mask)));
5886 t = build (code, type,
5887 (constopnum == 0) ? newconst : varop,
5888 (constopnum == 1) ? newconst : varop);
5889 return t;
5894 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5895 if (TREE_CODE (arg1) == INTEGER_CST
5896 && TREE_CODE (arg0) != INTEGER_CST
5897 && tree_int_cst_sgn (arg1) > 0)
5899 switch (TREE_CODE (t))
5901 case GE_EXPR:
5902 code = GT_EXPR;
5903 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5904 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5905 break;
5907 case LT_EXPR:
5908 code = LE_EXPR;
5909 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5910 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5911 break;
5913 default:
5914 break;
5918 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
5919 a MINUS_EXPR of a constant, we can convert it into a comparison with
5920 a revised constant as long as no overflow occurs. */
5921 if ((code == EQ_EXPR || code == NE_EXPR)
5922 && TREE_CODE (arg1) == INTEGER_CST
5923 && (TREE_CODE (arg0) == PLUS_EXPR
5924 || TREE_CODE (arg0) == MINUS_EXPR)
5925 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5926 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5927 ? MINUS_EXPR : PLUS_EXPR,
5928 arg1, TREE_OPERAND (arg0, 1), 0))
5929 && ! TREE_CONSTANT_OVERFLOW (tem))
5930 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5932 /* Similarly for a NEGATE_EXPR. */
5933 else if ((code == EQ_EXPR || code == NE_EXPR)
5934 && TREE_CODE (arg0) == NEGATE_EXPR
5935 && TREE_CODE (arg1) == INTEGER_CST
5936 && 0 != (tem = negate_expr (arg1))
5937 && TREE_CODE (tem) == INTEGER_CST
5938 && ! TREE_CONSTANT_OVERFLOW (tem))
5939 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5941 /* If we have X - Y == 0, we can convert that to X == Y and similarly
5942 for !=. Don't do this for ordered comparisons due to overflow. */
5943 else if ((code == NE_EXPR || code == EQ_EXPR)
5944 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
5945 return fold (build (code, type,
5946 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
5948 /* If we are widening one operand of an integer comparison,
5949 see if the other operand is similarly being widened. Perhaps we
5950 can do the comparison in the narrower type. */
5951 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
5952 && TREE_CODE (arg0) == NOP_EXPR
5953 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
5954 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
5955 && (TREE_TYPE (t1) == TREE_TYPE (tem)
5956 || (TREE_CODE (t1) == INTEGER_CST
5957 && int_fits_type_p (t1, TREE_TYPE (tem)))))
5958 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
5960 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
5961 constant, we can simplify it. */
5962 else if (TREE_CODE (arg1) == INTEGER_CST
5963 && (TREE_CODE (arg0) == MIN_EXPR
5964 || TREE_CODE (arg0) == MAX_EXPR)
5965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5966 return optimize_minmax_comparison (t);
5968 /* If we are comparing an ABS_EXPR with a constant, we can
5969 convert all the cases into explicit comparisons, but they may
5970 well not be faster than doing the ABS and one comparison.
5971 But ABS (X) <= C is a range comparison, which becomes a subtraction
5972 and a comparison, and is probably faster. */
5973 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5974 && TREE_CODE (arg0) == ABS_EXPR
5975 && ! TREE_SIDE_EFFECTS (arg0)
5976 && (0 != (tem = negate_expr (arg1)))
5977 && TREE_CODE (tem) == INTEGER_CST
5978 && ! TREE_CONSTANT_OVERFLOW (tem))
5979 return fold (build (TRUTH_ANDIF_EXPR, type,
5980 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
5981 build (LE_EXPR, type,
5982 TREE_OPERAND (arg0, 0), arg1)));
5984 /* If this is an EQ or NE comparison with zero and ARG0 is
5985 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5986 two operations, but the latter can be done in one less insn
5987 on machines that have only two-operand insns or on which a
5988 constant cannot be the first operand. */
5989 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5990 && TREE_CODE (arg0) == BIT_AND_EXPR)
5992 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5993 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5994 return
5995 fold (build (code, type,
5996 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5997 build (RSHIFT_EXPR,
5998 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5999 TREE_OPERAND (arg0, 1),
6000 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6001 convert (TREE_TYPE (arg0),
6002 integer_one_node)),
6003 arg1));
6004 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6005 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6006 return
6007 fold (build (code, type,
6008 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6009 build (RSHIFT_EXPR,
6010 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6011 TREE_OPERAND (arg0, 0),
6012 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6013 convert (TREE_TYPE (arg0),
6014 integer_one_node)),
6015 arg1));
6018 /* If this is an NE or EQ comparison of zero against the result of a
6019 signed MOD operation whose second operand is a power of 2, make
6020 the MOD operation unsigned since it is simpler and equivalent. */
6021 if ((code == NE_EXPR || code == EQ_EXPR)
6022 && integer_zerop (arg1)
6023 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6024 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6025 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6026 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6027 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6028 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6030 tree newtype = unsigned_type (TREE_TYPE (arg0));
6031 tree newmod = build (TREE_CODE (arg0), newtype,
6032 convert (newtype, TREE_OPERAND (arg0, 0)),
6033 convert (newtype, TREE_OPERAND (arg0, 1)));
6035 return build (code, type, newmod, convert (newtype, arg1));
6038 /* If this is an NE comparison of zero with an AND of one, remove the
6039 comparison since the AND will give the correct value. */
6040 if (code == NE_EXPR && integer_zerop (arg1)
6041 && TREE_CODE (arg0) == BIT_AND_EXPR
6042 && integer_onep (TREE_OPERAND (arg0, 1)))
6043 return convert (type, arg0);
6045 /* If we have (A & C) == C where C is a power of 2, convert this into
6046 (A & C) != 0. Similarly for NE_EXPR. */
6047 if ((code == EQ_EXPR || code == NE_EXPR)
6048 && TREE_CODE (arg0) == BIT_AND_EXPR
6049 && integer_pow2p (TREE_OPERAND (arg0, 1))
6050 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6051 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6052 arg0, integer_zero_node);
6054 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6055 and similarly for >= into !=. */
6056 if ((code == LT_EXPR || code == GE_EXPR)
6057 && TREE_UNSIGNED (TREE_TYPE (arg0))
6058 && TREE_CODE (arg1) == LSHIFT_EXPR
6059 && integer_onep (TREE_OPERAND (arg1, 0)))
6060 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6061 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6062 TREE_OPERAND (arg1, 1)),
6063 convert (TREE_TYPE (arg0), integer_zero_node));
6065 else if ((code == LT_EXPR || code == GE_EXPR)
6066 && TREE_UNSIGNED (TREE_TYPE (arg0))
6067 && (TREE_CODE (arg1) == NOP_EXPR
6068 || TREE_CODE (arg1) == CONVERT_EXPR)
6069 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6070 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6071 return
6072 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6073 convert (TREE_TYPE (arg0),
6074 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6075 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6076 convert (TREE_TYPE (arg0), integer_zero_node));
6078 /* Simplify comparison of something with itself. (For IEEE
6079 floating-point, we can only do some of these simplifications.) */
6080 if (operand_equal_p (arg0, arg1, 0))
6082 switch (code)
6084 case EQ_EXPR:
6085 case GE_EXPR:
6086 case LE_EXPR:
6087 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6088 return constant_boolean_node (1, type);
6089 code = EQ_EXPR;
6090 TREE_SET_CODE (t, code);
6091 break;
6093 case NE_EXPR:
6094 /* For NE, we can only do this simplification if integer. */
6095 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6096 break;
6097 /* ... fall through ... */
6098 case GT_EXPR:
6099 case LT_EXPR:
6100 return constant_boolean_node (0, type);
6101 default:
6102 abort ();
6106 /* An unsigned comparison against 0 can be simplified. */
6107 if (integer_zerop (arg1)
6108 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6109 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6110 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6112 switch (TREE_CODE (t))
6114 case GT_EXPR:
6115 code = NE_EXPR;
6116 TREE_SET_CODE (t, NE_EXPR);
6117 break;
6118 case LE_EXPR:
6119 code = EQ_EXPR;
6120 TREE_SET_CODE (t, EQ_EXPR);
6121 break;
6122 case GE_EXPR:
6123 return omit_one_operand (type,
6124 convert (type, integer_one_node),
6125 arg0);
6126 case LT_EXPR:
6127 return omit_one_operand (type,
6128 convert (type, integer_zero_node),
6129 arg0);
6130 default:
6131 break;
6135 /* Comparisons with the highest or lowest possible integer of
6136 the specified size will have known values and an unsigned
6137 <= 0x7fffffff can be simplified. */
6139 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6141 if (TREE_CODE (arg1) == INTEGER_CST
6142 && ! TREE_CONSTANT_OVERFLOW (arg1)
6143 && width <= HOST_BITS_PER_WIDE_INT
6144 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6145 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6147 if (TREE_INT_CST_HIGH (arg1) == 0
6148 && (TREE_INT_CST_LOW (arg1)
6149 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6150 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6151 switch (TREE_CODE (t))
6153 case GT_EXPR:
6154 return omit_one_operand (type,
6155 convert (type, integer_zero_node),
6156 arg0);
6157 case GE_EXPR:
6158 TREE_SET_CODE (t, EQ_EXPR);
6159 break;
6161 case LE_EXPR:
6162 return omit_one_operand (type,
6163 convert (type, integer_one_node),
6164 arg0);
6165 case LT_EXPR:
6166 TREE_SET_CODE (t, NE_EXPR);
6167 break;
6169 default:
6170 break;
6173 else if (TREE_INT_CST_HIGH (arg1) == -1
6174 && (TREE_INT_CST_LOW (arg1)
6175 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6176 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6177 switch (TREE_CODE (t))
6179 case LT_EXPR:
6180 return omit_one_operand (type,
6181 convert (type, integer_zero_node),
6182 arg0);
6183 case LE_EXPR:
6184 TREE_SET_CODE (t, EQ_EXPR);
6185 break;
6187 case GE_EXPR:
6188 return omit_one_operand (type,
6189 convert (type, integer_one_node),
6190 arg0);
6191 case GT_EXPR:
6192 TREE_SET_CODE (t, NE_EXPR);
6193 break;
6195 default:
6196 break;
6199 else if (TREE_INT_CST_HIGH (arg1) == 0
6200 && (TREE_INT_CST_LOW (arg1)
6201 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6202 && TREE_UNSIGNED (TREE_TYPE (arg1))
6203 /* signed_type does not work on pointer types. */
6204 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6205 switch (TREE_CODE (t))
6207 case LE_EXPR:
6208 return fold (build (GE_EXPR, type,
6209 convert (signed_type (TREE_TYPE (arg0)),
6210 arg0),
6211 convert (signed_type (TREE_TYPE (arg1)),
6212 integer_zero_node)));
6213 case GT_EXPR:
6214 return fold (build (LT_EXPR, type,
6215 convert (signed_type (TREE_TYPE (arg0)),
6216 arg0),
6217 convert (signed_type (TREE_TYPE (arg1)),
6218 integer_zero_node)));
6220 default:
6221 break;
6224 else if (TREE_INT_CST_HIGH (arg1) == 0
6225 && (TREE_INT_CST_LOW (arg1)
6226 == ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1)
6227 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6228 switch (TREE_CODE (t))
6230 case GT_EXPR:
6231 return omit_one_operand (type,
6232 convert (type, integer_zero_node),
6233 arg0);
6234 case GE_EXPR:
6235 TREE_SET_CODE (t, EQ_EXPR);
6236 break;
6238 case LE_EXPR:
6239 return omit_one_operand (type,
6240 convert (type, integer_one_node),
6241 arg0);
6242 case LT_EXPR:
6243 TREE_SET_CODE (t, NE_EXPR);
6244 break;
6246 default:
6247 break;
6252 /* If we are comparing an expression that just has comparisons
6253 of two integer values, arithmetic expressions of those comparisons,
6254 and constants, we can simplify it. There are only three cases
6255 to check: the two values can either be equal, the first can be
6256 greater, or the second can be greater. Fold the expression for
6257 those three values. Since each value must be 0 or 1, we have
6258 eight possibilities, each of which corresponds to the constant 0
6259 or 1 or one of the six possible comparisons.
6261 This handles common cases like (a > b) == 0 but also handles
6262 expressions like ((x > y) - (y > x)) > 0, which supposedly
6263 occur in macroized code. */
6265 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6267 tree cval1 = 0, cval2 = 0;
6268 int save_p = 0;
6270 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6271 /* Don't handle degenerate cases here; they should already
6272 have been handled anyway. */
6273 && cval1 != 0 && cval2 != 0
6274 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6275 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6276 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6277 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6278 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6279 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6280 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6282 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6283 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6285 /* We can't just pass T to eval_subst in case cval1 or cval2
6286 was the same as ARG1. */
6288 tree high_result
6289 = fold (build (code, type,
6290 eval_subst (arg0, cval1, maxval, cval2, minval),
6291 arg1));
6292 tree equal_result
6293 = fold (build (code, type,
6294 eval_subst (arg0, cval1, maxval, cval2, maxval),
6295 arg1));
6296 tree low_result
6297 = fold (build (code, type,
6298 eval_subst (arg0, cval1, minval, cval2, maxval),
6299 arg1));
6301 /* All three of these results should be 0 or 1. Confirm they
6302 are. Then use those values to select the proper code
6303 to use. */
6305 if ((integer_zerop (high_result)
6306 || integer_onep (high_result))
6307 && (integer_zerop (equal_result)
6308 || integer_onep (equal_result))
6309 && (integer_zerop (low_result)
6310 || integer_onep (low_result)))
6312 /* Make a 3-bit mask with the high-order bit being the
6313 value for `>', the next for '=', and the low for '<'. */
6314 switch ((integer_onep (high_result) * 4)
6315 + (integer_onep (equal_result) * 2)
6316 + integer_onep (low_result))
6318 case 0:
6319 /* Always false. */
6320 return omit_one_operand (type, integer_zero_node, arg0);
6321 case 1:
6322 code = LT_EXPR;
6323 break;
6324 case 2:
6325 code = EQ_EXPR;
6326 break;
6327 case 3:
6328 code = LE_EXPR;
6329 break;
6330 case 4:
6331 code = GT_EXPR;
6332 break;
6333 case 5:
6334 code = NE_EXPR;
6335 break;
6336 case 6:
6337 code = GE_EXPR;
6338 break;
6339 case 7:
6340 /* Always true. */
6341 return omit_one_operand (type, integer_one_node, arg0);
6344 t = build (code, type, cval1, cval2);
6345 if (save_p)
6346 return save_expr (t);
6347 else
6348 return fold (t);
6353 /* If this is a comparison of a field, we may be able to simplify it. */
6354 if ((TREE_CODE (arg0) == COMPONENT_REF
6355 || TREE_CODE (arg0) == BIT_FIELD_REF)
6356 && (code == EQ_EXPR || code == NE_EXPR)
6357 /* Handle the constant case even without -O
6358 to make sure the warnings are given. */
6359 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6361 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6362 return t1 ? t1 : t;
6365 /* If this is a comparison of complex values and either or both sides
6366 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6367 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6368 This may prevent needless evaluations. */
6369 if ((code == EQ_EXPR || code == NE_EXPR)
6370 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6371 && (TREE_CODE (arg0) == COMPLEX_EXPR
6372 || TREE_CODE (arg1) == COMPLEX_EXPR
6373 || TREE_CODE (arg0) == COMPLEX_CST
6374 || TREE_CODE (arg1) == COMPLEX_CST))
6376 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6377 tree real0, imag0, real1, imag1;
6379 arg0 = save_expr (arg0);
6380 arg1 = save_expr (arg1);
6381 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6382 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6383 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6384 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6386 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6387 : TRUTH_ORIF_EXPR),
6388 type,
6389 fold (build (code, type, real0, real1)),
6390 fold (build (code, type, imag0, imag1))));
6393 /* Optimize comparisons of strlen vs zero to a compare of the
6394 first character of the string vs zero. To wit,
6395 strlen(ptr) == 0 => *ptr == 0
6396 strlen(ptr) != 0 => *ptr != 0
6397 Other cases should reduce to one of these two (or a constant)
6398 due to the return value of strlen being unsigned. */
6399 if ((code == EQ_EXPR || code == NE_EXPR)
6400 && integer_zerop (arg1)
6401 && TREE_CODE (arg0) == CALL_EXPR
6402 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6404 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6405 tree arglist;
6407 if (TREE_CODE (fndecl) == FUNCTION_DECL
6408 && DECL_BUILT_IN (fndecl)
6409 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6410 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6411 && (arglist = TREE_OPERAND (arg0, 1))
6412 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6413 && ! TREE_CHAIN (arglist))
6414 return fold (build (code, type,
6415 build1 (INDIRECT_REF, char_type_node,
6416 TREE_VALUE(arglist)),
6417 integer_zero_node));
6420 /* From here on, the only cases we handle are when the result is
6421 known to be a constant.
6423 To compute GT, swap the arguments and do LT.
6424 To compute GE, do LT and invert the result.
6425 To compute LE, swap the arguments, do LT and invert the result.
6426 To compute NE, do EQ and invert the result.
6428 Therefore, the code below must handle only EQ and LT. */
6430 if (code == LE_EXPR || code == GT_EXPR)
6432 tem = arg0, arg0 = arg1, arg1 = tem;
6433 code = swap_tree_comparison (code);
6436 /* Note that it is safe to invert for real values here because we
6437 will check below in the one case that it matters. */
6439 t1 = NULL_TREE;
6440 invert = 0;
6441 if (code == NE_EXPR || code == GE_EXPR)
6443 invert = 1;
6444 code = invert_tree_comparison (code);
6447 /* Compute a result for LT or EQ if args permit;
6448 otherwise return T. */
6449 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6451 if (code == EQ_EXPR)
6452 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6453 else
6454 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6455 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6456 : INT_CST_LT (arg0, arg1)),
6460 #if 0 /* This is no longer useful, but breaks some real code. */
6461 /* Assume a nonexplicit constant cannot equal an explicit one,
6462 since such code would be undefined anyway.
6463 Exception: on sysvr4, using #pragma weak,
6464 a label can come out as 0. */
6465 else if (TREE_CODE (arg1) == INTEGER_CST
6466 && !integer_zerop (arg1)
6467 && TREE_CONSTANT (arg0)
6468 && TREE_CODE (arg0) == ADDR_EXPR
6469 && code == EQ_EXPR)
6470 t1 = build_int_2 (0, 0);
6471 #endif
6472 /* Two real constants can be compared explicitly. */
6473 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6475 /* If either operand is a NaN, the result is false with two
6476 exceptions: First, an NE_EXPR is true on NaNs, but that case
6477 is already handled correctly since we will be inverting the
6478 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6479 or a GE_EXPR into a LT_EXPR, we must return true so that it
6480 will be inverted into false. */
6482 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6483 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6484 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6486 else if (code == EQ_EXPR)
6487 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6488 TREE_REAL_CST (arg1)),
6490 else
6491 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6492 TREE_REAL_CST (arg1)),
6496 if (t1 == NULL_TREE)
6497 return t;
6499 if (invert)
6500 TREE_INT_CST_LOW (t1) ^= 1;
6502 TREE_TYPE (t1) = type;
6503 if (TREE_CODE (type) == BOOLEAN_TYPE)
6504 return truthvalue_conversion (t1);
6505 return t1;
6507 case COND_EXPR:
6508 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6509 so all simple results must be passed through pedantic_non_lvalue. */
6510 if (TREE_CODE (arg0) == INTEGER_CST)
6511 return pedantic_non_lvalue
6512 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6513 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6514 return pedantic_omit_one_operand (type, arg1, arg0);
6516 /* If the second operand is zero, invert the comparison and swap
6517 the second and third operands. Likewise if the second operand
6518 is constant and the third is not or if the third operand is
6519 equivalent to the first operand of the comparison. */
6521 if (integer_zerop (arg1)
6522 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6523 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6524 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6525 TREE_OPERAND (t, 2),
6526 TREE_OPERAND (arg0, 1))))
6528 /* See if this can be inverted. If it can't, possibly because
6529 it was a floating-point inequality comparison, don't do
6530 anything. */
6531 tem = invert_truthvalue (arg0);
6533 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6535 t = build (code, type, tem,
6536 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6537 arg0 = tem;
6538 /* arg1 should be the first argument of the new T. */
6539 arg1 = TREE_OPERAND (t, 1);
6540 STRIP_NOPS (arg1);
6544 /* If we have A op B ? A : C, we may be able to convert this to a
6545 simpler expression, depending on the operation and the values
6546 of B and C. Signed zeros prevent all of these transformations,
6547 for reasons given above each one. */
6549 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6550 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6551 arg1, TREE_OPERAND (arg0, 1))
6552 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6554 tree arg2 = TREE_OPERAND (t, 2);
6555 enum tree_code comp_code = TREE_CODE (arg0);
6557 STRIP_NOPS (arg2);
6559 /* If we have A op 0 ? A : -A, consider applying the following
6560 transformations:
6562 A == 0? A : -A same as -A
6563 A != 0? A : -A same as A
6564 A >= 0? A : -A same as abs (A)
6565 A > 0? A : -A same as abs (A)
6566 A <= 0? A : -A same as -abs (A)
6567 A < 0? A : -A same as -abs (A)
6569 None of these transformations work for modes with signed
6570 zeros. If A is +/-0, the first two transformations will
6571 change the sign of the result (from +0 to -0, or vice
6572 versa). The last four will fix the sign of the result,
6573 even though the original expressions could be positive or
6574 negative, depending on the sign of A.
6576 Note that all these transformations are correct if A is
6577 NaN, since the two alternatives (A and -A) are also NaNs. */
6578 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6579 ? real_zerop (TREE_OPERAND (arg0, 1))
6580 : integer_zerop (TREE_OPERAND (arg0, 1)))
6581 && TREE_CODE (arg2) == NEGATE_EXPR
6582 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6583 switch (comp_code)
6585 case EQ_EXPR:
6586 return
6587 pedantic_non_lvalue
6588 (convert (type,
6589 negate_expr
6590 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6591 arg1))));
6592 case NE_EXPR:
6593 return pedantic_non_lvalue (convert (type, arg1));
6594 case GE_EXPR:
6595 case GT_EXPR:
6596 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6597 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6598 return pedantic_non_lvalue
6599 (convert (type, fold (build1 (ABS_EXPR,
6600 TREE_TYPE (arg1), arg1))));
6601 case LE_EXPR:
6602 case LT_EXPR:
6603 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6604 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6605 return pedantic_non_lvalue
6606 (negate_expr (convert (type,
6607 fold (build1 (ABS_EXPR,
6608 TREE_TYPE (arg1),
6609 arg1)))));
6610 default:
6611 abort ();
6614 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6615 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6616 both transformations are correct when A is NaN: A != 0
6617 is then true, and A == 0 is false. */
6619 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6621 if (comp_code == NE_EXPR)
6622 return pedantic_non_lvalue (convert (type, arg1));
6623 else if (comp_code == EQ_EXPR)
6624 return pedantic_non_lvalue (convert (type, integer_zero_node));
6627 /* Try some transformations of A op B ? A : B.
6629 A == B? A : B same as B
6630 A != B? A : B same as A
6631 A >= B? A : B same as max (A, B)
6632 A > B? A : B same as max (B, A)
6633 A <= B? A : B same as min (A, B)
6634 A < B? A : B same as min (B, A)
6636 As above, these transformations don't work in the presence
6637 of signed zeros. For example, if A and B are zeros of
6638 opposite sign, the first two transformations will change
6639 the sign of the result. In the last four, the original
6640 expressions give different results for (A=+0, B=-0) and
6641 (A=-0, B=+0), but the transformed expressions do not.
6643 The first two transformations are correct if either A or B
6644 is a NaN. In the first transformation, the condition will
6645 be false, and B will indeed be chosen. In the case of the
6646 second transformation, the condition A != B will be true,
6647 and A will be chosen.
6649 The conversions to max() and min() are not correct if B is
6650 a number and A is not. The conditions in the original
6651 expressions will be false, so all four give B. The min()
6652 and max() versions would give a NaN instead. */
6653 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6654 arg2, TREE_OPERAND (arg0, 0)))
6656 tree comp_op0 = TREE_OPERAND (arg0, 0);
6657 tree comp_op1 = TREE_OPERAND (arg0, 1);
6658 tree comp_type = TREE_TYPE (comp_op0);
6660 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6661 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6662 comp_type = type;
6664 switch (comp_code)
6666 case EQ_EXPR:
6667 return pedantic_non_lvalue (convert (type, arg2));
6668 case NE_EXPR:
6669 return pedantic_non_lvalue (convert (type, arg1));
6670 case LE_EXPR:
6671 case LT_EXPR:
6672 /* In C++ a ?: expression can be an lvalue, so put the
6673 operand which will be used if they are equal first
6674 so that we can convert this back to the
6675 corresponding COND_EXPR. */
6676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6677 return pedantic_non_lvalue
6678 (convert (type, fold (build (MIN_EXPR, comp_type,
6679 (comp_code == LE_EXPR
6680 ? comp_op0 : comp_op1),
6681 (comp_code == LE_EXPR
6682 ? comp_op1 : comp_op0)))));
6683 break;
6684 case GE_EXPR:
6685 case GT_EXPR:
6686 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6687 return pedantic_non_lvalue
6688 (convert (type, fold (build (MAX_EXPR, comp_type,
6689 (comp_code == GE_EXPR
6690 ? comp_op0 : comp_op1),
6691 (comp_code == GE_EXPR
6692 ? comp_op1 : comp_op0)))));
6693 break;
6694 default:
6695 abort ();
6699 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6700 we might still be able to simplify this. For example,
6701 if C1 is one less or one more than C2, this might have started
6702 out as a MIN or MAX and been transformed by this function.
6703 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6705 if (INTEGRAL_TYPE_P (type)
6706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6707 && TREE_CODE (arg2) == INTEGER_CST)
6708 switch (comp_code)
6710 case EQ_EXPR:
6711 /* We can replace A with C1 in this case. */
6712 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6713 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6714 TREE_OPERAND (t, 2));
6715 break;
6717 case LT_EXPR:
6718 /* If C1 is C2 + 1, this is min(A, C2). */
6719 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6720 && operand_equal_p (TREE_OPERAND (arg0, 1),
6721 const_binop (PLUS_EXPR, arg2,
6722 integer_one_node, 0), 1))
6723 return pedantic_non_lvalue
6724 (fold (build (MIN_EXPR, type, arg1, arg2)));
6725 break;
6727 case LE_EXPR:
6728 /* If C1 is C2 - 1, this is min(A, C2). */
6729 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6730 && operand_equal_p (TREE_OPERAND (arg0, 1),
6731 const_binop (MINUS_EXPR, arg2,
6732 integer_one_node, 0), 1))
6733 return pedantic_non_lvalue
6734 (fold (build (MIN_EXPR, type, arg1, arg2)));
6735 break;
6737 case GT_EXPR:
6738 /* If C1 is C2 - 1, this is max(A, C2). */
6739 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6740 && operand_equal_p (TREE_OPERAND (arg0, 1),
6741 const_binop (MINUS_EXPR, arg2,
6742 integer_one_node, 0), 1))
6743 return pedantic_non_lvalue
6744 (fold (build (MAX_EXPR, type, arg1, arg2)));
6745 break;
6747 case GE_EXPR:
6748 /* If C1 is C2 + 1, this is max(A, C2). */
6749 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6750 && operand_equal_p (TREE_OPERAND (arg0, 1),
6751 const_binop (PLUS_EXPR, arg2,
6752 integer_one_node, 0), 1))
6753 return pedantic_non_lvalue
6754 (fold (build (MAX_EXPR, type, arg1, arg2)));
6755 break;
6756 case NE_EXPR:
6757 break;
6758 default:
6759 abort ();
6763 /* If the second operand is simpler than the third, swap them
6764 since that produces better jump optimization results. */
6765 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6766 || TREE_CODE (arg1) == SAVE_EXPR)
6767 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6768 || DECL_P (TREE_OPERAND (t, 2))
6769 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6771 /* See if this can be inverted. If it can't, possibly because
6772 it was a floating-point inequality comparison, don't do
6773 anything. */
6774 tem = invert_truthvalue (arg0);
6776 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6778 t = build (code, type, tem,
6779 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6780 arg0 = tem;
6781 /* arg1 should be the first argument of the new T. */
6782 arg1 = TREE_OPERAND (t, 1);
6783 STRIP_NOPS (arg1);
6787 /* Convert A ? 1 : 0 to simply A. */
6788 if (integer_onep (TREE_OPERAND (t, 1))
6789 && integer_zerop (TREE_OPERAND (t, 2))
6790 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6791 call to fold will try to move the conversion inside
6792 a COND, which will recurse. In that case, the COND_EXPR
6793 is probably the best choice, so leave it alone. */
6794 && type == TREE_TYPE (arg0))
6795 return pedantic_non_lvalue (arg0);
6797 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6798 operation is simply A & 2. */
6800 if (integer_zerop (TREE_OPERAND (t, 2))
6801 && TREE_CODE (arg0) == NE_EXPR
6802 && integer_zerop (TREE_OPERAND (arg0, 1))
6803 && integer_pow2p (arg1)
6804 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6805 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6806 arg1, 1))
6807 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6809 return t;
6811 case COMPOUND_EXPR:
6812 /* When pedantic, a compound expression can be neither an lvalue
6813 nor an integer constant expression. */
6814 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6815 return t;
6816 /* Don't let (0, 0) be null pointer constant. */
6817 if (integer_zerop (arg1))
6818 return build1 (NOP_EXPR, type, arg1);
6819 return convert (type, arg1);
6821 case COMPLEX_EXPR:
6822 if (wins)
6823 return build_complex (type, arg0, arg1);
6824 return t;
6826 case REALPART_EXPR:
6827 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6828 return t;
6829 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6830 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6831 TREE_OPERAND (arg0, 1));
6832 else if (TREE_CODE (arg0) == COMPLEX_CST)
6833 return TREE_REALPART (arg0);
6834 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6835 return fold (build (TREE_CODE (arg0), type,
6836 fold (build1 (REALPART_EXPR, type,
6837 TREE_OPERAND (arg0, 0))),
6838 fold (build1 (REALPART_EXPR,
6839 type, TREE_OPERAND (arg0, 1)))));
6840 return t;
6842 case IMAGPART_EXPR:
6843 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6844 return convert (type, integer_zero_node);
6845 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6846 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6847 TREE_OPERAND (arg0, 0));
6848 else if (TREE_CODE (arg0) == COMPLEX_CST)
6849 return TREE_IMAGPART (arg0);
6850 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6851 return fold (build (TREE_CODE (arg0), type,
6852 fold (build1 (IMAGPART_EXPR, type,
6853 TREE_OPERAND (arg0, 0))),
6854 fold (build1 (IMAGPART_EXPR, type,
6855 TREE_OPERAND (arg0, 1)))));
6856 return t;
6858 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6859 appropriate. */
6860 case CLEANUP_POINT_EXPR:
6861 if (! has_cleanups (arg0))
6862 return TREE_OPERAND (t, 0);
6865 enum tree_code code0 = TREE_CODE (arg0);
6866 int kind0 = TREE_CODE_CLASS (code0);
6867 tree arg00 = TREE_OPERAND (arg0, 0);
6868 tree arg01;
6870 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6871 return fold (build1 (code0, type,
6872 fold (build1 (CLEANUP_POINT_EXPR,
6873 TREE_TYPE (arg00), arg00))));
6875 if (kind0 == '<' || kind0 == '2'
6876 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6877 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6878 || code0 == TRUTH_XOR_EXPR)
6880 arg01 = TREE_OPERAND (arg0, 1);
6882 if (TREE_CONSTANT (arg00)
6883 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6884 && ! has_cleanups (arg00)))
6885 return fold (build (code0, type, arg00,
6886 fold (build1 (CLEANUP_POINT_EXPR,
6887 TREE_TYPE (arg01), arg01))));
6889 if (TREE_CONSTANT (arg01))
6890 return fold (build (code0, type,
6891 fold (build1 (CLEANUP_POINT_EXPR,
6892 TREE_TYPE (arg00), arg00)),
6893 arg01));
6896 return t;
6899 case CALL_EXPR:
6900 /* Check for a built-in function. */
6901 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
6902 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
6903 == FUNCTION_DECL)
6904 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
6906 tree tmp = fold_builtin (expr);
6907 if (tmp)
6908 return tmp;
6910 return t;
6912 default:
6913 return t;
6914 } /* switch (code) */
6917 /* Determine if first argument is a multiple of second argument. Return 0 if
6918 it is not, or we cannot easily determined it to be.
6920 An example of the sort of thing we care about (at this point; this routine
6921 could surely be made more general, and expanded to do what the *_DIV_EXPR's
6922 fold cases do now) is discovering that
6924 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6926 is a multiple of
6928 SAVE_EXPR (J * 8)
6930 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
6932 This code also handles discovering that
6934 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6936 is a multiple of 8 so we don't have to worry about dealing with a
6937 possible remainder.
6939 Note that we *look* inside a SAVE_EXPR only to determine how it was
6940 calculated; it is not safe for fold to do much of anything else with the
6941 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
6942 at run time. For example, the latter example above *cannot* be implemented
6943 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
6944 evaluation time of the original SAVE_EXPR is not necessarily the same at
6945 the time the new expression is evaluated. The only optimization of this
6946 sort that would be valid is changing
6948 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6950 divided by 8 to
6952 SAVE_EXPR (I) * SAVE_EXPR (J)
6954 (where the same SAVE_EXPR (J) is used in the original and the
6955 transformed version). */
6957 static int
6958 multiple_of_p (type, top, bottom)
6959 tree type;
6960 tree top;
6961 tree bottom;
6963 if (operand_equal_p (top, bottom, 0))
6964 return 1;
6966 if (TREE_CODE (type) != INTEGER_TYPE)
6967 return 0;
6969 switch (TREE_CODE (top))
6971 case MULT_EXPR:
6972 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6973 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6975 case PLUS_EXPR:
6976 case MINUS_EXPR:
6977 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6978 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6980 case LSHIFT_EXPR:
6981 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
6983 tree op1, t1;
6985 op1 = TREE_OPERAND (top, 1);
6986 /* const_binop may not detect overflow correctly,
6987 so check for it explicitly here. */
6988 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
6989 > TREE_INT_CST_LOW (op1)
6990 && TREE_INT_CST_HIGH (op1) == 0
6991 && 0 != (t1 = convert (type,
6992 const_binop (LSHIFT_EXPR, size_one_node,
6993 op1, 0)))
6994 && ! TREE_OVERFLOW (t1))
6995 return multiple_of_p (type, t1, bottom);
6997 return 0;
6999 case NOP_EXPR:
7000 /* Can't handle conversions from non-integral or wider integral type. */
7001 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7002 || (TYPE_PRECISION (type)
7003 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7004 return 0;
7006 /* .. fall through ... */
7008 case SAVE_EXPR:
7009 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7011 case INTEGER_CST:
7012 if (TREE_CODE (bottom) != INTEGER_CST
7013 || (TREE_UNSIGNED (type)
7014 && (tree_int_cst_sgn (top) < 0
7015 || tree_int_cst_sgn (bottom) < 0)))
7016 return 0;
7017 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7018 top, bottom, 0));
7020 default:
7021 return 0;
7025 /* Return true if `t' is known to be non-negative. */
7028 tree_expr_nonnegative_p (t)
7029 tree t;
7031 switch (TREE_CODE (t))
7033 case ABS_EXPR:
7034 case FFS_EXPR:
7035 return 1;
7036 case INTEGER_CST:
7037 return tree_int_cst_sgn (t) >= 0;
7038 case TRUNC_DIV_EXPR:
7039 case CEIL_DIV_EXPR:
7040 case FLOOR_DIV_EXPR:
7041 case ROUND_DIV_EXPR:
7042 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7043 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7044 case TRUNC_MOD_EXPR:
7045 case CEIL_MOD_EXPR:
7046 case FLOOR_MOD_EXPR:
7047 case ROUND_MOD_EXPR:
7048 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7049 case COND_EXPR:
7050 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7051 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7052 case COMPOUND_EXPR:
7053 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7054 case MIN_EXPR:
7055 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7056 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7057 case MAX_EXPR:
7058 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7059 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7060 case MODIFY_EXPR:
7061 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7062 case BIND_EXPR:
7063 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7064 case SAVE_EXPR:
7065 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7066 case NON_LVALUE_EXPR:
7067 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7068 case RTL_EXPR:
7069 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7071 default:
7072 if (truth_value_p (TREE_CODE (t)))
7073 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7074 return 1;
7075 else
7076 /* We don't know sign of `t', so be conservative and return false. */
7077 return 0;
7081 /* Return true if `r' is known to be non-negative.
7082 Only handles constants at the moment. */
7085 rtl_expr_nonnegative_p (r)
7086 rtx r;
7088 switch (GET_CODE (r))
7090 case CONST_INT:
7091 return INTVAL (r) >= 0;
7093 case CONST_DOUBLE:
7094 if (GET_MODE (r) == VOIDmode)
7095 return CONST_DOUBLE_HIGH (r) >= 0;
7096 return 0;
7098 case CONST_VECTOR:
7100 int units, i;
7101 rtx elt;
7103 units = CONST_VECTOR_NUNITS (r);
7105 for (i = 0; i < units; ++i)
7107 elt = CONST_VECTOR_ELT (r, i);
7108 if (!rtl_expr_nonnegative_p (elt))
7109 return 0;
7112 return 1;
7115 case SYMBOL_REF:
7116 case LABEL_REF:
7117 /* These are always nonnegative. */
7118 return 1;
7120 default:
7121 return 0;