function.h (struct emit_status): Clarify potential contents of regno_reg_rtx array.
[official-gcc.git] / gcc / fold-const.c
blobf9f3a64943363adb4121d8672a0f20fb2ce53745
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "real.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55 #include "hashtab.h"
56 #include "langhooks.h"
58 static void encode PARAMS ((HOST_WIDE_INT *,
59 unsigned HOST_WIDE_INT,
60 HOST_WIDE_INT));
61 static void decode PARAMS ((HOST_WIDE_INT *,
62 unsigned HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static tree negate_expr PARAMS ((tree));
65 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66 tree *, int));
67 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
70 static hashval_t size_htab_hash PARAMS ((const void *));
71 static int size_htab_eq PARAMS ((const void *, const void *));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int comparison_to_compcode PARAMS ((enum tree_code));
76 static enum tree_code compcode_to_comparison PARAMS ((int));
77 static int truth_value_p PARAMS ((enum tree_code));
78 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
79 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
80 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
81 static tree omit_one_operand PARAMS ((tree, tree, tree));
82 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
83 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
84 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
85 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
86 tree, tree));
87 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
88 HOST_WIDE_INT *,
89 enum machine_mode *, int *,
90 int *, tree *, tree *));
91 static int all_ones_mask_p PARAMS ((tree, int));
92 static tree sign_bit_p PARAMS ((tree, tree));
93 static int simple_operand_p PARAMS ((tree));
94 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
95 tree, int));
96 static tree make_range PARAMS ((tree, int *, tree *, tree *));
97 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
98 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
99 int, tree, tree));
100 static tree fold_range_test PARAMS ((tree));
101 static tree unextend PARAMS ((tree, int, int, tree));
102 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
103 static tree optimize_minmax_comparison PARAMS ((tree));
104 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
105 static tree strip_compound_expr PARAMS ((tree, tree));
106 static int multiple_of_p PARAMS ((tree, tree, tree));
107 static tree constant_boolean_node PARAMS ((int, tree));
108 static int count_cond PARAMS ((tree, int));
109 static tree fold_binary_op_with_conditional_arg
110 PARAMS ((enum tree_code, tree, tree, tree, int));
111 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
113 #if defined(HOST_EBCDIC)
114 /* bit 8 is significant in EBCDIC */
115 #define CHARMASK 0xff
116 #else
117 #define CHARMASK 0x7f
118 #endif
120 /* The following constants represent a bit based encoding of GCC's
121 comparison operators. This encoding simplifies transformations
122 on relational comparison operators, such as AND and OR. */
123 #define COMPCODE_FALSE 0
124 #define COMPCODE_LT 1
125 #define COMPCODE_EQ 2
126 #define COMPCODE_LE 3
127 #define COMPCODE_GT 4
128 #define COMPCODE_NE 5
129 #define COMPCODE_GE 6
130 #define COMPCODE_TRUE 7
132 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
133 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
134 and SUM1. Then this yields nonzero if overflow occurred during the
135 addition.
137 Overflow occurs if A and B have the same sign, but A and SUM differ in
138 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
139 sign. */
140 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
142 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
143 We do that by representing the two-word integer in 4 words, with only
144 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
145 number. The value of the word is LOWPART + HIGHPART * BASE. */
147 #define LOWPART(x) \
148 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
149 #define HIGHPART(x) \
150 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
151 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
153 /* Unpack a two-word integer into 4 words.
154 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
155 WORDS points to the array of HOST_WIDE_INTs. */
157 static void
158 encode (words, low, hi)
159 HOST_WIDE_INT *words;
160 unsigned HOST_WIDE_INT low;
161 HOST_WIDE_INT hi;
163 words[0] = LOWPART (low);
164 words[1] = HIGHPART (low);
165 words[2] = LOWPART (hi);
166 words[3] = HIGHPART (hi);
169 /* Pack an array of 4 words into a two-word integer.
170 WORDS points to the array of words.
171 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
173 static void
174 decode (words, low, hi)
175 HOST_WIDE_INT *words;
176 unsigned HOST_WIDE_INT *low;
177 HOST_WIDE_INT *hi;
179 *low = words[0] + words[1] * BASE;
180 *hi = words[2] + words[3] * BASE;
183 /* Make the integer constant T valid for its type by setting to 0 or 1 all
184 the bits in the constant that don't belong in the type.
186 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
187 nonzero, a signed overflow has already occurred in calculating T, so
188 propagate it.
190 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
191 if it exists. */
194 force_fit_type (t, overflow)
195 tree t;
196 int overflow;
198 unsigned HOST_WIDE_INT low;
199 HOST_WIDE_INT high;
200 unsigned int prec;
202 if (TREE_CODE (t) == REAL_CST)
204 #ifdef CHECK_FLOAT_VALUE
205 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
206 overflow);
207 #endif
208 return overflow;
211 else if (TREE_CODE (t) != INTEGER_CST)
212 return overflow;
214 low = TREE_INT_CST_LOW (t);
215 high = TREE_INT_CST_HIGH (t);
217 if (POINTER_TYPE_P (TREE_TYPE (t)))
218 prec = POINTER_SIZE;
219 else
220 prec = TYPE_PRECISION (TREE_TYPE (t));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 TREE_INT_CST_HIGH (t)
228 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 else
231 TREE_INT_CST_HIGH (t) = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
236 /* Unsigned types do not suffer sign extension or overflow unless they
237 are a sizetype. */
238 if (TREE_UNSIGNED (TREE_TYPE (t))
239 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
240 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
241 return overflow;
243 /* If the value's sign bit is set, extend the sign. */
244 if (prec != 2 * HOST_BITS_PER_WIDE_INT
245 && (prec > HOST_BITS_PER_WIDE_INT
246 ? 0 != (TREE_INT_CST_HIGH (t)
247 & ((HOST_WIDE_INT) 1
248 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
249 : 0 != (TREE_INT_CST_LOW (t)
250 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
252 /* Value is negative:
253 set to 1 all the bits that are outside this type's precision. */
254 if (prec > HOST_BITS_PER_WIDE_INT)
255 TREE_INT_CST_HIGH (t)
256 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
257 else
259 TREE_INT_CST_HIGH (t) = -1;
260 if (prec < HOST_BITS_PER_WIDE_INT)
261 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
265 /* Return nonzero if signed overflow occurred. */
266 return
267 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
268 != 0);
271 /* Add two doubleword integers with doubleword result.
272 Each argument is given as two `HOST_WIDE_INT' pieces.
273 One argument is L1 and H1; the other, L2 and H2.
274 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277 add_double (l1, h1, l2, h2, lv, hv)
278 unsigned HOST_WIDE_INT l1, l2;
279 HOST_WIDE_INT h1, h2;
280 unsigned HOST_WIDE_INT *lv;
281 HOST_WIDE_INT *hv;
283 unsigned HOST_WIDE_INT l;
284 HOST_WIDE_INT h;
286 l = l1 + l2;
287 h = h1 + h2 + (l < l1);
289 *lv = l;
290 *hv = h;
291 return OVERFLOW_SUM_SIGN (h1, h2, h);
294 /* Negate a doubleword integer with doubleword result.
295 Return nonzero if the operation overflows, assuming it's signed.
296 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 neg_double (l1, h1, lv, hv)
301 unsigned HOST_WIDE_INT l1;
302 HOST_WIDE_INT h1;
303 unsigned HOST_WIDE_INT *lv;
304 HOST_WIDE_INT *hv;
306 if (l1 == 0)
308 *lv = 0;
309 *hv = - h1;
310 return (*hv & h1) < 0;
312 else
314 *lv = -l1;
315 *hv = ~h1;
316 return 0;
320 /* Multiply two doubleword integers with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 Each argument is given as two `HOST_WIDE_INT' pieces.
323 One argument is L1 and H1; the other, L2 and H2.
324 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
327 mul_double (l1, h1, l2, h2, lv, hv)
328 unsigned HOST_WIDE_INT l1, l2;
329 HOST_WIDE_INT h1, h2;
330 unsigned HOST_WIDE_INT *lv;
331 HOST_WIDE_INT *hv;
333 HOST_WIDE_INT arg1[4];
334 HOST_WIDE_INT arg2[4];
335 HOST_WIDE_INT prod[4 * 2];
336 unsigned HOST_WIDE_INT carry;
337 int i, j, k;
338 unsigned HOST_WIDE_INT toplow, neglow;
339 HOST_WIDE_INT tophigh, neghigh;
341 encode (arg1, l1, h1);
342 encode (arg2, l2, h2);
344 memset ((char *) prod, 0, sizeof prod);
346 for (i = 0; i < 4; i++)
348 carry = 0;
349 for (j = 0; j < 4; j++)
351 k = i + j;
352 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
353 carry += arg1[i] * arg2[j];
354 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
355 carry += prod[k];
356 prod[k] = LOWPART (carry);
357 carry = HIGHPART (carry);
359 prod[i + 4] = carry;
362 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
364 /* Check for overflow by calculating the top half of the answer in full;
365 it should agree with the low half's sign bit. */
366 decode (prod + 4, &toplow, &tophigh);
367 if (h1 < 0)
369 neg_double (l2, h2, &neglow, &neghigh);
370 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
372 if (h2 < 0)
374 neg_double (l1, h1, &neglow, &neghigh);
375 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
380 /* Shift the doubleword integer in L1, H1 left by COUNT places
381 keeping only PREC bits of result.
382 Shift right if COUNT is negative.
383 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
384 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
386 void
387 lshift_double (l1, h1, count, prec, lv, hv, arith)
388 unsigned HOST_WIDE_INT l1;
389 HOST_WIDE_INT h1, count;
390 unsigned int prec;
391 unsigned HOST_WIDE_INT *lv;
392 HOST_WIDE_INT *hv;
393 int arith;
395 unsigned HOST_WIDE_INT signmask;
397 if (count < 0)
399 rshift_double (l1, h1, -count, prec, lv, hv, arith);
400 return;
403 #ifdef SHIFT_COUNT_TRUNCATED
404 if (SHIFT_COUNT_TRUNCATED)
405 count %= prec;
406 #endif
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
420 else
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
441 else
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
454 void
455 rshift_double (l1, h1, count, prec, lv, hv, arith)
456 unsigned HOST_WIDE_INT l1;
457 HOST_WIDE_INT h1, count;
458 unsigned int prec;
459 unsigned HOST_WIDE_INT *lv;
460 HOST_WIDE_INT *hv;
461 int arith;
463 unsigned HOST_WIDE_INT signmask;
465 signmask = (arith
466 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 : 0);
469 #ifdef SHIFT_COUNT_TRUNCATED
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
472 #endif
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
478 *hv = 0;
479 *lv = 0;
481 else if (count >= HOST_BITS_PER_WIDE_INT)
483 *hv = 0;
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
489 *lv = ((l1 >> count)
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
497 *hv = signmask;
498 *lv = signmask;
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
507 else
509 *hv = signmask;
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 void
521 lrotate_double (l1, h1, count, prec, lv, hv)
522 unsigned HOST_WIDE_INT l1;
523 HOST_WIDE_INT h1, count;
524 unsigned int prec;
525 unsigned HOST_WIDE_INT *lv;
526 HOST_WIDE_INT *hv;
528 unsigned HOST_WIDE_INT s1l, s2l;
529 HOST_WIDE_INT s1h, s2h;
531 count %= prec;
532 if (count < 0)
533 count += prec;
535 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
536 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
537 *lv = s1l | s2l;
538 *hv = s1h | s2h;
541 /* Rotate the doubleword integer in L1, H1 left by COUNT places
542 keeping only PREC bits of result. COUNT must be positive.
543 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
545 void
546 rrotate_double (l1, h1, count, prec, lv, hv)
547 unsigned HOST_WIDE_INT l1;
548 HOST_WIDE_INT h1, count;
549 unsigned int prec;
550 unsigned HOST_WIDE_INT *lv;
551 HOST_WIDE_INT *hv;
553 unsigned HOST_WIDE_INT s1l, s2l;
554 HOST_WIDE_INT s1h, s2h;
556 count %= prec;
557 if (count < 0)
558 count += prec;
560 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
561 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
562 *lv = s1l | s2l;
563 *hv = s1h | s2h;
566 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
567 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
568 CODE is a tree code for a kind of division, one of
569 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
570 or EXACT_DIV_EXPR
571 It controls how the quotient is rounded to an integer.
572 Return nonzero if the operation overflows.
573 UNS nonzero says do unsigned division. */
576 div_and_round_double (code, uns,
577 lnum_orig, hnum_orig, lden_orig, hden_orig,
578 lquo, hquo, lrem, hrem)
579 enum tree_code code;
580 int uns;
581 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
582 HOST_WIDE_INT hnum_orig;
583 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
584 HOST_WIDE_INT hden_orig;
585 unsigned HOST_WIDE_INT *lquo, *lrem;
586 HOST_WIDE_INT *hquo, *hrem;
588 int quo_neg = 0;
589 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
590 HOST_WIDE_INT den[4], quo[4];
591 int i, j;
592 unsigned HOST_WIDE_INT work;
593 unsigned HOST_WIDE_INT carry = 0;
594 unsigned HOST_WIDE_INT lnum = lnum_orig;
595 HOST_WIDE_INT hnum = hnum_orig;
596 unsigned HOST_WIDE_INT lden = lden_orig;
597 HOST_WIDE_INT hden = hden_orig;
598 int overflow = 0;
600 if (hden == 0 && lden == 0)
601 overflow = 1, lden = 1;
603 /* calculate quotient sign and convert operands to unsigned. */
604 if (!uns)
606 if (hnum < 0)
608 quo_neg = ~ quo_neg;
609 /* (minimum integer) / (-1) is the only overflow case. */
610 if (neg_double (lnum, hnum, &lnum, &hnum)
611 && ((HOST_WIDE_INT) lden & hden) == -1)
612 overflow = 1;
614 if (hden < 0)
616 quo_neg = ~ quo_neg;
617 neg_double (lden, hden, &lden, &hden);
621 if (hnum == 0 && hden == 0)
622 { /* single precision */
623 *hquo = *hrem = 0;
624 /* This unsigned division rounds toward zero. */
625 *lquo = lnum / lden;
626 goto finish_up;
629 if (hnum == 0)
630 { /* trivial case: dividend < divisor */
631 /* hden != 0 already checked. */
632 *hquo = *lquo = 0;
633 *hrem = hnum;
634 *lrem = lnum;
635 goto finish_up;
638 memset ((char *) quo, 0, sizeof quo);
640 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
641 memset ((char *) den, 0, sizeof den);
643 encode (num, lnum, hnum);
644 encode (den, lden, hden);
646 /* Special code for when the divisor < BASE. */
647 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
649 /* hnum != 0 already checked. */
650 for (i = 4 - 1; i >= 0; i--)
652 work = num[i] + carry * BASE;
653 quo[i] = work / lden;
654 carry = work % lden;
657 else
659 /* Full double precision division,
660 with thanks to Don Knuth's "Seminumerical Algorithms". */
661 int num_hi_sig, den_hi_sig;
662 unsigned HOST_WIDE_INT quo_est, scale;
664 /* Find the highest non-zero divisor digit. */
665 for (i = 4 - 1;; i--)
666 if (den[i] != 0)
668 den_hi_sig = i;
669 break;
672 /* Insure that the first digit of the divisor is at least BASE/2.
673 This is required by the quotient digit estimation algorithm. */
675 scale = BASE / (den[den_hi_sig] + 1);
676 if (scale > 1)
677 { /* scale divisor and dividend */
678 carry = 0;
679 for (i = 0; i <= 4 - 1; i++)
681 work = (num[i] * scale) + carry;
682 num[i] = LOWPART (work);
683 carry = HIGHPART (work);
686 num[4] = carry;
687 carry = 0;
688 for (i = 0; i <= 4 - 1; i++)
690 work = (den[i] * scale) + carry;
691 den[i] = LOWPART (work);
692 carry = HIGHPART (work);
693 if (den[i] != 0) den_hi_sig = i;
697 num_hi_sig = 4;
699 /* Main loop */
700 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
702 /* Guess the next quotient digit, quo_est, by dividing the first
703 two remaining dividend digits by the high order quotient digit.
704 quo_est is never low and is at most 2 high. */
705 unsigned HOST_WIDE_INT tmp;
707 num_hi_sig = i + den_hi_sig + 1;
708 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
709 if (num[num_hi_sig] != den[den_hi_sig])
710 quo_est = work / den[den_hi_sig];
711 else
712 quo_est = BASE - 1;
714 /* Refine quo_est so it's usually correct, and at most one high. */
715 tmp = work - quo_est * den[den_hi_sig];
716 if (tmp < BASE
717 && (den[den_hi_sig - 1] * quo_est
718 > (tmp * BASE + num[num_hi_sig - 2])))
719 quo_est--;
721 /* Try QUO_EST as the quotient digit, by multiplying the
722 divisor by QUO_EST and subtracting from the remaining dividend.
723 Keep in mind that QUO_EST is the I - 1st digit. */
725 carry = 0;
726 for (j = 0; j <= den_hi_sig; j++)
728 work = quo_est * den[j] + carry;
729 carry = HIGHPART (work);
730 work = num[i + j] - LOWPART (work);
731 num[i + j] = LOWPART (work);
732 carry += HIGHPART (work) != 0;
735 /* If quo_est was high by one, then num[i] went negative and
736 we need to correct things. */
737 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
739 quo_est--;
740 carry = 0; /* add divisor back in */
741 for (j = 0; j <= den_hi_sig; j++)
743 work = num[i + j] + den[j] + carry;
744 carry = HIGHPART (work);
745 num[i + j] = LOWPART (work);
748 num [num_hi_sig] += carry;
751 /* Store the quotient digit. */
752 quo[i] = quo_est;
756 decode (quo, lquo, hquo);
758 finish_up:
759 /* if result is negative, make it so. */
760 if (quo_neg)
761 neg_double (*lquo, *hquo, lquo, hquo);
763 /* compute trial remainder: rem = num - (quo * den) */
764 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
765 neg_double (*lrem, *hrem, lrem, hrem);
766 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
768 switch (code)
770 case TRUNC_DIV_EXPR:
771 case TRUNC_MOD_EXPR: /* round toward zero */
772 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
773 return overflow;
775 case FLOOR_DIV_EXPR:
776 case FLOOR_MOD_EXPR: /* round toward negative infinity */
777 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
779 /* quo = quo - 1; */
780 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
781 lquo, hquo);
783 else
784 return overflow;
785 break;
787 case CEIL_DIV_EXPR:
788 case CEIL_MOD_EXPR: /* round toward positive infinity */
789 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
791 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
792 lquo, hquo);
794 else
795 return overflow;
796 break;
798 case ROUND_DIV_EXPR:
799 case ROUND_MOD_EXPR: /* round to closest integer */
801 unsigned HOST_WIDE_INT labs_rem = *lrem;
802 HOST_WIDE_INT habs_rem = *hrem;
803 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
804 HOST_WIDE_INT habs_den = hden, htwice;
806 /* Get absolute values */
807 if (*hrem < 0)
808 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
809 if (hden < 0)
810 neg_double (lden, hden, &labs_den, &habs_den);
812 /* If (2 * abs (lrem) >= abs (lden)) */
813 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
814 labs_rem, habs_rem, &ltwice, &htwice);
816 if (((unsigned HOST_WIDE_INT) habs_den
817 < (unsigned HOST_WIDE_INT) htwice)
818 || (((unsigned HOST_WIDE_INT) habs_den
819 == (unsigned HOST_WIDE_INT) htwice)
820 && (labs_den < ltwice)))
822 if (*hquo < 0)
823 /* quo = quo - 1; */
824 add_double (*lquo, *hquo,
825 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
826 else
827 /* quo = quo + 1; */
828 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 lquo, hquo);
831 else
832 return overflow;
834 break;
836 default:
837 abort ();
840 /* compute true remainder: rem = num - (quo * den) */
841 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
842 neg_double (*lrem, *hrem, lrem, hrem);
843 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
844 return overflow;
847 /* Given T, an expression, return the negation of T. Allow for T to be
848 null, in which case return null. */
850 static tree
851 negate_expr (t)
852 tree t;
854 tree type;
855 tree tem;
857 if (t == 0)
858 return 0;
860 type = TREE_TYPE (t);
861 STRIP_SIGN_NOPS (t);
863 switch (TREE_CODE (t))
865 case INTEGER_CST:
866 case REAL_CST:
867 if (! TREE_UNSIGNED (type)
868 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
869 && ! TREE_OVERFLOW (tem))
870 return tem;
871 break;
873 case NEGATE_EXPR:
874 return convert (type, TREE_OPERAND (t, 0));
876 case MINUS_EXPR:
877 /* - (A - B) -> B - A */
878 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
879 return convert (type,
880 fold (build (MINUS_EXPR, TREE_TYPE (t),
881 TREE_OPERAND (t, 1),
882 TREE_OPERAND (t, 0))));
883 break;
885 default:
886 break;
889 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
892 /* Split a tree IN into a constant, literal and variable parts that could be
893 combined with CODE to make IN. "constant" means an expression with
894 TREE_CONSTANT but that isn't an actual constant. CODE must be a
895 commutative arithmetic operation. Store the constant part into *CONP,
896 the literal in *LITP and return the variable part. If a part isn't
897 present, set it to null. If the tree does not decompose in this way,
898 return the entire tree as the variable part and the other parts as null.
900 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
901 case, we negate an operand that was subtracted. Except if it is a
902 literal for which we use *MINUS_LITP instead.
904 If NEGATE_P is true, we are negating all of IN, again except a literal
905 for which we use *MINUS_LITP instead.
907 If IN is itself a literal or constant, return it as appropriate.
909 Note that we do not guarantee that any of the three values will be the
910 same type as IN, but they will have the same signedness and mode. */
912 static tree
913 split_tree (in, code, conp, litp, minus_litp, negate_p)
914 tree in;
915 enum tree_code code;
916 tree *conp, *litp, *minus_litp;
917 int negate_p;
919 tree var = 0;
921 *conp = 0;
922 *litp = 0;
923 *minus_litp = 0;
925 /* Strip any conversions that don't change the machine mode or signedness. */
926 STRIP_SIGN_NOPS (in);
928 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
929 *litp = in;
930 else if (TREE_CODE (in) == code
931 || (! FLOAT_TYPE_P (TREE_TYPE (in))
932 /* We can associate addition and subtraction together (even
933 though the C standard doesn't say so) for integers because
934 the value is not affected. For reals, the value might be
935 affected, so we can't. */
936 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
937 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
939 tree op0 = TREE_OPERAND (in, 0);
940 tree op1 = TREE_OPERAND (in, 1);
941 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
942 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
944 /* First see if either of the operands is a literal, then a constant. */
945 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
946 *litp = op0, op0 = 0;
947 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
948 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
950 if (op0 != 0 && TREE_CONSTANT (op0))
951 *conp = op0, op0 = 0;
952 else if (op1 != 0 && TREE_CONSTANT (op1))
953 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
955 /* If we haven't dealt with either operand, this is not a case we can
956 decompose. Otherwise, VAR is either of the ones remaining, if any. */
957 if (op0 != 0 && op1 != 0)
958 var = in;
959 else if (op0 != 0)
960 var = op0;
961 else
962 var = op1, neg_var_p = neg1_p;
964 /* Now do any needed negations. */
965 if (neg_litp_p)
966 *minus_litp = *litp, *litp = 0;
967 if (neg_conp_p)
968 *conp = negate_expr (*conp);
969 if (neg_var_p)
970 var = negate_expr (var);
972 else if (TREE_CONSTANT (in))
973 *conp = in;
974 else
975 var = in;
977 if (negate_p)
979 if (*litp)
980 *minus_litp = *litp, *litp = 0;
981 else if (*minus_litp)
982 *litp = *minus_litp, *minus_litp = 0;
983 *conp = negate_expr (*conp);
984 var = negate_expr (var);
987 return var;
990 /* Re-associate trees split by the above function. T1 and T2 are either
991 expressions to associate or null. Return the new expression, if any. If
992 we build an operation, do it in TYPE and with CODE. */
994 static tree
995 associate_trees (t1, t2, code, type)
996 tree t1, t2;
997 enum tree_code code;
998 tree type;
1000 if (t1 == 0)
1001 return t2;
1002 else if (t2 == 0)
1003 return t1;
1005 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1006 try to fold this since we will have infinite recursion. But do
1007 deal with any NEGATE_EXPRs. */
1008 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1009 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1011 if (TREE_CODE (t1) == NEGATE_EXPR)
1012 return build (MINUS_EXPR, type, convert (type, t2),
1013 convert (type, TREE_OPERAND (t1, 0)));
1014 else if (TREE_CODE (t2) == NEGATE_EXPR)
1015 return build (MINUS_EXPR, type, convert (type, t1),
1016 convert (type, TREE_OPERAND (t2, 0)));
1017 else
1018 return build (code, type, convert (type, t1), convert (type, t2));
1021 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1024 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1025 to produce a new constant.
1027 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1029 static tree
1030 int_const_binop (code, arg1, arg2, notrunc)
1031 enum tree_code code;
1032 tree arg1, arg2;
1033 int notrunc;
1035 unsigned HOST_WIDE_INT int1l, int2l;
1036 HOST_WIDE_INT int1h, int2h;
1037 unsigned HOST_WIDE_INT low;
1038 HOST_WIDE_INT hi;
1039 unsigned HOST_WIDE_INT garbagel;
1040 HOST_WIDE_INT garbageh;
1041 tree t;
1042 tree type = TREE_TYPE (arg1);
1043 int uns = TREE_UNSIGNED (type);
1044 int is_sizetype
1045 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1046 int overflow = 0;
1047 int no_overflow = 0;
1049 int1l = TREE_INT_CST_LOW (arg1);
1050 int1h = TREE_INT_CST_HIGH (arg1);
1051 int2l = TREE_INT_CST_LOW (arg2);
1052 int2h = TREE_INT_CST_HIGH (arg2);
1054 switch (code)
1056 case BIT_IOR_EXPR:
1057 low = int1l | int2l, hi = int1h | int2h;
1058 break;
1060 case BIT_XOR_EXPR:
1061 low = int1l ^ int2l, hi = int1h ^ int2h;
1062 break;
1064 case BIT_AND_EXPR:
1065 low = int1l & int2l, hi = int1h & int2h;
1066 break;
1068 case BIT_ANDTC_EXPR:
1069 low = int1l & ~int2l, hi = int1h & ~int2h;
1070 break;
1072 case RSHIFT_EXPR:
1073 int2l = -int2l;
1074 case LSHIFT_EXPR:
1075 /* It's unclear from the C standard whether shifts can overflow.
1076 The following code ignores overflow; perhaps a C standard
1077 interpretation ruling is needed. */
1078 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1079 &low, &hi, !uns);
1080 no_overflow = 1;
1081 break;
1083 case RROTATE_EXPR:
1084 int2l = - int2l;
1085 case LROTATE_EXPR:
1086 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1087 &low, &hi);
1088 break;
1090 case PLUS_EXPR:
1091 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1092 break;
1094 case MINUS_EXPR:
1095 neg_double (int2l, int2h, &low, &hi);
1096 add_double (int1l, int1h, low, hi, &low, &hi);
1097 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1098 break;
1100 case MULT_EXPR:
1101 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1102 break;
1104 case TRUNC_DIV_EXPR:
1105 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1106 case EXACT_DIV_EXPR:
1107 /* This is a shortcut for a common special case. */
1108 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1109 && ! TREE_CONSTANT_OVERFLOW (arg1)
1110 && ! TREE_CONSTANT_OVERFLOW (arg2)
1111 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1113 if (code == CEIL_DIV_EXPR)
1114 int1l += int2l - 1;
1116 low = int1l / int2l, hi = 0;
1117 break;
1120 /* ... fall through ... */
1122 case ROUND_DIV_EXPR:
1123 if (int2h == 0 && int2l == 1)
1125 low = int1l, hi = int1h;
1126 break;
1128 if (int1l == int2l && int1h == int2h
1129 && ! (int1l == 0 && int1h == 0))
1131 low = 1, hi = 0;
1132 break;
1134 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1135 &low, &hi, &garbagel, &garbageh);
1136 break;
1138 case TRUNC_MOD_EXPR:
1139 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1140 /* This is a shortcut for a common special case. */
1141 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1142 && ! TREE_CONSTANT_OVERFLOW (arg1)
1143 && ! TREE_CONSTANT_OVERFLOW (arg2)
1144 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1146 if (code == CEIL_MOD_EXPR)
1147 int1l += int2l - 1;
1148 low = int1l % int2l, hi = 0;
1149 break;
1152 /* ... fall through ... */
1154 case ROUND_MOD_EXPR:
1155 overflow = div_and_round_double (code, uns,
1156 int1l, int1h, int2l, int2h,
1157 &garbagel, &garbageh, &low, &hi);
1158 break;
1160 case MIN_EXPR:
1161 case MAX_EXPR:
1162 if (uns)
1163 low = (((unsigned HOST_WIDE_INT) int1h
1164 < (unsigned HOST_WIDE_INT) int2h)
1165 || (((unsigned HOST_WIDE_INT) int1h
1166 == (unsigned HOST_WIDE_INT) int2h)
1167 && int1l < int2l));
1168 else
1169 low = (int1h < int2h
1170 || (int1h == int2h && int1l < int2l));
1172 if (low == (code == MIN_EXPR))
1173 low = int1l, hi = int1h;
1174 else
1175 low = int2l, hi = int2h;
1176 break;
1178 default:
1179 abort ();
1182 /* If this is for a sizetype, can be represented as one (signed)
1183 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1184 constants. */
1185 if (is_sizetype
1186 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1187 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1188 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1189 return size_int_type_wide (low, type);
1190 else
1192 t = build_int_2 (low, hi);
1193 TREE_TYPE (t) = TREE_TYPE (arg1);
1196 TREE_OVERFLOW (t)
1197 = ((notrunc
1198 ? (!uns || is_sizetype) && overflow
1199 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1200 && ! no_overflow))
1201 | TREE_OVERFLOW (arg1)
1202 | TREE_OVERFLOW (arg2));
1204 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1205 So check if force_fit_type truncated the value. */
1206 if (is_sizetype
1207 && ! TREE_OVERFLOW (t)
1208 && (TREE_INT_CST_HIGH (t) != hi
1209 || TREE_INT_CST_LOW (t) != low))
1210 TREE_OVERFLOW (t) = 1;
1212 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1213 | TREE_CONSTANT_OVERFLOW (arg1)
1214 | TREE_CONSTANT_OVERFLOW (arg2));
1215 return t;
1218 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1219 constant. We assume ARG1 and ARG2 have the same data type, or at least
1220 are the same kind of constant and the same machine mode.
1222 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1224 static tree
1225 const_binop (code, arg1, arg2, notrunc)
1226 enum tree_code code;
1227 tree arg1, arg2;
1228 int notrunc;
1230 STRIP_NOPS (arg1);
1231 STRIP_NOPS (arg2);
1233 if (TREE_CODE (arg1) == INTEGER_CST)
1234 return int_const_binop (code, arg1, arg2, notrunc);
1236 if (TREE_CODE (arg1) == REAL_CST)
1238 REAL_VALUE_TYPE d1;
1239 REAL_VALUE_TYPE d2;
1240 REAL_VALUE_TYPE value;
1241 tree t;
1243 d1 = TREE_REAL_CST (arg1);
1244 d2 = TREE_REAL_CST (arg2);
1246 /* If either operand is a NaN, just return it. Otherwise, set up
1247 for floating-point trap; we return an overflow. */
1248 if (REAL_VALUE_ISNAN (d1))
1249 return arg1;
1250 else if (REAL_VALUE_ISNAN (d2))
1251 return arg2;
1253 REAL_ARITHMETIC (value, code, d1, d2);
1255 t = build_real (TREE_TYPE (arg1),
1256 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1257 value));
1259 TREE_OVERFLOW (t)
1260 = (force_fit_type (t, 0)
1261 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1262 TREE_CONSTANT_OVERFLOW (t)
1263 = TREE_OVERFLOW (t)
1264 | TREE_CONSTANT_OVERFLOW (arg1)
1265 | TREE_CONSTANT_OVERFLOW (arg2);
1266 return t;
1268 if (TREE_CODE (arg1) == COMPLEX_CST)
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1275 tree t;
1277 switch (code)
1279 case PLUS_EXPR:
1280 t = build_complex (type,
1281 const_binop (PLUS_EXPR, r1, r2, notrunc),
1282 const_binop (PLUS_EXPR, i1, i2, notrunc));
1283 break;
1285 case MINUS_EXPR:
1286 t = build_complex (type,
1287 const_binop (MINUS_EXPR, r1, r2, notrunc),
1288 const_binop (MINUS_EXPR, i1, i2, notrunc));
1289 break;
1291 case MULT_EXPR:
1292 t = build_complex (type,
1293 const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR,
1295 r1, r2, notrunc),
1296 const_binop (MULT_EXPR,
1297 i1, i2, notrunc),
1298 notrunc),
1299 const_binop (PLUS_EXPR,
1300 const_binop (MULT_EXPR,
1301 r1, i2, notrunc),
1302 const_binop (MULT_EXPR,
1303 i1, r2, notrunc),
1304 notrunc));
1305 break;
1307 case RDIV_EXPR:
1309 tree magsquared
1310 = const_binop (PLUS_EXPR,
1311 const_binop (MULT_EXPR, r2, r2, notrunc),
1312 const_binop (MULT_EXPR, i2, i2, notrunc),
1313 notrunc);
1315 t = build_complex (type,
1316 const_binop
1317 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1318 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1319 const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r1, r2,
1321 notrunc),
1322 const_binop (MULT_EXPR, i1, i2,
1323 notrunc),
1324 notrunc),
1325 magsquared, notrunc),
1326 const_binop
1327 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1328 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1329 const_binop (MINUS_EXPR,
1330 const_binop (MULT_EXPR, i1, r2,
1331 notrunc),
1332 const_binop (MULT_EXPR, r1, i2,
1333 notrunc),
1334 notrunc),
1335 magsquared, notrunc));
1337 break;
1339 default:
1340 abort ();
1342 return t;
1344 return 0;
1347 /* These are the hash table functions for the hash table of INTEGER_CST
1348 nodes of a sizetype. */
1350 /* Return the hash code code X, an INTEGER_CST. */
1352 static hashval_t
1353 size_htab_hash (x)
1354 const void *x;
1356 tree t = (tree) x;
1358 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1359 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1360 ^ (TREE_OVERFLOW (t) << 20));
1363 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1364 is the same as that given by *Y, which is the same. */
1366 static int
1367 size_htab_eq (x, y)
1368 const void *x;
1369 const void *y;
1371 tree xt = (tree) x;
1372 tree yt = (tree) y;
1374 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1375 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1376 && TREE_TYPE (xt) == TREE_TYPE (yt)
1377 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1380 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1381 bits are given by NUMBER and of the sizetype represented by KIND. */
1383 tree
1384 size_int_wide (number, kind)
1385 HOST_WIDE_INT number;
1386 enum size_type_kind kind;
1388 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1391 /* Likewise, but the desired type is specified explicitly. */
1393 static GTY (()) tree new_const;
1394 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1395 htab_t size_htab;
1397 tree
1398 size_int_type_wide (number, type)
1399 HOST_WIDE_INT number;
1400 tree type;
1402 PTR *slot;
1404 if (size_htab == 0)
1406 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1407 new_const = make_node (INTEGER_CST);
1410 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1411 hash table, we return the value from the hash table. Otherwise, we
1412 place that in the hash table and make a new node for the next time. */
1413 TREE_INT_CST_LOW (new_const) = number;
1414 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1415 TREE_TYPE (new_const) = type;
1416 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1417 = force_fit_type (new_const, 0);
1419 slot = htab_find_slot (size_htab, new_const, INSERT);
1420 if (*slot == 0)
1422 tree t = new_const;
1424 *slot = (PTR) new_const;
1425 new_const = make_node (INTEGER_CST);
1426 return t;
1428 else
1429 return (tree) *slot;
1432 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1433 is a tree code. The type of the result is taken from the operands.
1434 Both must be the same type integer type and it must be a size type.
1435 If the operands are constant, so is the result. */
1437 tree
1438 size_binop (code, arg0, arg1)
1439 enum tree_code code;
1440 tree arg0, arg1;
1442 tree type = TREE_TYPE (arg0);
1444 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1445 || type != TREE_TYPE (arg1))
1446 abort ();
1448 /* Handle the special case of two integer constants faster. */
1449 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1451 /* And some specific cases even faster than that. */
1452 if (code == PLUS_EXPR && integer_zerop (arg0))
1453 return arg1;
1454 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1455 && integer_zerop (arg1))
1456 return arg0;
1457 else if (code == MULT_EXPR && integer_onep (arg0))
1458 return arg1;
1460 /* Handle general case of two integer constants. */
1461 return int_const_binop (code, arg0, arg1, 0);
1464 if (arg0 == error_mark_node || arg1 == error_mark_node)
1465 return error_mark_node;
1467 return fold (build (code, type, arg0, arg1));
1470 /* Given two values, either both of sizetype or both of bitsizetype,
1471 compute the difference between the two values. Return the value
1472 in signed type corresponding to the type of the operands. */
1474 tree
1475 size_diffop (arg0, arg1)
1476 tree arg0, arg1;
1478 tree type = TREE_TYPE (arg0);
1479 tree ctype;
1481 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1482 || type != TREE_TYPE (arg1))
1483 abort ();
1485 /* If the type is already signed, just do the simple thing. */
1486 if (! TREE_UNSIGNED (type))
1487 return size_binop (MINUS_EXPR, arg0, arg1);
1489 ctype = (type == bitsizetype || type == ubitsizetype
1490 ? sbitsizetype : ssizetype);
1492 /* If either operand is not a constant, do the conversions to the signed
1493 type and subtract. The hardware will do the right thing with any
1494 overflow in the subtraction. */
1495 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1496 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1497 convert (ctype, arg1));
1499 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1500 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1501 overflow) and negate (which can't either). Special-case a result
1502 of zero while we're here. */
1503 if (tree_int_cst_equal (arg0, arg1))
1504 return convert (ctype, integer_zero_node);
1505 else if (tree_int_cst_lt (arg1, arg0))
1506 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1507 else
1508 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1509 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1513 /* Given T, a tree representing type conversion of ARG1, a constant,
1514 return a constant tree representing the result of conversion. */
1516 static tree
1517 fold_convert (t, arg1)
1518 tree t;
1519 tree arg1;
1521 tree type = TREE_TYPE (t);
1522 int overflow = 0;
1524 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1526 if (TREE_CODE (arg1) == INTEGER_CST)
1528 /* If we would build a constant wider than GCC supports,
1529 leave the conversion unfolded. */
1530 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1531 return t;
1533 /* If we are trying to make a sizetype for a small integer, use
1534 size_int to pick up cached types to reduce duplicate nodes. */
1535 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1536 && !TREE_CONSTANT_OVERFLOW (arg1)
1537 && compare_tree_int (arg1, 10000) < 0)
1538 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1540 /* Given an integer constant, make new constant with new type,
1541 appropriately sign-extended or truncated. */
1542 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1543 TREE_INT_CST_HIGH (arg1));
1544 TREE_TYPE (t) = type;
1545 /* Indicate an overflow if (1) ARG1 already overflowed,
1546 or (2) force_fit_type indicates an overflow.
1547 Tell force_fit_type that an overflow has already occurred
1548 if ARG1 is a too-large unsigned value and T is signed.
1549 But don't indicate an overflow if converting a pointer. */
1550 TREE_OVERFLOW (t)
1551 = ((force_fit_type (t,
1552 (TREE_INT_CST_HIGH (arg1) < 0
1553 && (TREE_UNSIGNED (type)
1554 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1555 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1556 || TREE_OVERFLOW (arg1));
1557 TREE_CONSTANT_OVERFLOW (t)
1558 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1560 else if (TREE_CODE (arg1) == REAL_CST)
1562 /* Don't initialize these, use assignments.
1563 Initialized local aggregates don't work on old compilers. */
1564 REAL_VALUE_TYPE x;
1565 REAL_VALUE_TYPE l;
1566 REAL_VALUE_TYPE u;
1567 tree type1 = TREE_TYPE (arg1);
1568 int no_upper_bound;
1570 x = TREE_REAL_CST (arg1);
1571 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1573 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1574 if (!no_upper_bound)
1575 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1577 /* See if X will be in range after truncation towards 0.
1578 To compensate for truncation, move the bounds away from 0,
1579 but reject if X exactly equals the adjusted bounds. */
1580 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1581 if (!no_upper_bound)
1582 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1583 /* If X is a NaN, use zero instead and show we have an overflow.
1584 Otherwise, range check. */
1585 if (REAL_VALUE_ISNAN (x))
1586 overflow = 1, x = dconst0;
1587 else if (! (REAL_VALUES_LESS (l, x)
1588 && !no_upper_bound
1589 && REAL_VALUES_LESS (x, u)))
1590 overflow = 1;
1593 HOST_WIDE_INT low, high;
1594 REAL_VALUE_TO_INT (&low, &high, x);
1595 t = build_int_2 (low, high);
1597 TREE_TYPE (t) = type;
1598 TREE_OVERFLOW (t)
1599 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1600 TREE_CONSTANT_OVERFLOW (t)
1601 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1603 TREE_TYPE (t) = type;
1605 else if (TREE_CODE (type) == REAL_TYPE)
1607 if (TREE_CODE (arg1) == INTEGER_CST)
1608 return build_real_from_int_cst (type, arg1);
1609 if (TREE_CODE (arg1) == REAL_CST)
1611 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1613 /* We make a copy of ARG1 so that we don't modify an
1614 existing constant tree. */
1615 t = copy_node (arg1);
1616 TREE_TYPE (t) = type;
1617 return t;
1620 t = build_real (type,
1621 real_value_truncate (TYPE_MODE (type),
1622 TREE_REAL_CST (arg1)));
1624 TREE_OVERFLOW (t)
1625 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1626 TREE_CONSTANT_OVERFLOW (t)
1627 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1628 return t;
1631 TREE_CONSTANT (t) = 1;
1632 return t;
1635 /* Return an expr equal to X but certainly not valid as an lvalue. */
1637 tree
1638 non_lvalue (x)
1639 tree x;
1641 tree result;
1643 /* These things are certainly not lvalues. */
1644 if (TREE_CODE (x) == NON_LVALUE_EXPR
1645 || TREE_CODE (x) == INTEGER_CST
1646 || TREE_CODE (x) == REAL_CST
1647 || TREE_CODE (x) == STRING_CST
1648 || TREE_CODE (x) == ADDR_EXPR)
1649 return x;
1651 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1652 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1653 return result;
1656 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1657 Zero means allow extended lvalues. */
1659 int pedantic_lvalues;
1661 /* When pedantic, return an expr equal to X but certainly not valid as a
1662 pedantic lvalue. Otherwise, return X. */
1664 tree
1665 pedantic_non_lvalue (x)
1666 tree x;
1668 if (pedantic_lvalues)
1669 return non_lvalue (x);
1670 else
1671 return x;
1674 /* Given a tree comparison code, return the code that is the logical inverse
1675 of the given code. It is not safe to do this for floating-point
1676 comparisons, except for NE_EXPR and EQ_EXPR. */
1678 static enum tree_code
1679 invert_tree_comparison (code)
1680 enum tree_code code;
1682 switch (code)
1684 case EQ_EXPR:
1685 return NE_EXPR;
1686 case NE_EXPR:
1687 return EQ_EXPR;
1688 case GT_EXPR:
1689 return LE_EXPR;
1690 case GE_EXPR:
1691 return LT_EXPR;
1692 case LT_EXPR:
1693 return GE_EXPR;
1694 case LE_EXPR:
1695 return GT_EXPR;
1696 default:
1697 abort ();
1701 /* Similar, but return the comparison that results if the operands are
1702 swapped. This is safe for floating-point. */
1704 static enum tree_code
1705 swap_tree_comparison (code)
1706 enum tree_code code;
1708 switch (code)
1710 case EQ_EXPR:
1711 case NE_EXPR:
1712 return code;
1713 case GT_EXPR:
1714 return LT_EXPR;
1715 case GE_EXPR:
1716 return LE_EXPR;
1717 case LT_EXPR:
1718 return GT_EXPR;
1719 case LE_EXPR:
1720 return GE_EXPR;
1721 default:
1722 abort ();
1727 /* Convert a comparison tree code from an enum tree_code representation
1728 into a compcode bit-based encoding. This function is the inverse of
1729 compcode_to_comparison. */
1731 static int
1732 comparison_to_compcode (code)
1733 enum tree_code code;
1735 switch (code)
1737 case LT_EXPR:
1738 return COMPCODE_LT;
1739 case EQ_EXPR:
1740 return COMPCODE_EQ;
1741 case LE_EXPR:
1742 return COMPCODE_LE;
1743 case GT_EXPR:
1744 return COMPCODE_GT;
1745 case NE_EXPR:
1746 return COMPCODE_NE;
1747 case GE_EXPR:
1748 return COMPCODE_GE;
1749 default:
1750 abort ();
1754 /* Convert a compcode bit-based encoding of a comparison operator back
1755 to GCC's enum tree_code representation. This function is the
1756 inverse of comparison_to_compcode. */
1758 static enum tree_code
1759 compcode_to_comparison (code)
1760 int code;
1762 switch (code)
1764 case COMPCODE_LT:
1765 return LT_EXPR;
1766 case COMPCODE_EQ:
1767 return EQ_EXPR;
1768 case COMPCODE_LE:
1769 return LE_EXPR;
1770 case COMPCODE_GT:
1771 return GT_EXPR;
1772 case COMPCODE_NE:
1773 return NE_EXPR;
1774 case COMPCODE_GE:
1775 return GE_EXPR;
1776 default:
1777 abort ();
1781 /* Return nonzero if CODE is a tree code that represents a truth value. */
1783 static int
1784 truth_value_p (code)
1785 enum tree_code code;
1787 return (TREE_CODE_CLASS (code) == '<'
1788 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1789 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1790 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1793 /* Return nonzero if two operands are necessarily equal.
1794 If ONLY_CONST is non-zero, only return non-zero for constants.
1795 This function tests whether the operands are indistinguishable;
1796 it does not test whether they are equal using C's == operation.
1797 The distinction is important for IEEE floating point, because
1798 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1799 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1802 operand_equal_p (arg0, arg1, only_const)
1803 tree arg0, arg1;
1804 int only_const;
1806 /* If both types don't have the same signedness, then we can't consider
1807 them equal. We must check this before the STRIP_NOPS calls
1808 because they may change the signedness of the arguments. */
1809 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1810 return 0;
1812 STRIP_NOPS (arg0);
1813 STRIP_NOPS (arg1);
1815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1816 /* This is needed for conversions and for COMPONENT_REF.
1817 Might as well play it safe and always test this. */
1818 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1819 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1820 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1821 return 0;
1823 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1824 We don't care about side effects in that case because the SAVE_EXPR
1825 takes care of that for us. In all other cases, two expressions are
1826 equal if they have no side effects. If we have two identical
1827 expressions with side effects that should be treated the same due
1828 to the only side effects being identical SAVE_EXPR's, that will
1829 be detected in the recursive calls below. */
1830 if (arg0 == arg1 && ! only_const
1831 && (TREE_CODE (arg0) == SAVE_EXPR
1832 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1833 return 1;
1835 /* Next handle constant cases, those for which we can return 1 even
1836 if ONLY_CONST is set. */
1837 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1838 switch (TREE_CODE (arg0))
1840 case INTEGER_CST:
1841 return (! TREE_CONSTANT_OVERFLOW (arg0)
1842 && ! TREE_CONSTANT_OVERFLOW (arg1)
1843 && tree_int_cst_equal (arg0, arg1));
1845 case REAL_CST:
1846 return (! TREE_CONSTANT_OVERFLOW (arg0)
1847 && ! TREE_CONSTANT_OVERFLOW (arg1)
1848 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1849 TREE_REAL_CST (arg1)));
1851 case VECTOR_CST:
1853 tree v1, v2;
1855 if (TREE_CONSTANT_OVERFLOW (arg0)
1856 || TREE_CONSTANT_OVERFLOW (arg1))
1857 return 0;
1859 v1 = TREE_VECTOR_CST_ELTS (arg0);
1860 v2 = TREE_VECTOR_CST_ELTS (arg1);
1861 while (v1 && v2)
1863 if (!operand_equal_p (v1, v2, only_const))
1864 return 0;
1865 v1 = TREE_CHAIN (v1);
1866 v2 = TREE_CHAIN (v2);
1869 return 1;
1872 case COMPLEX_CST:
1873 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1874 only_const)
1875 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1876 only_const));
1878 case STRING_CST:
1879 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1880 && ! memcmp (TREE_STRING_POINTER (arg0),
1881 TREE_STRING_POINTER (arg1),
1882 TREE_STRING_LENGTH (arg0)));
1884 case ADDR_EXPR:
1885 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1887 default:
1888 break;
1891 if (only_const)
1892 return 0;
1894 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1896 case '1':
1897 /* Two conversions are equal only if signedness and modes match. */
1898 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1899 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1900 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1901 return 0;
1903 return operand_equal_p (TREE_OPERAND (arg0, 0),
1904 TREE_OPERAND (arg1, 0), 0);
1906 case '<':
1907 case '2':
1908 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1909 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1911 return 1;
1913 /* For commutative ops, allow the other order. */
1914 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1915 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1916 || TREE_CODE (arg0) == BIT_IOR_EXPR
1917 || TREE_CODE (arg0) == BIT_XOR_EXPR
1918 || TREE_CODE (arg0) == BIT_AND_EXPR
1919 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1920 && operand_equal_p (TREE_OPERAND (arg0, 0),
1921 TREE_OPERAND (arg1, 1), 0)
1922 && operand_equal_p (TREE_OPERAND (arg0, 1),
1923 TREE_OPERAND (arg1, 0), 0));
1925 case 'r':
1926 /* If either of the pointer (or reference) expressions we are dereferencing
1927 contain a side effect, these cannot be equal. */
1928 if (TREE_SIDE_EFFECTS (arg0)
1929 || TREE_SIDE_EFFECTS (arg1))
1930 return 0;
1932 switch (TREE_CODE (arg0))
1934 case INDIRECT_REF:
1935 return operand_equal_p (TREE_OPERAND (arg0, 0),
1936 TREE_OPERAND (arg1, 0), 0);
1938 case COMPONENT_REF:
1939 case ARRAY_REF:
1940 case ARRAY_RANGE_REF:
1941 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1942 TREE_OPERAND (arg1, 0), 0)
1943 && operand_equal_p (TREE_OPERAND (arg0, 1),
1944 TREE_OPERAND (arg1, 1), 0));
1946 case BIT_FIELD_REF:
1947 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1948 TREE_OPERAND (arg1, 0), 0)
1949 && operand_equal_p (TREE_OPERAND (arg0, 1),
1950 TREE_OPERAND (arg1, 1), 0)
1951 && operand_equal_p (TREE_OPERAND (arg0, 2),
1952 TREE_OPERAND (arg1, 2), 0));
1953 default:
1954 return 0;
1957 case 'e':
1958 if (TREE_CODE (arg0) == RTL_EXPR)
1959 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1960 return 0;
1962 default:
1963 return 0;
1967 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1968 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1970 When in doubt, return 0. */
1972 static int
1973 operand_equal_for_comparison_p (arg0, arg1, other)
1974 tree arg0, arg1;
1975 tree other;
1977 int unsignedp1, unsignedpo;
1978 tree primarg0, primarg1, primother;
1979 unsigned int correct_width;
1981 if (operand_equal_p (arg0, arg1, 0))
1982 return 1;
1984 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1985 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1986 return 0;
1988 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1989 and see if the inner values are the same. This removes any
1990 signedness comparison, which doesn't matter here. */
1991 primarg0 = arg0, primarg1 = arg1;
1992 STRIP_NOPS (primarg0);
1993 STRIP_NOPS (primarg1);
1994 if (operand_equal_p (primarg0, primarg1, 0))
1995 return 1;
1997 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1998 actual comparison operand, ARG0.
2000 First throw away any conversions to wider types
2001 already present in the operands. */
2003 primarg1 = get_narrower (arg1, &unsignedp1);
2004 primother = get_narrower (other, &unsignedpo);
2006 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2007 if (unsignedp1 == unsignedpo
2008 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2009 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2011 tree type = TREE_TYPE (arg0);
2013 /* Make sure shorter operand is extended the right way
2014 to match the longer operand. */
2015 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2016 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2018 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2019 return 1;
2022 return 0;
2025 /* See if ARG is an expression that is either a comparison or is performing
2026 arithmetic on comparisons. The comparisons must only be comparing
2027 two different values, which will be stored in *CVAL1 and *CVAL2; if
2028 they are non-zero it means that some operands have already been found.
2029 No variables may be used anywhere else in the expression except in the
2030 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2031 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2033 If this is true, return 1. Otherwise, return zero. */
2035 static int
2036 twoval_comparison_p (arg, cval1, cval2, save_p)
2037 tree arg;
2038 tree *cval1, *cval2;
2039 int *save_p;
2041 enum tree_code code = TREE_CODE (arg);
2042 char class = TREE_CODE_CLASS (code);
2044 /* We can handle some of the 'e' cases here. */
2045 if (class == 'e' && code == TRUTH_NOT_EXPR)
2046 class = '1';
2047 else if (class == 'e'
2048 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2049 || code == COMPOUND_EXPR))
2050 class = '2';
2052 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2053 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2055 /* If we've already found a CVAL1 or CVAL2, this expression is
2056 two complex to handle. */
2057 if (*cval1 || *cval2)
2058 return 0;
2060 class = '1';
2061 *save_p = 1;
2064 switch (class)
2066 case '1':
2067 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2069 case '2':
2070 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2071 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2072 cval1, cval2, save_p));
2074 case 'c':
2075 return 1;
2077 case 'e':
2078 if (code == COND_EXPR)
2079 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2080 cval1, cval2, save_p)
2081 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2082 cval1, cval2, save_p)
2083 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2084 cval1, cval2, save_p));
2085 return 0;
2087 case '<':
2088 /* First see if we can handle the first operand, then the second. For
2089 the second operand, we know *CVAL1 can't be zero. It must be that
2090 one side of the comparison is each of the values; test for the
2091 case where this isn't true by failing if the two operands
2092 are the same. */
2094 if (operand_equal_p (TREE_OPERAND (arg, 0),
2095 TREE_OPERAND (arg, 1), 0))
2096 return 0;
2098 if (*cval1 == 0)
2099 *cval1 = TREE_OPERAND (arg, 0);
2100 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2102 else if (*cval2 == 0)
2103 *cval2 = TREE_OPERAND (arg, 0);
2104 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2106 else
2107 return 0;
2109 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2111 else if (*cval2 == 0)
2112 *cval2 = TREE_OPERAND (arg, 1);
2113 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2115 else
2116 return 0;
2118 return 1;
2120 default:
2121 return 0;
2125 /* ARG is a tree that is known to contain just arithmetic operations and
2126 comparisons. Evaluate the operations in the tree substituting NEW0 for
2127 any occurrence of OLD0 as an operand of a comparison and likewise for
2128 NEW1 and OLD1. */
2130 static tree
2131 eval_subst (arg, old0, new0, old1, new1)
2132 tree arg;
2133 tree old0, new0, old1, new1;
2135 tree type = TREE_TYPE (arg);
2136 enum tree_code code = TREE_CODE (arg);
2137 char class = TREE_CODE_CLASS (code);
2139 /* We can handle some of the 'e' cases here. */
2140 if (class == 'e' && code == TRUTH_NOT_EXPR)
2141 class = '1';
2142 else if (class == 'e'
2143 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2144 class = '2';
2146 switch (class)
2148 case '1':
2149 return fold (build1 (code, type,
2150 eval_subst (TREE_OPERAND (arg, 0),
2151 old0, new0, old1, new1)));
2153 case '2':
2154 return fold (build (code, type,
2155 eval_subst (TREE_OPERAND (arg, 0),
2156 old0, new0, old1, new1),
2157 eval_subst (TREE_OPERAND (arg, 1),
2158 old0, new0, old1, new1)));
2160 case 'e':
2161 switch (code)
2163 case SAVE_EXPR:
2164 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2166 case COMPOUND_EXPR:
2167 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2169 case COND_EXPR:
2170 return fold (build (code, type,
2171 eval_subst (TREE_OPERAND (arg, 0),
2172 old0, new0, old1, new1),
2173 eval_subst (TREE_OPERAND (arg, 1),
2174 old0, new0, old1, new1),
2175 eval_subst (TREE_OPERAND (arg, 2),
2176 old0, new0, old1, new1)));
2177 default:
2178 break;
2180 /* fall through - ??? */
2182 case '<':
2184 tree arg0 = TREE_OPERAND (arg, 0);
2185 tree arg1 = TREE_OPERAND (arg, 1);
2187 /* We need to check both for exact equality and tree equality. The
2188 former will be true if the operand has a side-effect. In that
2189 case, we know the operand occurred exactly once. */
2191 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2192 arg0 = new0;
2193 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2194 arg0 = new1;
2196 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2197 arg1 = new0;
2198 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2199 arg1 = new1;
2201 return fold (build (code, type, arg0, arg1));
2204 default:
2205 return arg;
2209 /* Return a tree for the case when the result of an expression is RESULT
2210 converted to TYPE and OMITTED was previously an operand of the expression
2211 but is now not needed (e.g., we folded OMITTED * 0).
2213 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2214 the conversion of RESULT to TYPE. */
2216 static tree
2217 omit_one_operand (type, result, omitted)
2218 tree type, result, omitted;
2220 tree t = convert (type, result);
2222 if (TREE_SIDE_EFFECTS (omitted))
2223 return build (COMPOUND_EXPR, type, omitted, t);
2225 return non_lvalue (t);
2228 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2230 static tree
2231 pedantic_omit_one_operand (type, result, omitted)
2232 tree type, result, omitted;
2234 tree t = convert (type, result);
2236 if (TREE_SIDE_EFFECTS (omitted))
2237 return build (COMPOUND_EXPR, type, omitted, t);
2239 return pedantic_non_lvalue (t);
2242 /* Return a simplified tree node for the truth-negation of ARG. This
2243 never alters ARG itself. We assume that ARG is an operation that
2244 returns a truth value (0 or 1). */
2246 tree
2247 invert_truthvalue (arg)
2248 tree arg;
2250 tree type = TREE_TYPE (arg);
2251 enum tree_code code = TREE_CODE (arg);
2253 if (code == ERROR_MARK)
2254 return arg;
2256 /* If this is a comparison, we can simply invert it, except for
2257 floating-point non-equality comparisons, in which case we just
2258 enclose a TRUTH_NOT_EXPR around what we have. */
2260 if (TREE_CODE_CLASS (code) == '<')
2262 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2263 && !flag_unsafe_math_optimizations
2264 && code != NE_EXPR
2265 && code != EQ_EXPR)
2266 return build1 (TRUTH_NOT_EXPR, type, arg);
2267 else
2268 return build (invert_tree_comparison (code), type,
2269 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2272 switch (code)
2274 case INTEGER_CST:
2275 return convert (type, build_int_2 (integer_zerop (arg), 0));
2277 case TRUTH_AND_EXPR:
2278 return build (TRUTH_OR_EXPR, type,
2279 invert_truthvalue (TREE_OPERAND (arg, 0)),
2280 invert_truthvalue (TREE_OPERAND (arg, 1)));
2282 case TRUTH_OR_EXPR:
2283 return build (TRUTH_AND_EXPR, type,
2284 invert_truthvalue (TREE_OPERAND (arg, 0)),
2285 invert_truthvalue (TREE_OPERAND (arg, 1)));
2287 case TRUTH_XOR_EXPR:
2288 /* Here we can invert either operand. We invert the first operand
2289 unless the second operand is a TRUTH_NOT_EXPR in which case our
2290 result is the XOR of the first operand with the inside of the
2291 negation of the second operand. */
2293 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2294 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2295 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2296 else
2297 return build (TRUTH_XOR_EXPR, type,
2298 invert_truthvalue (TREE_OPERAND (arg, 0)),
2299 TREE_OPERAND (arg, 1));
2301 case TRUTH_ANDIF_EXPR:
2302 return build (TRUTH_ORIF_EXPR, type,
2303 invert_truthvalue (TREE_OPERAND (arg, 0)),
2304 invert_truthvalue (TREE_OPERAND (arg, 1)));
2306 case TRUTH_ORIF_EXPR:
2307 return build (TRUTH_ANDIF_EXPR, type,
2308 invert_truthvalue (TREE_OPERAND (arg, 0)),
2309 invert_truthvalue (TREE_OPERAND (arg, 1)));
2311 case TRUTH_NOT_EXPR:
2312 return TREE_OPERAND (arg, 0);
2314 case COND_EXPR:
2315 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2316 invert_truthvalue (TREE_OPERAND (arg, 1)),
2317 invert_truthvalue (TREE_OPERAND (arg, 2)));
2319 case COMPOUND_EXPR:
2320 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2321 invert_truthvalue (TREE_OPERAND (arg, 1)));
2323 case WITH_RECORD_EXPR:
2324 return build (WITH_RECORD_EXPR, type,
2325 invert_truthvalue (TREE_OPERAND (arg, 0)),
2326 TREE_OPERAND (arg, 1));
2328 case NON_LVALUE_EXPR:
2329 return invert_truthvalue (TREE_OPERAND (arg, 0));
2331 case NOP_EXPR:
2332 case CONVERT_EXPR:
2333 case FLOAT_EXPR:
2334 return build1 (TREE_CODE (arg), type,
2335 invert_truthvalue (TREE_OPERAND (arg, 0)));
2337 case BIT_AND_EXPR:
2338 if (!integer_onep (TREE_OPERAND (arg, 1)))
2339 break;
2340 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2342 case SAVE_EXPR:
2343 return build1 (TRUTH_NOT_EXPR, type, arg);
2345 case CLEANUP_POINT_EXPR:
2346 return build1 (CLEANUP_POINT_EXPR, type,
2347 invert_truthvalue (TREE_OPERAND (arg, 0)));
2349 default:
2350 break;
2352 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2353 abort ();
2354 return build1 (TRUTH_NOT_EXPR, type, arg);
2357 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2358 operands are another bit-wise operation with a common input. If so,
2359 distribute the bit operations to save an operation and possibly two if
2360 constants are involved. For example, convert
2361 (A | B) & (A | C) into A | (B & C)
2362 Further simplification will occur if B and C are constants.
2364 If this optimization cannot be done, 0 will be returned. */
2366 static tree
2367 distribute_bit_expr (code, type, arg0, arg1)
2368 enum tree_code code;
2369 tree type;
2370 tree arg0, arg1;
2372 tree common;
2373 tree left, right;
2375 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2376 || TREE_CODE (arg0) == code
2377 || (TREE_CODE (arg0) != BIT_AND_EXPR
2378 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2379 return 0;
2381 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2383 common = TREE_OPERAND (arg0, 0);
2384 left = TREE_OPERAND (arg0, 1);
2385 right = TREE_OPERAND (arg1, 1);
2387 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2389 common = TREE_OPERAND (arg0, 0);
2390 left = TREE_OPERAND (arg0, 1);
2391 right = TREE_OPERAND (arg1, 0);
2393 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2395 common = TREE_OPERAND (arg0, 1);
2396 left = TREE_OPERAND (arg0, 0);
2397 right = TREE_OPERAND (arg1, 1);
2399 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2401 common = TREE_OPERAND (arg0, 1);
2402 left = TREE_OPERAND (arg0, 0);
2403 right = TREE_OPERAND (arg1, 0);
2405 else
2406 return 0;
2408 return fold (build (TREE_CODE (arg0), type, common,
2409 fold (build (code, type, left, right))));
2412 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2413 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2415 static tree
2416 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2417 tree inner;
2418 tree type;
2419 int bitsize, bitpos;
2420 int unsignedp;
2422 tree result = build (BIT_FIELD_REF, type, inner,
2423 size_int (bitsize), bitsize_int (bitpos));
2425 TREE_UNSIGNED (result) = unsignedp;
2427 return result;
2430 /* Optimize a bit-field compare.
2432 There are two cases: First is a compare against a constant and the
2433 second is a comparison of two items where the fields are at the same
2434 bit position relative to the start of a chunk (byte, halfword, word)
2435 large enough to contain it. In these cases we can avoid the shift
2436 implicit in bitfield extractions.
2438 For constants, we emit a compare of the shifted constant with the
2439 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2440 compared. For two fields at the same position, we do the ANDs with the
2441 similar mask and compare the result of the ANDs.
2443 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2444 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2445 are the left and right operands of the comparison, respectively.
2447 If the optimization described above can be done, we return the resulting
2448 tree. Otherwise we return zero. */
2450 static tree
2451 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2452 enum tree_code code;
2453 tree compare_type;
2454 tree lhs, rhs;
2456 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2457 tree type = TREE_TYPE (lhs);
2458 tree signed_type, unsigned_type;
2459 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2460 enum machine_mode lmode, rmode, nmode;
2461 int lunsignedp, runsignedp;
2462 int lvolatilep = 0, rvolatilep = 0;
2463 tree linner, rinner = NULL_TREE;
2464 tree mask;
2465 tree offset;
2467 /* Get all the information about the extractions being done. If the bit size
2468 if the same as the size of the underlying object, we aren't doing an
2469 extraction at all and so can do nothing. We also don't want to
2470 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2471 then will no longer be able to replace it. */
2472 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2473 &lunsignedp, &lvolatilep);
2474 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2475 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2476 return 0;
2478 if (!const_p)
2480 /* If this is not a constant, we can only do something if bit positions,
2481 sizes, and signedness are the same. */
2482 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2483 &runsignedp, &rvolatilep);
2485 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2486 || lunsignedp != runsignedp || offset != 0
2487 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2488 return 0;
2491 /* See if we can find a mode to refer to this field. We should be able to,
2492 but fail if we can't. */
2493 nmode = get_best_mode (lbitsize, lbitpos,
2494 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2495 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2496 TYPE_ALIGN (TREE_TYPE (rinner))),
2497 word_mode, lvolatilep || rvolatilep);
2498 if (nmode == VOIDmode)
2499 return 0;
2501 /* Set signed and unsigned types of the precision of this mode for the
2502 shifts below. */
2503 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2504 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2506 /* Compute the bit position and size for the new reference and our offset
2507 within it. If the new reference is the same size as the original, we
2508 won't optimize anything, so return zero. */
2509 nbitsize = GET_MODE_BITSIZE (nmode);
2510 nbitpos = lbitpos & ~ (nbitsize - 1);
2511 lbitpos -= nbitpos;
2512 if (nbitsize == lbitsize)
2513 return 0;
2515 if (BYTES_BIG_ENDIAN)
2516 lbitpos = nbitsize - lbitsize - lbitpos;
2518 /* Make the mask to be used against the extracted field. */
2519 mask = build_int_2 (~0, ~0);
2520 TREE_TYPE (mask) = unsigned_type;
2521 force_fit_type (mask, 0);
2522 mask = convert (unsigned_type, mask);
2523 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2524 mask = const_binop (RSHIFT_EXPR, mask,
2525 size_int (nbitsize - lbitsize - lbitpos), 0);
2527 if (! const_p)
2528 /* If not comparing with constant, just rework the comparison
2529 and return. */
2530 return build (code, compare_type,
2531 build (BIT_AND_EXPR, unsigned_type,
2532 make_bit_field_ref (linner, unsigned_type,
2533 nbitsize, nbitpos, 1),
2534 mask),
2535 build (BIT_AND_EXPR, unsigned_type,
2536 make_bit_field_ref (rinner, unsigned_type,
2537 nbitsize, nbitpos, 1),
2538 mask));
2540 /* Otherwise, we are handling the constant case. See if the constant is too
2541 big for the field. Warn and return a tree of for 0 (false) if so. We do
2542 this not only for its own sake, but to avoid having to test for this
2543 error case below. If we didn't, we might generate wrong code.
2545 For unsigned fields, the constant shifted right by the field length should
2546 be all zero. For signed fields, the high-order bits should agree with
2547 the sign bit. */
2549 if (lunsignedp)
2551 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2552 convert (unsigned_type, rhs),
2553 size_int (lbitsize), 0)))
2555 warning ("comparison is always %d due to width of bit-field",
2556 code == NE_EXPR);
2557 return convert (compare_type,
2558 (code == NE_EXPR
2559 ? integer_one_node : integer_zero_node));
2562 else
2564 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2565 size_int (lbitsize - 1), 0);
2566 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2568 warning ("comparison is always %d due to width of bit-field",
2569 code == NE_EXPR);
2570 return convert (compare_type,
2571 (code == NE_EXPR
2572 ? integer_one_node : integer_zero_node));
2576 /* Single-bit compares should always be against zero. */
2577 if (lbitsize == 1 && ! integer_zerop (rhs))
2579 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2580 rhs = convert (type, integer_zero_node);
2583 /* Make a new bitfield reference, shift the constant over the
2584 appropriate number of bits and mask it with the computed mask
2585 (in case this was a signed field). If we changed it, make a new one. */
2586 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2587 if (lvolatilep)
2589 TREE_SIDE_EFFECTS (lhs) = 1;
2590 TREE_THIS_VOLATILE (lhs) = 1;
2593 rhs = fold (const_binop (BIT_AND_EXPR,
2594 const_binop (LSHIFT_EXPR,
2595 convert (unsigned_type, rhs),
2596 size_int (lbitpos), 0),
2597 mask, 0));
2599 return build (code, compare_type,
2600 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2601 rhs);
2604 /* Subroutine for fold_truthop: decode a field reference.
2606 If EXP is a comparison reference, we return the innermost reference.
2608 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2609 set to the starting bit number.
2611 If the innermost field can be completely contained in a mode-sized
2612 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2614 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2615 otherwise it is not changed.
2617 *PUNSIGNEDP is set to the signedness of the field.
2619 *PMASK is set to the mask used. This is either contained in a
2620 BIT_AND_EXPR or derived from the width of the field.
2622 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2624 Return 0 if this is not a component reference or is one that we can't
2625 do anything with. */
2627 static tree
2628 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2629 pvolatilep, pmask, pand_mask)
2630 tree exp;
2631 HOST_WIDE_INT *pbitsize, *pbitpos;
2632 enum machine_mode *pmode;
2633 int *punsignedp, *pvolatilep;
2634 tree *pmask;
2635 tree *pand_mask;
2637 tree and_mask = 0;
2638 tree mask, inner, offset;
2639 tree unsigned_type;
2640 unsigned int precision;
2642 /* All the optimizations using this function assume integer fields.
2643 There are problems with FP fields since the type_for_size call
2644 below can fail for, e.g., XFmode. */
2645 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2646 return 0;
2648 STRIP_NOPS (exp);
2650 if (TREE_CODE (exp) == BIT_AND_EXPR)
2652 and_mask = TREE_OPERAND (exp, 1);
2653 exp = TREE_OPERAND (exp, 0);
2654 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2655 if (TREE_CODE (and_mask) != INTEGER_CST)
2656 return 0;
2659 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2660 punsignedp, pvolatilep);
2661 if ((inner == exp && and_mask == 0)
2662 || *pbitsize < 0 || offset != 0
2663 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2664 return 0;
2666 /* Compute the mask to access the bitfield. */
2667 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2668 precision = TYPE_PRECISION (unsigned_type);
2670 mask = build_int_2 (~0, ~0);
2671 TREE_TYPE (mask) = unsigned_type;
2672 force_fit_type (mask, 0);
2673 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2674 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2676 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2677 if (and_mask != 0)
2678 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2679 convert (unsigned_type, and_mask), mask));
2681 *pmask = mask;
2682 *pand_mask = and_mask;
2683 return inner;
2686 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2687 bit positions. */
2689 static int
2690 all_ones_mask_p (mask, size)
2691 tree mask;
2692 int size;
2694 tree type = TREE_TYPE (mask);
2695 unsigned int precision = TYPE_PRECISION (type);
2696 tree tmask;
2698 tmask = build_int_2 (~0, ~0);
2699 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2700 force_fit_type (tmask, 0);
2701 return
2702 tree_int_cst_equal (mask,
2703 const_binop (RSHIFT_EXPR,
2704 const_binop (LSHIFT_EXPR, tmask,
2705 size_int (precision - size),
2707 size_int (precision - size), 0));
2710 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2711 represents the sign bit of EXP's type. If EXP represents a sign
2712 or zero extension, also test VAL against the unextended type.
2713 The return value is the (sub)expression whose sign bit is VAL,
2714 or NULL_TREE otherwise. */
2716 static tree
2717 sign_bit_p (exp, val)
2718 tree exp;
2719 tree val;
2721 unsigned HOST_WIDE_INT lo;
2722 HOST_WIDE_INT hi;
2723 int width;
2724 tree t;
2726 /* Tree EXP must have a integral type. */
2727 t = TREE_TYPE (exp);
2728 if (! INTEGRAL_TYPE_P (t))
2729 return NULL_TREE;
2731 /* Tree VAL must be an integer constant. */
2732 if (TREE_CODE (val) != INTEGER_CST
2733 || TREE_CONSTANT_OVERFLOW (val))
2734 return NULL_TREE;
2736 width = TYPE_PRECISION (t);
2737 if (width > HOST_BITS_PER_WIDE_INT)
2739 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2740 lo = 0;
2742 else
2744 hi = 0;
2745 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2748 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2749 return exp;
2751 /* Handle extension from a narrower type. */
2752 if (TREE_CODE (exp) == NOP_EXPR
2753 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2754 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2756 return NULL_TREE;
2759 /* Subroutine for fold_truthop: determine if an operand is simple enough
2760 to be evaluated unconditionally. */
2762 static int
2763 simple_operand_p (exp)
2764 tree exp;
2766 /* Strip any conversions that don't change the machine mode. */
2767 while ((TREE_CODE (exp) == NOP_EXPR
2768 || TREE_CODE (exp) == CONVERT_EXPR)
2769 && (TYPE_MODE (TREE_TYPE (exp))
2770 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2771 exp = TREE_OPERAND (exp, 0);
2773 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2774 || (DECL_P (exp)
2775 && ! TREE_ADDRESSABLE (exp)
2776 && ! TREE_THIS_VOLATILE (exp)
2777 && ! DECL_NONLOCAL (exp)
2778 /* Don't regard global variables as simple. They may be
2779 allocated in ways unknown to the compiler (shared memory,
2780 #pragma weak, etc). */
2781 && ! TREE_PUBLIC (exp)
2782 && ! DECL_EXTERNAL (exp)
2783 /* Loading a static variable is unduly expensive, but global
2784 registers aren't expensive. */
2785 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2788 /* The following functions are subroutines to fold_range_test and allow it to
2789 try to change a logical combination of comparisons into a range test.
2791 For example, both
2792 X == 2 || X == 3 || X == 4 || X == 5
2794 X >= 2 && X <= 5
2795 are converted to
2796 (unsigned) (X - 2) <= 3
2798 We describe each set of comparisons as being either inside or outside
2799 a range, using a variable named like IN_P, and then describe the
2800 range with a lower and upper bound. If one of the bounds is omitted,
2801 it represents either the highest or lowest value of the type.
2803 In the comments below, we represent a range by two numbers in brackets
2804 preceded by a "+" to designate being inside that range, or a "-" to
2805 designate being outside that range, so the condition can be inverted by
2806 flipping the prefix. An omitted bound is represented by a "-". For
2807 example, "- [-, 10]" means being outside the range starting at the lowest
2808 possible value and ending at 10, in other words, being greater than 10.
2809 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2810 always false.
2812 We set up things so that the missing bounds are handled in a consistent
2813 manner so neither a missing bound nor "true" and "false" need to be
2814 handled using a special case. */
2816 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2817 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2818 and UPPER1_P are nonzero if the respective argument is an upper bound
2819 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2820 must be specified for a comparison. ARG1 will be converted to ARG0's
2821 type if both are specified. */
2823 static tree
2824 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2825 enum tree_code code;
2826 tree type;
2827 tree arg0, arg1;
2828 int upper0_p, upper1_p;
2830 tree tem;
2831 int result;
2832 int sgn0, sgn1;
2834 /* If neither arg represents infinity, do the normal operation.
2835 Else, if not a comparison, return infinity. Else handle the special
2836 comparison rules. Note that most of the cases below won't occur, but
2837 are handled for consistency. */
2839 if (arg0 != 0 && arg1 != 0)
2841 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2842 arg0, convert (TREE_TYPE (arg0), arg1)));
2843 STRIP_NOPS (tem);
2844 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2847 if (TREE_CODE_CLASS (code) != '<')
2848 return 0;
2850 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2851 for neither. In real maths, we cannot assume open ended ranges are
2852 the same. But, this is computer arithmetic, where numbers are finite.
2853 We can therefore make the transformation of any unbounded range with
2854 the value Z, Z being greater than any representable number. This permits
2855 us to treat unbounded ranges as equal. */
2856 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2857 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2858 switch (code)
2860 case EQ_EXPR:
2861 result = sgn0 == sgn1;
2862 break;
2863 case NE_EXPR:
2864 result = sgn0 != sgn1;
2865 break;
2866 case LT_EXPR:
2867 result = sgn0 < sgn1;
2868 break;
2869 case LE_EXPR:
2870 result = sgn0 <= sgn1;
2871 break;
2872 case GT_EXPR:
2873 result = sgn0 > sgn1;
2874 break;
2875 case GE_EXPR:
2876 result = sgn0 >= sgn1;
2877 break;
2878 default:
2879 abort ();
2882 return convert (type, result ? integer_one_node : integer_zero_node);
2885 /* Given EXP, a logical expression, set the range it is testing into
2886 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2887 actually being tested. *PLOW and *PHIGH will be made of the same type
2888 as the returned expression. If EXP is not a comparison, we will most
2889 likely not be returning a useful value and range. */
2891 static tree
2892 make_range (exp, pin_p, plow, phigh)
2893 tree exp;
2894 int *pin_p;
2895 tree *plow, *phigh;
2897 enum tree_code code;
2898 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2899 tree orig_type = NULL_TREE;
2900 int in_p, n_in_p;
2901 tree low, high, n_low, n_high;
2903 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2904 and see if we can refine the range. Some of the cases below may not
2905 happen, but it doesn't seem worth worrying about this. We "continue"
2906 the outer loop when we've changed something; otherwise we "break"
2907 the switch, which will "break" the while. */
2909 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2911 while (1)
2913 code = TREE_CODE (exp);
2915 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2917 arg0 = TREE_OPERAND (exp, 0);
2918 if (TREE_CODE_CLASS (code) == '<'
2919 || TREE_CODE_CLASS (code) == '1'
2920 || TREE_CODE_CLASS (code) == '2')
2921 type = TREE_TYPE (arg0);
2922 if (TREE_CODE_CLASS (code) == '2'
2923 || TREE_CODE_CLASS (code) == '<'
2924 || (TREE_CODE_CLASS (code) == 'e'
2925 && TREE_CODE_LENGTH (code) > 1))
2926 arg1 = TREE_OPERAND (exp, 1);
2929 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2930 lose a cast by accident. */
2931 if (type != NULL_TREE && orig_type == NULL_TREE)
2932 orig_type = type;
2934 switch (code)
2936 case TRUTH_NOT_EXPR:
2937 in_p = ! in_p, exp = arg0;
2938 continue;
2940 case EQ_EXPR: case NE_EXPR:
2941 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2942 /* We can only do something if the range is testing for zero
2943 and if the second operand is an integer constant. Note that
2944 saying something is "in" the range we make is done by
2945 complementing IN_P since it will set in the initial case of
2946 being not equal to zero; "out" is leaving it alone. */
2947 if (low == 0 || high == 0
2948 || ! integer_zerop (low) || ! integer_zerop (high)
2949 || TREE_CODE (arg1) != INTEGER_CST)
2950 break;
2952 switch (code)
2954 case NE_EXPR: /* - [c, c] */
2955 low = high = arg1;
2956 break;
2957 case EQ_EXPR: /* + [c, c] */
2958 in_p = ! in_p, low = high = arg1;
2959 break;
2960 case GT_EXPR: /* - [-, c] */
2961 low = 0, high = arg1;
2962 break;
2963 case GE_EXPR: /* + [c, -] */
2964 in_p = ! in_p, low = arg1, high = 0;
2965 break;
2966 case LT_EXPR: /* - [c, -] */
2967 low = arg1, high = 0;
2968 break;
2969 case LE_EXPR: /* + [-, c] */
2970 in_p = ! in_p, low = 0, high = arg1;
2971 break;
2972 default:
2973 abort ();
2976 exp = arg0;
2978 /* If this is an unsigned comparison, we also know that EXP is
2979 greater than or equal to zero. We base the range tests we make
2980 on that fact, so we record it here so we can parse existing
2981 range tests. */
2982 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2984 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2985 1, convert (type, integer_zero_node),
2986 NULL_TREE))
2987 break;
2989 in_p = n_in_p, low = n_low, high = n_high;
2991 /* If the high bound is missing, but we
2992 have a low bound, reverse the range so
2993 it goes from zero to the low bound minus 1. */
2994 if (high == 0 && low)
2996 in_p = ! in_p;
2997 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2998 integer_one_node, 0);
2999 low = convert (type, integer_zero_node);
3002 continue;
3004 case NEGATE_EXPR:
3005 /* (-x) IN [a,b] -> x in [-b, -a] */
3006 n_low = range_binop (MINUS_EXPR, type,
3007 convert (type, integer_zero_node), 0, high, 1);
3008 n_high = range_binop (MINUS_EXPR, type,
3009 convert (type, integer_zero_node), 0, low, 0);
3010 low = n_low, high = n_high;
3011 exp = arg0;
3012 continue;
3014 case BIT_NOT_EXPR:
3015 /* ~ X -> -X - 1 */
3016 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3017 convert (type, integer_one_node));
3018 continue;
3020 case PLUS_EXPR: case MINUS_EXPR:
3021 if (TREE_CODE (arg1) != INTEGER_CST)
3022 break;
3024 /* If EXP is signed, any overflow in the computation is undefined,
3025 so we don't worry about it so long as our computations on
3026 the bounds don't overflow. For unsigned, overflow is defined
3027 and this is exactly the right thing. */
3028 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3029 type, low, 0, arg1, 0);
3030 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3031 type, high, 1, arg1, 0);
3032 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3033 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3034 break;
3036 /* Check for an unsigned range which has wrapped around the maximum
3037 value thus making n_high < n_low, and normalize it. */
3038 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3040 low = range_binop (PLUS_EXPR, type, n_high, 0,
3041 integer_one_node, 0);
3042 high = range_binop (MINUS_EXPR, type, n_low, 0,
3043 integer_one_node, 0);
3045 /* If the range is of the form +/- [ x+1, x ], we won't
3046 be able to normalize it. But then, it represents the
3047 whole range or the empty set, so make it
3048 +/- [ -, - ]. */
3049 if (tree_int_cst_equal (n_low, low)
3050 && tree_int_cst_equal (n_high, high))
3051 low = high = 0;
3052 else
3053 in_p = ! in_p;
3055 else
3056 low = n_low, high = n_high;
3058 exp = arg0;
3059 continue;
3061 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3062 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3063 break;
3065 if (! INTEGRAL_TYPE_P (type)
3066 || (low != 0 && ! int_fits_type_p (low, type))
3067 || (high != 0 && ! int_fits_type_p (high, type)))
3068 break;
3070 n_low = low, n_high = high;
3072 if (n_low != 0)
3073 n_low = convert (type, n_low);
3075 if (n_high != 0)
3076 n_high = convert (type, n_high);
3078 /* If we're converting from an unsigned to a signed type,
3079 we will be doing the comparison as unsigned. The tests above
3080 have already verified that LOW and HIGH are both positive.
3082 So we have to make sure that the original unsigned value will
3083 be interpreted as positive. */
3084 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3086 tree equiv_type = (*lang_hooks.types.type_for_mode)
3087 (TYPE_MODE (type), 1);
3088 tree high_positive;
3090 /* A range without an upper bound is, naturally, unbounded.
3091 Since convert would have cropped a very large value, use
3092 the max value for the destination type. */
3093 high_positive
3094 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3095 : TYPE_MAX_VALUE (type);
3097 high_positive = fold (build (RSHIFT_EXPR, type,
3098 convert (type, high_positive),
3099 convert (type, integer_one_node)));
3101 /* If the low bound is specified, "and" the range with the
3102 range for which the original unsigned value will be
3103 positive. */
3104 if (low != 0)
3106 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3107 1, n_low, n_high,
3108 1, convert (type, integer_zero_node),
3109 high_positive))
3110 break;
3112 in_p = (n_in_p == in_p);
3114 else
3116 /* Otherwise, "or" the range with the range of the input
3117 that will be interpreted as negative. */
3118 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3119 0, n_low, n_high,
3120 1, convert (type, integer_zero_node),
3121 high_positive))
3122 break;
3124 in_p = (in_p != n_in_p);
3128 exp = arg0;
3129 low = n_low, high = n_high;
3130 continue;
3132 default:
3133 break;
3136 break;
3139 /* If EXP is a constant, we can evaluate whether this is true or false. */
3140 if (TREE_CODE (exp) == INTEGER_CST)
3142 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3143 exp, 0, low, 0))
3144 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3145 exp, 1, high, 1)));
3146 low = high = 0;
3147 exp = 0;
3150 *pin_p = in_p, *plow = low, *phigh = high;
3151 return exp;
3154 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3155 type, TYPE, return an expression to test if EXP is in (or out of, depending
3156 on IN_P) the range. */
3158 static tree
3159 build_range_check (type, exp, in_p, low, high)
3160 tree type;
3161 tree exp;
3162 int in_p;
3163 tree low, high;
3165 tree etype = TREE_TYPE (exp);
3166 tree value;
3168 if (! in_p
3169 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3170 return invert_truthvalue (value);
3172 if (low == 0 && high == 0)
3173 return convert (type, integer_one_node);
3175 if (low == 0)
3176 return fold (build (LE_EXPR, type, exp, high));
3178 if (high == 0)
3179 return fold (build (GE_EXPR, type, exp, low));
3181 if (operand_equal_p (low, high, 0))
3182 return fold (build (EQ_EXPR, type, exp, low));
3184 if (integer_zerop (low))
3186 if (! TREE_UNSIGNED (etype))
3188 etype = (*lang_hooks.types.unsigned_type) (etype);
3189 high = convert (etype, high);
3190 exp = convert (etype, exp);
3192 return build_range_check (type, exp, 1, 0, high);
3195 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3196 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3198 unsigned HOST_WIDE_INT lo;
3199 HOST_WIDE_INT hi;
3200 int prec;
3202 prec = TYPE_PRECISION (etype);
3203 if (prec <= HOST_BITS_PER_WIDE_INT)
3205 hi = 0;
3206 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3208 else
3210 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3211 lo = (unsigned HOST_WIDE_INT) -1;
3214 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3216 if (TREE_UNSIGNED (etype))
3218 etype = (*lang_hooks.types.signed_type) (etype);
3219 exp = convert (etype, exp);
3221 return fold (build (GT_EXPR, type, exp,
3222 convert (etype, integer_zero_node)));
3226 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3227 && ! TREE_OVERFLOW (value))
3228 return build_range_check (type,
3229 fold (build (MINUS_EXPR, etype, exp, low)),
3230 1, convert (etype, integer_zero_node), value);
3232 return 0;
3235 /* Given two ranges, see if we can merge them into one. Return 1 if we
3236 can, 0 if we can't. Set the output range into the specified parameters. */
3238 static int
3239 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3240 int *pin_p;
3241 tree *plow, *phigh;
3242 int in0_p, in1_p;
3243 tree low0, high0, low1, high1;
3245 int no_overlap;
3246 int subset;
3247 int temp;
3248 tree tem;
3249 int in_p;
3250 tree low, high;
3251 int lowequal = ((low0 == 0 && low1 == 0)
3252 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3253 low0, 0, low1, 0)));
3254 int highequal = ((high0 == 0 && high1 == 0)
3255 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3256 high0, 1, high1, 1)));
3258 /* Make range 0 be the range that starts first, or ends last if they
3259 start at the same value. Swap them if it isn't. */
3260 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3261 low0, 0, low1, 0))
3262 || (lowequal
3263 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3264 high1, 1, high0, 1))))
3266 temp = in0_p, in0_p = in1_p, in1_p = temp;
3267 tem = low0, low0 = low1, low1 = tem;
3268 tem = high0, high0 = high1, high1 = tem;
3271 /* Now flag two cases, whether the ranges are disjoint or whether the
3272 second range is totally subsumed in the first. Note that the tests
3273 below are simplified by the ones above. */
3274 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3275 high0, 1, low1, 0));
3276 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3277 high1, 1, high0, 1));
3279 /* We now have four cases, depending on whether we are including or
3280 excluding the two ranges. */
3281 if (in0_p && in1_p)
3283 /* If they don't overlap, the result is false. If the second range
3284 is a subset it is the result. Otherwise, the range is from the start
3285 of the second to the end of the first. */
3286 if (no_overlap)
3287 in_p = 0, low = high = 0;
3288 else if (subset)
3289 in_p = 1, low = low1, high = high1;
3290 else
3291 in_p = 1, low = low1, high = high0;
3294 else if (in0_p && ! in1_p)
3296 /* If they don't overlap, the result is the first range. If they are
3297 equal, the result is false. If the second range is a subset of the
3298 first, and the ranges begin at the same place, we go from just after
3299 the end of the first range to the end of the second. If the second
3300 range is not a subset of the first, or if it is a subset and both
3301 ranges end at the same place, the range starts at the start of the
3302 first range and ends just before the second range.
3303 Otherwise, we can't describe this as a single range. */
3304 if (no_overlap)
3305 in_p = 1, low = low0, high = high0;
3306 else if (lowequal && highequal)
3307 in_p = 0, low = high = 0;
3308 else if (subset && lowequal)
3310 in_p = 1, high = high0;
3311 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3312 integer_one_node, 0);
3314 else if (! subset || highequal)
3316 in_p = 1, low = low0;
3317 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3318 integer_one_node, 0);
3320 else
3321 return 0;
3324 else if (! in0_p && in1_p)
3326 /* If they don't overlap, the result is the second range. If the second
3327 is a subset of the first, the result is false. Otherwise,
3328 the range starts just after the first range and ends at the
3329 end of the second. */
3330 if (no_overlap)
3331 in_p = 1, low = low1, high = high1;
3332 else if (subset || highequal)
3333 in_p = 0, low = high = 0;
3334 else
3336 in_p = 1, high = high1;
3337 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3338 integer_one_node, 0);
3342 else
3344 /* The case where we are excluding both ranges. Here the complex case
3345 is if they don't overlap. In that case, the only time we have a
3346 range is if they are adjacent. If the second is a subset of the
3347 first, the result is the first. Otherwise, the range to exclude
3348 starts at the beginning of the first range and ends at the end of the
3349 second. */
3350 if (no_overlap)
3352 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3353 range_binop (PLUS_EXPR, NULL_TREE,
3354 high0, 1,
3355 integer_one_node, 1),
3356 1, low1, 0)))
3357 in_p = 0, low = low0, high = high1;
3358 else
3359 return 0;
3361 else if (subset)
3362 in_p = 0, low = low0, high = high0;
3363 else
3364 in_p = 0, low = low0, high = high1;
3367 *pin_p = in_p, *plow = low, *phigh = high;
3368 return 1;
3371 /* EXP is some logical combination of boolean tests. See if we can
3372 merge it into some range test. Return the new tree if so. */
3374 static tree
3375 fold_range_test (exp)
3376 tree exp;
3378 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3379 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3380 int in0_p, in1_p, in_p;
3381 tree low0, low1, low, high0, high1, high;
3382 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3383 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3384 tree tem;
3386 /* If this is an OR operation, invert both sides; we will invert
3387 again at the end. */
3388 if (or_op)
3389 in0_p = ! in0_p, in1_p = ! in1_p;
3391 /* If both expressions are the same, if we can merge the ranges, and we
3392 can build the range test, return it or it inverted. If one of the
3393 ranges is always true or always false, consider it to be the same
3394 expression as the other. */
3395 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3396 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3397 in1_p, low1, high1)
3398 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3399 lhs != 0 ? lhs
3400 : rhs != 0 ? rhs : integer_zero_node,
3401 in_p, low, high))))
3402 return or_op ? invert_truthvalue (tem) : tem;
3404 /* On machines where the branch cost is expensive, if this is a
3405 short-circuited branch and the underlying object on both sides
3406 is the same, make a non-short-circuit operation. */
3407 else if (BRANCH_COST >= 2
3408 && lhs != 0 && rhs != 0
3409 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3410 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3411 && operand_equal_p (lhs, rhs, 0))
3413 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3414 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3415 which cases we can't do this. */
3416 if (simple_operand_p (lhs))
3417 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3419 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3420 TREE_OPERAND (exp, 1));
3422 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3423 && ! contains_placeholder_p (lhs))
3425 tree common = save_expr (lhs);
3427 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3428 or_op ? ! in0_p : in0_p,
3429 low0, high0))
3430 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3431 or_op ? ! in1_p : in1_p,
3432 low1, high1))))
3433 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3434 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3435 TREE_TYPE (exp), lhs, rhs);
3439 return 0;
3442 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3443 bit value. Arrange things so the extra bits will be set to zero if and
3444 only if C is signed-extended to its full width. If MASK is nonzero,
3445 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3447 static tree
3448 unextend (c, p, unsignedp, mask)
3449 tree c;
3450 int p;
3451 int unsignedp;
3452 tree mask;
3454 tree type = TREE_TYPE (c);
3455 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3456 tree temp;
3458 if (p == modesize || unsignedp)
3459 return c;
3461 /* We work by getting just the sign bit into the low-order bit, then
3462 into the high-order bit, then sign-extend. We then XOR that value
3463 with C. */
3464 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3465 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3467 /* We must use a signed type in order to get an arithmetic right shift.
3468 However, we must also avoid introducing accidental overflows, so that
3469 a subsequent call to integer_zerop will work. Hence we must
3470 do the type conversion here. At this point, the constant is either
3471 zero or one, and the conversion to a signed type can never overflow.
3472 We could get an overflow if this conversion is done anywhere else. */
3473 if (TREE_UNSIGNED (type))
3474 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3476 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3477 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3478 if (mask != 0)
3479 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3480 /* If necessary, convert the type back to match the type of C. */
3481 if (TREE_UNSIGNED (type))
3482 temp = convert (type, temp);
3484 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3487 /* Find ways of folding logical expressions of LHS and RHS:
3488 Try to merge two comparisons to the same innermost item.
3489 Look for range tests like "ch >= '0' && ch <= '9'".
3490 Look for combinations of simple terms on machines with expensive branches
3491 and evaluate the RHS unconditionally.
3493 For example, if we have p->a == 2 && p->b == 4 and we can make an
3494 object large enough to span both A and B, we can do this with a comparison
3495 against the object ANDed with the a mask.
3497 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3498 operations to do this with one comparison.
3500 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3501 function and the one above.
3503 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3504 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3506 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3507 two operands.
3509 We return the simplified tree or 0 if no optimization is possible. */
3511 static tree
3512 fold_truthop (code, truth_type, lhs, rhs)
3513 enum tree_code code;
3514 tree truth_type, lhs, rhs;
3516 /* If this is the "or" of two comparisons, we can do something if
3517 the comparisons are NE_EXPR. If this is the "and", we can do something
3518 if the comparisons are EQ_EXPR. I.e.,
3519 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3521 WANTED_CODE is this operation code. For single bit fields, we can
3522 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3523 comparison for one-bit fields. */
3525 enum tree_code wanted_code;
3526 enum tree_code lcode, rcode;
3527 tree ll_arg, lr_arg, rl_arg, rr_arg;
3528 tree ll_inner, lr_inner, rl_inner, rr_inner;
3529 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3530 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3531 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3532 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3533 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3534 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3535 enum machine_mode lnmode, rnmode;
3536 tree ll_mask, lr_mask, rl_mask, rr_mask;
3537 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3538 tree l_const, r_const;
3539 tree lntype, rntype, result;
3540 int first_bit, end_bit;
3541 int volatilep;
3543 /* Start by getting the comparison codes. Fail if anything is volatile.
3544 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3545 it were surrounded with a NE_EXPR. */
3547 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3548 return 0;
3550 lcode = TREE_CODE (lhs);
3551 rcode = TREE_CODE (rhs);
3553 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3554 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3556 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3557 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3559 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3560 return 0;
3562 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3563 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3565 ll_arg = TREE_OPERAND (lhs, 0);
3566 lr_arg = TREE_OPERAND (lhs, 1);
3567 rl_arg = TREE_OPERAND (rhs, 0);
3568 rr_arg = TREE_OPERAND (rhs, 1);
3570 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3571 if (simple_operand_p (ll_arg)
3572 && simple_operand_p (lr_arg)
3573 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3575 int compcode;
3577 if (operand_equal_p (ll_arg, rl_arg, 0)
3578 && operand_equal_p (lr_arg, rr_arg, 0))
3580 int lcompcode, rcompcode;
3582 lcompcode = comparison_to_compcode (lcode);
3583 rcompcode = comparison_to_compcode (rcode);
3584 compcode = (code == TRUTH_AND_EXPR)
3585 ? lcompcode & rcompcode
3586 : lcompcode | rcompcode;
3588 else if (operand_equal_p (ll_arg, rr_arg, 0)
3589 && operand_equal_p (lr_arg, rl_arg, 0))
3591 int lcompcode, rcompcode;
3593 rcode = swap_tree_comparison (rcode);
3594 lcompcode = comparison_to_compcode (lcode);
3595 rcompcode = comparison_to_compcode (rcode);
3596 compcode = (code == TRUTH_AND_EXPR)
3597 ? lcompcode & rcompcode
3598 : lcompcode | rcompcode;
3600 else
3601 compcode = -1;
3603 if (compcode == COMPCODE_TRUE)
3604 return convert (truth_type, integer_one_node);
3605 else if (compcode == COMPCODE_FALSE)
3606 return convert (truth_type, integer_zero_node);
3607 else if (compcode != -1)
3608 return build (compcode_to_comparison (compcode),
3609 truth_type, ll_arg, lr_arg);
3612 /* If the RHS can be evaluated unconditionally and its operands are
3613 simple, it wins to evaluate the RHS unconditionally on machines
3614 with expensive branches. In this case, this isn't a comparison
3615 that can be merged. Avoid doing this if the RHS is a floating-point
3616 comparison since those can trap. */
3618 if (BRANCH_COST >= 2
3619 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3620 && simple_operand_p (rl_arg)
3621 && simple_operand_p (rr_arg))
3623 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3624 if (code == TRUTH_OR_EXPR
3625 && lcode == NE_EXPR && integer_zerop (lr_arg)
3626 && rcode == NE_EXPR && integer_zerop (rr_arg)
3627 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3628 return build (NE_EXPR, truth_type,
3629 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3630 ll_arg, rl_arg),
3631 integer_zero_node);
3633 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3634 if (code == TRUTH_AND_EXPR
3635 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3636 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3637 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3638 return build (EQ_EXPR, truth_type,
3639 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3640 ll_arg, rl_arg),
3641 integer_zero_node);
3643 return build (code, truth_type, lhs, rhs);
3646 /* See if the comparisons can be merged. Then get all the parameters for
3647 each side. */
3649 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3650 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3651 return 0;
3653 volatilep = 0;
3654 ll_inner = decode_field_reference (ll_arg,
3655 &ll_bitsize, &ll_bitpos, &ll_mode,
3656 &ll_unsignedp, &volatilep, &ll_mask,
3657 &ll_and_mask);
3658 lr_inner = decode_field_reference (lr_arg,
3659 &lr_bitsize, &lr_bitpos, &lr_mode,
3660 &lr_unsignedp, &volatilep, &lr_mask,
3661 &lr_and_mask);
3662 rl_inner = decode_field_reference (rl_arg,
3663 &rl_bitsize, &rl_bitpos, &rl_mode,
3664 &rl_unsignedp, &volatilep, &rl_mask,
3665 &rl_and_mask);
3666 rr_inner = decode_field_reference (rr_arg,
3667 &rr_bitsize, &rr_bitpos, &rr_mode,
3668 &rr_unsignedp, &volatilep, &rr_mask,
3669 &rr_and_mask);
3671 /* It must be true that the inner operation on the lhs of each
3672 comparison must be the same if we are to be able to do anything.
3673 Then see if we have constants. If not, the same must be true for
3674 the rhs's. */
3675 if (volatilep || ll_inner == 0 || rl_inner == 0
3676 || ! operand_equal_p (ll_inner, rl_inner, 0))
3677 return 0;
3679 if (TREE_CODE (lr_arg) == INTEGER_CST
3680 && TREE_CODE (rr_arg) == INTEGER_CST)
3681 l_const = lr_arg, r_const = rr_arg;
3682 else if (lr_inner == 0 || rr_inner == 0
3683 || ! operand_equal_p (lr_inner, rr_inner, 0))
3684 return 0;
3685 else
3686 l_const = r_const = 0;
3688 /* If either comparison code is not correct for our logical operation,
3689 fail. However, we can convert a one-bit comparison against zero into
3690 the opposite comparison against that bit being set in the field. */
3692 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3693 if (lcode != wanted_code)
3695 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3697 /* Make the left operand unsigned, since we are only interested
3698 in the value of one bit. Otherwise we are doing the wrong
3699 thing below. */
3700 ll_unsignedp = 1;
3701 l_const = ll_mask;
3703 else
3704 return 0;
3707 /* This is analogous to the code for l_const above. */
3708 if (rcode != wanted_code)
3710 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3712 rl_unsignedp = 1;
3713 r_const = rl_mask;
3715 else
3716 return 0;
3719 /* See if we can find a mode that contains both fields being compared on
3720 the left. If we can't, fail. Otherwise, update all constants and masks
3721 to be relative to a field of that size. */
3722 first_bit = MIN (ll_bitpos, rl_bitpos);
3723 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3724 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3725 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3726 volatilep);
3727 if (lnmode == VOIDmode)
3728 return 0;
3730 lnbitsize = GET_MODE_BITSIZE (lnmode);
3731 lnbitpos = first_bit & ~ (lnbitsize - 1);
3732 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3733 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3735 if (BYTES_BIG_ENDIAN)
3737 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3738 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3741 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3742 size_int (xll_bitpos), 0);
3743 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3744 size_int (xrl_bitpos), 0);
3746 if (l_const)
3748 l_const = convert (lntype, l_const);
3749 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3750 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3751 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3752 fold (build1 (BIT_NOT_EXPR,
3753 lntype, ll_mask)),
3754 0)))
3756 warning ("comparison is always %d", wanted_code == NE_EXPR);
3758 return convert (truth_type,
3759 wanted_code == NE_EXPR
3760 ? integer_one_node : integer_zero_node);
3763 if (r_const)
3765 r_const = convert (lntype, r_const);
3766 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3767 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3768 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3769 fold (build1 (BIT_NOT_EXPR,
3770 lntype, rl_mask)),
3771 0)))
3773 warning ("comparison is always %d", wanted_code == NE_EXPR);
3775 return convert (truth_type,
3776 wanted_code == NE_EXPR
3777 ? integer_one_node : integer_zero_node);
3781 /* If the right sides are not constant, do the same for it. Also,
3782 disallow this optimization if a size or signedness mismatch occurs
3783 between the left and right sides. */
3784 if (l_const == 0)
3786 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3787 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3788 /* Make sure the two fields on the right
3789 correspond to the left without being swapped. */
3790 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3791 return 0;
3793 first_bit = MIN (lr_bitpos, rr_bitpos);
3794 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3795 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3796 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3797 volatilep);
3798 if (rnmode == VOIDmode)
3799 return 0;
3801 rnbitsize = GET_MODE_BITSIZE (rnmode);
3802 rnbitpos = first_bit & ~ (rnbitsize - 1);
3803 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3804 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3806 if (BYTES_BIG_ENDIAN)
3808 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3809 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3812 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3813 size_int (xlr_bitpos), 0);
3814 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3815 size_int (xrr_bitpos), 0);
3817 /* Make a mask that corresponds to both fields being compared.
3818 Do this for both items being compared. If the operands are the
3819 same size and the bits being compared are in the same position
3820 then we can do this by masking both and comparing the masked
3821 results. */
3822 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3823 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3824 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3826 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3827 ll_unsignedp || rl_unsignedp);
3828 if (! all_ones_mask_p (ll_mask, lnbitsize))
3829 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3831 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3832 lr_unsignedp || rr_unsignedp);
3833 if (! all_ones_mask_p (lr_mask, rnbitsize))
3834 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3836 return build (wanted_code, truth_type, lhs, rhs);
3839 /* There is still another way we can do something: If both pairs of
3840 fields being compared are adjacent, we may be able to make a wider
3841 field containing them both.
3843 Note that we still must mask the lhs/rhs expressions. Furthermore,
3844 the mask must be shifted to account for the shift done by
3845 make_bit_field_ref. */
3846 if ((ll_bitsize + ll_bitpos == rl_bitpos
3847 && lr_bitsize + lr_bitpos == rr_bitpos)
3848 || (ll_bitpos == rl_bitpos + rl_bitsize
3849 && lr_bitpos == rr_bitpos + rr_bitsize))
3851 tree type;
3853 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3854 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3855 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3856 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3858 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3859 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3860 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3861 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3863 /* Convert to the smaller type before masking out unwanted bits. */
3864 type = lntype;
3865 if (lntype != rntype)
3867 if (lnbitsize > rnbitsize)
3869 lhs = convert (rntype, lhs);
3870 ll_mask = convert (rntype, ll_mask);
3871 type = rntype;
3873 else if (lnbitsize < rnbitsize)
3875 rhs = convert (lntype, rhs);
3876 lr_mask = convert (lntype, lr_mask);
3877 type = lntype;
3881 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3882 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3884 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3885 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3887 return build (wanted_code, truth_type, lhs, rhs);
3890 return 0;
3893 /* Handle the case of comparisons with constants. If there is something in
3894 common between the masks, those bits of the constants must be the same.
3895 If not, the condition is always false. Test for this to avoid generating
3896 incorrect code below. */
3897 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3898 if (! integer_zerop (result)
3899 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3900 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3902 if (wanted_code == NE_EXPR)
3904 warning ("`or' of unmatched not-equal tests is always 1");
3905 return convert (truth_type, integer_one_node);
3907 else
3909 warning ("`and' of mutually exclusive equal-tests is always 0");
3910 return convert (truth_type, integer_zero_node);
3914 /* Construct the expression we will return. First get the component
3915 reference we will make. Unless the mask is all ones the width of
3916 that field, perform the mask operation. Then compare with the
3917 merged constant. */
3918 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3919 ll_unsignedp || rl_unsignedp);
3921 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3922 if (! all_ones_mask_p (ll_mask, lnbitsize))
3923 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3925 return build (wanted_code, truth_type, result,
3926 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3929 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3930 constant. */
3932 static tree
3933 optimize_minmax_comparison (t)
3934 tree t;
3936 tree type = TREE_TYPE (t);
3937 tree arg0 = TREE_OPERAND (t, 0);
3938 enum tree_code op_code;
3939 tree comp_const = TREE_OPERAND (t, 1);
3940 tree minmax_const;
3941 int consts_equal, consts_lt;
3942 tree inner;
3944 STRIP_SIGN_NOPS (arg0);
3946 op_code = TREE_CODE (arg0);
3947 minmax_const = TREE_OPERAND (arg0, 1);
3948 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3949 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3950 inner = TREE_OPERAND (arg0, 0);
3952 /* If something does not permit us to optimize, return the original tree. */
3953 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3954 || TREE_CODE (comp_const) != INTEGER_CST
3955 || TREE_CONSTANT_OVERFLOW (comp_const)
3956 || TREE_CODE (minmax_const) != INTEGER_CST
3957 || TREE_CONSTANT_OVERFLOW (minmax_const))
3958 return t;
3960 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3961 and GT_EXPR, doing the rest with recursive calls using logical
3962 simplifications. */
3963 switch (TREE_CODE (t))
3965 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3966 return
3967 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3969 case GE_EXPR:
3970 return
3971 fold (build (TRUTH_ORIF_EXPR, type,
3972 optimize_minmax_comparison
3973 (build (EQ_EXPR, type, arg0, comp_const)),
3974 optimize_minmax_comparison
3975 (build (GT_EXPR, type, arg0, comp_const))));
3977 case EQ_EXPR:
3978 if (op_code == MAX_EXPR && consts_equal)
3979 /* MAX (X, 0) == 0 -> X <= 0 */
3980 return fold (build (LE_EXPR, type, inner, comp_const));
3982 else if (op_code == MAX_EXPR && consts_lt)
3983 /* MAX (X, 0) == 5 -> X == 5 */
3984 return fold (build (EQ_EXPR, type, inner, comp_const));
3986 else if (op_code == MAX_EXPR)
3987 /* MAX (X, 0) == -1 -> false */
3988 return omit_one_operand (type, integer_zero_node, inner);
3990 else if (consts_equal)
3991 /* MIN (X, 0) == 0 -> X >= 0 */
3992 return fold (build (GE_EXPR, type, inner, comp_const));
3994 else if (consts_lt)
3995 /* MIN (X, 0) == 5 -> false */
3996 return omit_one_operand (type, integer_zero_node, inner);
3998 else
3999 /* MIN (X, 0) == -1 -> X == -1 */
4000 return fold (build (EQ_EXPR, type, inner, comp_const));
4002 case GT_EXPR:
4003 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4004 /* MAX (X, 0) > 0 -> X > 0
4005 MAX (X, 0) > 5 -> X > 5 */
4006 return fold (build (GT_EXPR, type, inner, comp_const));
4008 else if (op_code == MAX_EXPR)
4009 /* MAX (X, 0) > -1 -> true */
4010 return omit_one_operand (type, integer_one_node, inner);
4012 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4013 /* MIN (X, 0) > 0 -> false
4014 MIN (X, 0) > 5 -> false */
4015 return omit_one_operand (type, integer_zero_node, inner);
4017 else
4018 /* MIN (X, 0) > -1 -> X > -1 */
4019 return fold (build (GT_EXPR, type, inner, comp_const));
4021 default:
4022 return t;
4026 /* T is an integer expression that is being multiplied, divided, or taken a
4027 modulus (CODE says which and what kind of divide or modulus) by a
4028 constant C. See if we can eliminate that operation by folding it with
4029 other operations already in T. WIDE_TYPE, if non-null, is a type that
4030 should be used for the computation if wider than our type.
4032 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4033 (X * 2) + (Y * 4). We must, however, be assured that either the original
4034 expression would not overflow or that overflow is undefined for the type
4035 in the language in question.
4037 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4038 the machine has a multiply-accumulate insn or that this is part of an
4039 addressing calculation.
4041 If we return a non-null expression, it is an equivalent form of the
4042 original computation, but need not be in the original type. */
4044 static tree
4045 extract_muldiv (t, c, code, wide_type)
4046 tree t;
4047 tree c;
4048 enum tree_code code;
4049 tree wide_type;
4051 tree type = TREE_TYPE (t);
4052 enum tree_code tcode = TREE_CODE (t);
4053 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4054 > GET_MODE_SIZE (TYPE_MODE (type)))
4055 ? wide_type : type);
4056 tree t1, t2;
4057 int same_p = tcode == code;
4058 tree op0 = NULL_TREE, op1 = NULL_TREE;
4060 /* Don't deal with constants of zero here; they confuse the code below. */
4061 if (integer_zerop (c))
4062 return NULL_TREE;
4064 if (TREE_CODE_CLASS (tcode) == '1')
4065 op0 = TREE_OPERAND (t, 0);
4067 if (TREE_CODE_CLASS (tcode) == '2')
4068 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4070 /* Note that we need not handle conditional operations here since fold
4071 already handles those cases. So just do arithmetic here. */
4072 switch (tcode)
4074 case INTEGER_CST:
4075 /* For a constant, we can always simplify if we are a multiply
4076 or (for divide and modulus) if it is a multiple of our constant. */
4077 if (code == MULT_EXPR
4078 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4079 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4080 break;
4082 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4083 /* If op0 is an expression ... */
4084 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4085 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4086 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4087 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4088 /* ... and is unsigned, and its type is smaller than ctype,
4089 then we cannot pass through as widening. */
4090 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4091 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4092 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4093 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4094 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4095 /* ... or its type is larger than ctype,
4096 then we cannot pass through this truncation. */
4097 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4098 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))))
4099 break;
4101 /* Pass the constant down and see if we can make a simplification. If
4102 we can, replace this expression with the inner simplification for
4103 possible later conversion to our or some other type. */
4104 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4105 code == MULT_EXPR ? ctype : NULL_TREE)))
4106 return t1;
4107 break;
4109 case NEGATE_EXPR: case ABS_EXPR:
4110 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4111 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4112 break;
4114 case MIN_EXPR: case MAX_EXPR:
4115 /* If widening the type changes the signedness, then we can't perform
4116 this optimization as that changes the result. */
4117 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4118 break;
4120 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4121 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4122 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4124 if (tree_int_cst_sgn (c) < 0)
4125 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4127 return fold (build (tcode, ctype, convert (ctype, t1),
4128 convert (ctype, t2)));
4130 break;
4132 case WITH_RECORD_EXPR:
4133 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4134 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4135 TREE_OPERAND (t, 1));
4136 break;
4138 case SAVE_EXPR:
4139 /* If this has not been evaluated and the operand has no side effects,
4140 we can see if we can do something inside it and make a new one.
4141 Note that this test is overly conservative since we can do this
4142 if the only reason it had side effects is that it was another
4143 similar SAVE_EXPR, but that isn't worth bothering with. */
4144 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4145 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4146 wide_type)))
4148 t1 = save_expr (t1);
4149 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4150 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4151 if (is_pending_size (t))
4152 put_pending_size (t1);
4153 return t1;
4155 break;
4157 case LSHIFT_EXPR: case RSHIFT_EXPR:
4158 /* If the second operand is constant, this is a multiplication
4159 or floor division, by a power of two, so we can treat it that
4160 way unless the multiplier or divisor overflows. */
4161 if (TREE_CODE (op1) == INTEGER_CST
4162 /* const_binop may not detect overflow correctly,
4163 so check for it explicitly here. */
4164 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4165 && TREE_INT_CST_HIGH (op1) == 0
4166 && 0 != (t1 = convert (ctype,
4167 const_binop (LSHIFT_EXPR, size_one_node,
4168 op1, 0)))
4169 && ! TREE_OVERFLOW (t1))
4170 return extract_muldiv (build (tcode == LSHIFT_EXPR
4171 ? MULT_EXPR : FLOOR_DIV_EXPR,
4172 ctype, convert (ctype, op0), t1),
4173 c, code, wide_type);
4174 break;
4176 case PLUS_EXPR: case MINUS_EXPR:
4177 /* See if we can eliminate the operation on both sides. If we can, we
4178 can return a new PLUS or MINUS. If we can't, the only remaining
4179 cases where we can do anything are if the second operand is a
4180 constant. */
4181 t1 = extract_muldiv (op0, c, code, wide_type);
4182 t2 = extract_muldiv (op1, c, code, wide_type);
4183 if (t1 != 0 && t2 != 0
4184 && (code == MULT_EXPR
4185 /* If not multiplication, we can only do this if either operand
4186 is divisible by c. */
4187 || multiple_of_p (ctype, op0, c)
4188 || multiple_of_p (ctype, op1, c)))
4189 return fold (build (tcode, ctype, convert (ctype, t1),
4190 convert (ctype, t2)));
4192 /* If this was a subtraction, negate OP1 and set it to be an addition.
4193 This simplifies the logic below. */
4194 if (tcode == MINUS_EXPR)
4195 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4197 if (TREE_CODE (op1) != INTEGER_CST)
4198 break;
4200 /* If either OP1 or C are negative, this optimization is not safe for
4201 some of the division and remainder types while for others we need
4202 to change the code. */
4203 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4205 if (code == CEIL_DIV_EXPR)
4206 code = FLOOR_DIV_EXPR;
4207 else if (code == FLOOR_DIV_EXPR)
4208 code = CEIL_DIV_EXPR;
4209 else if (code != MULT_EXPR
4210 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4211 break;
4214 /* If it's a multiply or a division/modulus operation of a multiple
4215 of our constant, do the operation and verify it doesn't overflow. */
4216 if (code == MULT_EXPR
4217 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4219 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4220 if (op1 == 0 || TREE_OVERFLOW (op1))
4221 break;
4223 else
4224 break;
4226 /* If we have an unsigned type is not a sizetype, we cannot widen
4227 the operation since it will change the result if the original
4228 computation overflowed. */
4229 if (TREE_UNSIGNED (ctype)
4230 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4231 && ctype != type)
4232 break;
4234 /* If we were able to eliminate our operation from the first side,
4235 apply our operation to the second side and reform the PLUS. */
4236 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4237 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4239 /* The last case is if we are a multiply. In that case, we can
4240 apply the distributive law to commute the multiply and addition
4241 if the multiplication of the constants doesn't overflow. */
4242 if (code == MULT_EXPR)
4243 return fold (build (tcode, ctype, fold (build (code, ctype,
4244 convert (ctype, op0),
4245 convert (ctype, c))),
4246 op1));
4248 break;
4250 case MULT_EXPR:
4251 /* We have a special case here if we are doing something like
4252 (C * 8) % 4 since we know that's zero. */
4253 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4254 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4255 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4256 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4257 return omit_one_operand (type, integer_zero_node, op0);
4259 /* ... fall through ... */
4261 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4262 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4263 /* If we can extract our operation from the LHS, do so and return a
4264 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4265 do something only if the second operand is a constant. */
4266 if (same_p
4267 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4268 return fold (build (tcode, ctype, convert (ctype, t1),
4269 convert (ctype, op1)));
4270 else if (tcode == MULT_EXPR && code == MULT_EXPR
4271 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4272 return fold (build (tcode, ctype, convert (ctype, op0),
4273 convert (ctype, t1)));
4274 else if (TREE_CODE (op1) != INTEGER_CST)
4275 return 0;
4277 /* If these are the same operation types, we can associate them
4278 assuming no overflow. */
4279 if (tcode == code
4280 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4281 convert (ctype, c), 0))
4282 && ! TREE_OVERFLOW (t1))
4283 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4285 /* If these operations "cancel" each other, we have the main
4286 optimizations of this pass, which occur when either constant is a
4287 multiple of the other, in which case we replace this with either an
4288 operation or CODE or TCODE.
4290 If we have an unsigned type that is not a sizetype, we cannot do
4291 this since it will change the result if the original computation
4292 overflowed. */
4293 if ((! TREE_UNSIGNED (ctype)
4294 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4295 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4296 || (tcode == MULT_EXPR
4297 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4298 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4300 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4301 return fold (build (tcode, ctype, convert (ctype, op0),
4302 convert (ctype,
4303 const_binop (TRUNC_DIV_EXPR,
4304 op1, c, 0))));
4305 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4306 return fold (build (code, ctype, convert (ctype, op0),
4307 convert (ctype,
4308 const_binop (TRUNC_DIV_EXPR,
4309 c, op1, 0))));
4311 break;
4313 default:
4314 break;
4317 return 0;
4320 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4321 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4322 that we may sometimes modify the tree. */
4324 static tree
4325 strip_compound_expr (t, s)
4326 tree t;
4327 tree s;
4329 enum tree_code code = TREE_CODE (t);
4331 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4332 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4333 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4334 return TREE_OPERAND (t, 1);
4336 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4337 don't bother handling any other types. */
4338 else if (code == COND_EXPR)
4340 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4341 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4342 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4344 else if (TREE_CODE_CLASS (code) == '1')
4345 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4346 else if (TREE_CODE_CLASS (code) == '<'
4347 || TREE_CODE_CLASS (code) == '2')
4349 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4350 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4353 return t;
4356 /* Return a node which has the indicated constant VALUE (either 0 or
4357 1), and is of the indicated TYPE. */
4359 static tree
4360 constant_boolean_node (value, type)
4361 int value;
4362 tree type;
4364 if (type == integer_type_node)
4365 return value ? integer_one_node : integer_zero_node;
4366 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4367 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4368 integer_zero_node);
4369 else
4371 tree t = build_int_2 (value, 0);
4373 TREE_TYPE (t) = type;
4374 return t;
4378 /* Utility function for the following routine, to see how complex a nesting of
4379 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4380 we don't care (to avoid spending too much time on complex expressions.). */
4382 static int
4383 count_cond (expr, lim)
4384 tree expr;
4385 int lim;
4387 int ctrue, cfalse;
4389 if (TREE_CODE (expr) != COND_EXPR)
4390 return 0;
4391 else if (lim <= 0)
4392 return 0;
4394 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4395 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4396 return MIN (lim, 1 + ctrue + cfalse);
4399 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4400 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4401 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4402 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4403 COND is the first argument to CODE; otherwise (as in the example
4404 given here), it is the second argument. TYPE is the type of the
4405 original expression. */
4407 static tree
4408 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4409 enum tree_code code;
4410 tree type;
4411 tree cond;
4412 tree arg;
4413 int cond_first_p;
4415 tree test, true_value, false_value;
4416 tree lhs = NULL_TREE;
4417 tree rhs = NULL_TREE;
4418 /* In the end, we'll produce a COND_EXPR. Both arms of the
4419 conditional expression will be binary operations. The left-hand
4420 side of the expression to be executed if the condition is true
4421 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4422 of the expression to be executed if the condition is true will be
4423 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4424 but apply to the expression to be executed if the conditional is
4425 false. */
4426 tree *true_lhs;
4427 tree *true_rhs;
4428 tree *false_lhs;
4429 tree *false_rhs;
4430 /* These are the codes to use for the left-hand side and right-hand
4431 side of the COND_EXPR. Normally, they are the same as CODE. */
4432 enum tree_code lhs_code = code;
4433 enum tree_code rhs_code = code;
4434 /* And these are the types of the expressions. */
4435 tree lhs_type = type;
4436 tree rhs_type = type;
4438 if (cond_first_p)
4440 true_rhs = false_rhs = &arg;
4441 true_lhs = &true_value;
4442 false_lhs = &false_value;
4444 else
4446 true_lhs = false_lhs = &arg;
4447 true_rhs = &true_value;
4448 false_rhs = &false_value;
4451 if (TREE_CODE (cond) == COND_EXPR)
4453 test = TREE_OPERAND (cond, 0);
4454 true_value = TREE_OPERAND (cond, 1);
4455 false_value = TREE_OPERAND (cond, 2);
4456 /* If this operand throws an expression, then it does not make
4457 sense to try to perform a logical or arithmetic operation
4458 involving it. Instead of building `a + throw 3' for example,
4459 we simply build `a, throw 3'. */
4460 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4462 lhs_code = COMPOUND_EXPR;
4463 if (!cond_first_p)
4464 lhs_type = void_type_node;
4466 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4468 rhs_code = COMPOUND_EXPR;
4469 if (!cond_first_p)
4470 rhs_type = void_type_node;
4473 else
4475 tree testtype = TREE_TYPE (cond);
4476 test = cond;
4477 true_value = convert (testtype, integer_one_node);
4478 false_value = convert (testtype, integer_zero_node);
4481 /* If ARG is complex we want to make sure we only evaluate
4482 it once. Though this is only required if it is volatile, it
4483 might be more efficient even if it is not. However, if we
4484 succeed in folding one part to a constant, we do not need
4485 to make this SAVE_EXPR. Since we do this optimization
4486 primarily to see if we do end up with constant and this
4487 SAVE_EXPR interferes with later optimizations, suppressing
4488 it when we can is important.
4490 If we are not in a function, we can't make a SAVE_EXPR, so don't
4491 try to do so. Don't try to see if the result is a constant
4492 if an arm is a COND_EXPR since we get exponential behavior
4493 in that case. */
4495 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4496 && (*lang_hooks.decls.global_bindings_p) () == 0
4497 && ((TREE_CODE (arg) != VAR_DECL
4498 && TREE_CODE (arg) != PARM_DECL)
4499 || TREE_SIDE_EFFECTS (arg)))
4501 if (TREE_CODE (true_value) != COND_EXPR)
4502 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4504 if (TREE_CODE (false_value) != COND_EXPR)
4505 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4507 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4508 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4509 arg = save_expr (arg), lhs = rhs = 0;
4512 if (lhs == 0)
4513 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4514 if (rhs == 0)
4515 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4517 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4519 if (TREE_CODE (arg) == SAVE_EXPR)
4520 return build (COMPOUND_EXPR, type,
4521 convert (void_type_node, arg),
4522 strip_compound_expr (test, arg));
4523 else
4524 return convert (type, test);
4528 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4530 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4531 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4532 ADDEND is the same as X.
4534 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4535 and finite. The problematic cases are when X is zero, and its mode
4536 has signed zeros. In the case of rounding towards -infinity,
4537 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4538 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4540 static bool
4541 fold_real_zero_addition_p (type, addend, negate)
4542 tree type, addend;
4543 int negate;
4545 if (!real_zerop (addend))
4546 return false;
4548 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4550 return true;
4552 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4553 if (TREE_CODE (addend) == REAL_CST
4554 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4555 negate = !negate;
4557 /* The mode has signed zeros, and we have to honor their sign.
4558 In this situation, there is only one case we can return true for.
4559 X - 0 is the same as X unless rounding towards -infinity is
4560 supported. */
4561 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4565 /* Perform constant folding and related simplification of EXPR.
4566 The related simplifications include x*1 => x, x*0 => 0, etc.,
4567 and application of the associative law.
4568 NOP_EXPR conversions may be removed freely (as long as we
4569 are careful not to change the C type of the overall expression)
4570 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4571 but we can constant-fold them if they have constant operands. */
4573 tree
4574 fold (expr)
4575 tree expr;
4577 tree t = expr;
4578 tree t1 = NULL_TREE;
4579 tree tem;
4580 tree type = TREE_TYPE (expr);
4581 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4582 enum tree_code code = TREE_CODE (t);
4583 int kind = TREE_CODE_CLASS (code);
4584 int invert;
4585 /* WINS will be nonzero when the switch is done
4586 if all operands are constant. */
4587 int wins = 1;
4589 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4590 Likewise for a SAVE_EXPR that's already been evaluated. */
4591 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4592 return t;
4594 /* Return right away if a constant. */
4595 if (kind == 'c')
4596 return t;
4598 #ifdef MAX_INTEGER_COMPUTATION_MODE
4599 check_max_integer_computation_mode (expr);
4600 #endif
4602 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4604 tree subop;
4606 /* Special case for conversion ops that can have fixed point args. */
4607 arg0 = TREE_OPERAND (t, 0);
4609 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4610 if (arg0 != 0)
4611 STRIP_SIGN_NOPS (arg0);
4613 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4614 subop = TREE_REALPART (arg0);
4615 else
4616 subop = arg0;
4618 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4619 && TREE_CODE (subop) != REAL_CST
4621 /* Note that TREE_CONSTANT isn't enough:
4622 static var addresses are constant but we can't
4623 do arithmetic on them. */
4624 wins = 0;
4626 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4628 int len = first_rtl_op (code);
4629 int i;
4630 for (i = 0; i < len; i++)
4632 tree op = TREE_OPERAND (t, i);
4633 tree subop;
4635 if (op == 0)
4636 continue; /* Valid for CALL_EXPR, at least. */
4638 if (kind == '<' || code == RSHIFT_EXPR)
4640 /* Signedness matters here. Perhaps we can refine this
4641 later. */
4642 STRIP_SIGN_NOPS (op);
4644 else
4645 /* Strip any conversions that don't change the mode. */
4646 STRIP_NOPS (op);
4648 if (TREE_CODE (op) == COMPLEX_CST)
4649 subop = TREE_REALPART (op);
4650 else
4651 subop = op;
4653 if (TREE_CODE (subop) != INTEGER_CST
4654 && TREE_CODE (subop) != REAL_CST)
4655 /* Note that TREE_CONSTANT isn't enough:
4656 static var addresses are constant but we can't
4657 do arithmetic on them. */
4658 wins = 0;
4660 if (i == 0)
4661 arg0 = op;
4662 else if (i == 1)
4663 arg1 = op;
4667 /* If this is a commutative operation, and ARG0 is a constant, move it
4668 to ARG1 to reduce the number of tests below. */
4669 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4670 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4671 || code == BIT_AND_EXPR)
4672 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4674 tem = arg0; arg0 = arg1; arg1 = tem;
4676 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4677 TREE_OPERAND (t, 1) = tem;
4680 /* Now WINS is set as described above,
4681 ARG0 is the first operand of EXPR,
4682 and ARG1 is the second operand (if it has more than one operand).
4684 First check for cases where an arithmetic operation is applied to a
4685 compound, conditional, or comparison operation. Push the arithmetic
4686 operation inside the compound or conditional to see if any folding
4687 can then be done. Convert comparison to conditional for this purpose.
4688 The also optimizes non-constant cases that used to be done in
4689 expand_expr.
4691 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4692 one of the operands is a comparison and the other is a comparison, a
4693 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4694 code below would make the expression more complex. Change it to a
4695 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4696 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4698 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4699 || code == EQ_EXPR || code == NE_EXPR)
4700 && ((truth_value_p (TREE_CODE (arg0))
4701 && (truth_value_p (TREE_CODE (arg1))
4702 || (TREE_CODE (arg1) == BIT_AND_EXPR
4703 && integer_onep (TREE_OPERAND (arg1, 1)))))
4704 || (truth_value_p (TREE_CODE (arg1))
4705 && (truth_value_p (TREE_CODE (arg0))
4706 || (TREE_CODE (arg0) == BIT_AND_EXPR
4707 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4709 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4710 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4711 : TRUTH_XOR_EXPR,
4712 type, arg0, arg1));
4714 if (code == EQ_EXPR)
4715 t = invert_truthvalue (t);
4717 return t;
4720 if (TREE_CODE_CLASS (code) == '1')
4722 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4723 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4724 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4725 else if (TREE_CODE (arg0) == COND_EXPR)
4727 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4728 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4729 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4731 /* If this was a conversion, and all we did was to move into
4732 inside the COND_EXPR, bring it back out. But leave it if
4733 it is a conversion from integer to integer and the
4734 result precision is no wider than a word since such a
4735 conversion is cheap and may be optimized away by combine,
4736 while it couldn't if it were outside the COND_EXPR. Then return
4737 so we don't get into an infinite recursion loop taking the
4738 conversion out and then back in. */
4740 if ((code == NOP_EXPR || code == CONVERT_EXPR
4741 || code == NON_LVALUE_EXPR)
4742 && TREE_CODE (t) == COND_EXPR
4743 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4744 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4745 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4746 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4747 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4748 && (INTEGRAL_TYPE_P
4749 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4750 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4751 t = build1 (code, type,
4752 build (COND_EXPR,
4753 TREE_TYPE (TREE_OPERAND
4754 (TREE_OPERAND (t, 1), 0)),
4755 TREE_OPERAND (t, 0),
4756 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4757 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4758 return t;
4760 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4761 return fold (build (COND_EXPR, type, arg0,
4762 fold (build1 (code, type, integer_one_node)),
4763 fold (build1 (code, type, integer_zero_node))));
4765 else if (TREE_CODE_CLASS (code) == '2'
4766 || TREE_CODE_CLASS (code) == '<')
4768 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4769 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4770 fold (build (code, type,
4771 arg0, TREE_OPERAND (arg1, 1))));
4772 else if ((TREE_CODE (arg1) == COND_EXPR
4773 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4774 && TREE_CODE_CLASS (code) != '<'))
4775 && (TREE_CODE (arg0) != COND_EXPR
4776 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4777 && (! TREE_SIDE_EFFECTS (arg0)
4778 || ((*lang_hooks.decls.global_bindings_p) () == 0
4779 && ! contains_placeholder_p (arg0))))
4780 return
4781 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4782 /*cond_first_p=*/0);
4783 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4784 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4785 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4786 else if ((TREE_CODE (arg0) == COND_EXPR
4787 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4788 && TREE_CODE_CLASS (code) != '<'))
4789 && (TREE_CODE (arg1) != COND_EXPR
4790 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4791 && (! TREE_SIDE_EFFECTS (arg1)
4792 || ((*lang_hooks.decls.global_bindings_p) () == 0
4793 && ! contains_placeholder_p (arg1))))
4794 return
4795 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4796 /*cond_first_p=*/1);
4798 else if (TREE_CODE_CLASS (code) == '<'
4799 && TREE_CODE (arg0) == COMPOUND_EXPR)
4800 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4801 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4802 else if (TREE_CODE_CLASS (code) == '<'
4803 && TREE_CODE (arg1) == COMPOUND_EXPR)
4804 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4805 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4807 switch (code)
4809 case INTEGER_CST:
4810 case REAL_CST:
4811 case VECTOR_CST:
4812 case STRING_CST:
4813 case COMPLEX_CST:
4814 case CONSTRUCTOR:
4815 return t;
4817 case CONST_DECL:
4818 return fold (DECL_INITIAL (t));
4820 case NOP_EXPR:
4821 case FLOAT_EXPR:
4822 case CONVERT_EXPR:
4823 case FIX_TRUNC_EXPR:
4824 /* Other kinds of FIX are not handled properly by fold_convert. */
4826 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4827 return TREE_OPERAND (t, 0);
4829 /* Handle cases of two conversions in a row. */
4830 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4831 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4833 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4834 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4835 tree final_type = TREE_TYPE (t);
4836 int inside_int = INTEGRAL_TYPE_P (inside_type);
4837 int inside_ptr = POINTER_TYPE_P (inside_type);
4838 int inside_float = FLOAT_TYPE_P (inside_type);
4839 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4840 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4841 int inter_int = INTEGRAL_TYPE_P (inter_type);
4842 int inter_ptr = POINTER_TYPE_P (inter_type);
4843 int inter_float = FLOAT_TYPE_P (inter_type);
4844 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4845 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4846 int final_int = INTEGRAL_TYPE_P (final_type);
4847 int final_ptr = POINTER_TYPE_P (final_type);
4848 int final_float = FLOAT_TYPE_P (final_type);
4849 unsigned int final_prec = TYPE_PRECISION (final_type);
4850 int final_unsignedp = TREE_UNSIGNED (final_type);
4852 /* In addition to the cases of two conversions in a row
4853 handled below, if we are converting something to its own
4854 type via an object of identical or wider precision, neither
4855 conversion is needed. */
4856 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4857 && ((inter_int && final_int) || (inter_float && final_float))
4858 && inter_prec >= final_prec)
4859 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4861 /* Likewise, if the intermediate and final types are either both
4862 float or both integer, we don't need the middle conversion if
4863 it is wider than the final type and doesn't change the signedness
4864 (for integers). Avoid this if the final type is a pointer
4865 since then we sometimes need the inner conversion. Likewise if
4866 the outer has a precision not equal to the size of its mode. */
4867 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4868 || (inter_float && inside_float))
4869 && inter_prec >= inside_prec
4870 && (inter_float || inter_unsignedp == inside_unsignedp)
4871 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4872 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4873 && ! final_ptr)
4874 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4876 /* If we have a sign-extension of a zero-extended value, we can
4877 replace that by a single zero-extension. */
4878 if (inside_int && inter_int && final_int
4879 && inside_prec < inter_prec && inter_prec < final_prec
4880 && inside_unsignedp && !inter_unsignedp)
4881 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4883 /* Two conversions in a row are not needed unless:
4884 - some conversion is floating-point (overstrict for now), or
4885 - the intermediate type is narrower than both initial and
4886 final, or
4887 - the intermediate type and innermost type differ in signedness,
4888 and the outermost type is wider than the intermediate, or
4889 - the initial type is a pointer type and the precisions of the
4890 intermediate and final types differ, or
4891 - the final type is a pointer type and the precisions of the
4892 initial and intermediate types differ. */
4893 if (! inside_float && ! inter_float && ! final_float
4894 && (inter_prec > inside_prec || inter_prec > final_prec)
4895 && ! (inside_int && inter_int
4896 && inter_unsignedp != inside_unsignedp
4897 && inter_prec < final_prec)
4898 && ((inter_unsignedp && inter_prec > inside_prec)
4899 == (final_unsignedp && final_prec > inter_prec))
4900 && ! (inside_ptr && inter_prec != final_prec)
4901 && ! (final_ptr && inside_prec != inter_prec)
4902 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4903 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4904 && ! final_ptr)
4905 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4908 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4909 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4910 /* Detect assigning a bitfield. */
4911 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4912 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4914 /* Don't leave an assignment inside a conversion
4915 unless assigning a bitfield. */
4916 tree prev = TREE_OPERAND (t, 0);
4917 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4918 /* First do the assignment, then return converted constant. */
4919 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4920 TREE_USED (t) = 1;
4921 return t;
4924 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4925 constants (if x has signed type, the sign bit cannot be set
4926 in c). This folds extension into the BIT_AND_EXPR. */
4927 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4928 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4929 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4930 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4932 tree and = TREE_OPERAND (t, 0);
4933 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4934 int change = 0;
4936 if (TREE_UNSIGNED (TREE_TYPE (and))
4937 || (TYPE_PRECISION (TREE_TYPE (t))
4938 <= TYPE_PRECISION (TREE_TYPE (and))))
4939 change = 1;
4940 else if (TYPE_PRECISION (TREE_TYPE (and1))
4941 <= HOST_BITS_PER_WIDE_INT
4942 && host_integerp (and1, 1))
4944 unsigned HOST_WIDE_INT cst;
4946 cst = tree_low_cst (and1, 1);
4947 cst &= (HOST_WIDE_INT) -1
4948 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4949 change = (cst == 0);
4950 #ifdef LOAD_EXTEND_OP
4951 if (change
4952 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4953 == ZERO_EXTEND))
4955 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4956 and0 = convert (uns, and0);
4957 and1 = convert (uns, and1);
4959 #endif
4961 if (change)
4962 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4963 convert (TREE_TYPE (t), and0),
4964 convert (TREE_TYPE (t), and1)));
4967 if (!wins)
4969 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4970 return t;
4972 return fold_convert (t, arg0);
4974 case VIEW_CONVERT_EXPR:
4975 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4976 return build1 (VIEW_CONVERT_EXPR, type,
4977 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4978 return t;
4980 case COMPONENT_REF:
4981 if (TREE_CODE (arg0) == CONSTRUCTOR)
4983 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4984 if (m)
4985 t = TREE_VALUE (m);
4987 return t;
4989 case RANGE_EXPR:
4990 TREE_CONSTANT (t) = wins;
4991 return t;
4993 case NEGATE_EXPR:
4994 if (wins)
4996 if (TREE_CODE (arg0) == INTEGER_CST)
4998 unsigned HOST_WIDE_INT low;
4999 HOST_WIDE_INT high;
5000 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5001 TREE_INT_CST_HIGH (arg0),
5002 &low, &high);
5003 t = build_int_2 (low, high);
5004 TREE_TYPE (t) = type;
5005 TREE_OVERFLOW (t)
5006 = (TREE_OVERFLOW (arg0)
5007 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5008 TREE_CONSTANT_OVERFLOW (t)
5009 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5011 else if (TREE_CODE (arg0) == REAL_CST)
5012 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5014 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5015 return TREE_OPERAND (arg0, 0);
5017 /* Convert - (a - b) to (b - a) for non-floating-point. */
5018 else if (TREE_CODE (arg0) == MINUS_EXPR
5019 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5020 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5021 TREE_OPERAND (arg0, 0));
5023 return t;
5025 case ABS_EXPR:
5026 if (wins)
5028 if (TREE_CODE (arg0) == INTEGER_CST)
5030 /* If the value is unsigned, then the absolute value is
5031 the same as the ordinary value. */
5032 if (TREE_UNSIGNED (type))
5033 return arg0;
5034 /* Similarly, if the value is non-negative. */
5035 else if (INT_CST_LT (integer_minus_one_node, arg0))
5036 return arg0;
5037 /* If the value is negative, then the absolute value is
5038 its negation. */
5039 else
5041 unsigned HOST_WIDE_INT low;
5042 HOST_WIDE_INT high;
5043 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5044 TREE_INT_CST_HIGH (arg0),
5045 &low, &high);
5046 t = build_int_2 (low, high);
5047 TREE_TYPE (t) = type;
5048 TREE_OVERFLOW (t)
5049 = (TREE_OVERFLOW (arg0)
5050 | force_fit_type (t, overflow));
5051 TREE_CONSTANT_OVERFLOW (t)
5052 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5055 else if (TREE_CODE (arg0) == REAL_CST)
5057 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5058 t = build_real (type,
5059 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5062 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5063 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5064 return t;
5066 case CONJ_EXPR:
5067 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5068 return convert (type, arg0);
5069 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5070 return build (COMPLEX_EXPR, type,
5071 TREE_OPERAND (arg0, 0),
5072 negate_expr (TREE_OPERAND (arg0, 1)));
5073 else if (TREE_CODE (arg0) == COMPLEX_CST)
5074 return build_complex (type, TREE_REALPART (arg0),
5075 negate_expr (TREE_IMAGPART (arg0)));
5076 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5077 return fold (build (TREE_CODE (arg0), type,
5078 fold (build1 (CONJ_EXPR, type,
5079 TREE_OPERAND (arg0, 0))),
5080 fold (build1 (CONJ_EXPR,
5081 type, TREE_OPERAND (arg0, 1)))));
5082 else if (TREE_CODE (arg0) == CONJ_EXPR)
5083 return TREE_OPERAND (arg0, 0);
5084 return t;
5086 case BIT_NOT_EXPR:
5087 if (wins)
5089 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5090 ~ TREE_INT_CST_HIGH (arg0));
5091 TREE_TYPE (t) = type;
5092 force_fit_type (t, 0);
5093 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5094 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5096 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5097 return TREE_OPERAND (arg0, 0);
5098 return t;
5100 case PLUS_EXPR:
5101 /* A + (-B) -> A - B */
5102 if (TREE_CODE (arg1) == NEGATE_EXPR)
5103 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5104 /* (-A) + B -> B - A */
5105 if (TREE_CODE (arg0) == NEGATE_EXPR)
5106 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5107 else if (! FLOAT_TYPE_P (type))
5109 if (integer_zerop (arg1))
5110 return non_lvalue (convert (type, arg0));
5112 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5113 with a constant, and the two constants have no bits in common,
5114 we should treat this as a BIT_IOR_EXPR since this may produce more
5115 simplifications. */
5116 if (TREE_CODE (arg0) == BIT_AND_EXPR
5117 && TREE_CODE (arg1) == BIT_AND_EXPR
5118 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5119 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5120 && integer_zerop (const_binop (BIT_AND_EXPR,
5121 TREE_OPERAND (arg0, 1),
5122 TREE_OPERAND (arg1, 1), 0)))
5124 code = BIT_IOR_EXPR;
5125 goto bit_ior;
5128 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5129 (plus (plus (mult) (mult)) (foo)) so that we can
5130 take advantage of the factoring cases below. */
5131 if ((TREE_CODE (arg0) == PLUS_EXPR
5132 && TREE_CODE (arg1) == MULT_EXPR)
5133 || (TREE_CODE (arg1) == PLUS_EXPR
5134 && TREE_CODE (arg0) == MULT_EXPR))
5136 tree parg0, parg1, parg, marg;
5138 if (TREE_CODE (arg0) == PLUS_EXPR)
5139 parg = arg0, marg = arg1;
5140 else
5141 parg = arg1, marg = arg0;
5142 parg0 = TREE_OPERAND (parg, 0);
5143 parg1 = TREE_OPERAND (parg, 1);
5144 STRIP_NOPS (parg0);
5145 STRIP_NOPS (parg1);
5147 if (TREE_CODE (parg0) == MULT_EXPR
5148 && TREE_CODE (parg1) != MULT_EXPR)
5149 return fold (build (PLUS_EXPR, type,
5150 fold (build (PLUS_EXPR, type, parg0, marg)),
5151 parg1));
5152 if (TREE_CODE (parg0) != MULT_EXPR
5153 && TREE_CODE (parg1) == MULT_EXPR)
5154 return fold (build (PLUS_EXPR, type,
5155 fold (build (PLUS_EXPR, type, parg1, marg)),
5156 parg0));
5159 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5161 tree arg00, arg01, arg10, arg11;
5162 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5164 /* (A * C) + (B * C) -> (A+B) * C.
5165 We are most concerned about the case where C is a constant,
5166 but other combinations show up during loop reduction. Since
5167 it is not difficult, try all four possibilities. */
5169 arg00 = TREE_OPERAND (arg0, 0);
5170 arg01 = TREE_OPERAND (arg0, 1);
5171 arg10 = TREE_OPERAND (arg1, 0);
5172 arg11 = TREE_OPERAND (arg1, 1);
5173 same = NULL_TREE;
5175 if (operand_equal_p (arg01, arg11, 0))
5176 same = arg01, alt0 = arg00, alt1 = arg10;
5177 else if (operand_equal_p (arg00, arg10, 0))
5178 same = arg00, alt0 = arg01, alt1 = arg11;
5179 else if (operand_equal_p (arg00, arg11, 0))
5180 same = arg00, alt0 = arg01, alt1 = arg10;
5181 else if (operand_equal_p (arg01, arg10, 0))
5182 same = arg01, alt0 = arg00, alt1 = arg11;
5184 /* No identical multiplicands; see if we can find a common
5185 power-of-two factor in non-power-of-two multiplies. This
5186 can help in multi-dimensional array access. */
5187 else if (TREE_CODE (arg01) == INTEGER_CST
5188 && TREE_CODE (arg11) == INTEGER_CST
5189 && TREE_INT_CST_HIGH (arg01) == 0
5190 && TREE_INT_CST_HIGH (arg11) == 0)
5192 HOST_WIDE_INT int01, int11, tmp;
5193 int01 = TREE_INT_CST_LOW (arg01);
5194 int11 = TREE_INT_CST_LOW (arg11);
5196 /* Move min of absolute values to int11. */
5197 if ((int01 >= 0 ? int01 : -int01)
5198 < (int11 >= 0 ? int11 : -int11))
5200 tmp = int01, int01 = int11, int11 = tmp;
5201 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5202 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5205 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5207 alt0 = fold (build (MULT_EXPR, type, arg00,
5208 build_int_2 (int01 / int11, 0)));
5209 alt1 = arg10;
5210 same = arg11;
5214 if (same)
5215 return fold (build (MULT_EXPR, type,
5216 fold (build (PLUS_EXPR, type, alt0, alt1)),
5217 same));
5221 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5222 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5223 return non_lvalue (convert (type, arg0));
5225 /* Likewise if the operands are reversed. */
5226 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5227 return non_lvalue (convert (type, arg1));
5229 bit_rotate:
5230 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5231 is a rotate of A by C1 bits. */
5232 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5233 is a rotate of A by B bits. */
5235 enum tree_code code0, code1;
5236 code0 = TREE_CODE (arg0);
5237 code1 = TREE_CODE (arg1);
5238 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5239 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5240 && operand_equal_p (TREE_OPERAND (arg0, 0),
5241 TREE_OPERAND (arg1, 0), 0)
5242 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5244 tree tree01, tree11;
5245 enum tree_code code01, code11;
5247 tree01 = TREE_OPERAND (arg0, 1);
5248 tree11 = TREE_OPERAND (arg1, 1);
5249 STRIP_NOPS (tree01);
5250 STRIP_NOPS (tree11);
5251 code01 = TREE_CODE (tree01);
5252 code11 = TREE_CODE (tree11);
5253 if (code01 == INTEGER_CST
5254 && code11 == INTEGER_CST
5255 && TREE_INT_CST_HIGH (tree01) == 0
5256 && TREE_INT_CST_HIGH (tree11) == 0
5257 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5258 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5259 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5260 code0 == LSHIFT_EXPR ? tree01 : tree11);
5261 else if (code11 == MINUS_EXPR)
5263 tree tree110, tree111;
5264 tree110 = TREE_OPERAND (tree11, 0);
5265 tree111 = TREE_OPERAND (tree11, 1);
5266 STRIP_NOPS (tree110);
5267 STRIP_NOPS (tree111);
5268 if (TREE_CODE (tree110) == INTEGER_CST
5269 && 0 == compare_tree_int (tree110,
5270 TYPE_PRECISION
5271 (TREE_TYPE (TREE_OPERAND
5272 (arg0, 0))))
5273 && operand_equal_p (tree01, tree111, 0))
5274 return build ((code0 == LSHIFT_EXPR
5275 ? LROTATE_EXPR
5276 : RROTATE_EXPR),
5277 type, TREE_OPERAND (arg0, 0), tree01);
5279 else if (code01 == MINUS_EXPR)
5281 tree tree010, tree011;
5282 tree010 = TREE_OPERAND (tree01, 0);
5283 tree011 = TREE_OPERAND (tree01, 1);
5284 STRIP_NOPS (tree010);
5285 STRIP_NOPS (tree011);
5286 if (TREE_CODE (tree010) == INTEGER_CST
5287 && 0 == compare_tree_int (tree010,
5288 TYPE_PRECISION
5289 (TREE_TYPE (TREE_OPERAND
5290 (arg0, 0))))
5291 && operand_equal_p (tree11, tree011, 0))
5292 return build ((code0 != LSHIFT_EXPR
5293 ? LROTATE_EXPR
5294 : RROTATE_EXPR),
5295 type, TREE_OPERAND (arg0, 0), tree11);
5300 associate:
5301 /* In most languages, can't associate operations on floats through
5302 parentheses. Rather than remember where the parentheses were, we
5303 don't associate floats at all. It shouldn't matter much. However,
5304 associating multiplications is only very slightly inaccurate, so do
5305 that if -funsafe-math-optimizations is specified. */
5307 if (! wins
5308 && (! FLOAT_TYPE_P (type)
5309 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5311 tree var0, con0, lit0, minus_lit0;
5312 tree var1, con1, lit1, minus_lit1;
5314 /* Split both trees into variables, constants, and literals. Then
5315 associate each group together, the constants with literals,
5316 then the result with variables. This increases the chances of
5317 literals being recombined later and of generating relocatable
5318 expressions for the sum of a constant and literal. */
5319 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5320 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5321 code == MINUS_EXPR);
5323 /* Only do something if we found more than two objects. Otherwise,
5324 nothing has changed and we risk infinite recursion. */
5325 if (2 < ((var0 != 0) + (var1 != 0)
5326 + (con0 != 0) + (con1 != 0)
5327 + (lit0 != 0) + (lit1 != 0)
5328 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5330 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5331 if (code == MINUS_EXPR)
5332 code = PLUS_EXPR;
5334 var0 = associate_trees (var0, var1, code, type);
5335 con0 = associate_trees (con0, con1, code, type);
5336 lit0 = associate_trees (lit0, lit1, code, type);
5337 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5339 /* Preserve the MINUS_EXPR if the negative part of the literal is
5340 greater than the positive part. Otherwise, the multiplicative
5341 folding code (i.e extract_muldiv) may be fooled in case
5342 unsigned constants are substracted, like in the following
5343 example: ((X*2 + 4) - 8U)/2. */
5344 if (minus_lit0 && lit0)
5346 if (tree_int_cst_lt (lit0, minus_lit0))
5348 minus_lit0 = associate_trees (minus_lit0, lit0,
5349 MINUS_EXPR, type);
5350 lit0 = 0;
5352 else
5354 lit0 = associate_trees (lit0, minus_lit0,
5355 MINUS_EXPR, type);
5356 minus_lit0 = 0;
5359 if (minus_lit0)
5361 if (con0 == 0)
5362 return convert (type, associate_trees (var0, minus_lit0,
5363 MINUS_EXPR, type));
5364 else
5366 con0 = associate_trees (con0, minus_lit0,
5367 MINUS_EXPR, type);
5368 return convert (type, associate_trees (var0, con0,
5369 PLUS_EXPR, type));
5373 con0 = associate_trees (con0, lit0, code, type);
5374 return convert (type, associate_trees (var0, con0, code, type));
5378 binary:
5379 if (wins)
5380 t1 = const_binop (code, arg0, arg1, 0);
5381 if (t1 != NULL_TREE)
5383 /* The return value should always have
5384 the same type as the original expression. */
5385 if (TREE_TYPE (t1) != TREE_TYPE (t))
5386 t1 = convert (TREE_TYPE (t), t1);
5388 return t1;
5390 return t;
5392 case MINUS_EXPR:
5393 /* A - (-B) -> A + B */
5394 if (TREE_CODE (arg1) == NEGATE_EXPR)
5395 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5396 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5397 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5398 return
5399 fold (build (MINUS_EXPR, type,
5400 build_real (TREE_TYPE (arg1),
5401 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5402 TREE_OPERAND (arg0, 0)));
5404 if (! FLOAT_TYPE_P (type))
5406 if (! wins && integer_zerop (arg0))
5407 return negate_expr (convert (type, arg1));
5408 if (integer_zerop (arg1))
5409 return non_lvalue (convert (type, arg0));
5411 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5412 about the case where C is a constant, just try one of the
5413 four possibilities. */
5415 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5416 && operand_equal_p (TREE_OPERAND (arg0, 1),
5417 TREE_OPERAND (arg1, 1), 0))
5418 return fold (build (MULT_EXPR, type,
5419 fold (build (MINUS_EXPR, type,
5420 TREE_OPERAND (arg0, 0),
5421 TREE_OPERAND (arg1, 0))),
5422 TREE_OPERAND (arg0, 1)));
5425 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5426 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5427 return non_lvalue (convert (type, arg0));
5429 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5430 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5431 (-ARG1 + ARG0) reduces to -ARG1. */
5432 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5433 return negate_expr (convert (type, arg1));
5435 /* Fold &x - &x. This can happen from &x.foo - &x.
5436 This is unsafe for certain floats even in non-IEEE formats.
5437 In IEEE, it is unsafe because it does wrong for NaNs.
5438 Also note that operand_equal_p is always false if an operand
5439 is volatile. */
5441 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5442 && operand_equal_p (arg0, arg1, 0))
5443 return convert (type, integer_zero_node);
5445 goto associate;
5447 case MULT_EXPR:
5448 /* (-A) * (-B) -> A * B */
5449 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5450 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5451 TREE_OPERAND (arg1, 0)));
5453 if (! FLOAT_TYPE_P (type))
5455 if (integer_zerop (arg1))
5456 return omit_one_operand (type, arg1, arg0);
5457 if (integer_onep (arg1))
5458 return non_lvalue (convert (type, arg0));
5460 /* (a * (1 << b)) is (a << b) */
5461 if (TREE_CODE (arg1) == LSHIFT_EXPR
5462 && integer_onep (TREE_OPERAND (arg1, 0)))
5463 return fold (build (LSHIFT_EXPR, type, arg0,
5464 TREE_OPERAND (arg1, 1)));
5465 if (TREE_CODE (arg0) == LSHIFT_EXPR
5466 && integer_onep (TREE_OPERAND (arg0, 0)))
5467 return fold (build (LSHIFT_EXPR, type, arg1,
5468 TREE_OPERAND (arg0, 1)));
5470 if (TREE_CODE (arg1) == INTEGER_CST
5471 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5472 code, NULL_TREE)))
5473 return convert (type, tem);
5476 else
5478 /* Maybe fold x * 0 to 0. The expressions aren't the same
5479 when x is NaN, since x * 0 is also NaN. Nor are they the
5480 same in modes with signed zeros, since multiplying a
5481 negative value by 0 gives -0, not +0. */
5482 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5483 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5484 && real_zerop (arg1))
5485 return omit_one_operand (type, arg1, arg0);
5486 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5487 However, ANSI says we can drop signals,
5488 so we can do this anyway. */
5489 if (real_onep (arg1))
5490 return non_lvalue (convert (type, arg0));
5492 /* Transform x * -1.0 into -x. This should be safe for NaNs,
5493 signed zeros and signed infinities, but is currently
5494 restricted to "unsafe math optimizations" just in case. */
5495 if (flag_unsafe_math_optimizations
5496 && real_minus_onep (arg1))
5497 return fold (build1 (NEGATE_EXPR, type, arg0));
5499 /* x*2 is x+x */
5500 if (! wins && real_twop (arg1)
5501 && (*lang_hooks.decls.global_bindings_p) () == 0
5502 && ! contains_placeholder_p (arg0))
5504 tree arg = save_expr (arg0);
5505 return build (PLUS_EXPR, type, arg, arg);
5508 goto associate;
5510 case BIT_IOR_EXPR:
5511 bit_ior:
5512 if (integer_all_onesp (arg1))
5513 return omit_one_operand (type, arg1, arg0);
5514 if (integer_zerop (arg1))
5515 return non_lvalue (convert (type, arg0));
5516 t1 = distribute_bit_expr (code, type, arg0, arg1);
5517 if (t1 != NULL_TREE)
5518 return t1;
5520 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5522 This results in more efficient code for machines without a NAND
5523 instruction. Combine will canonicalize to the first form
5524 which will allow use of NAND instructions provided by the
5525 backend if they exist. */
5526 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5527 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5529 return fold (build1 (BIT_NOT_EXPR, type,
5530 build (BIT_AND_EXPR, type,
5531 TREE_OPERAND (arg0, 0),
5532 TREE_OPERAND (arg1, 0))));
5535 /* See if this can be simplified into a rotate first. If that
5536 is unsuccessful continue in the association code. */
5537 goto bit_rotate;
5539 case BIT_XOR_EXPR:
5540 if (integer_zerop (arg1))
5541 return non_lvalue (convert (type, arg0));
5542 if (integer_all_onesp (arg1))
5543 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5545 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5546 with a constant, and the two constants have no bits in common,
5547 we should treat this as a BIT_IOR_EXPR since this may produce more
5548 simplifications. */
5549 if (TREE_CODE (arg0) == BIT_AND_EXPR
5550 && TREE_CODE (arg1) == BIT_AND_EXPR
5551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5552 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5553 && integer_zerop (const_binop (BIT_AND_EXPR,
5554 TREE_OPERAND (arg0, 1),
5555 TREE_OPERAND (arg1, 1), 0)))
5557 code = BIT_IOR_EXPR;
5558 goto bit_ior;
5561 /* See if this can be simplified into a rotate first. If that
5562 is unsuccessful continue in the association code. */
5563 goto bit_rotate;
5565 case BIT_AND_EXPR:
5566 bit_and:
5567 if (integer_all_onesp (arg1))
5568 return non_lvalue (convert (type, arg0));
5569 if (integer_zerop (arg1))
5570 return omit_one_operand (type, arg1, arg0);
5571 t1 = distribute_bit_expr (code, type, arg0, arg1);
5572 if (t1 != NULL_TREE)
5573 return t1;
5574 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5575 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5576 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5578 unsigned int prec
5579 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5581 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5582 && (~TREE_INT_CST_LOW (arg1)
5583 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5584 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5587 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5589 This results in more efficient code for machines without a NOR
5590 instruction. Combine will canonicalize to the first form
5591 which will allow use of NOR instructions provided by the
5592 backend if they exist. */
5593 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5594 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5596 return fold (build1 (BIT_NOT_EXPR, type,
5597 build (BIT_IOR_EXPR, type,
5598 TREE_OPERAND (arg0, 0),
5599 TREE_OPERAND (arg1, 0))));
5602 goto associate;
5604 case BIT_ANDTC_EXPR:
5605 if (integer_all_onesp (arg0))
5606 return non_lvalue (convert (type, arg1));
5607 if (integer_zerop (arg0))
5608 return omit_one_operand (type, arg0, arg1);
5609 if (TREE_CODE (arg1) == INTEGER_CST)
5611 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5612 code = BIT_AND_EXPR;
5613 goto bit_and;
5615 goto binary;
5617 case RDIV_EXPR:
5618 /* Don't touch a floating-point divide by zero unless the mode
5619 of the constant can represent infinity. */
5620 if (TREE_CODE (arg1) == REAL_CST
5621 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5622 && real_zerop (arg1))
5623 return t;
5625 /* (-A) / (-B) -> A / B */
5626 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5627 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5628 TREE_OPERAND (arg1, 0)));
5630 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5631 However, ANSI says we can drop signals, so we can do this anyway. */
5632 if (real_onep (arg1))
5633 return non_lvalue (convert (type, arg0));
5635 /* If ARG1 is a constant, we can convert this to a multiply by the
5636 reciprocal. This does not have the same rounding properties,
5637 so only do this if -funsafe-math-optimizations. We can actually
5638 always safely do it if ARG1 is a power of two, but it's hard to
5639 tell if it is or not in a portable manner. */
5640 if (TREE_CODE (arg1) == REAL_CST)
5642 if (flag_unsafe_math_optimizations
5643 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5644 arg1, 0)))
5645 return fold (build (MULT_EXPR, type, arg0, tem));
5646 /* Find the reciprocal if optimizing and the result is exact. */
5647 else if (optimize)
5649 REAL_VALUE_TYPE r;
5650 r = TREE_REAL_CST (arg1);
5651 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5653 tem = build_real (type, r);
5654 return fold (build (MULT_EXPR, type, arg0, tem));
5658 /* Convert A/B/C to A/(B*C). */
5659 if (flag_unsafe_math_optimizations
5660 && TREE_CODE (arg0) == RDIV_EXPR)
5662 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5663 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5664 arg1)));
5666 /* Convert A/(B/C) to (A/B)*C. */
5667 if (flag_unsafe_math_optimizations
5668 && TREE_CODE (arg1) == RDIV_EXPR)
5670 return fold (build (MULT_EXPR, type,
5671 build (RDIV_EXPR, type, arg0,
5672 TREE_OPERAND (arg1, 0)),
5673 TREE_OPERAND (arg1, 1)));
5675 goto binary;
5677 case TRUNC_DIV_EXPR:
5678 case ROUND_DIV_EXPR:
5679 case FLOOR_DIV_EXPR:
5680 case CEIL_DIV_EXPR:
5681 case EXACT_DIV_EXPR:
5682 if (integer_onep (arg1))
5683 return non_lvalue (convert (type, arg0));
5684 if (integer_zerop (arg1))
5685 return t;
5687 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5688 operation, EXACT_DIV_EXPR.
5690 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5691 At one time others generated faster code, it's not clear if they do
5692 after the last round to changes to the DIV code in expmed.c. */
5693 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5694 && multiple_of_p (type, arg0, arg1))
5695 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5697 if (TREE_CODE (arg1) == INTEGER_CST
5698 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5699 code, NULL_TREE)))
5700 return convert (type, tem);
5702 goto binary;
5704 case CEIL_MOD_EXPR:
5705 case FLOOR_MOD_EXPR:
5706 case ROUND_MOD_EXPR:
5707 case TRUNC_MOD_EXPR:
5708 if (integer_onep (arg1))
5709 return omit_one_operand (type, integer_zero_node, arg0);
5710 if (integer_zerop (arg1))
5711 return t;
5713 if (TREE_CODE (arg1) == INTEGER_CST
5714 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5715 code, NULL_TREE)))
5716 return convert (type, tem);
5718 goto binary;
5720 case LSHIFT_EXPR:
5721 case RSHIFT_EXPR:
5722 case LROTATE_EXPR:
5723 case RROTATE_EXPR:
5724 if (integer_zerop (arg1))
5725 return non_lvalue (convert (type, arg0));
5726 /* Since negative shift count is not well-defined,
5727 don't try to compute it in the compiler. */
5728 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5729 return t;
5730 /* Rewrite an LROTATE_EXPR by a constant into an
5731 RROTATE_EXPR by a new constant. */
5732 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5734 TREE_SET_CODE (t, RROTATE_EXPR);
5735 code = RROTATE_EXPR;
5736 TREE_OPERAND (t, 1) = arg1
5737 = const_binop
5738 (MINUS_EXPR,
5739 convert (TREE_TYPE (arg1),
5740 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5741 arg1, 0);
5742 if (tree_int_cst_sgn (arg1) < 0)
5743 return t;
5746 /* If we have a rotate of a bit operation with the rotate count and
5747 the second operand of the bit operation both constant,
5748 permute the two operations. */
5749 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5750 && (TREE_CODE (arg0) == BIT_AND_EXPR
5751 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5752 || TREE_CODE (arg0) == BIT_IOR_EXPR
5753 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5755 return fold (build (TREE_CODE (arg0), type,
5756 fold (build (code, type,
5757 TREE_OPERAND (arg0, 0), arg1)),
5758 fold (build (code, type,
5759 TREE_OPERAND (arg0, 1), arg1))));
5761 /* Two consecutive rotates adding up to the width of the mode can
5762 be ignored. */
5763 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5764 && TREE_CODE (arg0) == RROTATE_EXPR
5765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5766 && TREE_INT_CST_HIGH (arg1) == 0
5767 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5768 && ((TREE_INT_CST_LOW (arg1)
5769 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5770 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5771 return TREE_OPERAND (arg0, 0);
5773 goto binary;
5775 case MIN_EXPR:
5776 if (operand_equal_p (arg0, arg1, 0))
5777 return omit_one_operand (type, arg0, arg1);
5778 if (INTEGRAL_TYPE_P (type)
5779 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5780 return omit_one_operand (type, arg1, arg0);
5781 goto associate;
5783 case MAX_EXPR:
5784 if (operand_equal_p (arg0, arg1, 0))
5785 return omit_one_operand (type, arg0, arg1);
5786 if (INTEGRAL_TYPE_P (type)
5787 && TYPE_MAX_VALUE (type)
5788 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5789 return omit_one_operand (type, arg1, arg0);
5790 goto associate;
5792 case TRUTH_NOT_EXPR:
5793 /* Note that the operand of this must be an int
5794 and its values must be 0 or 1.
5795 ("true" is a fixed value perhaps depending on the language,
5796 but we don't handle values other than 1 correctly yet.) */
5797 tem = invert_truthvalue (arg0);
5798 /* Avoid infinite recursion. */
5799 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5800 return t;
5801 return convert (type, tem);
5803 case TRUTH_ANDIF_EXPR:
5804 /* Note that the operands of this must be ints
5805 and their values must be 0 or 1.
5806 ("true" is a fixed value perhaps depending on the language.) */
5807 /* If first arg is constant zero, return it. */
5808 if (integer_zerop (arg0))
5809 return convert (type, arg0);
5810 case TRUTH_AND_EXPR:
5811 /* If either arg is constant true, drop it. */
5812 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5813 return non_lvalue (convert (type, arg1));
5814 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5815 /* Preserve sequence points. */
5816 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5817 return non_lvalue (convert (type, arg0));
5818 /* If second arg is constant zero, result is zero, but first arg
5819 must be evaluated. */
5820 if (integer_zerop (arg1))
5821 return omit_one_operand (type, arg1, arg0);
5822 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5823 case will be handled here. */
5824 if (integer_zerop (arg0))
5825 return omit_one_operand (type, arg0, arg1);
5827 truth_andor:
5828 /* We only do these simplifications if we are optimizing. */
5829 if (!optimize)
5830 return t;
5832 /* Check for things like (A || B) && (A || C). We can convert this
5833 to A || (B && C). Note that either operator can be any of the four
5834 truth and/or operations and the transformation will still be
5835 valid. Also note that we only care about order for the
5836 ANDIF and ORIF operators. If B contains side effects, this
5837 might change the truth-value of A. */
5838 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5839 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5840 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5841 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5842 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5843 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5845 tree a00 = TREE_OPERAND (arg0, 0);
5846 tree a01 = TREE_OPERAND (arg0, 1);
5847 tree a10 = TREE_OPERAND (arg1, 0);
5848 tree a11 = TREE_OPERAND (arg1, 1);
5849 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5850 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5851 && (code == TRUTH_AND_EXPR
5852 || code == TRUTH_OR_EXPR));
5854 if (operand_equal_p (a00, a10, 0))
5855 return fold (build (TREE_CODE (arg0), type, a00,
5856 fold (build (code, type, a01, a11))));
5857 else if (commutative && operand_equal_p (a00, a11, 0))
5858 return fold (build (TREE_CODE (arg0), type, a00,
5859 fold (build (code, type, a01, a10))));
5860 else if (commutative && operand_equal_p (a01, a10, 0))
5861 return fold (build (TREE_CODE (arg0), type, a01,
5862 fold (build (code, type, a00, a11))));
5864 /* This case if tricky because we must either have commutative
5865 operators or else A10 must not have side-effects. */
5867 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5868 && operand_equal_p (a01, a11, 0))
5869 return fold (build (TREE_CODE (arg0), type,
5870 fold (build (code, type, a00, a10)),
5871 a01));
5874 /* See if we can build a range comparison. */
5875 if (0 != (tem = fold_range_test (t)))
5876 return tem;
5878 /* Check for the possibility of merging component references. If our
5879 lhs is another similar operation, try to merge its rhs with our
5880 rhs. Then try to merge our lhs and rhs. */
5881 if (TREE_CODE (arg0) == code
5882 && 0 != (tem = fold_truthop (code, type,
5883 TREE_OPERAND (arg0, 1), arg1)))
5884 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5886 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5887 return tem;
5889 return t;
5891 case TRUTH_ORIF_EXPR:
5892 /* Note that the operands of this must be ints
5893 and their values must be 0 or true.
5894 ("true" is a fixed value perhaps depending on the language.) */
5895 /* If first arg is constant true, return it. */
5896 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5897 return convert (type, arg0);
5898 case TRUTH_OR_EXPR:
5899 /* If either arg is constant zero, drop it. */
5900 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5901 return non_lvalue (convert (type, arg1));
5902 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5903 /* Preserve sequence points. */
5904 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5905 return non_lvalue (convert (type, arg0));
5906 /* If second arg is constant true, result is true, but we must
5907 evaluate first arg. */
5908 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5909 return omit_one_operand (type, arg1, arg0);
5910 /* Likewise for first arg, but note this only occurs here for
5911 TRUTH_OR_EXPR. */
5912 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5913 return omit_one_operand (type, arg0, arg1);
5914 goto truth_andor;
5916 case TRUTH_XOR_EXPR:
5917 /* If either arg is constant zero, drop it. */
5918 if (integer_zerop (arg0))
5919 return non_lvalue (convert (type, arg1));
5920 if (integer_zerop (arg1))
5921 return non_lvalue (convert (type, arg0));
5922 /* If either arg is constant true, this is a logical inversion. */
5923 if (integer_onep (arg0))
5924 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5925 if (integer_onep (arg1))
5926 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5927 return t;
5929 case EQ_EXPR:
5930 case NE_EXPR:
5931 case LT_EXPR:
5932 case GT_EXPR:
5933 case LE_EXPR:
5934 case GE_EXPR:
5935 /* If one arg is a real or integer constant, put it last. */
5936 if ((TREE_CODE (arg0) == INTEGER_CST
5937 && TREE_CODE (arg1) != INTEGER_CST)
5938 || (TREE_CODE (arg0) == REAL_CST
5939 && TREE_CODE (arg0) != REAL_CST))
5941 TREE_OPERAND (t, 0) = arg1;
5942 TREE_OPERAND (t, 1) = arg0;
5943 arg0 = TREE_OPERAND (t, 0);
5944 arg1 = TREE_OPERAND (t, 1);
5945 code = swap_tree_comparison (code);
5946 TREE_SET_CODE (t, code);
5949 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5951 /* (-a) CMP (-b) -> b CMP a */
5952 if (TREE_CODE (arg0) == NEGATE_EXPR
5953 && TREE_CODE (arg1) == NEGATE_EXPR)
5954 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5955 TREE_OPERAND (arg0, 0)));
5956 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5957 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5958 return
5959 fold (build
5960 (swap_tree_comparison (code), type,
5961 TREE_OPERAND (arg0, 0),
5962 build_real (TREE_TYPE (arg1),
5963 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5964 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5965 /* a CMP (-0) -> a CMP 0 */
5966 if (TREE_CODE (arg1) == REAL_CST
5967 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5968 return fold (build (code, type, arg0,
5969 build_real (TREE_TYPE (arg1), dconst0)));
5971 /* If this is a comparison of a real constant with a PLUS_EXPR
5972 or a MINUS_EXPR of a real constant, we can convert it into a
5973 comparison with a revised real constant as long as no overflow
5974 occurs when unsafe_math_optimizations are enabled. */
5975 if (flag_unsafe_math_optimizations
5976 && TREE_CODE (arg1) == REAL_CST
5977 && (TREE_CODE (arg0) == PLUS_EXPR
5978 || TREE_CODE (arg0) == MINUS_EXPR)
5979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5980 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5981 ? MINUS_EXPR : PLUS_EXPR,
5982 arg1, TREE_OPERAND (arg0, 1), 0))
5983 && ! TREE_CONSTANT_OVERFLOW (tem))
5984 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5987 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5988 First, see if one arg is constant; find the constant arg
5989 and the other one. */
5991 tree constop = 0, varop = NULL_TREE;
5992 int constopnum = -1;
5994 if (TREE_CONSTANT (arg1))
5995 constopnum = 1, constop = arg1, varop = arg0;
5996 if (TREE_CONSTANT (arg0))
5997 constopnum = 0, constop = arg0, varop = arg1;
5999 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6001 /* This optimization is invalid for ordered comparisons
6002 if CONST+INCR overflows or if foo+incr might overflow.
6003 This optimization is invalid for floating point due to rounding.
6004 For pointer types we assume overflow doesn't happen. */
6005 if (POINTER_TYPE_P (TREE_TYPE (varop))
6006 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6007 && (code == EQ_EXPR || code == NE_EXPR)))
6009 tree newconst
6010 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6011 constop, TREE_OPERAND (varop, 1)));
6013 /* Do not overwrite the current varop to be a preincrement,
6014 create a new node so that we won't confuse our caller who
6015 might create trees and throw them away, reusing the
6016 arguments that they passed to build. This shows up in
6017 the THEN or ELSE parts of ?: being postincrements. */
6018 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6019 TREE_OPERAND (varop, 0),
6020 TREE_OPERAND (varop, 1));
6022 /* If VAROP is a reference to a bitfield, we must mask
6023 the constant by the width of the field. */
6024 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6025 && DECL_BIT_FIELD(TREE_OPERAND
6026 (TREE_OPERAND (varop, 0), 1)))
6028 int size
6029 = TREE_INT_CST_LOW (DECL_SIZE
6030 (TREE_OPERAND
6031 (TREE_OPERAND (varop, 0), 1)));
6032 tree mask, unsigned_type;
6033 unsigned int precision;
6034 tree folded_compare;
6036 /* First check whether the comparison would come out
6037 always the same. If we don't do that we would
6038 change the meaning with the masking. */
6039 if (constopnum == 0)
6040 folded_compare = fold (build (code, type, constop,
6041 TREE_OPERAND (varop, 0)));
6042 else
6043 folded_compare = fold (build (code, type,
6044 TREE_OPERAND (varop, 0),
6045 constop));
6046 if (integer_zerop (folded_compare)
6047 || integer_onep (folded_compare))
6048 return omit_one_operand (type, folded_compare, varop);
6050 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6051 precision = TYPE_PRECISION (unsigned_type);
6052 mask = build_int_2 (~0, ~0);
6053 TREE_TYPE (mask) = unsigned_type;
6054 force_fit_type (mask, 0);
6055 mask = const_binop (RSHIFT_EXPR, mask,
6056 size_int (precision - size), 0);
6057 newconst = fold (build (BIT_AND_EXPR,
6058 TREE_TYPE (varop), newconst,
6059 convert (TREE_TYPE (varop),
6060 mask)));
6063 t = build (code, type,
6064 (constopnum == 0) ? newconst : varop,
6065 (constopnum == 1) ? newconst : varop);
6066 return t;
6069 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6071 if (POINTER_TYPE_P (TREE_TYPE (varop))
6072 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6073 && (code == EQ_EXPR || code == NE_EXPR)))
6075 tree newconst
6076 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6077 constop, TREE_OPERAND (varop, 1)));
6079 /* Do not overwrite the current varop to be a predecrement,
6080 create a new node so that we won't confuse our caller who
6081 might create trees and throw them away, reusing the
6082 arguments that they passed to build. This shows up in
6083 the THEN or ELSE parts of ?: being postdecrements. */
6084 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6085 TREE_OPERAND (varop, 0),
6086 TREE_OPERAND (varop, 1));
6088 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6089 && DECL_BIT_FIELD(TREE_OPERAND
6090 (TREE_OPERAND (varop, 0), 1)))
6092 int size
6093 = TREE_INT_CST_LOW (DECL_SIZE
6094 (TREE_OPERAND
6095 (TREE_OPERAND (varop, 0), 1)));
6096 tree mask, unsigned_type;
6097 unsigned int precision;
6098 tree folded_compare;
6100 if (constopnum == 0)
6101 folded_compare = fold (build (code, type, constop,
6102 TREE_OPERAND (varop, 0)));
6103 else
6104 folded_compare = fold (build (code, type,
6105 TREE_OPERAND (varop, 0),
6106 constop));
6107 if (integer_zerop (folded_compare)
6108 || integer_onep (folded_compare))
6109 return omit_one_operand (type, folded_compare, varop);
6111 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6112 precision = TYPE_PRECISION (unsigned_type);
6113 mask = build_int_2 (~0, ~0);
6114 TREE_TYPE (mask) = TREE_TYPE (varop);
6115 force_fit_type (mask, 0);
6116 mask = const_binop (RSHIFT_EXPR, mask,
6117 size_int (precision - size), 0);
6118 newconst = fold (build (BIT_AND_EXPR,
6119 TREE_TYPE (varop), newconst,
6120 convert (TREE_TYPE (varop),
6121 mask)));
6124 t = build (code, type,
6125 (constopnum == 0) ? newconst : varop,
6126 (constopnum == 1) ? newconst : varop);
6127 return t;
6132 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6133 This transformation affects the cases which are handled in later
6134 optimizations involving comparisons with non-negative constants. */
6135 if (TREE_CODE (arg1) == INTEGER_CST
6136 && TREE_CODE (arg0) != INTEGER_CST
6137 && tree_int_cst_sgn (arg1) > 0)
6139 switch (code)
6141 case GE_EXPR:
6142 code = GT_EXPR;
6143 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6144 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6145 break;
6147 case LT_EXPR:
6148 code = LE_EXPR;
6149 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6150 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6151 break;
6153 default:
6154 break;
6158 /* Comparisons with the highest or lowest possible integer of
6159 the specified size will have known values. */
6161 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6163 if (TREE_CODE (arg1) == INTEGER_CST
6164 && ! TREE_CONSTANT_OVERFLOW (arg1)
6165 && width <= HOST_BITS_PER_WIDE_INT
6166 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6167 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6169 unsigned HOST_WIDE_INT signed_max;
6170 unsigned HOST_WIDE_INT max, min;
6172 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6174 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6176 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6177 min = 0;
6179 else
6181 max = signed_max;
6182 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6185 if (TREE_INT_CST_HIGH (arg1) == 0
6186 && TREE_INT_CST_LOW (arg1) == max)
6187 switch (code)
6189 case GT_EXPR:
6190 return omit_one_operand (type,
6191 convert (type, integer_zero_node),
6192 arg0);
6193 case GE_EXPR:
6194 code = EQ_EXPR;
6195 TREE_SET_CODE (t, EQ_EXPR);
6196 break;
6197 case LE_EXPR:
6198 return omit_one_operand (type,
6199 convert (type, integer_one_node),
6200 arg0);
6201 case LT_EXPR:
6202 code = NE_EXPR;
6203 TREE_SET_CODE (t, NE_EXPR);
6204 break;
6206 /* The GE_EXPR and LT_EXPR cases above are not normally
6207 reached because of previous transformations. */
6209 default:
6210 break;
6212 else if (TREE_INT_CST_HIGH (arg1) == 0
6213 && TREE_INT_CST_LOW (arg1) == max - 1)
6214 switch (code)
6216 case GT_EXPR:
6217 code = EQ_EXPR;
6218 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6219 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6220 break;
6221 case LE_EXPR:
6222 code = NE_EXPR;
6223 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6224 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6225 break;
6226 default:
6227 break;
6229 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6230 && TREE_INT_CST_LOW (arg1) == min)
6231 switch (code)
6233 case LT_EXPR:
6234 return omit_one_operand (type,
6235 convert (type, integer_zero_node),
6236 arg0);
6237 case LE_EXPR:
6238 code = EQ_EXPR;
6239 TREE_SET_CODE (t, EQ_EXPR);
6240 break;
6242 case GE_EXPR:
6243 return omit_one_operand (type,
6244 convert (type, integer_one_node),
6245 arg0);
6246 case GT_EXPR:
6247 code = NE_EXPR;
6248 TREE_SET_CODE (t, NE_EXPR);
6249 break;
6251 default:
6252 break;
6254 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6255 && TREE_INT_CST_LOW (arg1) == min + 1)
6256 switch (code)
6258 case GE_EXPR:
6259 code = NE_EXPR;
6260 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6261 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6262 break;
6263 case LT_EXPR:
6264 code = EQ_EXPR;
6265 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6266 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6267 break;
6268 default:
6269 break;
6272 else if (TREE_INT_CST_HIGH (arg1) == 0
6273 && TREE_INT_CST_LOW (arg1) == signed_max
6274 && TREE_UNSIGNED (TREE_TYPE (arg1))
6275 /* signed_type does not work on pointer types. */
6276 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6278 /* The following case also applies to X < signed_max+1
6279 and X >= signed_max+1 because previous transformations. */
6280 if (code == LE_EXPR || code == GT_EXPR)
6282 tree st0, st1;
6283 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6284 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6285 return fold
6286 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6287 type, convert (st0, arg0),
6288 convert (st1, integer_zero_node)));
6294 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6295 a MINUS_EXPR of a constant, we can convert it into a comparison with
6296 a revised constant as long as no overflow occurs. */
6297 if ((code == EQ_EXPR || code == NE_EXPR)
6298 && TREE_CODE (arg1) == INTEGER_CST
6299 && (TREE_CODE (arg0) == PLUS_EXPR
6300 || TREE_CODE (arg0) == MINUS_EXPR)
6301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6302 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6303 ? MINUS_EXPR : PLUS_EXPR,
6304 arg1, TREE_OPERAND (arg0, 1), 0))
6305 && ! TREE_CONSTANT_OVERFLOW (tem))
6306 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6308 /* Similarly for a NEGATE_EXPR. */
6309 else if ((code == EQ_EXPR || code == NE_EXPR)
6310 && TREE_CODE (arg0) == NEGATE_EXPR
6311 && TREE_CODE (arg1) == INTEGER_CST
6312 && 0 != (tem = negate_expr (arg1))
6313 && TREE_CODE (tem) == INTEGER_CST
6314 && ! TREE_CONSTANT_OVERFLOW (tem))
6315 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6317 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6318 for !=. Don't do this for ordered comparisons due to overflow. */
6319 else if ((code == NE_EXPR || code == EQ_EXPR)
6320 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6321 return fold (build (code, type,
6322 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6324 /* If we are widening one operand of an integer comparison,
6325 see if the other operand is similarly being widened. Perhaps we
6326 can do the comparison in the narrower type. */
6327 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6328 && TREE_CODE (arg0) == NOP_EXPR
6329 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6330 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6331 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6332 || (TREE_CODE (t1) == INTEGER_CST
6333 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6334 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6336 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6337 constant, we can simplify it. */
6338 else if (TREE_CODE (arg1) == INTEGER_CST
6339 && (TREE_CODE (arg0) == MIN_EXPR
6340 || TREE_CODE (arg0) == MAX_EXPR)
6341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6342 return optimize_minmax_comparison (t);
6344 /* If we are comparing an ABS_EXPR with a constant, we can
6345 convert all the cases into explicit comparisons, but they may
6346 well not be faster than doing the ABS and one comparison.
6347 But ABS (X) <= C is a range comparison, which becomes a subtraction
6348 and a comparison, and is probably faster. */
6349 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6350 && TREE_CODE (arg0) == ABS_EXPR
6351 && ! TREE_SIDE_EFFECTS (arg0)
6352 && (0 != (tem = negate_expr (arg1)))
6353 && TREE_CODE (tem) == INTEGER_CST
6354 && ! TREE_CONSTANT_OVERFLOW (tem))
6355 return fold (build (TRUTH_ANDIF_EXPR, type,
6356 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6357 build (LE_EXPR, type,
6358 TREE_OPERAND (arg0, 0), arg1)));
6360 /* If this is an EQ or NE comparison with zero and ARG0 is
6361 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6362 two operations, but the latter can be done in one less insn
6363 on machines that have only two-operand insns or on which a
6364 constant cannot be the first operand. */
6365 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6366 && TREE_CODE (arg0) == BIT_AND_EXPR)
6368 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6369 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6370 return
6371 fold (build (code, type,
6372 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6373 build (RSHIFT_EXPR,
6374 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6375 TREE_OPERAND (arg0, 1),
6376 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6377 convert (TREE_TYPE (arg0),
6378 integer_one_node)),
6379 arg1));
6380 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6381 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6382 return
6383 fold (build (code, type,
6384 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6385 build (RSHIFT_EXPR,
6386 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6387 TREE_OPERAND (arg0, 0),
6388 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6389 convert (TREE_TYPE (arg0),
6390 integer_one_node)),
6391 arg1));
6394 /* If this is an NE or EQ comparison of zero against the result of a
6395 signed MOD operation whose second operand is a power of 2, make
6396 the MOD operation unsigned since it is simpler and equivalent. */
6397 if ((code == NE_EXPR || code == EQ_EXPR)
6398 && integer_zerop (arg1)
6399 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6400 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6401 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6402 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6403 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6404 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6406 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6407 tree newmod = build (TREE_CODE (arg0), newtype,
6408 convert (newtype, TREE_OPERAND (arg0, 0)),
6409 convert (newtype, TREE_OPERAND (arg0, 1)));
6411 return build (code, type, newmod, convert (newtype, arg1));
6414 /* If this is an NE comparison of zero with an AND of one, remove the
6415 comparison since the AND will give the correct value. */
6416 if (code == NE_EXPR && integer_zerop (arg1)
6417 && TREE_CODE (arg0) == BIT_AND_EXPR
6418 && integer_onep (TREE_OPERAND (arg0, 1)))
6419 return convert (type, arg0);
6421 /* If we have (A & C) == C where C is a power of 2, convert this into
6422 (A & C) != 0. Similarly for NE_EXPR. */
6423 if ((code == EQ_EXPR || code == NE_EXPR)
6424 && TREE_CODE (arg0) == BIT_AND_EXPR
6425 && integer_pow2p (TREE_OPERAND (arg0, 1))
6426 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6427 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6428 arg0, integer_zero_node));
6430 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6431 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6432 if ((code == EQ_EXPR || code == NE_EXPR)
6433 && TREE_CODE (arg0) == BIT_AND_EXPR
6434 && integer_zerop (arg1))
6436 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6437 TREE_OPERAND (arg0, 1));
6438 if (arg00 != NULL_TREE)
6440 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6441 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6442 convert (stype, arg00),
6443 convert (stype, integer_zero_node)));
6447 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6448 and similarly for >= into !=. */
6449 if ((code == LT_EXPR || code == GE_EXPR)
6450 && TREE_UNSIGNED (TREE_TYPE (arg0))
6451 && TREE_CODE (arg1) == LSHIFT_EXPR
6452 && integer_onep (TREE_OPERAND (arg1, 0)))
6453 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6454 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6455 TREE_OPERAND (arg1, 1)),
6456 convert (TREE_TYPE (arg0), integer_zero_node));
6458 else if ((code == LT_EXPR || code == GE_EXPR)
6459 && TREE_UNSIGNED (TREE_TYPE (arg0))
6460 && (TREE_CODE (arg1) == NOP_EXPR
6461 || TREE_CODE (arg1) == CONVERT_EXPR)
6462 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6463 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6464 return
6465 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6466 convert (TREE_TYPE (arg0),
6467 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6468 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6469 convert (TREE_TYPE (arg0), integer_zero_node));
6471 /* Simplify comparison of something with itself. (For IEEE
6472 floating-point, we can only do some of these simplifications.) */
6473 if (operand_equal_p (arg0, arg1, 0))
6475 switch (code)
6477 case EQ_EXPR:
6478 case GE_EXPR:
6479 case LE_EXPR:
6480 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6481 return constant_boolean_node (1, type);
6482 code = EQ_EXPR;
6483 TREE_SET_CODE (t, code);
6484 break;
6486 case NE_EXPR:
6487 /* For NE, we can only do this simplification if integer. */
6488 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6489 break;
6490 /* ... fall through ... */
6491 case GT_EXPR:
6492 case LT_EXPR:
6493 return constant_boolean_node (0, type);
6494 default:
6495 abort ();
6499 /* If we are comparing an expression that just has comparisons
6500 of two integer values, arithmetic expressions of those comparisons,
6501 and constants, we can simplify it. There are only three cases
6502 to check: the two values can either be equal, the first can be
6503 greater, or the second can be greater. Fold the expression for
6504 those three values. Since each value must be 0 or 1, we have
6505 eight possibilities, each of which corresponds to the constant 0
6506 or 1 or one of the six possible comparisons.
6508 This handles common cases like (a > b) == 0 but also handles
6509 expressions like ((x > y) - (y > x)) > 0, which supposedly
6510 occur in macroized code. */
6512 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6514 tree cval1 = 0, cval2 = 0;
6515 int save_p = 0;
6517 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6518 /* Don't handle degenerate cases here; they should already
6519 have been handled anyway. */
6520 && cval1 != 0 && cval2 != 0
6521 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6522 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6523 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6524 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6525 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6526 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6527 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6529 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6530 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6532 /* We can't just pass T to eval_subst in case cval1 or cval2
6533 was the same as ARG1. */
6535 tree high_result
6536 = fold (build (code, type,
6537 eval_subst (arg0, cval1, maxval, cval2, minval),
6538 arg1));
6539 tree equal_result
6540 = fold (build (code, type,
6541 eval_subst (arg0, cval1, maxval, cval2, maxval),
6542 arg1));
6543 tree low_result
6544 = fold (build (code, type,
6545 eval_subst (arg0, cval1, minval, cval2, maxval),
6546 arg1));
6548 /* All three of these results should be 0 or 1. Confirm they
6549 are. Then use those values to select the proper code
6550 to use. */
6552 if ((integer_zerop (high_result)
6553 || integer_onep (high_result))
6554 && (integer_zerop (equal_result)
6555 || integer_onep (equal_result))
6556 && (integer_zerop (low_result)
6557 || integer_onep (low_result)))
6559 /* Make a 3-bit mask with the high-order bit being the
6560 value for `>', the next for '=', and the low for '<'. */
6561 switch ((integer_onep (high_result) * 4)
6562 + (integer_onep (equal_result) * 2)
6563 + integer_onep (low_result))
6565 case 0:
6566 /* Always false. */
6567 return omit_one_operand (type, integer_zero_node, arg0);
6568 case 1:
6569 code = LT_EXPR;
6570 break;
6571 case 2:
6572 code = EQ_EXPR;
6573 break;
6574 case 3:
6575 code = LE_EXPR;
6576 break;
6577 case 4:
6578 code = GT_EXPR;
6579 break;
6580 case 5:
6581 code = NE_EXPR;
6582 break;
6583 case 6:
6584 code = GE_EXPR;
6585 break;
6586 case 7:
6587 /* Always true. */
6588 return omit_one_operand (type, integer_one_node, arg0);
6591 t = build (code, type, cval1, cval2);
6592 if (save_p)
6593 return save_expr (t);
6594 else
6595 return fold (t);
6600 /* If this is a comparison of a field, we may be able to simplify it. */
6601 if ((TREE_CODE (arg0) == COMPONENT_REF
6602 || TREE_CODE (arg0) == BIT_FIELD_REF)
6603 && (code == EQ_EXPR || code == NE_EXPR)
6604 /* Handle the constant case even without -O
6605 to make sure the warnings are given. */
6606 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6608 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6609 return t1 ? t1 : t;
6612 /* If this is a comparison of complex values and either or both sides
6613 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6614 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6615 This may prevent needless evaluations. */
6616 if ((code == EQ_EXPR || code == NE_EXPR)
6617 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6618 && (TREE_CODE (arg0) == COMPLEX_EXPR
6619 || TREE_CODE (arg1) == COMPLEX_EXPR
6620 || TREE_CODE (arg0) == COMPLEX_CST
6621 || TREE_CODE (arg1) == COMPLEX_CST))
6623 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6624 tree real0, imag0, real1, imag1;
6626 arg0 = save_expr (arg0);
6627 arg1 = save_expr (arg1);
6628 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6629 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6630 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6631 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6633 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6634 : TRUTH_ORIF_EXPR),
6635 type,
6636 fold (build (code, type, real0, real1)),
6637 fold (build (code, type, imag0, imag1))));
6640 /* Optimize comparisons of strlen vs zero to a compare of the
6641 first character of the string vs zero. To wit,
6642 strlen(ptr) == 0 => *ptr == 0
6643 strlen(ptr) != 0 => *ptr != 0
6644 Other cases should reduce to one of these two (or a constant)
6645 due to the return value of strlen being unsigned. */
6646 if ((code == EQ_EXPR || code == NE_EXPR)
6647 && integer_zerop (arg1)
6648 && TREE_CODE (arg0) == CALL_EXPR
6649 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6651 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6652 tree arglist;
6654 if (TREE_CODE (fndecl) == FUNCTION_DECL
6655 && DECL_BUILT_IN (fndecl)
6656 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6657 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6658 && (arglist = TREE_OPERAND (arg0, 1))
6659 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6660 && ! TREE_CHAIN (arglist))
6661 return fold (build (code, type,
6662 build1 (INDIRECT_REF, char_type_node,
6663 TREE_VALUE(arglist)),
6664 integer_zero_node));
6667 /* From here on, the only cases we handle are when the result is
6668 known to be a constant.
6670 To compute GT, swap the arguments and do LT.
6671 To compute GE, do LT and invert the result.
6672 To compute LE, swap the arguments, do LT and invert the result.
6673 To compute NE, do EQ and invert the result.
6675 Therefore, the code below must handle only EQ and LT. */
6677 if (code == LE_EXPR || code == GT_EXPR)
6679 tem = arg0, arg0 = arg1, arg1 = tem;
6680 code = swap_tree_comparison (code);
6683 /* Note that it is safe to invert for real values here because we
6684 will check below in the one case that it matters. */
6686 t1 = NULL_TREE;
6687 invert = 0;
6688 if (code == NE_EXPR || code == GE_EXPR)
6690 invert = 1;
6691 code = invert_tree_comparison (code);
6694 /* Compute a result for LT or EQ if args permit;
6695 otherwise return T. */
6696 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6698 if (code == EQ_EXPR)
6699 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6700 else
6701 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6702 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6703 : INT_CST_LT (arg0, arg1)),
6707 #if 0 /* This is no longer useful, but breaks some real code. */
6708 /* Assume a nonexplicit constant cannot equal an explicit one,
6709 since such code would be undefined anyway.
6710 Exception: on sysvr4, using #pragma weak,
6711 a label can come out as 0. */
6712 else if (TREE_CODE (arg1) == INTEGER_CST
6713 && !integer_zerop (arg1)
6714 && TREE_CONSTANT (arg0)
6715 && TREE_CODE (arg0) == ADDR_EXPR
6716 && code == EQ_EXPR)
6717 t1 = build_int_2 (0, 0);
6718 #endif
6719 /* Two real constants can be compared explicitly. */
6720 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6722 /* If either operand is a NaN, the result is false with two
6723 exceptions: First, an NE_EXPR is true on NaNs, but that case
6724 is already handled correctly since we will be inverting the
6725 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6726 or a GE_EXPR into a LT_EXPR, we must return true so that it
6727 will be inverted into false. */
6729 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6730 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6731 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6733 else if (code == EQ_EXPR)
6734 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6735 TREE_REAL_CST (arg1)),
6737 else
6738 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6739 TREE_REAL_CST (arg1)),
6743 if (t1 == NULL_TREE)
6744 return t;
6746 if (invert)
6747 TREE_INT_CST_LOW (t1) ^= 1;
6749 TREE_TYPE (t1) = type;
6750 if (TREE_CODE (type) == BOOLEAN_TYPE)
6751 return (*lang_hooks.truthvalue_conversion) (t1);
6752 return t1;
6754 case COND_EXPR:
6755 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6756 so all simple results must be passed through pedantic_non_lvalue. */
6757 if (TREE_CODE (arg0) == INTEGER_CST)
6758 return pedantic_non_lvalue
6759 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6760 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6761 return pedantic_omit_one_operand (type, arg1, arg0);
6763 /* If the second operand is zero, invert the comparison and swap
6764 the second and third operands. Likewise if the second operand
6765 is constant and the third is not or if the third operand is
6766 equivalent to the first operand of the comparison. */
6768 if (integer_zerop (arg1)
6769 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6770 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6771 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6772 TREE_OPERAND (t, 2),
6773 TREE_OPERAND (arg0, 1))))
6775 /* See if this can be inverted. If it can't, possibly because
6776 it was a floating-point inequality comparison, don't do
6777 anything. */
6778 tem = invert_truthvalue (arg0);
6780 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6782 t = build (code, type, tem,
6783 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6784 arg0 = tem;
6785 /* arg1 should be the first argument of the new T. */
6786 arg1 = TREE_OPERAND (t, 1);
6787 STRIP_NOPS (arg1);
6791 /* If we have A op B ? A : C, we may be able to convert this to a
6792 simpler expression, depending on the operation and the values
6793 of B and C. Signed zeros prevent all of these transformations,
6794 for reasons given above each one. */
6796 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6797 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6798 arg1, TREE_OPERAND (arg0, 1))
6799 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6801 tree arg2 = TREE_OPERAND (t, 2);
6802 enum tree_code comp_code = TREE_CODE (arg0);
6804 STRIP_NOPS (arg2);
6806 /* If we have A op 0 ? A : -A, consider applying the following
6807 transformations:
6809 A == 0? A : -A same as -A
6810 A != 0? A : -A same as A
6811 A >= 0? A : -A same as abs (A)
6812 A > 0? A : -A same as abs (A)
6813 A <= 0? A : -A same as -abs (A)
6814 A < 0? A : -A same as -abs (A)
6816 None of these transformations work for modes with signed
6817 zeros. If A is +/-0, the first two transformations will
6818 change the sign of the result (from +0 to -0, or vice
6819 versa). The last four will fix the sign of the result,
6820 even though the original expressions could be positive or
6821 negative, depending on the sign of A.
6823 Note that all these transformations are correct if A is
6824 NaN, since the two alternatives (A and -A) are also NaNs. */
6825 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6826 ? real_zerop (TREE_OPERAND (arg0, 1))
6827 : integer_zerop (TREE_OPERAND (arg0, 1)))
6828 && TREE_CODE (arg2) == NEGATE_EXPR
6829 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6830 switch (comp_code)
6832 case EQ_EXPR:
6833 return
6834 pedantic_non_lvalue
6835 (convert (type,
6836 negate_expr
6837 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6838 arg1))));
6839 case NE_EXPR:
6840 return pedantic_non_lvalue (convert (type, arg1));
6841 case GE_EXPR:
6842 case GT_EXPR:
6843 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6844 arg1 = convert ((*lang_hooks.types.signed_type)
6845 (TREE_TYPE (arg1)), arg1);
6846 return pedantic_non_lvalue
6847 (convert (type, fold (build1 (ABS_EXPR,
6848 TREE_TYPE (arg1), arg1))));
6849 case LE_EXPR:
6850 case LT_EXPR:
6851 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6852 arg1 = convert ((lang_hooks.types.signed_type)
6853 (TREE_TYPE (arg1)), arg1);
6854 return pedantic_non_lvalue
6855 (negate_expr (convert (type,
6856 fold (build1 (ABS_EXPR,
6857 TREE_TYPE (arg1),
6858 arg1)))));
6859 default:
6860 abort ();
6863 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6864 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6865 both transformations are correct when A is NaN: A != 0
6866 is then true, and A == 0 is false. */
6868 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6870 if (comp_code == NE_EXPR)
6871 return pedantic_non_lvalue (convert (type, arg1));
6872 else if (comp_code == EQ_EXPR)
6873 return pedantic_non_lvalue (convert (type, integer_zero_node));
6876 /* Try some transformations of A op B ? A : B.
6878 A == B? A : B same as B
6879 A != B? A : B same as A
6880 A >= B? A : B same as max (A, B)
6881 A > B? A : B same as max (B, A)
6882 A <= B? A : B same as min (A, B)
6883 A < B? A : B same as min (B, A)
6885 As above, these transformations don't work in the presence
6886 of signed zeros. For example, if A and B are zeros of
6887 opposite sign, the first two transformations will change
6888 the sign of the result. In the last four, the original
6889 expressions give different results for (A=+0, B=-0) and
6890 (A=-0, B=+0), but the transformed expressions do not.
6892 The first two transformations are correct if either A or B
6893 is a NaN. In the first transformation, the condition will
6894 be false, and B will indeed be chosen. In the case of the
6895 second transformation, the condition A != B will be true,
6896 and A will be chosen.
6898 The conversions to max() and min() are not correct if B is
6899 a number and A is not. The conditions in the original
6900 expressions will be false, so all four give B. The min()
6901 and max() versions would give a NaN instead. */
6902 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6903 arg2, TREE_OPERAND (arg0, 0)))
6905 tree comp_op0 = TREE_OPERAND (arg0, 0);
6906 tree comp_op1 = TREE_OPERAND (arg0, 1);
6907 tree comp_type = TREE_TYPE (comp_op0);
6909 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6910 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6911 comp_type = type;
6913 switch (comp_code)
6915 case EQ_EXPR:
6916 return pedantic_non_lvalue (convert (type, arg2));
6917 case NE_EXPR:
6918 return pedantic_non_lvalue (convert (type, arg1));
6919 case LE_EXPR:
6920 case LT_EXPR:
6921 /* In C++ a ?: expression can be an lvalue, so put the
6922 operand which will be used if they are equal first
6923 so that we can convert this back to the
6924 corresponding COND_EXPR. */
6925 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6926 return pedantic_non_lvalue
6927 (convert (type, fold (build (MIN_EXPR, comp_type,
6928 (comp_code == LE_EXPR
6929 ? comp_op0 : comp_op1),
6930 (comp_code == LE_EXPR
6931 ? comp_op1 : comp_op0)))));
6932 break;
6933 case GE_EXPR:
6934 case GT_EXPR:
6935 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6936 return pedantic_non_lvalue
6937 (convert (type, fold (build (MAX_EXPR, comp_type,
6938 (comp_code == GE_EXPR
6939 ? comp_op0 : comp_op1),
6940 (comp_code == GE_EXPR
6941 ? comp_op1 : comp_op0)))));
6942 break;
6943 default:
6944 abort ();
6948 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6949 we might still be able to simplify this. For example,
6950 if C1 is one less or one more than C2, this might have started
6951 out as a MIN or MAX and been transformed by this function.
6952 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6954 if (INTEGRAL_TYPE_P (type)
6955 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6956 && TREE_CODE (arg2) == INTEGER_CST)
6957 switch (comp_code)
6959 case EQ_EXPR:
6960 /* We can replace A with C1 in this case. */
6961 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6962 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6963 TREE_OPERAND (t, 2));
6964 break;
6966 case LT_EXPR:
6967 /* If C1 is C2 + 1, this is min(A, C2). */
6968 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6969 && operand_equal_p (TREE_OPERAND (arg0, 1),
6970 const_binop (PLUS_EXPR, arg2,
6971 integer_one_node, 0), 1))
6972 return pedantic_non_lvalue
6973 (fold (build (MIN_EXPR, type, arg1, arg2)));
6974 break;
6976 case LE_EXPR:
6977 /* If C1 is C2 - 1, this is min(A, C2). */
6978 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6979 && operand_equal_p (TREE_OPERAND (arg0, 1),
6980 const_binop (MINUS_EXPR, arg2,
6981 integer_one_node, 0), 1))
6982 return pedantic_non_lvalue
6983 (fold (build (MIN_EXPR, type, arg1, arg2)));
6984 break;
6986 case GT_EXPR:
6987 /* If C1 is C2 - 1, this is max(A, C2). */
6988 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6989 && operand_equal_p (TREE_OPERAND (arg0, 1),
6990 const_binop (MINUS_EXPR, arg2,
6991 integer_one_node, 0), 1))
6992 return pedantic_non_lvalue
6993 (fold (build (MAX_EXPR, type, arg1, arg2)));
6994 break;
6996 case GE_EXPR:
6997 /* If C1 is C2 + 1, this is max(A, C2). */
6998 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6999 && operand_equal_p (TREE_OPERAND (arg0, 1),
7000 const_binop (PLUS_EXPR, arg2,
7001 integer_one_node, 0), 1))
7002 return pedantic_non_lvalue
7003 (fold (build (MAX_EXPR, type, arg1, arg2)));
7004 break;
7005 case NE_EXPR:
7006 break;
7007 default:
7008 abort ();
7012 /* If the second operand is simpler than the third, swap them
7013 since that produces better jump optimization results. */
7014 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7015 || TREE_CODE (arg1) == SAVE_EXPR)
7016 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7017 || DECL_P (TREE_OPERAND (t, 2))
7018 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7020 /* See if this can be inverted. If it can't, possibly because
7021 it was a floating-point inequality comparison, don't do
7022 anything. */
7023 tem = invert_truthvalue (arg0);
7025 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7027 t = build (code, type, tem,
7028 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7029 arg0 = tem;
7030 /* arg1 should be the first argument of the new T. */
7031 arg1 = TREE_OPERAND (t, 1);
7032 STRIP_NOPS (arg1);
7036 /* Convert A ? 1 : 0 to simply A. */
7037 if (integer_onep (TREE_OPERAND (t, 1))
7038 && integer_zerop (TREE_OPERAND (t, 2))
7039 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7040 call to fold will try to move the conversion inside
7041 a COND, which will recurse. In that case, the COND_EXPR
7042 is probably the best choice, so leave it alone. */
7043 && type == TREE_TYPE (arg0))
7044 return pedantic_non_lvalue (arg0);
7046 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7047 operation is simply A & 2. */
7049 if (integer_zerop (TREE_OPERAND (t, 2))
7050 && TREE_CODE (arg0) == NE_EXPR
7051 && integer_zerop (TREE_OPERAND (arg0, 1))
7052 && integer_pow2p (arg1)
7053 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7055 arg1, 1))
7056 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7058 return t;
7060 case COMPOUND_EXPR:
7061 /* When pedantic, a compound expression can be neither an lvalue
7062 nor an integer constant expression. */
7063 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7064 return t;
7065 /* Don't let (0, 0) be null pointer constant. */
7066 if (integer_zerop (arg1))
7067 return build1 (NOP_EXPR, type, arg1);
7068 return convert (type, arg1);
7070 case COMPLEX_EXPR:
7071 if (wins)
7072 return build_complex (type, arg0, arg1);
7073 return t;
7075 case REALPART_EXPR:
7076 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7077 return t;
7078 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7079 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7080 TREE_OPERAND (arg0, 1));
7081 else if (TREE_CODE (arg0) == COMPLEX_CST)
7082 return TREE_REALPART (arg0);
7083 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7084 return fold (build (TREE_CODE (arg0), type,
7085 fold (build1 (REALPART_EXPR, type,
7086 TREE_OPERAND (arg0, 0))),
7087 fold (build1 (REALPART_EXPR,
7088 type, TREE_OPERAND (arg0, 1)))));
7089 return t;
7091 case IMAGPART_EXPR:
7092 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7093 return convert (type, integer_zero_node);
7094 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7095 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7096 TREE_OPERAND (arg0, 0));
7097 else if (TREE_CODE (arg0) == COMPLEX_CST)
7098 return TREE_IMAGPART (arg0);
7099 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7100 return fold (build (TREE_CODE (arg0), type,
7101 fold (build1 (IMAGPART_EXPR, type,
7102 TREE_OPERAND (arg0, 0))),
7103 fold (build1 (IMAGPART_EXPR, type,
7104 TREE_OPERAND (arg0, 1)))));
7105 return t;
7107 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7108 appropriate. */
7109 case CLEANUP_POINT_EXPR:
7110 if (! has_cleanups (arg0))
7111 return TREE_OPERAND (t, 0);
7114 enum tree_code code0 = TREE_CODE (arg0);
7115 int kind0 = TREE_CODE_CLASS (code0);
7116 tree arg00 = TREE_OPERAND (arg0, 0);
7117 tree arg01;
7119 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7120 return fold (build1 (code0, type,
7121 fold (build1 (CLEANUP_POINT_EXPR,
7122 TREE_TYPE (arg00), arg00))));
7124 if (kind0 == '<' || kind0 == '2'
7125 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7126 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7127 || code0 == TRUTH_XOR_EXPR)
7129 arg01 = TREE_OPERAND (arg0, 1);
7131 if (TREE_CONSTANT (arg00)
7132 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7133 && ! has_cleanups (arg00)))
7134 return fold (build (code0, type, arg00,
7135 fold (build1 (CLEANUP_POINT_EXPR,
7136 TREE_TYPE (arg01), arg01))));
7138 if (TREE_CONSTANT (arg01))
7139 return fold (build (code0, type,
7140 fold (build1 (CLEANUP_POINT_EXPR,
7141 TREE_TYPE (arg00), arg00)),
7142 arg01));
7145 return t;
7148 case CALL_EXPR:
7149 /* Check for a built-in function. */
7150 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7151 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7152 == FUNCTION_DECL)
7153 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7155 tree tmp = fold_builtin (expr);
7156 if (tmp)
7157 return tmp;
7159 return t;
7161 default:
7162 return t;
7163 } /* switch (code) */
7166 /* Determine if first argument is a multiple of second argument. Return 0 if
7167 it is not, or we cannot easily determined it to be.
7169 An example of the sort of thing we care about (at this point; this routine
7170 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7171 fold cases do now) is discovering that
7173 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7175 is a multiple of
7177 SAVE_EXPR (J * 8)
7179 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7181 This code also handles discovering that
7183 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7185 is a multiple of 8 so we don't have to worry about dealing with a
7186 possible remainder.
7188 Note that we *look* inside a SAVE_EXPR only to determine how it was
7189 calculated; it is not safe for fold to do much of anything else with the
7190 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7191 at run time. For example, the latter example above *cannot* be implemented
7192 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7193 evaluation time of the original SAVE_EXPR is not necessarily the same at
7194 the time the new expression is evaluated. The only optimization of this
7195 sort that would be valid is changing
7197 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7199 divided by 8 to
7201 SAVE_EXPR (I) * SAVE_EXPR (J)
7203 (where the same SAVE_EXPR (J) is used in the original and the
7204 transformed version). */
7206 static int
7207 multiple_of_p (type, top, bottom)
7208 tree type;
7209 tree top;
7210 tree bottom;
7212 if (operand_equal_p (top, bottom, 0))
7213 return 1;
7215 if (TREE_CODE (type) != INTEGER_TYPE)
7216 return 0;
7218 switch (TREE_CODE (top))
7220 case MULT_EXPR:
7221 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7222 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7224 case PLUS_EXPR:
7225 case MINUS_EXPR:
7226 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7227 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7229 case LSHIFT_EXPR:
7230 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7232 tree op1, t1;
7234 op1 = TREE_OPERAND (top, 1);
7235 /* const_binop may not detect overflow correctly,
7236 so check for it explicitly here. */
7237 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7238 > TREE_INT_CST_LOW (op1)
7239 && TREE_INT_CST_HIGH (op1) == 0
7240 && 0 != (t1 = convert (type,
7241 const_binop (LSHIFT_EXPR, size_one_node,
7242 op1, 0)))
7243 && ! TREE_OVERFLOW (t1))
7244 return multiple_of_p (type, t1, bottom);
7246 return 0;
7248 case NOP_EXPR:
7249 /* Can't handle conversions from non-integral or wider integral type. */
7250 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7251 || (TYPE_PRECISION (type)
7252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7253 return 0;
7255 /* .. fall through ... */
7257 case SAVE_EXPR:
7258 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7260 case INTEGER_CST:
7261 if (TREE_CODE (bottom) != INTEGER_CST
7262 || (TREE_UNSIGNED (type)
7263 && (tree_int_cst_sgn (top) < 0
7264 || tree_int_cst_sgn (bottom) < 0)))
7265 return 0;
7266 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7267 top, bottom, 0));
7269 default:
7270 return 0;
7274 /* Return true if `t' is known to be non-negative. */
7277 tree_expr_nonnegative_p (t)
7278 tree t;
7280 switch (TREE_CODE (t))
7282 case ABS_EXPR:
7283 case FFS_EXPR:
7284 return 1;
7285 case INTEGER_CST:
7286 return tree_int_cst_sgn (t) >= 0;
7287 case TRUNC_DIV_EXPR:
7288 case CEIL_DIV_EXPR:
7289 case FLOOR_DIV_EXPR:
7290 case ROUND_DIV_EXPR:
7291 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7292 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7293 case TRUNC_MOD_EXPR:
7294 case CEIL_MOD_EXPR:
7295 case FLOOR_MOD_EXPR:
7296 case ROUND_MOD_EXPR:
7297 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7298 case COND_EXPR:
7299 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7300 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7301 case COMPOUND_EXPR:
7302 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7303 case MIN_EXPR:
7304 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7305 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7306 case MAX_EXPR:
7307 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7308 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7309 case MODIFY_EXPR:
7310 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7311 case BIND_EXPR:
7312 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7313 case SAVE_EXPR:
7314 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7315 case NON_LVALUE_EXPR:
7316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7317 case RTL_EXPR:
7318 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7320 default:
7321 if (truth_value_p (TREE_CODE (t)))
7322 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7323 return 1;
7324 else
7325 /* We don't know sign of `t', so be conservative and return false. */
7326 return 0;
7330 /* Return true if `r' is known to be non-negative.
7331 Only handles constants at the moment. */
7334 rtl_expr_nonnegative_p (r)
7335 rtx r;
7337 switch (GET_CODE (r))
7339 case CONST_INT:
7340 return INTVAL (r) >= 0;
7342 case CONST_DOUBLE:
7343 if (GET_MODE (r) == VOIDmode)
7344 return CONST_DOUBLE_HIGH (r) >= 0;
7345 return 0;
7347 case CONST_VECTOR:
7349 int units, i;
7350 rtx elt;
7352 units = CONST_VECTOR_NUNITS (r);
7354 for (i = 0; i < units; ++i)
7356 elt = CONST_VECTOR_ELT (r, i);
7357 if (!rtl_expr_nonnegative_p (elt))
7358 return 0;
7361 return 1;
7364 case SYMBOL_REF:
7365 case LABEL_REF:
7366 /* These are always nonnegative. */
7367 return 1;
7369 default:
7370 return 0;
7374 #include "gt-fold-const.h"