2003-06-19 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / fold-const.c
blob5ead9fe86f42df52f67918a981806ee10b317402
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
60 static void encode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT,
62 HOST_WIDE_INT));
63 static void decode PARAMS ((HOST_WIDE_INT *,
64 unsigned HOST_WIDE_INT *,
65 HOST_WIDE_INT *));
66 static bool negate_expr_p PARAMS ((tree));
67 static tree negate_expr PARAMS ((tree));
68 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
69 tree *, int));
70 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
71 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
72 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
73 static hashval_t size_htab_hash PARAMS ((const void *));
74 static int size_htab_eq PARAMS ((const void *, const void *));
75 static tree fold_convert PARAMS ((tree, tree));
76 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
77 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
78 static int comparison_to_compcode PARAMS ((enum tree_code));
79 static enum tree_code compcode_to_comparison PARAMS ((int));
80 static int truth_value_p PARAMS ((enum tree_code));
81 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
82 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
83 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
84 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
85 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
86 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
87 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
88 tree, tree));
89 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
90 HOST_WIDE_INT *,
91 enum machine_mode *, int *,
92 int *, tree *, tree *));
93 static int all_ones_mask_p PARAMS ((tree, int));
94 static tree sign_bit_p PARAMS ((tree, tree));
95 static int simple_operand_p PARAMS ((tree));
96 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
97 tree, int));
98 static tree make_range PARAMS ((tree, int *, tree *, tree *));
99 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
100 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
101 int, tree, tree));
102 static tree fold_range_test PARAMS ((tree));
103 static tree unextend PARAMS ((tree, int, int, tree));
104 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
105 static tree optimize_minmax_comparison PARAMS ((tree));
106 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
107 static tree extract_muldiv_1 PARAMS ((tree, tree, enum tree_code, tree));
108 static tree strip_compound_expr PARAMS ((tree, tree));
109 static int multiple_of_p PARAMS ((tree, tree, tree));
110 static tree constant_boolean_node PARAMS ((int, tree));
111 static int count_cond PARAMS ((tree, int));
112 static tree fold_binary_op_with_conditional_arg
113 PARAMS ((enum tree_code, tree, tree, tree, int));
114 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
115 static tree fold_mathfn_compare PARAMS ((enum built_in_function,
116 enum tree_code, tree, tree, tree));
117 static tree fold_inf_compare PARAMS ((enum tree_code, tree, tree, tree));
119 /* The following constants represent a bit based encoding of GCC's
120 comparison operators. This encoding simplifies transformations
121 on relational comparison operators, such as AND and OR. */
122 #define COMPCODE_FALSE 0
123 #define COMPCODE_LT 1
124 #define COMPCODE_EQ 2
125 #define COMPCODE_LE 3
126 #define COMPCODE_GT 4
127 #define COMPCODE_NE 5
128 #define COMPCODE_GE 6
129 #define COMPCODE_TRUE 7
131 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
132 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
133 and SUM1. Then this yields nonzero if overflow occurred during the
134 addition.
136 Overflow occurs if A and B have the same sign, but A and SUM differ in
137 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
138 sign. */
139 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
141 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
142 We do that by representing the two-word integer in 4 words, with only
143 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
144 number. The value of the word is LOWPART + HIGHPART * BASE. */
146 #define LOWPART(x) \
147 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
148 #define HIGHPART(x) \
149 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
150 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
152 /* Unpack a two-word integer into 4 words.
153 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
154 WORDS points to the array of HOST_WIDE_INTs. */
156 static void
157 encode (words, low, hi)
158 HOST_WIDE_INT *words;
159 unsigned HOST_WIDE_INT low;
160 HOST_WIDE_INT hi;
162 words[0] = LOWPART (low);
163 words[1] = HIGHPART (low);
164 words[2] = LOWPART (hi);
165 words[3] = HIGHPART (hi);
168 /* Pack an array of 4 words into a two-word integer.
169 WORDS points to the array of words.
170 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
172 static void
173 decode (words, low, hi)
174 HOST_WIDE_INT *words;
175 unsigned HOST_WIDE_INT *low;
176 HOST_WIDE_INT *hi;
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* Make the integer constant T valid for its type by setting to 0 or 1 all
183 the bits in the constant that don't belong in the type.
185 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
186 nonzero, a signed overflow has already occurred in calculating T, so
187 propagate it. */
190 force_fit_type (t, overflow)
191 tree t;
192 int overflow;
194 unsigned HOST_WIDE_INT low;
195 HOST_WIDE_INT high;
196 unsigned int prec;
198 if (TREE_CODE (t) == REAL_CST)
200 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
201 Consider doing it via real_convert now. */
202 return overflow;
205 else if (TREE_CODE (t) != INTEGER_CST)
206 return overflow;
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t)))
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* First clear all bits that are beyond the type's precision. */
218 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_HIGH (t)
222 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
223 else
225 TREE_INT_CST_HIGH (t) = 0;
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
230 /* Unsigned types do not suffer sign extension or overflow unless they
231 are a sizetype. */
232 if (TREE_UNSIGNED (TREE_TYPE (t))
233 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
234 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
235 return overflow;
237 /* If the value's sign bit is set, extend the sign. */
238 if (prec != 2 * HOST_BITS_PER_WIDE_INT
239 && (prec > HOST_BITS_PER_WIDE_INT
240 ? 0 != (TREE_INT_CST_HIGH (t)
241 & ((HOST_WIDE_INT) 1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 : 0 != (TREE_INT_CST_LOW (t)
244 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
246 /* Value is negative:
247 set to 1 all the bits that are outside this type's precision. */
248 if (prec > HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_HIGH (t)
250 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
251 else
253 TREE_INT_CST_HIGH (t) = -1;
254 if (prec < HOST_BITS_PER_WIDE_INT)
255 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
259 /* Return nonzero if signed overflow occurred. */
260 return
261 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
262 != 0);
265 /* Add two doubleword integers with doubleword result.
266 Each argument is given as two `HOST_WIDE_INT' pieces.
267 One argument is L1 and H1; the other, L2 and H2.
268 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
271 add_double (l1, h1, l2, h2, lv, hv)
272 unsigned HOST_WIDE_INT l1, l2;
273 HOST_WIDE_INT h1, h2;
274 unsigned HOST_WIDE_INT *lv;
275 HOST_WIDE_INT *hv;
277 unsigned HOST_WIDE_INT l;
278 HOST_WIDE_INT h;
280 l = l1 + l2;
281 h = h1 + h2 + (l < l1);
283 *lv = l;
284 *hv = h;
285 return OVERFLOW_SUM_SIGN (h1, h2, h);
288 /* Negate a doubleword integer with doubleword result.
289 Return nonzero if the operation overflows, assuming it's signed.
290 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 neg_double (l1, h1, lv, hv)
295 unsigned HOST_WIDE_INT l1;
296 HOST_WIDE_INT h1;
297 unsigned HOST_WIDE_INT *lv;
298 HOST_WIDE_INT *hv;
300 if (l1 == 0)
302 *lv = 0;
303 *hv = - h1;
304 return (*hv & h1) < 0;
306 else
308 *lv = -l1;
309 *hv = ~h1;
310 return 0;
314 /* Multiply two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 mul_double (l1, h1, l2, h2, lv, hv)
322 unsigned HOST_WIDE_INT l1, l2;
323 HOST_WIDE_INT h1, h2;
324 unsigned HOST_WIDE_INT *lv;
325 HOST_WIDE_INT *hv;
327 HOST_WIDE_INT arg1[4];
328 HOST_WIDE_INT arg2[4];
329 HOST_WIDE_INT prod[4 * 2];
330 unsigned HOST_WIDE_INT carry;
331 int i, j, k;
332 unsigned HOST_WIDE_INT toplow, neglow;
333 HOST_WIDE_INT tophigh, neghigh;
335 encode (arg1, l1, h1);
336 encode (arg2, l2, h2);
338 memset ((char *) prod, 0, sizeof prod);
340 for (i = 0; i < 4; i++)
342 carry = 0;
343 for (j = 0; j < 4; j++)
345 k = i + j;
346 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
347 carry += arg1[i] * arg2[j];
348 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
349 carry += prod[k];
350 prod[k] = LOWPART (carry);
351 carry = HIGHPART (carry);
353 prod[i + 4] = carry;
356 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
358 /* Check for overflow by calculating the top half of the answer in full;
359 it should agree with the low half's sign bit. */
360 decode (prod + 4, &toplow, &tophigh);
361 if (h1 < 0)
363 neg_double (l2, h2, &neglow, &neghigh);
364 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
366 if (h2 < 0)
368 neg_double (l1, h1, &neglow, &neghigh);
369 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
371 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
374 /* Shift the doubleword integer in L1, H1 left by COUNT places
375 keeping only PREC bits of result.
376 Shift right if COUNT is negative.
377 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
378 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
380 void
381 lshift_double (l1, h1, count, prec, lv, hv, arith)
382 unsigned HOST_WIDE_INT l1;
383 HOST_WIDE_INT h1, count;
384 unsigned int prec;
385 unsigned HOST_WIDE_INT *lv;
386 HOST_WIDE_INT *hv;
387 int arith;
389 unsigned HOST_WIDE_INT signmask;
391 if (count < 0)
393 rshift_double (l1, h1, -count, prec, lv, hv, arith);
394 return;
397 #ifdef SHIFT_COUNT_TRUNCATED
398 if (SHIFT_COUNT_TRUNCATED)
399 count %= prec;
400 #endif
402 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
404 /* Shifting by the host word size is undefined according to the
405 ANSI standard, so we must handle this as a special case. */
406 *hv = 0;
407 *lv = 0;
409 else if (count >= HOST_BITS_PER_WIDE_INT)
411 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
412 *lv = 0;
414 else
416 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
417 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
418 *lv = l1 << count;
421 /* Sign extend all bits that are beyond the precision. */
423 signmask = -((prec > HOST_BITS_PER_WIDE_INT
424 ? ((unsigned HOST_WIDE_INT) *hv
425 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
426 : (*lv >> (prec - 1))) & 1);
428 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
430 else if (prec >= HOST_BITS_PER_WIDE_INT)
432 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
433 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
435 else
437 *hv = signmask;
438 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
439 *lv |= signmask << prec;
443 /* Shift the doubleword integer in L1, H1 right by COUNT places
444 keeping only PREC bits of result. COUNT must be positive.
445 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
446 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
448 void
449 rshift_double (l1, h1, count, prec, lv, hv, arith)
450 unsigned HOST_WIDE_INT l1;
451 HOST_WIDE_INT h1, count;
452 unsigned int prec;
453 unsigned HOST_WIDE_INT *lv;
454 HOST_WIDE_INT *hv;
455 int arith;
457 unsigned HOST_WIDE_INT signmask;
459 signmask = (arith
460 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
461 : 0);
463 #ifdef SHIFT_COUNT_TRUNCATED
464 if (SHIFT_COUNT_TRUNCATED)
465 count %= prec;
466 #endif
468 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470 /* Shifting by the host word size is undefined according to the
471 ANSI standard, so we must handle this as a special case. */
472 *hv = 0;
473 *lv = 0;
475 else if (count >= HOST_BITS_PER_WIDE_INT)
477 *hv = 0;
478 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480 else
482 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
483 *lv = ((l1 >> count)
484 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
487 /* Zero / sign extend all bits that are beyond the precision. */
489 if (count >= (HOST_WIDE_INT)prec)
491 *hv = signmask;
492 *lv = signmask;
494 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
499 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501 else
503 *hv = signmask;
504 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
505 *lv |= signmask << (prec - count);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result.
511 Rotate right if COUNT is negative.
512 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 void
515 lrotate_double (l1, h1, count, prec, lv, hv)
516 unsigned HOST_WIDE_INT l1;
517 HOST_WIDE_INT h1, count;
518 unsigned int prec;
519 unsigned HOST_WIDE_INT *lv;
520 HOST_WIDE_INT *hv;
522 unsigned HOST_WIDE_INT s1l, s2l;
523 HOST_WIDE_INT s1h, s2h;
525 count %= prec;
526 if (count < 0)
527 count += prec;
529 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
530 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 *lv = s1l | s2l;
532 *hv = s1h | s2h;
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 void
540 rrotate_double (l1, h1, count, prec, lv, hv)
541 unsigned HOST_WIDE_INT l1;
542 HOST_WIDE_INT h1, count;
543 unsigned int prec;
544 unsigned HOST_WIDE_INT *lv;
545 HOST_WIDE_INT *hv;
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
550 count %= prec;
551 if (count < 0)
552 count += prec;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 *lv = s1l | s2l;
557 *hv = s1h | s2h;
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 or EXACT_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (code, uns,
571 lnum_orig, hnum_orig, lden_orig, hden_orig,
572 lquo, hquo, lrem, hrem)
573 enum tree_code code;
574 int uns;
575 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig;
577 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig;
579 unsigned HOST_WIDE_INT *lquo, *lrem;
580 HOST_WIDE_INT *hquo, *hrem;
582 int quo_neg = 0;
583 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
584 HOST_WIDE_INT den[4], quo[4];
585 int i, j;
586 unsigned HOST_WIDE_INT work;
587 unsigned HOST_WIDE_INT carry = 0;
588 unsigned HOST_WIDE_INT lnum = lnum_orig;
589 HOST_WIDE_INT hnum = hnum_orig;
590 unsigned HOST_WIDE_INT lden = lden_orig;
591 HOST_WIDE_INT hden = hden_orig;
592 int overflow = 0;
594 if (hden == 0 && lden == 0)
595 overflow = 1, lden = 1;
597 /* calculate quotient sign and convert operands to unsigned. */
598 if (!uns)
600 if (hnum < 0)
602 quo_neg = ~ quo_neg;
603 /* (minimum integer) / (-1) is the only overflow case. */
604 if (neg_double (lnum, hnum, &lnum, &hnum)
605 && ((HOST_WIDE_INT) lden & hden) == -1)
606 overflow = 1;
608 if (hden < 0)
610 quo_neg = ~ quo_neg;
611 neg_double (lden, hden, &lden, &hden);
615 if (hnum == 0 && hden == 0)
616 { /* single precision */
617 *hquo = *hrem = 0;
618 /* This unsigned division rounds toward zero. */
619 *lquo = lnum / lden;
620 goto finish_up;
623 if (hnum == 0)
624 { /* trivial case: dividend < divisor */
625 /* hden != 0 already checked. */
626 *hquo = *lquo = 0;
627 *hrem = hnum;
628 *lrem = lnum;
629 goto finish_up;
632 memset ((char *) quo, 0, sizeof quo);
634 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
635 memset ((char *) den, 0, sizeof den);
637 encode (num, lnum, hnum);
638 encode (den, lden, hden);
640 /* Special code for when the divisor < BASE. */
641 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
643 /* hnum != 0 already checked. */
644 for (i = 4 - 1; i >= 0; i--)
646 work = num[i] + carry * BASE;
647 quo[i] = work / lden;
648 carry = work % lden;
651 else
653 /* Full double precision division,
654 with thanks to Don Knuth's "Seminumerical Algorithms". */
655 int num_hi_sig, den_hi_sig;
656 unsigned HOST_WIDE_INT quo_est, scale;
658 /* Find the highest nonzero divisor digit. */
659 for (i = 4 - 1;; i--)
660 if (den[i] != 0)
662 den_hi_sig = i;
663 break;
666 /* Insure that the first digit of the divisor is at least BASE/2.
667 This is required by the quotient digit estimation algorithm. */
669 scale = BASE / (den[den_hi_sig] + 1);
670 if (scale > 1)
671 { /* scale divisor and dividend */
672 carry = 0;
673 for (i = 0; i <= 4 - 1; i++)
675 work = (num[i] * scale) + carry;
676 num[i] = LOWPART (work);
677 carry = HIGHPART (work);
680 num[4] = carry;
681 carry = 0;
682 for (i = 0; i <= 4 - 1; i++)
684 work = (den[i] * scale) + carry;
685 den[i] = LOWPART (work);
686 carry = HIGHPART (work);
687 if (den[i] != 0) den_hi_sig = i;
691 num_hi_sig = 4;
693 /* Main loop */
694 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
696 /* Guess the next quotient digit, quo_est, by dividing the first
697 two remaining dividend digits by the high order quotient digit.
698 quo_est is never low and is at most 2 high. */
699 unsigned HOST_WIDE_INT tmp;
701 num_hi_sig = i + den_hi_sig + 1;
702 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
703 if (num[num_hi_sig] != den[den_hi_sig])
704 quo_est = work / den[den_hi_sig];
705 else
706 quo_est = BASE - 1;
708 /* Refine quo_est so it's usually correct, and at most one high. */
709 tmp = work - quo_est * den[den_hi_sig];
710 if (tmp < BASE
711 && (den[den_hi_sig - 1] * quo_est
712 > (tmp * BASE + num[num_hi_sig - 2])))
713 quo_est--;
715 /* Try QUO_EST as the quotient digit, by multiplying the
716 divisor by QUO_EST and subtracting from the remaining dividend.
717 Keep in mind that QUO_EST is the I - 1st digit. */
719 carry = 0;
720 for (j = 0; j <= den_hi_sig; j++)
722 work = quo_est * den[j] + carry;
723 carry = HIGHPART (work);
724 work = num[i + j] - LOWPART (work);
725 num[i + j] = LOWPART (work);
726 carry += HIGHPART (work) != 0;
729 /* If quo_est was high by one, then num[i] went negative and
730 we need to correct things. */
731 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
733 quo_est--;
734 carry = 0; /* add divisor back in */
735 for (j = 0; j <= den_hi_sig; j++)
737 work = num[i + j] + den[j] + carry;
738 carry = HIGHPART (work);
739 num[i + j] = LOWPART (work);
742 num [num_hi_sig] += carry;
745 /* Store the quotient digit. */
746 quo[i] = quo_est;
750 decode (quo, lquo, hquo);
752 finish_up:
753 /* if result is negative, make it so. */
754 if (quo_neg)
755 neg_double (*lquo, *hquo, lquo, hquo);
757 /* compute trial remainder: rem = num - (quo * den) */
758 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
759 neg_double (*lrem, *hrem, lrem, hrem);
760 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
762 switch (code)
764 case TRUNC_DIV_EXPR:
765 case TRUNC_MOD_EXPR: /* round toward zero */
766 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 return overflow;
769 case FLOOR_DIV_EXPR:
770 case FLOOR_MOD_EXPR: /* round toward negative infinity */
771 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
773 /* quo = quo - 1; */
774 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
775 lquo, hquo);
777 else
778 return overflow;
779 break;
781 case CEIL_DIV_EXPR:
782 case CEIL_MOD_EXPR: /* round toward positive infinity */
783 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
785 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
786 lquo, hquo);
788 else
789 return overflow;
790 break;
792 case ROUND_DIV_EXPR:
793 case ROUND_MOD_EXPR: /* round to closest integer */
795 unsigned HOST_WIDE_INT labs_rem = *lrem;
796 HOST_WIDE_INT habs_rem = *hrem;
797 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
798 HOST_WIDE_INT habs_den = hden, htwice;
800 /* Get absolute values */
801 if (*hrem < 0)
802 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
803 if (hden < 0)
804 neg_double (lden, hden, &labs_den, &habs_den);
806 /* If (2 * abs (lrem) >= abs (lden)) */
807 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
808 labs_rem, habs_rem, &ltwice, &htwice);
810 if (((unsigned HOST_WIDE_INT) habs_den
811 < (unsigned HOST_WIDE_INT) htwice)
812 || (((unsigned HOST_WIDE_INT) habs_den
813 == (unsigned HOST_WIDE_INT) htwice)
814 && (labs_den < ltwice)))
816 if (*hquo < 0)
817 /* quo = quo - 1; */
818 add_double (*lquo, *hquo,
819 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
820 else
821 /* quo = quo + 1; */
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
825 else
826 return overflow;
828 break;
830 default:
831 abort ();
834 /* compute true remainder: rem = num - (quo * den) */
835 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
836 neg_double (*lrem, *hrem, lrem, hrem);
837 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 return overflow;
841 /* Determine whether an expression T can be cheaply negated using
842 the function negate_expr. */
844 static bool
845 negate_expr_p (t)
846 tree t;
848 unsigned HOST_WIDE_INT val;
849 unsigned int prec;
850 tree type;
852 if (t == 0)
853 return false;
855 type = TREE_TYPE (t);
857 STRIP_SIGN_NOPS (t);
858 switch (TREE_CODE (t))
860 case INTEGER_CST:
861 if (TREE_UNSIGNED (type))
862 return false;
864 /* Check that -CST will not overflow type. */
865 prec = TYPE_PRECISION (type);
866 if (prec > HOST_BITS_PER_WIDE_INT)
868 if (TREE_INT_CST_LOW (t) != 0)
869 return true;
870 prec -= HOST_BITS_PER_WIDE_INT;
871 val = TREE_INT_CST_HIGH (t);
873 else
874 val = TREE_INT_CST_LOW (t);
875 if (prec < HOST_BITS_PER_WIDE_INT)
876 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
877 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
879 case REAL_CST:
880 case NEGATE_EXPR:
881 case MINUS_EXPR:
882 return true;
884 default:
885 break;
887 return false;
890 /* Given T, an expression, return the negation of T. Allow for T to be
891 null, in which case return null. */
893 static tree
894 negate_expr (t)
895 tree t;
897 tree type;
898 tree tem;
900 if (t == 0)
901 return 0;
903 type = TREE_TYPE (t);
904 STRIP_SIGN_NOPS (t);
906 switch (TREE_CODE (t))
908 case INTEGER_CST:
909 case REAL_CST:
910 if (! TREE_UNSIGNED (type)
911 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
912 && ! TREE_OVERFLOW (tem))
913 return tem;
914 break;
916 case NEGATE_EXPR:
917 return convert (type, TREE_OPERAND (t, 0));
919 case MINUS_EXPR:
920 /* - (A - B) -> B - A */
921 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
922 return convert (type,
923 fold (build (MINUS_EXPR, TREE_TYPE (t),
924 TREE_OPERAND (t, 1),
925 TREE_OPERAND (t, 0))));
926 break;
928 default:
929 break;
932 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
935 /* Split a tree IN into a constant, literal and variable parts that could be
936 combined with CODE to make IN. "constant" means an expression with
937 TREE_CONSTANT but that isn't an actual constant. CODE must be a
938 commutative arithmetic operation. Store the constant part into *CONP,
939 the literal in *LITP and return the variable part. If a part isn't
940 present, set it to null. If the tree does not decompose in this way,
941 return the entire tree as the variable part and the other parts as null.
943 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
944 case, we negate an operand that was subtracted. Except if it is a
945 literal for which we use *MINUS_LITP instead.
947 If NEGATE_P is true, we are negating all of IN, again except a literal
948 for which we use *MINUS_LITP instead.
950 If IN is itself a literal or constant, return it as appropriate.
952 Note that we do not guarantee that any of the three values will be the
953 same type as IN, but they will have the same signedness and mode. */
955 static tree
956 split_tree (in, code, conp, litp, minus_litp, negate_p)
957 tree in;
958 enum tree_code code;
959 tree *conp, *litp, *minus_litp;
960 int negate_p;
962 tree var = 0;
964 *conp = 0;
965 *litp = 0;
966 *minus_litp = 0;
968 /* Strip any conversions that don't change the machine mode or signedness. */
969 STRIP_SIGN_NOPS (in);
971 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
972 *litp = in;
973 else if (TREE_CODE (in) == code
974 || (! FLOAT_TYPE_P (TREE_TYPE (in))
975 /* We can associate addition and subtraction together (even
976 though the C standard doesn't say so) for integers because
977 the value is not affected. For reals, the value might be
978 affected, so we can't. */
979 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
980 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
982 tree op0 = TREE_OPERAND (in, 0);
983 tree op1 = TREE_OPERAND (in, 1);
984 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
985 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
987 /* First see if either of the operands is a literal, then a constant. */
988 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
989 *litp = op0, op0 = 0;
990 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
991 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
993 if (op0 != 0 && TREE_CONSTANT (op0))
994 *conp = op0, op0 = 0;
995 else if (op1 != 0 && TREE_CONSTANT (op1))
996 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
998 /* If we haven't dealt with either operand, this is not a case we can
999 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1000 if (op0 != 0 && op1 != 0)
1001 var = in;
1002 else if (op0 != 0)
1003 var = op0;
1004 else
1005 var = op1, neg_var_p = neg1_p;
1007 /* Now do any needed negations. */
1008 if (neg_litp_p)
1009 *minus_litp = *litp, *litp = 0;
1010 if (neg_conp_p)
1011 *conp = negate_expr (*conp);
1012 if (neg_var_p)
1013 var = negate_expr (var);
1015 else if (TREE_CONSTANT (in))
1016 *conp = in;
1017 else
1018 var = in;
1020 if (negate_p)
1022 if (*litp)
1023 *minus_litp = *litp, *litp = 0;
1024 else if (*minus_litp)
1025 *litp = *minus_litp, *minus_litp = 0;
1026 *conp = negate_expr (*conp);
1027 var = negate_expr (var);
1030 return var;
1033 /* Re-associate trees split by the above function. T1 and T2 are either
1034 expressions to associate or null. Return the new expression, if any. If
1035 we build an operation, do it in TYPE and with CODE. */
1037 static tree
1038 associate_trees (t1, t2, code, type)
1039 tree t1, t2;
1040 enum tree_code code;
1041 tree type;
1043 if (t1 == 0)
1044 return t2;
1045 else if (t2 == 0)
1046 return t1;
1048 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1049 try to fold this since we will have infinite recursion. But do
1050 deal with any NEGATE_EXPRs. */
1051 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1052 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1054 if (code == PLUS_EXPR)
1056 if (TREE_CODE (t1) == NEGATE_EXPR)
1057 return build (MINUS_EXPR, type, convert (type, t2),
1058 convert (type, TREE_OPERAND (t1, 0)));
1059 else if (TREE_CODE (t2) == NEGATE_EXPR)
1060 return build (MINUS_EXPR, type, convert (type, t1),
1061 convert (type, TREE_OPERAND (t2, 0)));
1063 return build (code, type, convert (type, t1), convert (type, t2));
1066 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1069 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1070 to produce a new constant.
1072 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1074 static tree
1075 int_const_binop (code, arg1, arg2, notrunc)
1076 enum tree_code code;
1077 tree arg1, arg2;
1078 int notrunc;
1080 unsigned HOST_WIDE_INT int1l, int2l;
1081 HOST_WIDE_INT int1h, int2h;
1082 unsigned HOST_WIDE_INT low;
1083 HOST_WIDE_INT hi;
1084 unsigned HOST_WIDE_INT garbagel;
1085 HOST_WIDE_INT garbageh;
1086 tree t;
1087 tree type = TREE_TYPE (arg1);
1088 int uns = TREE_UNSIGNED (type);
1089 int is_sizetype
1090 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1091 int overflow = 0;
1092 int no_overflow = 0;
1094 int1l = TREE_INT_CST_LOW (arg1);
1095 int1h = TREE_INT_CST_HIGH (arg1);
1096 int2l = TREE_INT_CST_LOW (arg2);
1097 int2h = TREE_INT_CST_HIGH (arg2);
1099 switch (code)
1101 case BIT_IOR_EXPR:
1102 low = int1l | int2l, hi = int1h | int2h;
1103 break;
1105 case BIT_XOR_EXPR:
1106 low = int1l ^ int2l, hi = int1h ^ int2h;
1107 break;
1109 case BIT_AND_EXPR:
1110 low = int1l & int2l, hi = int1h & int2h;
1111 break;
1113 case BIT_ANDTC_EXPR:
1114 low = int1l & ~int2l, hi = int1h & ~int2h;
1115 break;
1117 case RSHIFT_EXPR:
1118 int2l = -int2l;
1119 case LSHIFT_EXPR:
1120 /* It's unclear from the C standard whether shifts can overflow.
1121 The following code ignores overflow; perhaps a C standard
1122 interpretation ruling is needed. */
1123 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1124 &low, &hi, !uns);
1125 no_overflow = 1;
1126 break;
1128 case RROTATE_EXPR:
1129 int2l = - int2l;
1130 case LROTATE_EXPR:
1131 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1132 &low, &hi);
1133 break;
1135 case PLUS_EXPR:
1136 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1137 break;
1139 case MINUS_EXPR:
1140 neg_double (int2l, int2h, &low, &hi);
1141 add_double (int1l, int1h, low, hi, &low, &hi);
1142 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1143 break;
1145 case MULT_EXPR:
1146 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1147 break;
1149 case TRUNC_DIV_EXPR:
1150 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1151 case EXACT_DIV_EXPR:
1152 /* This is a shortcut for a common special case. */
1153 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1154 && ! TREE_CONSTANT_OVERFLOW (arg1)
1155 && ! TREE_CONSTANT_OVERFLOW (arg2)
1156 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1158 if (code == CEIL_DIV_EXPR)
1159 int1l += int2l - 1;
1161 low = int1l / int2l, hi = 0;
1162 break;
1165 /* ... fall through ... */
1167 case ROUND_DIV_EXPR:
1168 if (int2h == 0 && int2l == 1)
1170 low = int1l, hi = int1h;
1171 break;
1173 if (int1l == int2l && int1h == int2h
1174 && ! (int1l == 0 && int1h == 0))
1176 low = 1, hi = 0;
1177 break;
1179 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1180 &low, &hi, &garbagel, &garbageh);
1181 break;
1183 case TRUNC_MOD_EXPR:
1184 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1185 /* This is a shortcut for a common special case. */
1186 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1187 && ! TREE_CONSTANT_OVERFLOW (arg1)
1188 && ! TREE_CONSTANT_OVERFLOW (arg2)
1189 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1191 if (code == CEIL_MOD_EXPR)
1192 int1l += int2l - 1;
1193 low = int1l % int2l, hi = 0;
1194 break;
1197 /* ... fall through ... */
1199 case ROUND_MOD_EXPR:
1200 overflow = div_and_round_double (code, uns,
1201 int1l, int1h, int2l, int2h,
1202 &garbagel, &garbageh, &low, &hi);
1203 break;
1205 case MIN_EXPR:
1206 case MAX_EXPR:
1207 if (uns)
1208 low = (((unsigned HOST_WIDE_INT) int1h
1209 < (unsigned HOST_WIDE_INT) int2h)
1210 || (((unsigned HOST_WIDE_INT) int1h
1211 == (unsigned HOST_WIDE_INT) int2h)
1212 && int1l < int2l));
1213 else
1214 low = (int1h < int2h
1215 || (int1h == int2h && int1l < int2l));
1217 if (low == (code == MIN_EXPR))
1218 low = int1l, hi = int1h;
1219 else
1220 low = int2l, hi = int2h;
1221 break;
1223 default:
1224 abort ();
1227 /* If this is for a sizetype, can be represented as one (signed)
1228 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1229 constants. */
1230 if (is_sizetype
1231 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1232 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1233 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1234 return size_int_type_wide (low, type);
1235 else
1237 t = build_int_2 (low, hi);
1238 TREE_TYPE (t) = TREE_TYPE (arg1);
1241 TREE_OVERFLOW (t)
1242 = ((notrunc
1243 ? (!uns || is_sizetype) && overflow
1244 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1245 && ! no_overflow))
1246 | TREE_OVERFLOW (arg1)
1247 | TREE_OVERFLOW (arg2));
1249 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1250 So check if force_fit_type truncated the value. */
1251 if (is_sizetype
1252 && ! TREE_OVERFLOW (t)
1253 && (TREE_INT_CST_HIGH (t) != hi
1254 || TREE_INT_CST_LOW (t) != low))
1255 TREE_OVERFLOW (t) = 1;
1257 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1258 | TREE_CONSTANT_OVERFLOW (arg1)
1259 | TREE_CONSTANT_OVERFLOW (arg2));
1260 return t;
1263 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1264 constant. We assume ARG1 and ARG2 have the same data type, or at least
1265 are the same kind of constant and the same machine mode.
1267 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1269 static tree
1270 const_binop (code, arg1, arg2, notrunc)
1271 enum tree_code code;
1272 tree arg1, arg2;
1273 int notrunc;
1275 STRIP_NOPS (arg1);
1276 STRIP_NOPS (arg2);
1278 if (TREE_CODE (arg1) == INTEGER_CST)
1279 return int_const_binop (code, arg1, arg2, notrunc);
1281 if (TREE_CODE (arg1) == REAL_CST)
1283 REAL_VALUE_TYPE d1;
1284 REAL_VALUE_TYPE d2;
1285 REAL_VALUE_TYPE value;
1286 tree t;
1288 d1 = TREE_REAL_CST (arg1);
1289 d2 = TREE_REAL_CST (arg2);
1291 /* If either operand is a NaN, just return it. Otherwise, set up
1292 for floating-point trap; we return an overflow. */
1293 if (REAL_VALUE_ISNAN (d1))
1294 return arg1;
1295 else if (REAL_VALUE_ISNAN (d2))
1296 return arg2;
1298 REAL_ARITHMETIC (value, code, d1, d2);
1300 t = build_real (TREE_TYPE (arg1),
1301 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1302 value));
1304 TREE_OVERFLOW (t)
1305 = (force_fit_type (t, 0)
1306 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1307 TREE_CONSTANT_OVERFLOW (t)
1308 = TREE_OVERFLOW (t)
1309 | TREE_CONSTANT_OVERFLOW (arg1)
1310 | TREE_CONSTANT_OVERFLOW (arg2);
1311 return t;
1313 if (TREE_CODE (arg1) == COMPLEX_CST)
1315 tree type = TREE_TYPE (arg1);
1316 tree r1 = TREE_REALPART (arg1);
1317 tree i1 = TREE_IMAGPART (arg1);
1318 tree r2 = TREE_REALPART (arg2);
1319 tree i2 = TREE_IMAGPART (arg2);
1320 tree t;
1322 switch (code)
1324 case PLUS_EXPR:
1325 t = build_complex (type,
1326 const_binop (PLUS_EXPR, r1, r2, notrunc),
1327 const_binop (PLUS_EXPR, i1, i2, notrunc));
1328 break;
1330 case MINUS_EXPR:
1331 t = build_complex (type,
1332 const_binop (MINUS_EXPR, r1, r2, notrunc),
1333 const_binop (MINUS_EXPR, i1, i2, notrunc));
1334 break;
1336 case MULT_EXPR:
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR,
1339 const_binop (MULT_EXPR,
1340 r1, r2, notrunc),
1341 const_binop (MULT_EXPR,
1342 i1, i2, notrunc),
1343 notrunc),
1344 const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR,
1346 r1, i2, notrunc),
1347 const_binop (MULT_EXPR,
1348 i1, r2, notrunc),
1349 notrunc));
1350 break;
1352 case RDIV_EXPR:
1354 tree magsquared
1355 = const_binop (PLUS_EXPR,
1356 const_binop (MULT_EXPR, r2, r2, notrunc),
1357 const_binop (MULT_EXPR, i2, i2, notrunc),
1358 notrunc);
1360 t = build_complex (type,
1361 const_binop
1362 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1363 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1364 const_binop (PLUS_EXPR,
1365 const_binop (MULT_EXPR, r1, r2,
1366 notrunc),
1367 const_binop (MULT_EXPR, i1, i2,
1368 notrunc),
1369 notrunc),
1370 magsquared, notrunc),
1371 const_binop
1372 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1373 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1374 const_binop (MINUS_EXPR,
1375 const_binop (MULT_EXPR, i1, r2,
1376 notrunc),
1377 const_binop (MULT_EXPR, r1, i2,
1378 notrunc),
1379 notrunc),
1380 magsquared, notrunc));
1382 break;
1384 default:
1385 abort ();
1387 return t;
1389 return 0;
1392 /* These are the hash table functions for the hash table of INTEGER_CST
1393 nodes of a sizetype. */
1395 /* Return the hash code code X, an INTEGER_CST. */
1397 static hashval_t
1398 size_htab_hash (x)
1399 const void *x;
1401 tree t = (tree) x;
1403 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1404 ^ htab_hash_pointer (TREE_TYPE (t))
1405 ^ (TREE_OVERFLOW (t) << 20));
1408 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1409 is the same as that given by *Y, which is the same. */
1411 static int
1412 size_htab_eq (x, y)
1413 const void *x;
1414 const void *y;
1416 tree xt = (tree) x;
1417 tree yt = (tree) y;
1419 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1420 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1421 && TREE_TYPE (xt) == TREE_TYPE (yt)
1422 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1425 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1426 bits are given by NUMBER and of the sizetype represented by KIND. */
1428 tree
1429 size_int_wide (number, kind)
1430 HOST_WIDE_INT number;
1431 enum size_type_kind kind;
1433 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1436 /* Likewise, but the desired type is specified explicitly. */
1438 static GTY (()) tree new_const;
1439 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1440 htab_t size_htab;
1442 tree
1443 size_int_type_wide (number, type)
1444 HOST_WIDE_INT number;
1445 tree type;
1447 void **slot;
1449 if (size_htab == 0)
1451 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1452 new_const = make_node (INTEGER_CST);
1455 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1456 hash table, we return the value from the hash table. Otherwise, we
1457 place that in the hash table and make a new node for the next time. */
1458 TREE_INT_CST_LOW (new_const) = number;
1459 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1460 TREE_TYPE (new_const) = type;
1461 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1462 = force_fit_type (new_const, 0);
1464 slot = htab_find_slot (size_htab, new_const, INSERT);
1465 if (*slot == 0)
1467 tree t = new_const;
1469 *slot = new_const;
1470 new_const = make_node (INTEGER_CST);
1471 return t;
1473 else
1474 return (tree) *slot;
1477 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1478 is a tree code. The type of the result is taken from the operands.
1479 Both must be the same type integer type and it must be a size type.
1480 If the operands are constant, so is the result. */
1482 tree
1483 size_binop (code, arg0, arg1)
1484 enum tree_code code;
1485 tree arg0, arg1;
1487 tree type = TREE_TYPE (arg0);
1489 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1490 || type != TREE_TYPE (arg1))
1491 abort ();
1493 /* Handle the special case of two integer constants faster. */
1494 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1496 /* And some specific cases even faster than that. */
1497 if (code == PLUS_EXPR && integer_zerop (arg0))
1498 return arg1;
1499 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1500 && integer_zerop (arg1))
1501 return arg0;
1502 else if (code == MULT_EXPR && integer_onep (arg0))
1503 return arg1;
1505 /* Handle general case of two integer constants. */
1506 return int_const_binop (code, arg0, arg1, 0);
1509 if (arg0 == error_mark_node || arg1 == error_mark_node)
1510 return error_mark_node;
1512 return fold (build (code, type, arg0, arg1));
1515 /* Given two values, either both of sizetype or both of bitsizetype,
1516 compute the difference between the two values. Return the value
1517 in signed type corresponding to the type of the operands. */
1519 tree
1520 size_diffop (arg0, arg1)
1521 tree arg0, arg1;
1523 tree type = TREE_TYPE (arg0);
1524 tree ctype;
1526 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1527 || type != TREE_TYPE (arg1))
1528 abort ();
1530 /* If the type is already signed, just do the simple thing. */
1531 if (! TREE_UNSIGNED (type))
1532 return size_binop (MINUS_EXPR, arg0, arg1);
1534 ctype = (type == bitsizetype || type == ubitsizetype
1535 ? sbitsizetype : ssizetype);
1537 /* If either operand is not a constant, do the conversions to the signed
1538 type and subtract. The hardware will do the right thing with any
1539 overflow in the subtraction. */
1540 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1541 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1542 convert (ctype, arg1));
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0, arg1))
1549 return convert (ctype, integer_zero_node);
1550 else if (tree_int_cst_lt (arg1, arg0))
1551 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1554 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1558 /* Given T, a tree representing type conversion of ARG1, a constant,
1559 return a constant tree representing the result of conversion. */
1561 static tree
1562 fold_convert (t, arg1)
1563 tree t;
1564 tree arg1;
1566 tree type = TREE_TYPE (t);
1567 int overflow = 0;
1569 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1571 if (TREE_CODE (arg1) == INTEGER_CST)
1573 /* If we would build a constant wider than GCC supports,
1574 leave the conversion unfolded. */
1575 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1576 return t;
1578 /* If we are trying to make a sizetype for a small integer, use
1579 size_int to pick up cached types to reduce duplicate nodes. */
1580 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1581 && !TREE_CONSTANT_OVERFLOW (arg1)
1582 && compare_tree_int (arg1, 10000) < 0)
1583 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1585 /* Given an integer constant, make new constant with new type,
1586 appropriately sign-extended or truncated. */
1587 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1588 TREE_INT_CST_HIGH (arg1));
1589 TREE_TYPE (t) = type;
1590 /* Indicate an overflow if (1) ARG1 already overflowed,
1591 or (2) force_fit_type indicates an overflow.
1592 Tell force_fit_type that an overflow has already occurred
1593 if ARG1 is a too-large unsigned value and T is signed.
1594 But don't indicate an overflow if converting a pointer. */
1595 TREE_OVERFLOW (t)
1596 = ((force_fit_type (t,
1597 (TREE_INT_CST_HIGH (arg1) < 0
1598 && (TREE_UNSIGNED (type)
1599 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1600 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1601 || TREE_OVERFLOW (arg1));
1602 TREE_CONSTANT_OVERFLOW (t)
1603 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1605 else if (TREE_CODE (arg1) == REAL_CST)
1607 /* Don't initialize these, use assignments.
1608 Initialized local aggregates don't work on old compilers. */
1609 REAL_VALUE_TYPE x;
1610 REAL_VALUE_TYPE l;
1611 REAL_VALUE_TYPE u;
1612 tree type1 = TREE_TYPE (arg1);
1613 int no_upper_bound;
1615 x = TREE_REAL_CST (arg1);
1616 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1618 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1619 if (!no_upper_bound)
1620 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1622 /* See if X will be in range after truncation towards 0.
1623 To compensate for truncation, move the bounds away from 0,
1624 but reject if X exactly equals the adjusted bounds. */
1625 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1626 if (!no_upper_bound)
1627 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1628 /* If X is a NaN, use zero instead and show we have an overflow.
1629 Otherwise, range check. */
1630 if (REAL_VALUE_ISNAN (x))
1631 overflow = 1, x = dconst0;
1632 else if (! (REAL_VALUES_LESS (l, x)
1633 && !no_upper_bound
1634 && REAL_VALUES_LESS (x, u)))
1635 overflow = 1;
1638 HOST_WIDE_INT low, high;
1639 REAL_VALUE_TO_INT (&low, &high, x);
1640 t = build_int_2 (low, high);
1642 TREE_TYPE (t) = type;
1643 TREE_OVERFLOW (t)
1644 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1645 TREE_CONSTANT_OVERFLOW (t)
1646 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1648 TREE_TYPE (t) = type;
1650 else if (TREE_CODE (type) == REAL_TYPE)
1652 if (TREE_CODE (arg1) == INTEGER_CST)
1653 return build_real_from_int_cst (type, arg1);
1654 if (TREE_CODE (arg1) == REAL_CST)
1656 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1658 /* We make a copy of ARG1 so that we don't modify an
1659 existing constant tree. */
1660 t = copy_node (arg1);
1661 TREE_TYPE (t) = type;
1662 return t;
1665 t = build_real (type,
1666 real_value_truncate (TYPE_MODE (type),
1667 TREE_REAL_CST (arg1)));
1669 TREE_OVERFLOW (t)
1670 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1671 TREE_CONSTANT_OVERFLOW (t)
1672 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1673 return t;
1676 TREE_CONSTANT (t) = 1;
1677 return t;
1680 /* Return an expr equal to X but certainly not valid as an lvalue. */
1682 tree
1683 non_lvalue (x)
1684 tree x;
1686 tree result;
1688 /* These things are certainly not lvalues. */
1689 if (TREE_CODE (x) == NON_LVALUE_EXPR
1690 || TREE_CODE (x) == INTEGER_CST
1691 || TREE_CODE (x) == REAL_CST
1692 || TREE_CODE (x) == STRING_CST
1693 || TREE_CODE (x) == ADDR_EXPR)
1694 return x;
1696 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1697 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1698 return result;
1701 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1702 Zero means allow extended lvalues. */
1704 int pedantic_lvalues;
1706 /* When pedantic, return an expr equal to X but certainly not valid as a
1707 pedantic lvalue. Otherwise, return X. */
1709 tree
1710 pedantic_non_lvalue (x)
1711 tree x;
1713 if (pedantic_lvalues)
1714 return non_lvalue (x);
1715 else
1716 return x;
1719 /* Given a tree comparison code, return the code that is the logical inverse
1720 of the given code. It is not safe to do this for floating-point
1721 comparisons, except for NE_EXPR and EQ_EXPR. */
1723 static enum tree_code
1724 invert_tree_comparison (code)
1725 enum tree_code code;
1727 switch (code)
1729 case EQ_EXPR:
1730 return NE_EXPR;
1731 case NE_EXPR:
1732 return EQ_EXPR;
1733 case GT_EXPR:
1734 return LE_EXPR;
1735 case GE_EXPR:
1736 return LT_EXPR;
1737 case LT_EXPR:
1738 return GE_EXPR;
1739 case LE_EXPR:
1740 return GT_EXPR;
1741 default:
1742 abort ();
1746 /* Similar, but return the comparison that results if the operands are
1747 swapped. This is safe for floating-point. */
1749 static enum tree_code
1750 swap_tree_comparison (code)
1751 enum tree_code code;
1753 switch (code)
1755 case EQ_EXPR:
1756 case NE_EXPR:
1757 return code;
1758 case GT_EXPR:
1759 return LT_EXPR;
1760 case GE_EXPR:
1761 return LE_EXPR;
1762 case LT_EXPR:
1763 return GT_EXPR;
1764 case LE_EXPR:
1765 return GE_EXPR;
1766 default:
1767 abort ();
1772 /* Convert a comparison tree code from an enum tree_code representation
1773 into a compcode bit-based encoding. This function is the inverse of
1774 compcode_to_comparison. */
1776 static int
1777 comparison_to_compcode (code)
1778 enum tree_code code;
1780 switch (code)
1782 case LT_EXPR:
1783 return COMPCODE_LT;
1784 case EQ_EXPR:
1785 return COMPCODE_EQ;
1786 case LE_EXPR:
1787 return COMPCODE_LE;
1788 case GT_EXPR:
1789 return COMPCODE_GT;
1790 case NE_EXPR:
1791 return COMPCODE_NE;
1792 case GE_EXPR:
1793 return COMPCODE_GE;
1794 default:
1795 abort ();
1799 /* Convert a compcode bit-based encoding of a comparison operator back
1800 to GCC's enum tree_code representation. This function is the
1801 inverse of comparison_to_compcode. */
1803 static enum tree_code
1804 compcode_to_comparison (code)
1805 int code;
1807 switch (code)
1809 case COMPCODE_LT:
1810 return LT_EXPR;
1811 case COMPCODE_EQ:
1812 return EQ_EXPR;
1813 case COMPCODE_LE:
1814 return LE_EXPR;
1815 case COMPCODE_GT:
1816 return GT_EXPR;
1817 case COMPCODE_NE:
1818 return NE_EXPR;
1819 case COMPCODE_GE:
1820 return GE_EXPR;
1821 default:
1822 abort ();
1826 /* Return nonzero if CODE is a tree code that represents a truth value. */
1828 static int
1829 truth_value_p (code)
1830 enum tree_code code;
1832 return (TREE_CODE_CLASS (code) == '<'
1833 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1834 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1835 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1838 /* Return nonzero if two operands are necessarily equal.
1839 If ONLY_CONST is nonzero, only return nonzero for constants.
1840 This function tests whether the operands are indistinguishable;
1841 it does not test whether they are equal using C's == operation.
1842 The distinction is important for IEEE floating point, because
1843 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1844 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1847 operand_equal_p (arg0, arg1, only_const)
1848 tree arg0, arg1;
1849 int only_const;
1851 /* If both types don't have the same signedness, then we can't consider
1852 them equal. We must check this before the STRIP_NOPS calls
1853 because they may change the signedness of the arguments. */
1854 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1855 return 0;
1857 STRIP_NOPS (arg0);
1858 STRIP_NOPS (arg1);
1860 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1861 /* This is needed for conversions and for COMPONENT_REF.
1862 Might as well play it safe and always test this. */
1863 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1864 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1865 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1866 return 0;
1868 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1869 We don't care about side effects in that case because the SAVE_EXPR
1870 takes care of that for us. In all other cases, two expressions are
1871 equal if they have no side effects. If we have two identical
1872 expressions with side effects that should be treated the same due
1873 to the only side effects being identical SAVE_EXPR's, that will
1874 be detected in the recursive calls below. */
1875 if (arg0 == arg1 && ! only_const
1876 && (TREE_CODE (arg0) == SAVE_EXPR
1877 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1878 return 1;
1880 /* Next handle constant cases, those for which we can return 1 even
1881 if ONLY_CONST is set. */
1882 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1883 switch (TREE_CODE (arg0))
1885 case INTEGER_CST:
1886 return (! TREE_CONSTANT_OVERFLOW (arg0)
1887 && ! TREE_CONSTANT_OVERFLOW (arg1)
1888 && tree_int_cst_equal (arg0, arg1));
1890 case REAL_CST:
1891 return (! TREE_CONSTANT_OVERFLOW (arg0)
1892 && ! TREE_CONSTANT_OVERFLOW (arg1)
1893 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1894 TREE_REAL_CST (arg1)));
1896 case VECTOR_CST:
1898 tree v1, v2;
1900 if (TREE_CONSTANT_OVERFLOW (arg0)
1901 || TREE_CONSTANT_OVERFLOW (arg1))
1902 return 0;
1904 v1 = TREE_VECTOR_CST_ELTS (arg0);
1905 v2 = TREE_VECTOR_CST_ELTS (arg1);
1906 while (v1 && v2)
1908 if (!operand_equal_p (v1, v2, only_const))
1909 return 0;
1910 v1 = TREE_CHAIN (v1);
1911 v2 = TREE_CHAIN (v2);
1914 return 1;
1917 case COMPLEX_CST:
1918 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1919 only_const)
1920 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1921 only_const));
1923 case STRING_CST:
1924 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1925 && ! memcmp (TREE_STRING_POINTER (arg0),
1926 TREE_STRING_POINTER (arg1),
1927 TREE_STRING_LENGTH (arg0)));
1929 case ADDR_EXPR:
1930 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1932 default:
1933 break;
1936 if (only_const)
1937 return 0;
1939 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1941 case '1':
1942 /* Two conversions are equal only if signedness and modes match. */
1943 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1944 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1945 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1946 return 0;
1948 return operand_equal_p (TREE_OPERAND (arg0, 0),
1949 TREE_OPERAND (arg1, 0), 0);
1951 case '<':
1952 case '2':
1953 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1954 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1956 return 1;
1958 /* For commutative ops, allow the other order. */
1959 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1960 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1961 || TREE_CODE (arg0) == BIT_IOR_EXPR
1962 || TREE_CODE (arg0) == BIT_XOR_EXPR
1963 || TREE_CODE (arg0) == BIT_AND_EXPR
1964 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1965 && operand_equal_p (TREE_OPERAND (arg0, 0),
1966 TREE_OPERAND (arg1, 1), 0)
1967 && operand_equal_p (TREE_OPERAND (arg0, 1),
1968 TREE_OPERAND (arg1, 0), 0));
1970 case 'r':
1971 /* If either of the pointer (or reference) expressions we are
1972 dereferencing contain a side effect, these cannot be equal. */
1973 if (TREE_SIDE_EFFECTS (arg0)
1974 || TREE_SIDE_EFFECTS (arg1))
1975 return 0;
1977 switch (TREE_CODE (arg0))
1979 case INDIRECT_REF:
1980 return operand_equal_p (TREE_OPERAND (arg0, 0),
1981 TREE_OPERAND (arg1, 0), 0);
1983 case COMPONENT_REF:
1984 case ARRAY_REF:
1985 case ARRAY_RANGE_REF:
1986 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1987 TREE_OPERAND (arg1, 0), 0)
1988 && operand_equal_p (TREE_OPERAND (arg0, 1),
1989 TREE_OPERAND (arg1, 1), 0));
1991 case BIT_FIELD_REF:
1992 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1993 TREE_OPERAND (arg1, 0), 0)
1994 && operand_equal_p (TREE_OPERAND (arg0, 1),
1995 TREE_OPERAND (arg1, 1), 0)
1996 && operand_equal_p (TREE_OPERAND (arg0, 2),
1997 TREE_OPERAND (arg1, 2), 0));
1998 default:
1999 return 0;
2002 case 'e':
2003 switch (TREE_CODE (arg0))
2005 case ADDR_EXPR:
2006 case TRUTH_NOT_EXPR:
2007 return operand_equal_p (TREE_OPERAND (arg0, 0),
2008 TREE_OPERAND (arg1, 0), 0);
2010 case RTL_EXPR:
2011 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2013 case CALL_EXPR:
2014 /* If the CALL_EXPRs call different functions, then they
2015 clearly can not be equal. */
2016 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2017 TREE_OPERAND (arg1, 0), 0))
2018 return 0;
2020 /* Only consider const functions equivalent. */
2021 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
2023 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
2024 if (! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2025 return 0;
2027 else
2028 return 0;
2030 /* Now see if all the arguments are the same. operand_equal_p
2031 does not handle TREE_LIST, so we walk the operands here
2032 feeding them to operand_equal_p. */
2033 arg0 = TREE_OPERAND (arg0, 1);
2034 arg1 = TREE_OPERAND (arg1, 1);
2035 while (arg0 && arg1)
2037 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2038 return 0;
2040 arg0 = TREE_CHAIN (arg0);
2041 arg1 = TREE_CHAIN (arg1);
2044 /* If we get here and both argument lists are exhausted
2045 then the CALL_EXPRs are equal. */
2046 return ! (arg0 || arg1);
2048 default:
2049 return 0;
2052 case 'd':
2053 /* Consider __builtin_sqrt equal to sqrt. */
2054 return TREE_CODE (arg0) == FUNCTION_DECL
2055 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2056 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2057 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2059 default:
2060 return 0;
2064 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2065 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2067 When in doubt, return 0. */
2069 static int
2070 operand_equal_for_comparison_p (arg0, arg1, other)
2071 tree arg0, arg1;
2072 tree other;
2074 int unsignedp1, unsignedpo;
2075 tree primarg0, primarg1, primother;
2076 unsigned int correct_width;
2078 if (operand_equal_p (arg0, arg1, 0))
2079 return 1;
2081 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2082 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2083 return 0;
2085 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2086 and see if the inner values are the same. This removes any
2087 signedness comparison, which doesn't matter here. */
2088 primarg0 = arg0, primarg1 = arg1;
2089 STRIP_NOPS (primarg0);
2090 STRIP_NOPS (primarg1);
2091 if (operand_equal_p (primarg0, primarg1, 0))
2092 return 1;
2094 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2095 actual comparison operand, ARG0.
2097 First throw away any conversions to wider types
2098 already present in the operands. */
2100 primarg1 = get_narrower (arg1, &unsignedp1);
2101 primother = get_narrower (other, &unsignedpo);
2103 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2104 if (unsignedp1 == unsignedpo
2105 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2106 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2108 tree type = TREE_TYPE (arg0);
2110 /* Make sure shorter operand is extended the right way
2111 to match the longer operand. */
2112 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2113 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2115 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2116 return 1;
2119 return 0;
2122 /* See if ARG is an expression that is either a comparison or is performing
2123 arithmetic on comparisons. The comparisons must only be comparing
2124 two different values, which will be stored in *CVAL1 and *CVAL2; if
2125 they are nonzero it means that some operands have already been found.
2126 No variables may be used anywhere else in the expression except in the
2127 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2128 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2130 If this is true, return 1. Otherwise, return zero. */
2132 static int
2133 twoval_comparison_p (arg, cval1, cval2, save_p)
2134 tree arg;
2135 tree *cval1, *cval2;
2136 int *save_p;
2138 enum tree_code code = TREE_CODE (arg);
2139 char class = TREE_CODE_CLASS (code);
2141 /* We can handle some of the 'e' cases here. */
2142 if (class == 'e' && code == TRUTH_NOT_EXPR)
2143 class = '1';
2144 else if (class == 'e'
2145 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2146 || code == COMPOUND_EXPR))
2147 class = '2';
2149 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2150 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2152 /* If we've already found a CVAL1 or CVAL2, this expression is
2153 two complex to handle. */
2154 if (*cval1 || *cval2)
2155 return 0;
2157 class = '1';
2158 *save_p = 1;
2161 switch (class)
2163 case '1':
2164 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2166 case '2':
2167 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2168 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2169 cval1, cval2, save_p));
2171 case 'c':
2172 return 1;
2174 case 'e':
2175 if (code == COND_EXPR)
2176 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2177 cval1, cval2, save_p)
2178 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2179 cval1, cval2, save_p)
2180 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2181 cval1, cval2, save_p));
2182 return 0;
2184 case '<':
2185 /* First see if we can handle the first operand, then the second. For
2186 the second operand, we know *CVAL1 can't be zero. It must be that
2187 one side of the comparison is each of the values; test for the
2188 case where this isn't true by failing if the two operands
2189 are the same. */
2191 if (operand_equal_p (TREE_OPERAND (arg, 0),
2192 TREE_OPERAND (arg, 1), 0))
2193 return 0;
2195 if (*cval1 == 0)
2196 *cval1 = TREE_OPERAND (arg, 0);
2197 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2199 else if (*cval2 == 0)
2200 *cval2 = TREE_OPERAND (arg, 0);
2201 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2203 else
2204 return 0;
2206 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2208 else if (*cval2 == 0)
2209 *cval2 = TREE_OPERAND (arg, 1);
2210 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2212 else
2213 return 0;
2215 return 1;
2217 default:
2218 return 0;
2222 /* ARG is a tree that is known to contain just arithmetic operations and
2223 comparisons. Evaluate the operations in the tree substituting NEW0 for
2224 any occurrence of OLD0 as an operand of a comparison and likewise for
2225 NEW1 and OLD1. */
2227 static tree
2228 eval_subst (arg, old0, new0, old1, new1)
2229 tree arg;
2230 tree old0, new0, old1, new1;
2232 tree type = TREE_TYPE (arg);
2233 enum tree_code code = TREE_CODE (arg);
2234 char class = TREE_CODE_CLASS (code);
2236 /* We can handle some of the 'e' cases here. */
2237 if (class == 'e' && code == TRUTH_NOT_EXPR)
2238 class = '1';
2239 else if (class == 'e'
2240 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2241 class = '2';
2243 switch (class)
2245 case '1':
2246 return fold (build1 (code, type,
2247 eval_subst (TREE_OPERAND (arg, 0),
2248 old0, new0, old1, new1)));
2250 case '2':
2251 return fold (build (code, type,
2252 eval_subst (TREE_OPERAND (arg, 0),
2253 old0, new0, old1, new1),
2254 eval_subst (TREE_OPERAND (arg, 1),
2255 old0, new0, old1, new1)));
2257 case 'e':
2258 switch (code)
2260 case SAVE_EXPR:
2261 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2263 case COMPOUND_EXPR:
2264 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2266 case COND_EXPR:
2267 return fold (build (code, type,
2268 eval_subst (TREE_OPERAND (arg, 0),
2269 old0, new0, old1, new1),
2270 eval_subst (TREE_OPERAND (arg, 1),
2271 old0, new0, old1, new1),
2272 eval_subst (TREE_OPERAND (arg, 2),
2273 old0, new0, old1, new1)));
2274 default:
2275 break;
2277 /* fall through - ??? */
2279 case '<':
2281 tree arg0 = TREE_OPERAND (arg, 0);
2282 tree arg1 = TREE_OPERAND (arg, 1);
2284 /* We need to check both for exact equality and tree equality. The
2285 former will be true if the operand has a side-effect. In that
2286 case, we know the operand occurred exactly once. */
2288 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2289 arg0 = new0;
2290 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2291 arg0 = new1;
2293 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2294 arg1 = new0;
2295 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2296 arg1 = new1;
2298 return fold (build (code, type, arg0, arg1));
2301 default:
2302 return arg;
2306 /* Return a tree for the case when the result of an expression is RESULT
2307 converted to TYPE and OMITTED was previously an operand of the expression
2308 but is now not needed (e.g., we folded OMITTED * 0).
2310 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2311 the conversion of RESULT to TYPE. */
2313 tree
2314 omit_one_operand (type, result, omitted)
2315 tree type, result, omitted;
2317 tree t = convert (type, result);
2319 if (TREE_SIDE_EFFECTS (omitted))
2320 return build (COMPOUND_EXPR, type, omitted, t);
2322 return non_lvalue (t);
2325 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2327 static tree
2328 pedantic_omit_one_operand (type, result, omitted)
2329 tree type, result, omitted;
2331 tree t = convert (type, result);
2333 if (TREE_SIDE_EFFECTS (omitted))
2334 return build (COMPOUND_EXPR, type, omitted, t);
2336 return pedantic_non_lvalue (t);
2339 /* Return a simplified tree node for the truth-negation of ARG. This
2340 never alters ARG itself. We assume that ARG is an operation that
2341 returns a truth value (0 or 1). */
2343 tree
2344 invert_truthvalue (arg)
2345 tree arg;
2347 tree type = TREE_TYPE (arg);
2348 enum tree_code code = TREE_CODE (arg);
2350 if (code == ERROR_MARK)
2351 return arg;
2353 /* If this is a comparison, we can simply invert it, except for
2354 floating-point non-equality comparisons, in which case we just
2355 enclose a TRUTH_NOT_EXPR around what we have. */
2357 if (TREE_CODE_CLASS (code) == '<')
2359 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2360 && !flag_unsafe_math_optimizations
2361 && code != NE_EXPR
2362 && code != EQ_EXPR)
2363 return build1 (TRUTH_NOT_EXPR, type, arg);
2364 else
2365 return build (invert_tree_comparison (code), type,
2366 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2369 switch (code)
2371 case INTEGER_CST:
2372 return convert (type, build_int_2 (integer_zerop (arg), 0));
2374 case TRUTH_AND_EXPR:
2375 return build (TRUTH_OR_EXPR, type,
2376 invert_truthvalue (TREE_OPERAND (arg, 0)),
2377 invert_truthvalue (TREE_OPERAND (arg, 1)));
2379 case TRUTH_OR_EXPR:
2380 return build (TRUTH_AND_EXPR, type,
2381 invert_truthvalue (TREE_OPERAND (arg, 0)),
2382 invert_truthvalue (TREE_OPERAND (arg, 1)));
2384 case TRUTH_XOR_EXPR:
2385 /* Here we can invert either operand. We invert the first operand
2386 unless the second operand is a TRUTH_NOT_EXPR in which case our
2387 result is the XOR of the first operand with the inside of the
2388 negation of the second operand. */
2390 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2391 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2392 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2393 else
2394 return build (TRUTH_XOR_EXPR, type,
2395 invert_truthvalue (TREE_OPERAND (arg, 0)),
2396 TREE_OPERAND (arg, 1));
2398 case TRUTH_ANDIF_EXPR:
2399 return build (TRUTH_ORIF_EXPR, type,
2400 invert_truthvalue (TREE_OPERAND (arg, 0)),
2401 invert_truthvalue (TREE_OPERAND (arg, 1)));
2403 case TRUTH_ORIF_EXPR:
2404 return build (TRUTH_ANDIF_EXPR, type,
2405 invert_truthvalue (TREE_OPERAND (arg, 0)),
2406 invert_truthvalue (TREE_OPERAND (arg, 1)));
2408 case TRUTH_NOT_EXPR:
2409 return TREE_OPERAND (arg, 0);
2411 case COND_EXPR:
2412 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2413 invert_truthvalue (TREE_OPERAND (arg, 1)),
2414 invert_truthvalue (TREE_OPERAND (arg, 2)));
2416 case COMPOUND_EXPR:
2417 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2418 invert_truthvalue (TREE_OPERAND (arg, 1)));
2420 case WITH_RECORD_EXPR:
2421 return build (WITH_RECORD_EXPR, type,
2422 invert_truthvalue (TREE_OPERAND (arg, 0)),
2423 TREE_OPERAND (arg, 1));
2425 case NON_LVALUE_EXPR:
2426 return invert_truthvalue (TREE_OPERAND (arg, 0));
2428 case NOP_EXPR:
2429 case CONVERT_EXPR:
2430 case FLOAT_EXPR:
2431 return build1 (TREE_CODE (arg), type,
2432 invert_truthvalue (TREE_OPERAND (arg, 0)));
2434 case BIT_AND_EXPR:
2435 if (!integer_onep (TREE_OPERAND (arg, 1)))
2436 break;
2437 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2439 case SAVE_EXPR:
2440 return build1 (TRUTH_NOT_EXPR, type, arg);
2442 case CLEANUP_POINT_EXPR:
2443 return build1 (CLEANUP_POINT_EXPR, type,
2444 invert_truthvalue (TREE_OPERAND (arg, 0)));
2446 default:
2447 break;
2449 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2450 abort ();
2451 return build1 (TRUTH_NOT_EXPR, type, arg);
2454 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2455 operands are another bit-wise operation with a common input. If so,
2456 distribute the bit operations to save an operation and possibly two if
2457 constants are involved. For example, convert
2458 (A | B) & (A | C) into A | (B & C)
2459 Further simplification will occur if B and C are constants.
2461 If this optimization cannot be done, 0 will be returned. */
2463 static tree
2464 distribute_bit_expr (code, type, arg0, arg1)
2465 enum tree_code code;
2466 tree type;
2467 tree arg0, arg1;
2469 tree common;
2470 tree left, right;
2472 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2473 || TREE_CODE (arg0) == code
2474 || (TREE_CODE (arg0) != BIT_AND_EXPR
2475 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2476 return 0;
2478 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2480 common = TREE_OPERAND (arg0, 0);
2481 left = TREE_OPERAND (arg0, 1);
2482 right = TREE_OPERAND (arg1, 1);
2484 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2486 common = TREE_OPERAND (arg0, 0);
2487 left = TREE_OPERAND (arg0, 1);
2488 right = TREE_OPERAND (arg1, 0);
2490 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2492 common = TREE_OPERAND (arg0, 1);
2493 left = TREE_OPERAND (arg0, 0);
2494 right = TREE_OPERAND (arg1, 1);
2496 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2498 common = TREE_OPERAND (arg0, 1);
2499 left = TREE_OPERAND (arg0, 0);
2500 right = TREE_OPERAND (arg1, 0);
2502 else
2503 return 0;
2505 return fold (build (TREE_CODE (arg0), type, common,
2506 fold (build (code, type, left, right))));
2509 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2510 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2512 static tree
2513 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2514 tree inner;
2515 tree type;
2516 int bitsize, bitpos;
2517 int unsignedp;
2519 tree result = build (BIT_FIELD_REF, type, inner,
2520 size_int (bitsize), bitsize_int (bitpos));
2522 TREE_UNSIGNED (result) = unsignedp;
2524 return result;
2527 /* Optimize a bit-field compare.
2529 There are two cases: First is a compare against a constant and the
2530 second is a comparison of two items where the fields are at the same
2531 bit position relative to the start of a chunk (byte, halfword, word)
2532 large enough to contain it. In these cases we can avoid the shift
2533 implicit in bitfield extractions.
2535 For constants, we emit a compare of the shifted constant with the
2536 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2537 compared. For two fields at the same position, we do the ANDs with the
2538 similar mask and compare the result of the ANDs.
2540 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2541 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2542 are the left and right operands of the comparison, respectively.
2544 If the optimization described above can be done, we return the resulting
2545 tree. Otherwise we return zero. */
2547 static tree
2548 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2549 enum tree_code code;
2550 tree compare_type;
2551 tree lhs, rhs;
2553 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2554 tree type = TREE_TYPE (lhs);
2555 tree signed_type, unsigned_type;
2556 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2557 enum machine_mode lmode, rmode, nmode;
2558 int lunsignedp, runsignedp;
2559 int lvolatilep = 0, rvolatilep = 0;
2560 tree linner, rinner = NULL_TREE;
2561 tree mask;
2562 tree offset;
2564 /* Get all the information about the extractions being done. If the bit size
2565 if the same as the size of the underlying object, we aren't doing an
2566 extraction at all and so can do nothing. We also don't want to
2567 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2568 then will no longer be able to replace it. */
2569 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2570 &lunsignedp, &lvolatilep);
2571 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2572 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2573 return 0;
2575 if (!const_p)
2577 /* If this is not a constant, we can only do something if bit positions,
2578 sizes, and signedness are the same. */
2579 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2580 &runsignedp, &rvolatilep);
2582 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2583 || lunsignedp != runsignedp || offset != 0
2584 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2585 return 0;
2588 /* See if we can find a mode to refer to this field. We should be able to,
2589 but fail if we can't. */
2590 nmode = get_best_mode (lbitsize, lbitpos,
2591 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2592 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2593 TYPE_ALIGN (TREE_TYPE (rinner))),
2594 word_mode, lvolatilep || rvolatilep);
2595 if (nmode == VOIDmode)
2596 return 0;
2598 /* Set signed and unsigned types of the precision of this mode for the
2599 shifts below. */
2600 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2601 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2603 /* Compute the bit position and size for the new reference and our offset
2604 within it. If the new reference is the same size as the original, we
2605 won't optimize anything, so return zero. */
2606 nbitsize = GET_MODE_BITSIZE (nmode);
2607 nbitpos = lbitpos & ~ (nbitsize - 1);
2608 lbitpos -= nbitpos;
2609 if (nbitsize == lbitsize)
2610 return 0;
2612 if (BYTES_BIG_ENDIAN)
2613 lbitpos = nbitsize - lbitsize - lbitpos;
2615 /* Make the mask to be used against the extracted field. */
2616 mask = build_int_2 (~0, ~0);
2617 TREE_TYPE (mask) = unsigned_type;
2618 force_fit_type (mask, 0);
2619 mask = convert (unsigned_type, mask);
2620 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2621 mask = const_binop (RSHIFT_EXPR, mask,
2622 size_int (nbitsize - lbitsize - lbitpos), 0);
2624 if (! const_p)
2625 /* If not comparing with constant, just rework the comparison
2626 and return. */
2627 return build (code, compare_type,
2628 build (BIT_AND_EXPR, unsigned_type,
2629 make_bit_field_ref (linner, unsigned_type,
2630 nbitsize, nbitpos, 1),
2631 mask),
2632 build (BIT_AND_EXPR, unsigned_type,
2633 make_bit_field_ref (rinner, unsigned_type,
2634 nbitsize, nbitpos, 1),
2635 mask));
2637 /* Otherwise, we are handling the constant case. See if the constant is too
2638 big for the field. Warn and return a tree of for 0 (false) if so. We do
2639 this not only for its own sake, but to avoid having to test for this
2640 error case below. If we didn't, we might generate wrong code.
2642 For unsigned fields, the constant shifted right by the field length should
2643 be all zero. For signed fields, the high-order bits should agree with
2644 the sign bit. */
2646 if (lunsignedp)
2648 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2649 convert (unsigned_type, rhs),
2650 size_int (lbitsize), 0)))
2652 warning ("comparison is always %d due to width of bit-field",
2653 code == NE_EXPR);
2654 return convert (compare_type,
2655 (code == NE_EXPR
2656 ? integer_one_node : integer_zero_node));
2659 else
2661 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2662 size_int (lbitsize - 1), 0);
2663 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2665 warning ("comparison is always %d due to width of bit-field",
2666 code == NE_EXPR);
2667 return convert (compare_type,
2668 (code == NE_EXPR
2669 ? integer_one_node : integer_zero_node));
2673 /* Single-bit compares should always be against zero. */
2674 if (lbitsize == 1 && ! integer_zerop (rhs))
2676 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2677 rhs = convert (type, integer_zero_node);
2680 /* Make a new bitfield reference, shift the constant over the
2681 appropriate number of bits and mask it with the computed mask
2682 (in case this was a signed field). If we changed it, make a new one. */
2683 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2684 if (lvolatilep)
2686 TREE_SIDE_EFFECTS (lhs) = 1;
2687 TREE_THIS_VOLATILE (lhs) = 1;
2690 rhs = fold (const_binop (BIT_AND_EXPR,
2691 const_binop (LSHIFT_EXPR,
2692 convert (unsigned_type, rhs),
2693 size_int (lbitpos), 0),
2694 mask, 0));
2696 return build (code, compare_type,
2697 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2698 rhs);
2701 /* Subroutine for fold_truthop: decode a field reference.
2703 If EXP is a comparison reference, we return the innermost reference.
2705 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2706 set to the starting bit number.
2708 If the innermost field can be completely contained in a mode-sized
2709 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2711 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2712 otherwise it is not changed.
2714 *PUNSIGNEDP is set to the signedness of the field.
2716 *PMASK is set to the mask used. This is either contained in a
2717 BIT_AND_EXPR or derived from the width of the field.
2719 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2721 Return 0 if this is not a component reference or is one that we can't
2722 do anything with. */
2724 static tree
2725 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2726 pvolatilep, pmask, pand_mask)
2727 tree exp;
2728 HOST_WIDE_INT *pbitsize, *pbitpos;
2729 enum machine_mode *pmode;
2730 int *punsignedp, *pvolatilep;
2731 tree *pmask;
2732 tree *pand_mask;
2734 tree and_mask = 0;
2735 tree mask, inner, offset;
2736 tree unsigned_type;
2737 unsigned int precision;
2739 /* All the optimizations using this function assume integer fields.
2740 There are problems with FP fields since the type_for_size call
2741 below can fail for, e.g., XFmode. */
2742 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2743 return 0;
2745 STRIP_NOPS (exp);
2747 if (TREE_CODE (exp) == BIT_AND_EXPR)
2749 and_mask = TREE_OPERAND (exp, 1);
2750 exp = TREE_OPERAND (exp, 0);
2751 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2752 if (TREE_CODE (and_mask) != INTEGER_CST)
2753 return 0;
2756 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2757 punsignedp, pvolatilep);
2758 if ((inner == exp && and_mask == 0)
2759 || *pbitsize < 0 || offset != 0
2760 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2761 return 0;
2763 /* Compute the mask to access the bitfield. */
2764 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2765 precision = TYPE_PRECISION (unsigned_type);
2767 mask = build_int_2 (~0, ~0);
2768 TREE_TYPE (mask) = unsigned_type;
2769 force_fit_type (mask, 0);
2770 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2771 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2773 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2774 if (and_mask != 0)
2775 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2776 convert (unsigned_type, and_mask), mask));
2778 *pmask = mask;
2779 *pand_mask = and_mask;
2780 return inner;
2783 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2784 bit positions. */
2786 static int
2787 all_ones_mask_p (mask, size)
2788 tree mask;
2789 int size;
2791 tree type = TREE_TYPE (mask);
2792 unsigned int precision = TYPE_PRECISION (type);
2793 tree tmask;
2795 tmask = build_int_2 (~0, ~0);
2796 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2797 force_fit_type (tmask, 0);
2798 return
2799 tree_int_cst_equal (mask,
2800 const_binop (RSHIFT_EXPR,
2801 const_binop (LSHIFT_EXPR, tmask,
2802 size_int (precision - size),
2804 size_int (precision - size), 0));
2807 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2808 represents the sign bit of EXP's type. If EXP represents a sign
2809 or zero extension, also test VAL against the unextended type.
2810 The return value is the (sub)expression whose sign bit is VAL,
2811 or NULL_TREE otherwise. */
2813 static tree
2814 sign_bit_p (exp, val)
2815 tree exp;
2816 tree val;
2818 unsigned HOST_WIDE_INT lo;
2819 HOST_WIDE_INT hi;
2820 int width;
2821 tree t;
2823 /* Tree EXP must have an integral type. */
2824 t = TREE_TYPE (exp);
2825 if (! INTEGRAL_TYPE_P (t))
2826 return NULL_TREE;
2828 /* Tree VAL must be an integer constant. */
2829 if (TREE_CODE (val) != INTEGER_CST
2830 || TREE_CONSTANT_OVERFLOW (val))
2831 return NULL_TREE;
2833 width = TYPE_PRECISION (t);
2834 if (width > HOST_BITS_PER_WIDE_INT)
2836 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2837 lo = 0;
2839 else
2841 hi = 0;
2842 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2845 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2846 return exp;
2848 /* Handle extension from a narrower type. */
2849 if (TREE_CODE (exp) == NOP_EXPR
2850 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2851 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2853 return NULL_TREE;
2856 /* Subroutine for fold_truthop: determine if an operand is simple enough
2857 to be evaluated unconditionally. */
2859 static int
2860 simple_operand_p (exp)
2861 tree exp;
2863 /* Strip any conversions that don't change the machine mode. */
2864 while ((TREE_CODE (exp) == NOP_EXPR
2865 || TREE_CODE (exp) == CONVERT_EXPR)
2866 && (TYPE_MODE (TREE_TYPE (exp))
2867 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2868 exp = TREE_OPERAND (exp, 0);
2870 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2871 || (DECL_P (exp)
2872 && ! TREE_ADDRESSABLE (exp)
2873 && ! TREE_THIS_VOLATILE (exp)
2874 && ! DECL_NONLOCAL (exp)
2875 /* Don't regard global variables as simple. They may be
2876 allocated in ways unknown to the compiler (shared memory,
2877 #pragma weak, etc). */
2878 && ! TREE_PUBLIC (exp)
2879 && ! DECL_EXTERNAL (exp)
2880 /* Loading a static variable is unduly expensive, but global
2881 registers aren't expensive. */
2882 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2885 /* The following functions are subroutines to fold_range_test and allow it to
2886 try to change a logical combination of comparisons into a range test.
2888 For example, both
2889 X == 2 || X == 3 || X == 4 || X == 5
2891 X >= 2 && X <= 5
2892 are converted to
2893 (unsigned) (X - 2) <= 3
2895 We describe each set of comparisons as being either inside or outside
2896 a range, using a variable named like IN_P, and then describe the
2897 range with a lower and upper bound. If one of the bounds is omitted,
2898 it represents either the highest or lowest value of the type.
2900 In the comments below, we represent a range by two numbers in brackets
2901 preceded by a "+" to designate being inside that range, or a "-" to
2902 designate being outside that range, so the condition can be inverted by
2903 flipping the prefix. An omitted bound is represented by a "-". For
2904 example, "- [-, 10]" means being outside the range starting at the lowest
2905 possible value and ending at 10, in other words, being greater than 10.
2906 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2907 always false.
2909 We set up things so that the missing bounds are handled in a consistent
2910 manner so neither a missing bound nor "true" and "false" need to be
2911 handled using a special case. */
2913 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2914 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2915 and UPPER1_P are nonzero if the respective argument is an upper bound
2916 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2917 must be specified for a comparison. ARG1 will be converted to ARG0's
2918 type if both are specified. */
2920 static tree
2921 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2922 enum tree_code code;
2923 tree type;
2924 tree arg0, arg1;
2925 int upper0_p, upper1_p;
2927 tree tem;
2928 int result;
2929 int sgn0, sgn1;
2931 /* If neither arg represents infinity, do the normal operation.
2932 Else, if not a comparison, return infinity. Else handle the special
2933 comparison rules. Note that most of the cases below won't occur, but
2934 are handled for consistency. */
2936 if (arg0 != 0 && arg1 != 0)
2938 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2939 arg0, convert (TREE_TYPE (arg0), arg1)));
2940 STRIP_NOPS (tem);
2941 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2944 if (TREE_CODE_CLASS (code) != '<')
2945 return 0;
2947 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2948 for neither. In real maths, we cannot assume open ended ranges are
2949 the same. But, this is computer arithmetic, where numbers are finite.
2950 We can therefore make the transformation of any unbounded range with
2951 the value Z, Z being greater than any representable number. This permits
2952 us to treat unbounded ranges as equal. */
2953 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2954 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2955 switch (code)
2957 case EQ_EXPR:
2958 result = sgn0 == sgn1;
2959 break;
2960 case NE_EXPR:
2961 result = sgn0 != sgn1;
2962 break;
2963 case LT_EXPR:
2964 result = sgn0 < sgn1;
2965 break;
2966 case LE_EXPR:
2967 result = sgn0 <= sgn1;
2968 break;
2969 case GT_EXPR:
2970 result = sgn0 > sgn1;
2971 break;
2972 case GE_EXPR:
2973 result = sgn0 >= sgn1;
2974 break;
2975 default:
2976 abort ();
2979 return convert (type, result ? integer_one_node : integer_zero_node);
2982 /* Given EXP, a logical expression, set the range it is testing into
2983 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2984 actually being tested. *PLOW and *PHIGH will be made of the same type
2985 as the returned expression. If EXP is not a comparison, we will most
2986 likely not be returning a useful value and range. */
2988 static tree
2989 make_range (exp, pin_p, plow, phigh)
2990 tree exp;
2991 int *pin_p;
2992 tree *plow, *phigh;
2994 enum tree_code code;
2995 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2996 tree orig_type = NULL_TREE;
2997 int in_p, n_in_p;
2998 tree low, high, n_low, n_high;
3000 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3001 and see if we can refine the range. Some of the cases below may not
3002 happen, but it doesn't seem worth worrying about this. We "continue"
3003 the outer loop when we've changed something; otherwise we "break"
3004 the switch, which will "break" the while. */
3006 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3008 while (1)
3010 code = TREE_CODE (exp);
3012 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3014 arg0 = TREE_OPERAND (exp, 0);
3015 if (TREE_CODE_CLASS (code) == '<'
3016 || TREE_CODE_CLASS (code) == '1'
3017 || TREE_CODE_CLASS (code) == '2')
3018 type = TREE_TYPE (arg0);
3019 if (TREE_CODE_CLASS (code) == '2'
3020 || TREE_CODE_CLASS (code) == '<'
3021 || (TREE_CODE_CLASS (code) == 'e'
3022 && TREE_CODE_LENGTH (code) > 1))
3023 arg1 = TREE_OPERAND (exp, 1);
3026 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3027 lose a cast by accident. */
3028 if (type != NULL_TREE && orig_type == NULL_TREE)
3029 orig_type = type;
3031 switch (code)
3033 case TRUTH_NOT_EXPR:
3034 in_p = ! in_p, exp = arg0;
3035 continue;
3037 case EQ_EXPR: case NE_EXPR:
3038 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3039 /* We can only do something if the range is testing for zero
3040 and if the second operand is an integer constant. Note that
3041 saying something is "in" the range we make is done by
3042 complementing IN_P since it will set in the initial case of
3043 being not equal to zero; "out" is leaving it alone. */
3044 if (low == 0 || high == 0
3045 || ! integer_zerop (low) || ! integer_zerop (high)
3046 || TREE_CODE (arg1) != INTEGER_CST)
3047 break;
3049 switch (code)
3051 case NE_EXPR: /* - [c, c] */
3052 low = high = arg1;
3053 break;
3054 case EQ_EXPR: /* + [c, c] */
3055 in_p = ! in_p, low = high = arg1;
3056 break;
3057 case GT_EXPR: /* - [-, c] */
3058 low = 0, high = arg1;
3059 break;
3060 case GE_EXPR: /* + [c, -] */
3061 in_p = ! in_p, low = arg1, high = 0;
3062 break;
3063 case LT_EXPR: /* - [c, -] */
3064 low = arg1, high = 0;
3065 break;
3066 case LE_EXPR: /* + [-, c] */
3067 in_p = ! in_p, low = 0, high = arg1;
3068 break;
3069 default:
3070 abort ();
3073 exp = arg0;
3075 /* If this is an unsigned comparison, we also know that EXP is
3076 greater than or equal to zero. We base the range tests we make
3077 on that fact, so we record it here so we can parse existing
3078 range tests. */
3079 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3081 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3082 1, convert (type, integer_zero_node),
3083 NULL_TREE))
3084 break;
3086 in_p = n_in_p, low = n_low, high = n_high;
3088 /* If the high bound is missing, but we
3089 have a low bound, reverse the range so
3090 it goes from zero to the low bound minus 1. */
3091 if (high == 0 && low)
3093 in_p = ! in_p;
3094 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3095 integer_one_node, 0);
3096 low = convert (type, integer_zero_node);
3099 continue;
3101 case NEGATE_EXPR:
3102 /* (-x) IN [a,b] -> x in [-b, -a] */
3103 n_low = range_binop (MINUS_EXPR, type,
3104 convert (type, integer_zero_node), 0, high, 1);
3105 n_high = range_binop (MINUS_EXPR, type,
3106 convert (type, integer_zero_node), 0, low, 0);
3107 low = n_low, high = n_high;
3108 exp = arg0;
3109 continue;
3111 case BIT_NOT_EXPR:
3112 /* ~ X -> -X - 1 */
3113 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3114 convert (type, integer_one_node));
3115 continue;
3117 case PLUS_EXPR: case MINUS_EXPR:
3118 if (TREE_CODE (arg1) != INTEGER_CST)
3119 break;
3121 /* If EXP is signed, any overflow in the computation is undefined,
3122 so we don't worry about it so long as our computations on
3123 the bounds don't overflow. For unsigned, overflow is defined
3124 and this is exactly the right thing. */
3125 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3126 type, low, 0, arg1, 0);
3127 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3128 type, high, 1, arg1, 0);
3129 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3130 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3131 break;
3133 /* Check for an unsigned range which has wrapped around the maximum
3134 value thus making n_high < n_low, and normalize it. */
3135 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3137 low = range_binop (PLUS_EXPR, type, n_high, 0,
3138 integer_one_node, 0);
3139 high = range_binop (MINUS_EXPR, type, n_low, 0,
3140 integer_one_node, 0);
3142 /* If the range is of the form +/- [ x+1, x ], we won't
3143 be able to normalize it. But then, it represents the
3144 whole range or the empty set, so make it
3145 +/- [ -, - ]. */
3146 if (tree_int_cst_equal (n_low, low)
3147 && tree_int_cst_equal (n_high, high))
3148 low = high = 0;
3149 else
3150 in_p = ! in_p;
3152 else
3153 low = n_low, high = n_high;
3155 exp = arg0;
3156 continue;
3158 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3159 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3160 break;
3162 if (! INTEGRAL_TYPE_P (type)
3163 || (low != 0 && ! int_fits_type_p (low, type))
3164 || (high != 0 && ! int_fits_type_p (high, type)))
3165 break;
3167 n_low = low, n_high = high;
3169 if (n_low != 0)
3170 n_low = convert (type, n_low);
3172 if (n_high != 0)
3173 n_high = convert (type, n_high);
3175 /* If we're converting from an unsigned to a signed type,
3176 we will be doing the comparison as unsigned. The tests above
3177 have already verified that LOW and HIGH are both positive.
3179 So we have to make sure that the original unsigned value will
3180 be interpreted as positive. */
3181 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3183 tree equiv_type = (*lang_hooks.types.type_for_mode)
3184 (TYPE_MODE (type), 1);
3185 tree high_positive;
3187 /* A range without an upper bound is, naturally, unbounded.
3188 Since convert would have cropped a very large value, use
3189 the max value for the destination type. */
3190 high_positive
3191 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3192 : TYPE_MAX_VALUE (type);
3194 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3195 high_positive = fold (build (RSHIFT_EXPR, type,
3196 convert (type, high_positive),
3197 convert (type, integer_one_node)));
3199 /* If the low bound is specified, "and" the range with the
3200 range for which the original unsigned value will be
3201 positive. */
3202 if (low != 0)
3204 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3205 1, n_low, n_high,
3206 1, convert (type, integer_zero_node),
3207 high_positive))
3208 break;
3210 in_p = (n_in_p == in_p);
3212 else
3214 /* Otherwise, "or" the range with the range of the input
3215 that will be interpreted as negative. */
3216 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3217 0, n_low, n_high,
3218 1, convert (type, integer_zero_node),
3219 high_positive))
3220 break;
3222 in_p = (in_p != n_in_p);
3226 exp = arg0;
3227 low = n_low, high = n_high;
3228 continue;
3230 default:
3231 break;
3234 break;
3237 /* If EXP is a constant, we can evaluate whether this is true or false. */
3238 if (TREE_CODE (exp) == INTEGER_CST)
3240 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3241 exp, 0, low, 0))
3242 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3243 exp, 1, high, 1)));
3244 low = high = 0;
3245 exp = 0;
3248 *pin_p = in_p, *plow = low, *phigh = high;
3249 return exp;
3252 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3253 type, TYPE, return an expression to test if EXP is in (or out of, depending
3254 on IN_P) the range. */
3256 static tree
3257 build_range_check (type, exp, in_p, low, high)
3258 tree type;
3259 tree exp;
3260 int in_p;
3261 tree low, high;
3263 tree etype = TREE_TYPE (exp);
3264 tree value;
3266 if (! in_p
3267 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3268 return invert_truthvalue (value);
3270 if (low == 0 && high == 0)
3271 return convert (type, integer_one_node);
3273 if (low == 0)
3274 return fold (build (LE_EXPR, type, exp, high));
3276 if (high == 0)
3277 return fold (build (GE_EXPR, type, exp, low));
3279 if (operand_equal_p (low, high, 0))
3280 return fold (build (EQ_EXPR, type, exp, low));
3282 if (integer_zerop (low))
3284 if (! TREE_UNSIGNED (etype))
3286 etype = (*lang_hooks.types.unsigned_type) (etype);
3287 high = convert (etype, high);
3288 exp = convert (etype, exp);
3290 return build_range_check (type, exp, 1, 0, high);
3293 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3294 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3296 unsigned HOST_WIDE_INT lo;
3297 HOST_WIDE_INT hi;
3298 int prec;
3300 prec = TYPE_PRECISION (etype);
3301 if (prec <= HOST_BITS_PER_WIDE_INT)
3303 hi = 0;
3304 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3306 else
3308 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3309 lo = (unsigned HOST_WIDE_INT) -1;
3312 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3314 if (TREE_UNSIGNED (etype))
3316 etype = (*lang_hooks.types.signed_type) (etype);
3317 exp = convert (etype, exp);
3319 return fold (build (GT_EXPR, type, exp,
3320 convert (etype, integer_zero_node)));
3324 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3325 && ! TREE_OVERFLOW (value))
3326 return build_range_check (type,
3327 fold (build (MINUS_EXPR, etype, exp, low)),
3328 1, convert (etype, integer_zero_node), value);
3330 return 0;
3333 /* Given two ranges, see if we can merge them into one. Return 1 if we
3334 can, 0 if we can't. Set the output range into the specified parameters. */
3336 static int
3337 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3338 int *pin_p;
3339 tree *plow, *phigh;
3340 int in0_p, in1_p;
3341 tree low0, high0, low1, high1;
3343 int no_overlap;
3344 int subset;
3345 int temp;
3346 tree tem;
3347 int in_p;
3348 tree low, high;
3349 int lowequal = ((low0 == 0 && low1 == 0)
3350 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3351 low0, 0, low1, 0)));
3352 int highequal = ((high0 == 0 && high1 == 0)
3353 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3354 high0, 1, high1, 1)));
3356 /* Make range 0 be the range that starts first, or ends last if they
3357 start at the same value. Swap them if it isn't. */
3358 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3359 low0, 0, low1, 0))
3360 || (lowequal
3361 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3362 high1, 1, high0, 1))))
3364 temp = in0_p, in0_p = in1_p, in1_p = temp;
3365 tem = low0, low0 = low1, low1 = tem;
3366 tem = high0, high0 = high1, high1 = tem;
3369 /* Now flag two cases, whether the ranges are disjoint or whether the
3370 second range is totally subsumed in the first. Note that the tests
3371 below are simplified by the ones above. */
3372 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3373 high0, 1, low1, 0));
3374 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3375 high1, 1, high0, 1));
3377 /* We now have four cases, depending on whether we are including or
3378 excluding the two ranges. */
3379 if (in0_p && in1_p)
3381 /* If they don't overlap, the result is false. If the second range
3382 is a subset it is the result. Otherwise, the range is from the start
3383 of the second to the end of the first. */
3384 if (no_overlap)
3385 in_p = 0, low = high = 0;
3386 else if (subset)
3387 in_p = 1, low = low1, high = high1;
3388 else
3389 in_p = 1, low = low1, high = high0;
3392 else if (in0_p && ! in1_p)
3394 /* If they don't overlap, the result is the first range. If they are
3395 equal, the result is false. If the second range is a subset of the
3396 first, and the ranges begin at the same place, we go from just after
3397 the end of the first range to the end of the second. If the second
3398 range is not a subset of the first, or if it is a subset and both
3399 ranges end at the same place, the range starts at the start of the
3400 first range and ends just before the second range.
3401 Otherwise, we can't describe this as a single range. */
3402 if (no_overlap)
3403 in_p = 1, low = low0, high = high0;
3404 else if (lowequal && highequal)
3405 in_p = 0, low = high = 0;
3406 else if (subset && lowequal)
3408 in_p = 1, high = high0;
3409 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3410 integer_one_node, 0);
3412 else if (! subset || highequal)
3414 in_p = 1, low = low0;
3415 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3416 integer_one_node, 0);
3418 else
3419 return 0;
3422 else if (! in0_p && in1_p)
3424 /* If they don't overlap, the result is the second range. If the second
3425 is a subset of the first, the result is false. Otherwise,
3426 the range starts just after the first range and ends at the
3427 end of the second. */
3428 if (no_overlap)
3429 in_p = 1, low = low1, high = high1;
3430 else if (subset || highequal)
3431 in_p = 0, low = high = 0;
3432 else
3434 in_p = 1, high = high1;
3435 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3436 integer_one_node, 0);
3440 else
3442 /* The case where we are excluding both ranges. Here the complex case
3443 is if they don't overlap. In that case, the only time we have a
3444 range is if they are adjacent. If the second is a subset of the
3445 first, the result is the first. Otherwise, the range to exclude
3446 starts at the beginning of the first range and ends at the end of the
3447 second. */
3448 if (no_overlap)
3450 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3451 range_binop (PLUS_EXPR, NULL_TREE,
3452 high0, 1,
3453 integer_one_node, 1),
3454 1, low1, 0)))
3455 in_p = 0, low = low0, high = high1;
3456 else
3457 return 0;
3459 else if (subset)
3460 in_p = 0, low = low0, high = high0;
3461 else
3462 in_p = 0, low = low0, high = high1;
3465 *pin_p = in_p, *plow = low, *phigh = high;
3466 return 1;
3469 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3470 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3471 #endif
3473 /* EXP is some logical combination of boolean tests. See if we can
3474 merge it into some range test. Return the new tree if so. */
3476 static tree
3477 fold_range_test (exp)
3478 tree exp;
3480 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3481 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3482 int in0_p, in1_p, in_p;
3483 tree low0, low1, low, high0, high1, high;
3484 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3485 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3486 tree tem;
3488 /* If this is an OR operation, invert both sides; we will invert
3489 again at the end. */
3490 if (or_op)
3491 in0_p = ! in0_p, in1_p = ! in1_p;
3493 /* If both expressions are the same, if we can merge the ranges, and we
3494 can build the range test, return it or it inverted. If one of the
3495 ranges is always true or always false, consider it to be the same
3496 expression as the other. */
3497 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3498 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3499 in1_p, low1, high1)
3500 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3501 lhs != 0 ? lhs
3502 : rhs != 0 ? rhs : integer_zero_node,
3503 in_p, low, high))))
3504 return or_op ? invert_truthvalue (tem) : tem;
3506 /* On machines where the branch cost is expensive, if this is a
3507 short-circuited branch and the underlying object on both sides
3508 is the same, make a non-short-circuit operation. */
3509 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3510 && lhs != 0 && rhs != 0
3511 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3512 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3513 && operand_equal_p (lhs, rhs, 0))
3515 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3516 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3517 which cases we can't do this. */
3518 if (simple_operand_p (lhs))
3519 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3520 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3521 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3522 TREE_OPERAND (exp, 1));
3524 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3525 && ! CONTAINS_PLACEHOLDER_P (lhs))
3527 tree common = save_expr (lhs);
3529 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3530 or_op ? ! in0_p : in0_p,
3531 low0, high0))
3532 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3533 or_op ? ! in1_p : in1_p,
3534 low1, high1))))
3535 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3536 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3537 TREE_TYPE (exp), lhs, rhs);
3541 return 0;
3544 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3545 bit value. Arrange things so the extra bits will be set to zero if and
3546 only if C is signed-extended to its full width. If MASK is nonzero,
3547 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3549 static tree
3550 unextend (c, p, unsignedp, mask)
3551 tree c;
3552 int p;
3553 int unsignedp;
3554 tree mask;
3556 tree type = TREE_TYPE (c);
3557 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3558 tree temp;
3560 if (p == modesize || unsignedp)
3561 return c;
3563 /* We work by getting just the sign bit into the low-order bit, then
3564 into the high-order bit, then sign-extend. We then XOR that value
3565 with C. */
3566 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3567 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3569 /* We must use a signed type in order to get an arithmetic right shift.
3570 However, we must also avoid introducing accidental overflows, so that
3571 a subsequent call to integer_zerop will work. Hence we must
3572 do the type conversion here. At this point, the constant is either
3573 zero or one, and the conversion to a signed type can never overflow.
3574 We could get an overflow if this conversion is done anywhere else. */
3575 if (TREE_UNSIGNED (type))
3576 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3578 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3579 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3580 if (mask != 0)
3581 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3582 /* If necessary, convert the type back to match the type of C. */
3583 if (TREE_UNSIGNED (type))
3584 temp = convert (type, temp);
3586 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3589 /* Find ways of folding logical expressions of LHS and RHS:
3590 Try to merge two comparisons to the same innermost item.
3591 Look for range tests like "ch >= '0' && ch <= '9'".
3592 Look for combinations of simple terms on machines with expensive branches
3593 and evaluate the RHS unconditionally.
3595 For example, if we have p->a == 2 && p->b == 4 and we can make an
3596 object large enough to span both A and B, we can do this with a comparison
3597 against the object ANDed with the a mask.
3599 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3600 operations to do this with one comparison.
3602 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3603 function and the one above.
3605 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3606 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3608 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3609 two operands.
3611 We return the simplified tree or 0 if no optimization is possible. */
3613 static tree
3614 fold_truthop (code, truth_type, lhs, rhs)
3615 enum tree_code code;
3616 tree truth_type, lhs, rhs;
3618 /* If this is the "or" of two comparisons, we can do something if
3619 the comparisons are NE_EXPR. If this is the "and", we can do something
3620 if the comparisons are EQ_EXPR. I.e.,
3621 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3623 WANTED_CODE is this operation code. For single bit fields, we can
3624 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3625 comparison for one-bit fields. */
3627 enum tree_code wanted_code;
3628 enum tree_code lcode, rcode;
3629 tree ll_arg, lr_arg, rl_arg, rr_arg;
3630 tree ll_inner, lr_inner, rl_inner, rr_inner;
3631 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3632 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3633 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3634 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3635 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3636 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3637 enum machine_mode lnmode, rnmode;
3638 tree ll_mask, lr_mask, rl_mask, rr_mask;
3639 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3640 tree l_const, r_const;
3641 tree lntype, rntype, result;
3642 int first_bit, end_bit;
3643 int volatilep;
3645 /* Start by getting the comparison codes. Fail if anything is volatile.
3646 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3647 it were surrounded with a NE_EXPR. */
3649 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3650 return 0;
3652 lcode = TREE_CODE (lhs);
3653 rcode = TREE_CODE (rhs);
3655 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3656 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3658 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3659 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3661 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3662 return 0;
3664 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3665 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3667 ll_arg = TREE_OPERAND (lhs, 0);
3668 lr_arg = TREE_OPERAND (lhs, 1);
3669 rl_arg = TREE_OPERAND (rhs, 0);
3670 rr_arg = TREE_OPERAND (rhs, 1);
3672 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3673 if (simple_operand_p (ll_arg)
3674 && simple_operand_p (lr_arg)
3675 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3677 int compcode;
3679 if (operand_equal_p (ll_arg, rl_arg, 0)
3680 && operand_equal_p (lr_arg, rr_arg, 0))
3682 int lcompcode, rcompcode;
3684 lcompcode = comparison_to_compcode (lcode);
3685 rcompcode = comparison_to_compcode (rcode);
3686 compcode = (code == TRUTH_AND_EXPR)
3687 ? lcompcode & rcompcode
3688 : lcompcode | rcompcode;
3690 else if (operand_equal_p (ll_arg, rr_arg, 0)
3691 && operand_equal_p (lr_arg, rl_arg, 0))
3693 int lcompcode, rcompcode;
3695 rcode = swap_tree_comparison (rcode);
3696 lcompcode = comparison_to_compcode (lcode);
3697 rcompcode = comparison_to_compcode (rcode);
3698 compcode = (code == TRUTH_AND_EXPR)
3699 ? lcompcode & rcompcode
3700 : lcompcode | rcompcode;
3702 else
3703 compcode = -1;
3705 if (compcode == COMPCODE_TRUE)
3706 return convert (truth_type, integer_one_node);
3707 else if (compcode == COMPCODE_FALSE)
3708 return convert (truth_type, integer_zero_node);
3709 else if (compcode != -1)
3710 return build (compcode_to_comparison (compcode),
3711 truth_type, ll_arg, lr_arg);
3714 /* If the RHS can be evaluated unconditionally and its operands are
3715 simple, it wins to evaluate the RHS unconditionally on machines
3716 with expensive branches. In this case, this isn't a comparison
3717 that can be merged. Avoid doing this if the RHS is a floating-point
3718 comparison since those can trap. */
3720 if (BRANCH_COST >= 2
3721 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3722 && simple_operand_p (rl_arg)
3723 && simple_operand_p (rr_arg))
3725 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3726 if (code == TRUTH_OR_EXPR
3727 && lcode == NE_EXPR && integer_zerop (lr_arg)
3728 && rcode == NE_EXPR && integer_zerop (rr_arg)
3729 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3730 return build (NE_EXPR, truth_type,
3731 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3732 ll_arg, rl_arg),
3733 integer_zero_node);
3735 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3736 if (code == TRUTH_AND_EXPR
3737 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3738 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3739 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3740 return build (EQ_EXPR, truth_type,
3741 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3742 ll_arg, rl_arg),
3743 integer_zero_node);
3745 return build (code, truth_type, lhs, rhs);
3748 /* See if the comparisons can be merged. Then get all the parameters for
3749 each side. */
3751 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3752 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3753 return 0;
3755 volatilep = 0;
3756 ll_inner = decode_field_reference (ll_arg,
3757 &ll_bitsize, &ll_bitpos, &ll_mode,
3758 &ll_unsignedp, &volatilep, &ll_mask,
3759 &ll_and_mask);
3760 lr_inner = decode_field_reference (lr_arg,
3761 &lr_bitsize, &lr_bitpos, &lr_mode,
3762 &lr_unsignedp, &volatilep, &lr_mask,
3763 &lr_and_mask);
3764 rl_inner = decode_field_reference (rl_arg,
3765 &rl_bitsize, &rl_bitpos, &rl_mode,
3766 &rl_unsignedp, &volatilep, &rl_mask,
3767 &rl_and_mask);
3768 rr_inner = decode_field_reference (rr_arg,
3769 &rr_bitsize, &rr_bitpos, &rr_mode,
3770 &rr_unsignedp, &volatilep, &rr_mask,
3771 &rr_and_mask);
3773 /* It must be true that the inner operation on the lhs of each
3774 comparison must be the same if we are to be able to do anything.
3775 Then see if we have constants. If not, the same must be true for
3776 the rhs's. */
3777 if (volatilep || ll_inner == 0 || rl_inner == 0
3778 || ! operand_equal_p (ll_inner, rl_inner, 0))
3779 return 0;
3781 if (TREE_CODE (lr_arg) == INTEGER_CST
3782 && TREE_CODE (rr_arg) == INTEGER_CST)
3783 l_const = lr_arg, r_const = rr_arg;
3784 else if (lr_inner == 0 || rr_inner == 0
3785 || ! operand_equal_p (lr_inner, rr_inner, 0))
3786 return 0;
3787 else
3788 l_const = r_const = 0;
3790 /* If either comparison code is not correct for our logical operation,
3791 fail. However, we can convert a one-bit comparison against zero into
3792 the opposite comparison against that bit being set in the field. */
3794 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3795 if (lcode != wanted_code)
3797 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3799 /* Make the left operand unsigned, since we are only interested
3800 in the value of one bit. Otherwise we are doing the wrong
3801 thing below. */
3802 ll_unsignedp = 1;
3803 l_const = ll_mask;
3805 else
3806 return 0;
3809 /* This is analogous to the code for l_const above. */
3810 if (rcode != wanted_code)
3812 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3814 rl_unsignedp = 1;
3815 r_const = rl_mask;
3817 else
3818 return 0;
3821 /* After this point all optimizations will generate bit-field
3822 references, which we might not want. */
3823 if (! (*lang_hooks.can_use_bit_fields_p) ())
3824 return 0;
3826 /* See if we can find a mode that contains both fields being compared on
3827 the left. If we can't, fail. Otherwise, update all constants and masks
3828 to be relative to a field of that size. */
3829 first_bit = MIN (ll_bitpos, rl_bitpos);
3830 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3831 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3832 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3833 volatilep);
3834 if (lnmode == VOIDmode)
3835 return 0;
3837 lnbitsize = GET_MODE_BITSIZE (lnmode);
3838 lnbitpos = first_bit & ~ (lnbitsize - 1);
3839 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3840 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3842 if (BYTES_BIG_ENDIAN)
3844 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3845 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3848 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3849 size_int (xll_bitpos), 0);
3850 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3851 size_int (xrl_bitpos), 0);
3853 if (l_const)
3855 l_const = convert (lntype, l_const);
3856 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3857 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3858 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3859 fold (build1 (BIT_NOT_EXPR,
3860 lntype, ll_mask)),
3861 0)))
3863 warning ("comparison is always %d", wanted_code == NE_EXPR);
3865 return convert (truth_type,
3866 wanted_code == NE_EXPR
3867 ? integer_one_node : integer_zero_node);
3870 if (r_const)
3872 r_const = convert (lntype, r_const);
3873 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3874 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3875 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3876 fold (build1 (BIT_NOT_EXPR,
3877 lntype, rl_mask)),
3878 0)))
3880 warning ("comparison is always %d", wanted_code == NE_EXPR);
3882 return convert (truth_type,
3883 wanted_code == NE_EXPR
3884 ? integer_one_node : integer_zero_node);
3888 /* If the right sides are not constant, do the same for it. Also,
3889 disallow this optimization if a size or signedness mismatch occurs
3890 between the left and right sides. */
3891 if (l_const == 0)
3893 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3894 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3895 /* Make sure the two fields on the right
3896 correspond to the left without being swapped. */
3897 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3898 return 0;
3900 first_bit = MIN (lr_bitpos, rr_bitpos);
3901 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3902 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3903 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3904 volatilep);
3905 if (rnmode == VOIDmode)
3906 return 0;
3908 rnbitsize = GET_MODE_BITSIZE (rnmode);
3909 rnbitpos = first_bit & ~ (rnbitsize - 1);
3910 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3911 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3913 if (BYTES_BIG_ENDIAN)
3915 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3916 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3919 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3920 size_int (xlr_bitpos), 0);
3921 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3922 size_int (xrr_bitpos), 0);
3924 /* Make a mask that corresponds to both fields being compared.
3925 Do this for both items being compared. If the operands are the
3926 same size and the bits being compared are in the same position
3927 then we can do this by masking both and comparing the masked
3928 results. */
3929 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3930 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3931 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3933 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3934 ll_unsignedp || rl_unsignedp);
3935 if (! all_ones_mask_p (ll_mask, lnbitsize))
3936 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3938 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3939 lr_unsignedp || rr_unsignedp);
3940 if (! all_ones_mask_p (lr_mask, rnbitsize))
3941 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3943 return build (wanted_code, truth_type, lhs, rhs);
3946 /* There is still another way we can do something: If both pairs of
3947 fields being compared are adjacent, we may be able to make a wider
3948 field containing them both.
3950 Note that we still must mask the lhs/rhs expressions. Furthermore,
3951 the mask must be shifted to account for the shift done by
3952 make_bit_field_ref. */
3953 if ((ll_bitsize + ll_bitpos == rl_bitpos
3954 && lr_bitsize + lr_bitpos == rr_bitpos)
3955 || (ll_bitpos == rl_bitpos + rl_bitsize
3956 && lr_bitpos == rr_bitpos + rr_bitsize))
3958 tree type;
3960 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3961 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3962 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3963 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3965 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3966 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3967 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3968 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3970 /* Convert to the smaller type before masking out unwanted bits. */
3971 type = lntype;
3972 if (lntype != rntype)
3974 if (lnbitsize > rnbitsize)
3976 lhs = convert (rntype, lhs);
3977 ll_mask = convert (rntype, ll_mask);
3978 type = rntype;
3980 else if (lnbitsize < rnbitsize)
3982 rhs = convert (lntype, rhs);
3983 lr_mask = convert (lntype, lr_mask);
3984 type = lntype;
3988 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3989 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3991 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3992 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3994 return build (wanted_code, truth_type, lhs, rhs);
3997 return 0;
4000 /* Handle the case of comparisons with constants. If there is something in
4001 common between the masks, those bits of the constants must be the same.
4002 If not, the condition is always false. Test for this to avoid generating
4003 incorrect code below. */
4004 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4005 if (! integer_zerop (result)
4006 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4007 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4009 if (wanted_code == NE_EXPR)
4011 warning ("`or' of unmatched not-equal tests is always 1");
4012 return convert (truth_type, integer_one_node);
4014 else
4016 warning ("`and' of mutually exclusive equal-tests is always 0");
4017 return convert (truth_type, integer_zero_node);
4021 /* Construct the expression we will return. First get the component
4022 reference we will make. Unless the mask is all ones the width of
4023 that field, perform the mask operation. Then compare with the
4024 merged constant. */
4025 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4026 ll_unsignedp || rl_unsignedp);
4028 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4029 if (! all_ones_mask_p (ll_mask, lnbitsize))
4030 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4032 return build (wanted_code, truth_type, result,
4033 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4036 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4037 constant. */
4039 static tree
4040 optimize_minmax_comparison (t)
4041 tree t;
4043 tree type = TREE_TYPE (t);
4044 tree arg0 = TREE_OPERAND (t, 0);
4045 enum tree_code op_code;
4046 tree comp_const = TREE_OPERAND (t, 1);
4047 tree minmax_const;
4048 int consts_equal, consts_lt;
4049 tree inner;
4051 STRIP_SIGN_NOPS (arg0);
4053 op_code = TREE_CODE (arg0);
4054 minmax_const = TREE_OPERAND (arg0, 1);
4055 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4056 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4057 inner = TREE_OPERAND (arg0, 0);
4059 /* If something does not permit us to optimize, return the original tree. */
4060 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4061 || TREE_CODE (comp_const) != INTEGER_CST
4062 || TREE_CONSTANT_OVERFLOW (comp_const)
4063 || TREE_CODE (minmax_const) != INTEGER_CST
4064 || TREE_CONSTANT_OVERFLOW (minmax_const))
4065 return t;
4067 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4068 and GT_EXPR, doing the rest with recursive calls using logical
4069 simplifications. */
4070 switch (TREE_CODE (t))
4072 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4073 return
4074 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4076 case GE_EXPR:
4077 return
4078 fold (build (TRUTH_ORIF_EXPR, type,
4079 optimize_minmax_comparison
4080 (build (EQ_EXPR, type, arg0, comp_const)),
4081 optimize_minmax_comparison
4082 (build (GT_EXPR, type, arg0, comp_const))));
4084 case EQ_EXPR:
4085 if (op_code == MAX_EXPR && consts_equal)
4086 /* MAX (X, 0) == 0 -> X <= 0 */
4087 return fold (build (LE_EXPR, type, inner, comp_const));
4089 else if (op_code == MAX_EXPR && consts_lt)
4090 /* MAX (X, 0) == 5 -> X == 5 */
4091 return fold (build (EQ_EXPR, type, inner, comp_const));
4093 else if (op_code == MAX_EXPR)
4094 /* MAX (X, 0) == -1 -> false */
4095 return omit_one_operand (type, integer_zero_node, inner);
4097 else if (consts_equal)
4098 /* MIN (X, 0) == 0 -> X >= 0 */
4099 return fold (build (GE_EXPR, type, inner, comp_const));
4101 else if (consts_lt)
4102 /* MIN (X, 0) == 5 -> false */
4103 return omit_one_operand (type, integer_zero_node, inner);
4105 else
4106 /* MIN (X, 0) == -1 -> X == -1 */
4107 return fold (build (EQ_EXPR, type, inner, comp_const));
4109 case GT_EXPR:
4110 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4111 /* MAX (X, 0) > 0 -> X > 0
4112 MAX (X, 0) > 5 -> X > 5 */
4113 return fold (build (GT_EXPR, type, inner, comp_const));
4115 else if (op_code == MAX_EXPR)
4116 /* MAX (X, 0) > -1 -> true */
4117 return omit_one_operand (type, integer_one_node, inner);
4119 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4120 /* MIN (X, 0) > 0 -> false
4121 MIN (X, 0) > 5 -> false */
4122 return omit_one_operand (type, integer_zero_node, inner);
4124 else
4125 /* MIN (X, 0) > -1 -> X > -1 */
4126 return fold (build (GT_EXPR, type, inner, comp_const));
4128 default:
4129 return t;
4133 /* T is an integer expression that is being multiplied, divided, or taken a
4134 modulus (CODE says which and what kind of divide or modulus) by a
4135 constant C. See if we can eliminate that operation by folding it with
4136 other operations already in T. WIDE_TYPE, if non-null, is a type that
4137 should be used for the computation if wider than our type.
4139 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4140 (X * 2) + (Y * 4). We must, however, be assured that either the original
4141 expression would not overflow or that overflow is undefined for the type
4142 in the language in question.
4144 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4145 the machine has a multiply-accumulate insn or that this is part of an
4146 addressing calculation.
4148 If we return a non-null expression, it is an equivalent form of the
4149 original computation, but need not be in the original type. */
4151 static tree
4152 extract_muldiv (t, c, code, wide_type)
4153 tree t;
4154 tree c;
4155 enum tree_code code;
4156 tree wide_type;
4158 /* To avoid exponential search depth, refuse to allow recursion past
4159 three levels. Beyond that (1) it's highly unlikely that we'll find
4160 something interesting and (2) we've probably processed it before
4161 when we built the inner expression. */
4163 static int depth;
4164 tree ret;
4166 if (depth > 3)
4167 return NULL;
4169 depth++;
4170 ret = extract_muldiv_1 (t, c, code, wide_type);
4171 depth--;
4173 return ret;
4176 static tree
4177 extract_muldiv_1 (t, c, code, wide_type)
4178 tree t;
4179 tree c;
4180 enum tree_code code;
4181 tree wide_type;
4183 tree type = TREE_TYPE (t);
4184 enum tree_code tcode = TREE_CODE (t);
4185 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4186 > GET_MODE_SIZE (TYPE_MODE (type)))
4187 ? wide_type : type);
4188 tree t1, t2;
4189 int same_p = tcode == code;
4190 tree op0 = NULL_TREE, op1 = NULL_TREE;
4192 /* Don't deal with constants of zero here; they confuse the code below. */
4193 if (integer_zerop (c))
4194 return NULL_TREE;
4196 if (TREE_CODE_CLASS (tcode) == '1')
4197 op0 = TREE_OPERAND (t, 0);
4199 if (TREE_CODE_CLASS (tcode) == '2')
4200 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4202 /* Note that we need not handle conditional operations here since fold
4203 already handles those cases. So just do arithmetic here. */
4204 switch (tcode)
4206 case INTEGER_CST:
4207 /* For a constant, we can always simplify if we are a multiply
4208 or (for divide and modulus) if it is a multiple of our constant. */
4209 if (code == MULT_EXPR
4210 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4211 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4212 break;
4214 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4215 /* If op0 is an expression ... */
4216 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4217 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4218 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4219 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4220 /* ... and is unsigned, and its type is smaller than ctype,
4221 then we cannot pass through as widening. */
4222 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4223 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4224 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4225 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4226 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4227 /* ... or its type is larger than ctype,
4228 then we cannot pass through this truncation. */
4229 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4230 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4231 /* ... or signedness changes for division or modulus,
4232 then we cannot pass through this conversion. */
4233 || (code != MULT_EXPR
4234 && (TREE_UNSIGNED (ctype)
4235 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4236 break;
4238 /* Pass the constant down and see if we can make a simplification. If
4239 we can, replace this expression with the inner simplification for
4240 possible later conversion to our or some other type. */
4241 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4242 && TREE_CODE (t2) == INTEGER_CST
4243 && ! TREE_CONSTANT_OVERFLOW (t2)
4244 && (0 != (t1 = extract_muldiv (op0, t2, code,
4245 code == MULT_EXPR
4246 ? ctype : NULL_TREE))))
4247 return t1;
4248 break;
4250 case NEGATE_EXPR: case ABS_EXPR:
4251 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4252 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4253 break;
4255 case MIN_EXPR: case MAX_EXPR:
4256 /* If widening the type changes the signedness, then we can't perform
4257 this optimization as that changes the result. */
4258 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4259 break;
4261 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4262 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4263 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4265 if (tree_int_cst_sgn (c) < 0)
4266 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4268 return fold (build (tcode, ctype, convert (ctype, t1),
4269 convert (ctype, t2)));
4271 break;
4273 case WITH_RECORD_EXPR:
4274 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4275 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4276 TREE_OPERAND (t, 1));
4277 break;
4279 case SAVE_EXPR:
4280 /* If this has not been evaluated and the operand has no side effects,
4281 we can see if we can do something inside it and make a new one.
4282 Note that this test is overly conservative since we can do this
4283 if the only reason it had side effects is that it was another
4284 similar SAVE_EXPR, but that isn't worth bothering with. */
4285 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4286 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4287 wide_type)))
4289 t1 = save_expr (t1);
4290 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4291 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4292 if (is_pending_size (t))
4293 put_pending_size (t1);
4294 return t1;
4296 break;
4298 case LSHIFT_EXPR: case RSHIFT_EXPR:
4299 /* If the second operand is constant, this is a multiplication
4300 or floor division, by a power of two, so we can treat it that
4301 way unless the multiplier or divisor overflows. */
4302 if (TREE_CODE (op1) == INTEGER_CST
4303 /* const_binop may not detect overflow correctly,
4304 so check for it explicitly here. */
4305 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4306 && TREE_INT_CST_HIGH (op1) == 0
4307 && 0 != (t1 = convert (ctype,
4308 const_binop (LSHIFT_EXPR, size_one_node,
4309 op1, 0)))
4310 && ! TREE_OVERFLOW (t1))
4311 return extract_muldiv (build (tcode == LSHIFT_EXPR
4312 ? MULT_EXPR : FLOOR_DIV_EXPR,
4313 ctype, convert (ctype, op0), t1),
4314 c, code, wide_type);
4315 break;
4317 case PLUS_EXPR: case MINUS_EXPR:
4318 /* See if we can eliminate the operation on both sides. If we can, we
4319 can return a new PLUS or MINUS. If we can't, the only remaining
4320 cases where we can do anything are if the second operand is a
4321 constant. */
4322 t1 = extract_muldiv (op0, c, code, wide_type);
4323 t2 = extract_muldiv (op1, c, code, wide_type);
4324 if (t1 != 0 && t2 != 0
4325 && (code == MULT_EXPR
4326 /* If not multiplication, we can only do this if both operands
4327 are divisible by c. */
4328 || (multiple_of_p (ctype, op0, c)
4329 && multiple_of_p (ctype, op1, c))))
4330 return fold (build (tcode, ctype, convert (ctype, t1),
4331 convert (ctype, t2)));
4333 /* If this was a subtraction, negate OP1 and set it to be an addition.
4334 This simplifies the logic below. */
4335 if (tcode == MINUS_EXPR)
4336 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4338 if (TREE_CODE (op1) != INTEGER_CST)
4339 break;
4341 /* If either OP1 or C are negative, this optimization is not safe for
4342 some of the division and remainder types while for others we need
4343 to change the code. */
4344 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4346 if (code == CEIL_DIV_EXPR)
4347 code = FLOOR_DIV_EXPR;
4348 else if (code == FLOOR_DIV_EXPR)
4349 code = CEIL_DIV_EXPR;
4350 else if (code != MULT_EXPR
4351 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4352 break;
4355 /* If it's a multiply or a division/modulus operation of a multiple
4356 of our constant, do the operation and verify it doesn't overflow. */
4357 if (code == MULT_EXPR
4358 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4360 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4361 if (op1 == 0 || TREE_OVERFLOW (op1))
4362 break;
4364 else
4365 break;
4367 /* If we have an unsigned type is not a sizetype, we cannot widen
4368 the operation since it will change the result if the original
4369 computation overflowed. */
4370 if (TREE_UNSIGNED (ctype)
4371 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4372 && ctype != type)
4373 break;
4375 /* If we were able to eliminate our operation from the first side,
4376 apply our operation to the second side and reform the PLUS. */
4377 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4378 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4380 /* The last case is if we are a multiply. In that case, we can
4381 apply the distributive law to commute the multiply and addition
4382 if the multiplication of the constants doesn't overflow. */
4383 if (code == MULT_EXPR)
4384 return fold (build (tcode, ctype, fold (build (code, ctype,
4385 convert (ctype, op0),
4386 convert (ctype, c))),
4387 op1));
4389 break;
4391 case MULT_EXPR:
4392 /* We have a special case here if we are doing something like
4393 (C * 8) % 4 since we know that's zero. */
4394 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4395 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4396 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4397 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4398 return omit_one_operand (type, integer_zero_node, op0);
4400 /* ... fall through ... */
4402 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4403 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4404 /* If we can extract our operation from the LHS, do so and return a
4405 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4406 do something only if the second operand is a constant. */
4407 if (same_p
4408 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4409 return fold (build (tcode, ctype, convert (ctype, t1),
4410 convert (ctype, op1)));
4411 else if (tcode == MULT_EXPR && code == MULT_EXPR
4412 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4413 return fold (build (tcode, ctype, convert (ctype, op0),
4414 convert (ctype, t1)));
4415 else if (TREE_CODE (op1) != INTEGER_CST)
4416 return 0;
4418 /* If these are the same operation types, we can associate them
4419 assuming no overflow. */
4420 if (tcode == code
4421 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4422 convert (ctype, c), 0))
4423 && ! TREE_OVERFLOW (t1))
4424 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4426 /* If these operations "cancel" each other, we have the main
4427 optimizations of this pass, which occur when either constant is a
4428 multiple of the other, in which case we replace this with either an
4429 operation or CODE or TCODE.
4431 If we have an unsigned type that is not a sizetype, we cannot do
4432 this since it will change the result if the original computation
4433 overflowed. */
4434 if ((! TREE_UNSIGNED (ctype)
4435 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4436 && ! flag_wrapv
4437 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4438 || (tcode == MULT_EXPR
4439 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4440 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4442 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4443 return fold (build (tcode, ctype, convert (ctype, op0),
4444 convert (ctype,
4445 const_binop (TRUNC_DIV_EXPR,
4446 op1, c, 0))));
4447 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4448 return fold (build (code, ctype, convert (ctype, op0),
4449 convert (ctype,
4450 const_binop (TRUNC_DIV_EXPR,
4451 c, op1, 0))));
4453 break;
4455 default:
4456 break;
4459 return 0;
4462 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4463 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4464 that we may sometimes modify the tree. */
4466 static tree
4467 strip_compound_expr (t, s)
4468 tree t;
4469 tree s;
4471 enum tree_code code = TREE_CODE (t);
4473 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4474 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4475 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4476 return TREE_OPERAND (t, 1);
4478 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4479 don't bother handling any other types. */
4480 else if (code == COND_EXPR)
4482 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4483 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4484 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4486 else if (TREE_CODE_CLASS (code) == '1')
4487 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4488 else if (TREE_CODE_CLASS (code) == '<'
4489 || TREE_CODE_CLASS (code) == '2')
4491 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4492 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4495 return t;
4498 /* Return a node which has the indicated constant VALUE (either 0 or
4499 1), and is of the indicated TYPE. */
4501 static tree
4502 constant_boolean_node (value, type)
4503 int value;
4504 tree type;
4506 if (type == integer_type_node)
4507 return value ? integer_one_node : integer_zero_node;
4508 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4509 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4510 integer_zero_node);
4511 else
4513 tree t = build_int_2 (value, 0);
4515 TREE_TYPE (t) = type;
4516 return t;
4520 /* Utility function for the following routine, to see how complex a nesting of
4521 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4522 we don't care (to avoid spending too much time on complex expressions.). */
4524 static int
4525 count_cond (expr, lim)
4526 tree expr;
4527 int lim;
4529 int ctrue, cfalse;
4531 if (TREE_CODE (expr) != COND_EXPR)
4532 return 0;
4533 else if (lim <= 0)
4534 return 0;
4536 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4537 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4538 return MIN (lim, 1 + ctrue + cfalse);
4541 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4542 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4543 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4544 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4545 COND is the first argument to CODE; otherwise (as in the example
4546 given here), it is the second argument. TYPE is the type of the
4547 original expression. */
4549 static tree
4550 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4551 enum tree_code code;
4552 tree type;
4553 tree cond;
4554 tree arg;
4555 int cond_first_p;
4557 tree test, true_value, false_value;
4558 tree lhs = NULL_TREE;
4559 tree rhs = NULL_TREE;
4560 /* In the end, we'll produce a COND_EXPR. Both arms of the
4561 conditional expression will be binary operations. The left-hand
4562 side of the expression to be executed if the condition is true
4563 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4564 of the expression to be executed if the condition is true will be
4565 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4566 but apply to the expression to be executed if the conditional is
4567 false. */
4568 tree *true_lhs;
4569 tree *true_rhs;
4570 tree *false_lhs;
4571 tree *false_rhs;
4572 /* These are the codes to use for the left-hand side and right-hand
4573 side of the COND_EXPR. Normally, they are the same as CODE. */
4574 enum tree_code lhs_code = code;
4575 enum tree_code rhs_code = code;
4576 /* And these are the types of the expressions. */
4577 tree lhs_type = type;
4578 tree rhs_type = type;
4579 int save = 0;
4581 if (cond_first_p)
4583 true_rhs = false_rhs = &arg;
4584 true_lhs = &true_value;
4585 false_lhs = &false_value;
4587 else
4589 true_lhs = false_lhs = &arg;
4590 true_rhs = &true_value;
4591 false_rhs = &false_value;
4594 if (TREE_CODE (cond) == COND_EXPR)
4596 test = TREE_OPERAND (cond, 0);
4597 true_value = TREE_OPERAND (cond, 1);
4598 false_value = TREE_OPERAND (cond, 2);
4599 /* If this operand throws an expression, then it does not make
4600 sense to try to perform a logical or arithmetic operation
4601 involving it. Instead of building `a + throw 3' for example,
4602 we simply build `a, throw 3'. */
4603 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4605 if (! cond_first_p)
4607 lhs_code = COMPOUND_EXPR;
4608 lhs_type = void_type_node;
4610 else
4611 lhs = true_value;
4613 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4615 if (! cond_first_p)
4617 rhs_code = COMPOUND_EXPR;
4618 rhs_type = void_type_node;
4620 else
4621 rhs = false_value;
4624 else
4626 tree testtype = TREE_TYPE (cond);
4627 test = cond;
4628 true_value = convert (testtype, integer_one_node);
4629 false_value = convert (testtype, integer_zero_node);
4632 /* If ARG is complex we want to make sure we only evaluate it once. Though
4633 this is only required if it is volatile, it might be more efficient even
4634 if it is not. However, if we succeed in folding one part to a constant,
4635 we do not need to make this SAVE_EXPR. Since we do this optimization
4636 primarily to see if we do end up with constant and this SAVE_EXPR
4637 interferes with later optimizations, suppressing it when we can is
4638 important.
4640 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4641 do so. Don't try to see if the result is a constant if an arm is a
4642 COND_EXPR since we get exponential behavior in that case. */
4644 if (saved_expr_p (arg))
4645 save = 1;
4646 else if (lhs == 0 && rhs == 0
4647 && !TREE_CONSTANT (arg)
4648 && (*lang_hooks.decls.global_bindings_p) () == 0
4649 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4650 || TREE_SIDE_EFFECTS (arg)))
4652 if (TREE_CODE (true_value) != COND_EXPR)
4653 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4655 if (TREE_CODE (false_value) != COND_EXPR)
4656 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4658 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4659 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4661 arg = save_expr (arg);
4662 lhs = rhs = 0;
4663 save = 1;
4667 if (lhs == 0)
4668 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4669 if (rhs == 0)
4670 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4672 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4674 if (save)
4675 return build (COMPOUND_EXPR, type,
4676 convert (void_type_node, arg),
4677 strip_compound_expr (test, arg));
4678 else
4679 return convert (type, test);
4683 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4685 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4686 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4687 ADDEND is the same as X.
4689 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4690 and finite. The problematic cases are when X is zero, and its mode
4691 has signed zeros. In the case of rounding towards -infinity,
4692 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4693 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4695 static bool
4696 fold_real_zero_addition_p (type, addend, negate)
4697 tree type, addend;
4698 int negate;
4700 if (!real_zerop (addend))
4701 return false;
4703 /* Don't allow the fold with -fsignaling-nans. */
4704 if (HONOR_SNANS (TYPE_MODE (type)))
4705 return false;
4707 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4708 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4709 return true;
4711 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4712 if (TREE_CODE (addend) == REAL_CST
4713 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4714 negate = !negate;
4716 /* The mode has signed zeros, and we have to honor their sign.
4717 In this situation, there is only one case we can return true for.
4718 X - 0 is the same as X unless rounding towards -infinity is
4719 supported. */
4720 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4723 /* Subroutine of fold() that checks comparisons of built-in math
4724 functions against real constants.
4726 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4727 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4728 is the type of the result and ARG0 and ARG1 are the operands of the
4729 comparison. ARG1 must be a TREE_REAL_CST.
4731 The function returns the constant folded tree if a simplification
4732 can be made, and NULL_TREE otherwise. */
4734 static tree
4735 fold_mathfn_compare (fcode, code, type, arg0, arg1)
4736 enum built_in_function fcode;
4737 enum tree_code code;
4738 tree type, arg0, arg1;
4740 REAL_VALUE_TYPE c;
4742 if (fcode == BUILT_IN_SQRT
4743 || fcode == BUILT_IN_SQRTF
4744 || fcode == BUILT_IN_SQRTL)
4746 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4747 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4749 c = TREE_REAL_CST (arg1);
4750 if (REAL_VALUE_NEGATIVE (c))
4752 /* sqrt(x) < y is always false, if y is negative. */
4753 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4754 return omit_one_operand (type,
4755 convert (type, integer_zero_node),
4756 arg);
4758 /* sqrt(x) > y is always true, if y is negative and we
4759 don't care about NaNs, i.e. negative values of x. */
4760 if (code == NE_EXPR || !HONOR_NANS (mode))
4761 return omit_one_operand (type,
4762 convert (type, integer_one_node),
4763 arg);
4765 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4766 return fold (build (GE_EXPR, type, arg,
4767 build_real (TREE_TYPE (arg), dconst0)));
4769 else if (code == GT_EXPR || code == GE_EXPR)
4771 REAL_VALUE_TYPE c2;
4773 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4774 real_convert (&c2, mode, &c2);
4776 if (REAL_VALUE_ISINF (c2))
4778 /* sqrt(x) > y is x == +Inf, when y is very large. */
4779 if (HONOR_INFINITIES (mode))
4780 return fold (build (EQ_EXPR, type, arg,
4781 build_real (TREE_TYPE (arg), c2)));
4783 /* sqrt(x) > y is always false, when y is very large
4784 and we don't care about infinities. */
4785 return omit_one_operand (type,
4786 convert (type, integer_zero_node),
4787 arg);
4790 /* sqrt(x) > c is the same as x > c*c. */
4791 return fold (build (code, type, arg,
4792 build_real (TREE_TYPE (arg), c2)));
4794 else if (code == LT_EXPR || code == LE_EXPR)
4796 REAL_VALUE_TYPE c2;
4798 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4799 real_convert (&c2, mode, &c2);
4801 if (REAL_VALUE_ISINF (c2))
4803 /* sqrt(x) < y is always true, when y is a very large
4804 value and we don't care about NaNs or Infinities. */
4805 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4806 return omit_one_operand (type,
4807 convert (type, integer_one_node),
4808 arg);
4810 /* sqrt(x) < y is x != +Inf when y is very large and we
4811 don't care about NaNs. */
4812 if (! HONOR_NANS (mode))
4813 return fold (build (NE_EXPR, type, arg,
4814 build_real (TREE_TYPE (arg), c2)));
4816 /* sqrt(x) < y is x >= 0 when y is very large and we
4817 don't care about Infinities. */
4818 if (! HONOR_INFINITIES (mode))
4819 return fold (build (GE_EXPR, type, arg,
4820 build_real (TREE_TYPE (arg), dconst0)));
4822 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4823 if ((*lang_hooks.decls.global_bindings_p) () != 0
4824 || CONTAINS_PLACEHOLDER_P (arg))
4825 return NULL_TREE;
4827 arg = save_expr (arg);
4828 return fold (build (TRUTH_ANDIF_EXPR, type,
4829 fold (build (GE_EXPR, type, arg,
4830 build_real (TREE_TYPE (arg),
4831 dconst0))),
4832 fold (build (NE_EXPR, type, arg,
4833 build_real (TREE_TYPE (arg),
4834 c2)))));
4837 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4838 if (! HONOR_NANS (mode))
4839 return fold (build (code, type, arg,
4840 build_real (TREE_TYPE (arg), c2)));
4842 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4843 if ((*lang_hooks.decls.global_bindings_p) () == 0
4844 && ! CONTAINS_PLACEHOLDER_P (arg))
4846 arg = save_expr (arg);
4847 return fold (build (TRUTH_ANDIF_EXPR, type,
4848 fold (build (GE_EXPR, type, arg,
4849 build_real (TREE_TYPE (arg),
4850 dconst0))),
4851 fold (build (code, type, arg,
4852 build_real (TREE_TYPE (arg),
4853 c2)))));
4858 return NULL_TREE;
4861 /* Subroutine of fold() that optimizes comparisons against Infinities,
4862 either +Inf or -Inf.
4864 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4865 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4866 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4868 The function returns the constant folded tree if a simplification
4869 can be made, and NULL_TREE otherwise. */
4871 static tree
4872 fold_inf_compare (code, type, arg0, arg1)
4873 enum tree_code code;
4874 tree type, arg0, arg1;
4876 enum machine_mode mode;
4877 REAL_VALUE_TYPE max;
4878 tree temp;
4879 bool neg;
4881 mode = TYPE_MODE (TREE_TYPE (arg0));
4883 /* For negative infinity swap the sense of the comparison. */
4884 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4885 if (neg)
4886 code = swap_tree_comparison (code);
4888 switch (code)
4890 case GT_EXPR:
4891 /* x > +Inf is always false, if with ignore sNANs. */
4892 if (HONOR_SNANS (mode))
4893 return NULL_TREE;
4894 return omit_one_operand (type,
4895 convert (type, integer_zero_node),
4896 arg0);
4898 case LE_EXPR:
4899 /* x <= +Inf is always true, if we don't case about NaNs. */
4900 if (! HONOR_NANS (mode))
4901 return omit_one_operand (type,
4902 convert (type, integer_one_node),
4903 arg0);
4905 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4906 if ((*lang_hooks.decls.global_bindings_p) () == 0
4907 && ! CONTAINS_PLACEHOLDER_P (arg0))
4909 arg0 = save_expr (arg0);
4910 return fold (build (EQ_EXPR, type, arg0, arg0));
4912 break;
4914 case EQ_EXPR:
4915 case GE_EXPR:
4916 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4917 real_maxval (&max, neg, mode);
4918 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4919 arg0, build_real (TREE_TYPE (arg0), max)));
4921 case LT_EXPR:
4922 /* x < +Inf is always equal to x <= DBL_MAX. */
4923 real_maxval (&max, neg, mode);
4924 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4925 arg0, build_real (TREE_TYPE (arg0), max)));
4927 case NE_EXPR:
4928 /* x != +Inf is always equal to !(x > DBL_MAX). */
4929 real_maxval (&max, neg, mode);
4930 if (! HONOR_NANS (mode))
4931 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4932 arg0, build_real (TREE_TYPE (arg0), max)));
4933 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4934 arg0, build_real (TREE_TYPE (arg0), max)));
4935 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4937 default:
4938 break;
4941 return NULL_TREE;
4944 /* Perform constant folding and related simplification of EXPR.
4945 The related simplifications include x*1 => x, x*0 => 0, etc.,
4946 and application of the associative law.
4947 NOP_EXPR conversions may be removed freely (as long as we
4948 are careful not to change the C type of the overall expression)
4949 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4950 but we can constant-fold them if they have constant operands. */
4952 tree
4953 fold (expr)
4954 tree expr;
4956 tree t = expr;
4957 tree t1 = NULL_TREE;
4958 tree tem;
4959 tree type = TREE_TYPE (expr);
4960 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4961 enum tree_code code = TREE_CODE (t);
4962 int kind = TREE_CODE_CLASS (code);
4963 int invert;
4964 /* WINS will be nonzero when the switch is done
4965 if all operands are constant. */
4966 int wins = 1;
4968 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4969 Likewise for a SAVE_EXPR that's already been evaluated. */
4970 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4971 return t;
4973 /* Return right away if a constant. */
4974 if (kind == 'c')
4975 return t;
4977 #ifdef MAX_INTEGER_COMPUTATION_MODE
4978 check_max_integer_computation_mode (expr);
4979 #endif
4981 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4983 tree subop;
4985 /* Special case for conversion ops that can have fixed point args. */
4986 arg0 = TREE_OPERAND (t, 0);
4988 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4989 if (arg0 != 0)
4990 STRIP_SIGN_NOPS (arg0);
4992 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4993 subop = TREE_REALPART (arg0);
4994 else
4995 subop = arg0;
4997 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4998 && TREE_CODE (subop) != REAL_CST
5000 /* Note that TREE_CONSTANT isn't enough:
5001 static var addresses are constant but we can't
5002 do arithmetic on them. */
5003 wins = 0;
5005 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
5007 int len = first_rtl_op (code);
5008 int i;
5009 for (i = 0; i < len; i++)
5011 tree op = TREE_OPERAND (t, i);
5012 tree subop;
5014 if (op == 0)
5015 continue; /* Valid for CALL_EXPR, at least. */
5017 if (kind == '<' || code == RSHIFT_EXPR)
5019 /* Signedness matters here. Perhaps we can refine this
5020 later. */
5021 STRIP_SIGN_NOPS (op);
5023 else
5024 /* Strip any conversions that don't change the mode. */
5025 STRIP_NOPS (op);
5027 if (TREE_CODE (op) == COMPLEX_CST)
5028 subop = TREE_REALPART (op);
5029 else
5030 subop = op;
5032 if (TREE_CODE (subop) != INTEGER_CST
5033 && TREE_CODE (subop) != REAL_CST)
5034 /* Note that TREE_CONSTANT isn't enough:
5035 static var addresses are constant but we can't
5036 do arithmetic on them. */
5037 wins = 0;
5039 if (i == 0)
5040 arg0 = op;
5041 else if (i == 1)
5042 arg1 = op;
5046 /* If this is a commutative operation, and ARG0 is a constant, move it
5047 to ARG1 to reduce the number of tests below. */
5048 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5049 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5050 || code == BIT_AND_EXPR)
5051 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5053 tem = arg0; arg0 = arg1; arg1 = tem;
5055 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
5056 TREE_OPERAND (t, 1) = tem;
5059 /* Now WINS is set as described above,
5060 ARG0 is the first operand of EXPR,
5061 and ARG1 is the second operand (if it has more than one operand).
5063 First check for cases where an arithmetic operation is applied to a
5064 compound, conditional, or comparison operation. Push the arithmetic
5065 operation inside the compound or conditional to see if any folding
5066 can then be done. Convert comparison to conditional for this purpose.
5067 The also optimizes non-constant cases that used to be done in
5068 expand_expr.
5070 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5071 one of the operands is a comparison and the other is a comparison, a
5072 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5073 code below would make the expression more complex. Change it to a
5074 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5075 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5077 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5078 || code == EQ_EXPR || code == NE_EXPR)
5079 && ((truth_value_p (TREE_CODE (arg0))
5080 && (truth_value_p (TREE_CODE (arg1))
5081 || (TREE_CODE (arg1) == BIT_AND_EXPR
5082 && integer_onep (TREE_OPERAND (arg1, 1)))))
5083 || (truth_value_p (TREE_CODE (arg1))
5084 && (truth_value_p (TREE_CODE (arg0))
5085 || (TREE_CODE (arg0) == BIT_AND_EXPR
5086 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5088 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5089 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5090 : TRUTH_XOR_EXPR,
5091 type, arg0, arg1));
5093 if (code == EQ_EXPR)
5094 t = invert_truthvalue (t);
5096 return t;
5099 if (TREE_CODE_CLASS (code) == '1')
5101 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5102 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5103 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5104 else if (TREE_CODE (arg0) == COND_EXPR)
5106 tree arg01 = TREE_OPERAND (arg0, 1);
5107 tree arg02 = TREE_OPERAND (arg0, 2);
5108 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5109 arg01 = fold (build1 (code, type, arg01));
5110 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5111 arg02 = fold (build1 (code, type, arg02));
5112 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5113 arg01, arg02));
5115 /* If this was a conversion, and all we did was to move into
5116 inside the COND_EXPR, bring it back out. But leave it if
5117 it is a conversion from integer to integer and the
5118 result precision is no wider than a word since such a
5119 conversion is cheap and may be optimized away by combine,
5120 while it couldn't if it were outside the COND_EXPR. Then return
5121 so we don't get into an infinite recursion loop taking the
5122 conversion out and then back in. */
5124 if ((code == NOP_EXPR || code == CONVERT_EXPR
5125 || code == NON_LVALUE_EXPR)
5126 && TREE_CODE (t) == COND_EXPR
5127 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5128 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5129 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5130 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5131 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5132 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5133 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5134 && (INTEGRAL_TYPE_P
5135 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5136 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5137 t = build1 (code, type,
5138 build (COND_EXPR,
5139 TREE_TYPE (TREE_OPERAND
5140 (TREE_OPERAND (t, 1), 0)),
5141 TREE_OPERAND (t, 0),
5142 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5143 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5144 return t;
5146 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5147 return fold (build (COND_EXPR, type, arg0,
5148 fold (build1 (code, type, integer_one_node)),
5149 fold (build1 (code, type, integer_zero_node))));
5151 else if (TREE_CODE_CLASS (code) == '<'
5152 && TREE_CODE (arg0) == COMPOUND_EXPR)
5153 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5154 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5155 else if (TREE_CODE_CLASS (code) == '<'
5156 && TREE_CODE (arg1) == COMPOUND_EXPR)
5157 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5158 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5159 else if (TREE_CODE_CLASS (code) == '2'
5160 || TREE_CODE_CLASS (code) == '<')
5162 if (TREE_CODE (arg1) == COMPOUND_EXPR
5163 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5164 && ! TREE_SIDE_EFFECTS (arg0))
5165 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5166 fold (build (code, type,
5167 arg0, TREE_OPERAND (arg1, 1))));
5168 else if ((TREE_CODE (arg1) == COND_EXPR
5169 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5170 && TREE_CODE_CLASS (code) != '<'))
5171 && (TREE_CODE (arg0) != COND_EXPR
5172 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5173 && (! TREE_SIDE_EFFECTS (arg0)
5174 || ((*lang_hooks.decls.global_bindings_p) () == 0
5175 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5176 return
5177 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5178 /*cond_first_p=*/0);
5179 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5180 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5181 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5182 else if ((TREE_CODE (arg0) == COND_EXPR
5183 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5184 && TREE_CODE_CLASS (code) != '<'))
5185 && (TREE_CODE (arg1) != COND_EXPR
5186 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5187 && (! TREE_SIDE_EFFECTS (arg1)
5188 || ((*lang_hooks.decls.global_bindings_p) () == 0
5189 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5190 return
5191 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5192 /*cond_first_p=*/1);
5195 switch (code)
5197 case INTEGER_CST:
5198 case REAL_CST:
5199 case VECTOR_CST:
5200 case STRING_CST:
5201 case COMPLEX_CST:
5202 case CONSTRUCTOR:
5203 return t;
5205 case CONST_DECL:
5206 return fold (DECL_INITIAL (t));
5208 case NOP_EXPR:
5209 case FLOAT_EXPR:
5210 case CONVERT_EXPR:
5211 case FIX_TRUNC_EXPR:
5212 /* Other kinds of FIX are not handled properly by fold_convert. */
5214 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5215 return TREE_OPERAND (t, 0);
5217 /* Handle cases of two conversions in a row. */
5218 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5219 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5221 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5222 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5223 tree final_type = TREE_TYPE (t);
5224 int inside_int = INTEGRAL_TYPE_P (inside_type);
5225 int inside_ptr = POINTER_TYPE_P (inside_type);
5226 int inside_float = FLOAT_TYPE_P (inside_type);
5227 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5228 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5229 int inter_int = INTEGRAL_TYPE_P (inter_type);
5230 int inter_ptr = POINTER_TYPE_P (inter_type);
5231 int inter_float = FLOAT_TYPE_P (inter_type);
5232 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5233 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5234 int final_int = INTEGRAL_TYPE_P (final_type);
5235 int final_ptr = POINTER_TYPE_P (final_type);
5236 int final_float = FLOAT_TYPE_P (final_type);
5237 unsigned int final_prec = TYPE_PRECISION (final_type);
5238 int final_unsignedp = TREE_UNSIGNED (final_type);
5240 /* In addition to the cases of two conversions in a row
5241 handled below, if we are converting something to its own
5242 type via an object of identical or wider precision, neither
5243 conversion is needed. */
5244 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5245 && ((inter_int && final_int) || (inter_float && final_float))
5246 && inter_prec >= final_prec)
5247 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5249 /* Likewise, if the intermediate and final types are either both
5250 float or both integer, we don't need the middle conversion if
5251 it is wider than the final type and doesn't change the signedness
5252 (for integers). Avoid this if the final type is a pointer
5253 since then we sometimes need the inner conversion. Likewise if
5254 the outer has a precision not equal to the size of its mode. */
5255 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5256 || (inter_float && inside_float))
5257 && inter_prec >= inside_prec
5258 && (inter_float || inter_unsignedp == inside_unsignedp)
5259 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5260 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5261 && ! final_ptr)
5262 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5264 /* If we have a sign-extension of a zero-extended value, we can
5265 replace that by a single zero-extension. */
5266 if (inside_int && inter_int && final_int
5267 && inside_prec < inter_prec && inter_prec < final_prec
5268 && inside_unsignedp && !inter_unsignedp)
5269 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5271 /* Two conversions in a row are not needed unless:
5272 - some conversion is floating-point (overstrict for now), or
5273 - the intermediate type is narrower than both initial and
5274 final, or
5275 - the intermediate type and innermost type differ in signedness,
5276 and the outermost type is wider than the intermediate, or
5277 - the initial type is a pointer type and the precisions of the
5278 intermediate and final types differ, or
5279 - the final type is a pointer type and the precisions of the
5280 initial and intermediate types differ. */
5281 if (! inside_float && ! inter_float && ! final_float
5282 && (inter_prec > inside_prec || inter_prec > final_prec)
5283 && ! (inside_int && inter_int
5284 && inter_unsignedp != inside_unsignedp
5285 && inter_prec < final_prec)
5286 && ((inter_unsignedp && inter_prec > inside_prec)
5287 == (final_unsignedp && final_prec > inter_prec))
5288 && ! (inside_ptr && inter_prec != final_prec)
5289 && ! (final_ptr && inside_prec != inter_prec)
5290 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5291 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5292 && ! final_ptr)
5293 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5296 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5297 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5298 /* Detect assigning a bitfield. */
5299 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5300 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5302 /* Don't leave an assignment inside a conversion
5303 unless assigning a bitfield. */
5304 tree prev = TREE_OPERAND (t, 0);
5305 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5306 /* First do the assignment, then return converted constant. */
5307 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5308 TREE_USED (t) = 1;
5309 return t;
5312 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5313 constants (if x has signed type, the sign bit cannot be set
5314 in c). This folds extension into the BIT_AND_EXPR. */
5315 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5316 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5317 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5318 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5320 tree and = TREE_OPERAND (t, 0);
5321 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5322 int change = 0;
5324 if (TREE_UNSIGNED (TREE_TYPE (and))
5325 || (TYPE_PRECISION (TREE_TYPE (t))
5326 <= TYPE_PRECISION (TREE_TYPE (and))))
5327 change = 1;
5328 else if (TYPE_PRECISION (TREE_TYPE (and1))
5329 <= HOST_BITS_PER_WIDE_INT
5330 && host_integerp (and1, 1))
5332 unsigned HOST_WIDE_INT cst;
5334 cst = tree_low_cst (and1, 1);
5335 cst &= (HOST_WIDE_INT) -1
5336 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5337 change = (cst == 0);
5338 #ifdef LOAD_EXTEND_OP
5339 if (change
5340 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5341 == ZERO_EXTEND))
5343 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5344 and0 = convert (uns, and0);
5345 and1 = convert (uns, and1);
5347 #endif
5349 if (change)
5350 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5351 convert (TREE_TYPE (t), and0),
5352 convert (TREE_TYPE (t), and1)));
5355 if (!wins)
5357 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5358 return t;
5360 return fold_convert (t, arg0);
5362 case VIEW_CONVERT_EXPR:
5363 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5364 return build1 (VIEW_CONVERT_EXPR, type,
5365 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5366 return t;
5368 case COMPONENT_REF:
5369 if (TREE_CODE (arg0) == CONSTRUCTOR
5370 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5372 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5373 if (m)
5374 t = TREE_VALUE (m);
5376 return t;
5378 case RANGE_EXPR:
5379 TREE_CONSTANT (t) = wins;
5380 return t;
5382 case NEGATE_EXPR:
5383 if (wins)
5385 if (TREE_CODE (arg0) == INTEGER_CST)
5387 unsigned HOST_WIDE_INT low;
5388 HOST_WIDE_INT high;
5389 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5390 TREE_INT_CST_HIGH (arg0),
5391 &low, &high);
5392 t = build_int_2 (low, high);
5393 TREE_TYPE (t) = type;
5394 TREE_OVERFLOW (t)
5395 = (TREE_OVERFLOW (arg0)
5396 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5397 TREE_CONSTANT_OVERFLOW (t)
5398 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5400 else if (TREE_CODE (arg0) == REAL_CST)
5401 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5403 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5404 return TREE_OPERAND (arg0, 0);
5405 /* Convert -((double)float) into (double)(-float). */
5406 else if (TREE_CODE (arg0) == NOP_EXPR
5407 && TREE_CODE (type) == REAL_TYPE)
5409 tree targ0 = strip_float_extensions (arg0);
5410 if (targ0 != arg0)
5411 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5415 /* Convert - (a - b) to (b - a) for non-floating-point. */
5416 else if (TREE_CODE (arg0) == MINUS_EXPR
5417 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5418 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5419 TREE_OPERAND (arg0, 0));
5421 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5422 switch (builtin_mathfn_code (arg0))
5424 case BUILT_IN_SIN:
5425 case BUILT_IN_SINF:
5426 case BUILT_IN_SINL:
5427 case BUILT_IN_TAN:
5428 case BUILT_IN_TANF:
5429 case BUILT_IN_TANL:
5430 case BUILT_IN_ATAN:
5431 case BUILT_IN_ATANF:
5432 case BUILT_IN_ATANL:
5433 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5435 tree fndecl, arg, arglist;
5437 fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5438 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5439 arg = fold (build1 (NEGATE_EXPR, type, arg));
5440 arglist = build_tree_list (NULL_TREE, arg);
5441 return build_function_call_expr (fndecl, arglist);
5443 break;
5445 default:
5446 break;
5448 return t;
5450 case ABS_EXPR:
5451 if (wins)
5453 if (TREE_CODE (arg0) == INTEGER_CST)
5455 /* If the value is unsigned, then the absolute value is
5456 the same as the ordinary value. */
5457 if (TREE_UNSIGNED (type))
5458 return arg0;
5459 /* Similarly, if the value is non-negative. */
5460 else if (INT_CST_LT (integer_minus_one_node, arg0))
5461 return arg0;
5462 /* If the value is negative, then the absolute value is
5463 its negation. */
5464 else
5466 unsigned HOST_WIDE_INT low;
5467 HOST_WIDE_INT high;
5468 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5469 TREE_INT_CST_HIGH (arg0),
5470 &low, &high);
5471 t = build_int_2 (low, high);
5472 TREE_TYPE (t) = type;
5473 TREE_OVERFLOW (t)
5474 = (TREE_OVERFLOW (arg0)
5475 | force_fit_type (t, overflow));
5476 TREE_CONSTANT_OVERFLOW (t)
5477 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5480 else if (TREE_CODE (arg0) == REAL_CST)
5482 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5483 t = build_real (type,
5484 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5487 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5488 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5489 /* Convert fabs((double)float) into (double)fabsf(float). */
5490 else if (TREE_CODE (arg0) == NOP_EXPR
5491 && TREE_CODE (type) == REAL_TYPE)
5493 tree targ0 = strip_float_extensions (arg0);
5494 if (targ0 != arg0)
5495 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5496 targ0)));
5498 else if (tree_expr_nonnegative_p (arg0))
5499 return arg0;
5500 return t;
5502 case CONJ_EXPR:
5503 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5504 return convert (type, arg0);
5505 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5506 return build (COMPLEX_EXPR, type,
5507 TREE_OPERAND (arg0, 0),
5508 negate_expr (TREE_OPERAND (arg0, 1)));
5509 else if (TREE_CODE (arg0) == COMPLEX_CST)
5510 return build_complex (type, TREE_REALPART (arg0),
5511 negate_expr (TREE_IMAGPART (arg0)));
5512 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5513 return fold (build (TREE_CODE (arg0), type,
5514 fold (build1 (CONJ_EXPR, type,
5515 TREE_OPERAND (arg0, 0))),
5516 fold (build1 (CONJ_EXPR,
5517 type, TREE_OPERAND (arg0, 1)))));
5518 else if (TREE_CODE (arg0) == CONJ_EXPR)
5519 return TREE_OPERAND (arg0, 0);
5520 return t;
5522 case BIT_NOT_EXPR:
5523 if (wins)
5525 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5526 ~ TREE_INT_CST_HIGH (arg0));
5527 TREE_TYPE (t) = type;
5528 force_fit_type (t, 0);
5529 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5530 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5532 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5533 return TREE_OPERAND (arg0, 0);
5534 return t;
5536 case PLUS_EXPR:
5537 /* A + (-B) -> A - B */
5538 if (TREE_CODE (arg1) == NEGATE_EXPR)
5539 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5540 /* (-A) + B -> B - A */
5541 if (TREE_CODE (arg0) == NEGATE_EXPR)
5542 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5543 else if (! FLOAT_TYPE_P (type))
5545 if (integer_zerop (arg1))
5546 return non_lvalue (convert (type, arg0));
5548 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5549 with a constant, and the two constants have no bits in common,
5550 we should treat this as a BIT_IOR_EXPR since this may produce more
5551 simplifications. */
5552 if (TREE_CODE (arg0) == BIT_AND_EXPR
5553 && TREE_CODE (arg1) == BIT_AND_EXPR
5554 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5555 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5556 && integer_zerop (const_binop (BIT_AND_EXPR,
5557 TREE_OPERAND (arg0, 1),
5558 TREE_OPERAND (arg1, 1), 0)))
5560 code = BIT_IOR_EXPR;
5561 goto bit_ior;
5564 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5565 (plus (plus (mult) (mult)) (foo)) so that we can
5566 take advantage of the factoring cases below. */
5567 if ((TREE_CODE (arg0) == PLUS_EXPR
5568 && TREE_CODE (arg1) == MULT_EXPR)
5569 || (TREE_CODE (arg1) == PLUS_EXPR
5570 && TREE_CODE (arg0) == MULT_EXPR))
5572 tree parg0, parg1, parg, marg;
5574 if (TREE_CODE (arg0) == PLUS_EXPR)
5575 parg = arg0, marg = arg1;
5576 else
5577 parg = arg1, marg = arg0;
5578 parg0 = TREE_OPERAND (parg, 0);
5579 parg1 = TREE_OPERAND (parg, 1);
5580 STRIP_NOPS (parg0);
5581 STRIP_NOPS (parg1);
5583 if (TREE_CODE (parg0) == MULT_EXPR
5584 && TREE_CODE (parg1) != MULT_EXPR)
5585 return fold (build (PLUS_EXPR, type,
5586 fold (build (PLUS_EXPR, type,
5587 convert (type, parg0),
5588 convert (type, marg))),
5589 convert (type, parg1)));
5590 if (TREE_CODE (parg0) != MULT_EXPR
5591 && TREE_CODE (parg1) == MULT_EXPR)
5592 return fold (build (PLUS_EXPR, type,
5593 fold (build (PLUS_EXPR, type,
5594 convert (type, parg1),
5595 convert (type, marg))),
5596 convert (type, parg0)));
5599 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5601 tree arg00, arg01, arg10, arg11;
5602 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5604 /* (A * C) + (B * C) -> (A+B) * C.
5605 We are most concerned about the case where C is a constant,
5606 but other combinations show up during loop reduction. Since
5607 it is not difficult, try all four possibilities. */
5609 arg00 = TREE_OPERAND (arg0, 0);
5610 arg01 = TREE_OPERAND (arg0, 1);
5611 arg10 = TREE_OPERAND (arg1, 0);
5612 arg11 = TREE_OPERAND (arg1, 1);
5613 same = NULL_TREE;
5615 if (operand_equal_p (arg01, arg11, 0))
5616 same = arg01, alt0 = arg00, alt1 = arg10;
5617 else if (operand_equal_p (arg00, arg10, 0))
5618 same = arg00, alt0 = arg01, alt1 = arg11;
5619 else if (operand_equal_p (arg00, arg11, 0))
5620 same = arg00, alt0 = arg01, alt1 = arg10;
5621 else if (operand_equal_p (arg01, arg10, 0))
5622 same = arg01, alt0 = arg00, alt1 = arg11;
5624 /* No identical multiplicands; see if we can find a common
5625 power-of-two factor in non-power-of-two multiplies. This
5626 can help in multi-dimensional array access. */
5627 else if (TREE_CODE (arg01) == INTEGER_CST
5628 && TREE_CODE (arg11) == INTEGER_CST
5629 && TREE_INT_CST_HIGH (arg01) == 0
5630 && TREE_INT_CST_HIGH (arg11) == 0)
5632 HOST_WIDE_INT int01, int11, tmp;
5633 int01 = TREE_INT_CST_LOW (arg01);
5634 int11 = TREE_INT_CST_LOW (arg11);
5636 /* Move min of absolute values to int11. */
5637 if ((int01 >= 0 ? int01 : -int01)
5638 < (int11 >= 0 ? int11 : -int11))
5640 tmp = int01, int01 = int11, int11 = tmp;
5641 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5642 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5645 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5647 alt0 = fold (build (MULT_EXPR, type, arg00,
5648 build_int_2 (int01 / int11, 0)));
5649 alt1 = arg10;
5650 same = arg11;
5654 if (same)
5655 return fold (build (MULT_EXPR, type,
5656 fold (build (PLUS_EXPR, type, alt0, alt1)),
5657 same));
5661 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5662 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5663 return non_lvalue (convert (type, arg0));
5665 /* Likewise if the operands are reversed. */
5666 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5667 return non_lvalue (convert (type, arg1));
5669 bit_rotate:
5670 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5671 is a rotate of A by C1 bits. */
5672 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5673 is a rotate of A by B bits. */
5675 enum tree_code code0, code1;
5676 code0 = TREE_CODE (arg0);
5677 code1 = TREE_CODE (arg1);
5678 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5679 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5680 && operand_equal_p (TREE_OPERAND (arg0, 0),
5681 TREE_OPERAND (arg1, 0), 0)
5682 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5684 tree tree01, tree11;
5685 enum tree_code code01, code11;
5687 tree01 = TREE_OPERAND (arg0, 1);
5688 tree11 = TREE_OPERAND (arg1, 1);
5689 STRIP_NOPS (tree01);
5690 STRIP_NOPS (tree11);
5691 code01 = TREE_CODE (tree01);
5692 code11 = TREE_CODE (tree11);
5693 if (code01 == INTEGER_CST
5694 && code11 == INTEGER_CST
5695 && TREE_INT_CST_HIGH (tree01) == 0
5696 && TREE_INT_CST_HIGH (tree11) == 0
5697 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5698 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5699 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5700 code0 == LSHIFT_EXPR ? tree01 : tree11);
5701 else if (code11 == MINUS_EXPR)
5703 tree tree110, tree111;
5704 tree110 = TREE_OPERAND (tree11, 0);
5705 tree111 = TREE_OPERAND (tree11, 1);
5706 STRIP_NOPS (tree110);
5707 STRIP_NOPS (tree111);
5708 if (TREE_CODE (tree110) == INTEGER_CST
5709 && 0 == compare_tree_int (tree110,
5710 TYPE_PRECISION
5711 (TREE_TYPE (TREE_OPERAND
5712 (arg0, 0))))
5713 && operand_equal_p (tree01, tree111, 0))
5714 return build ((code0 == LSHIFT_EXPR
5715 ? LROTATE_EXPR
5716 : RROTATE_EXPR),
5717 type, TREE_OPERAND (arg0, 0), tree01);
5719 else if (code01 == MINUS_EXPR)
5721 tree tree010, tree011;
5722 tree010 = TREE_OPERAND (tree01, 0);
5723 tree011 = TREE_OPERAND (tree01, 1);
5724 STRIP_NOPS (tree010);
5725 STRIP_NOPS (tree011);
5726 if (TREE_CODE (tree010) == INTEGER_CST
5727 && 0 == compare_tree_int (tree010,
5728 TYPE_PRECISION
5729 (TREE_TYPE (TREE_OPERAND
5730 (arg0, 0))))
5731 && operand_equal_p (tree11, tree011, 0))
5732 return build ((code0 != LSHIFT_EXPR
5733 ? LROTATE_EXPR
5734 : RROTATE_EXPR),
5735 type, TREE_OPERAND (arg0, 0), tree11);
5740 associate:
5741 /* In most languages, can't associate operations on floats through
5742 parentheses. Rather than remember where the parentheses were, we
5743 don't associate floats at all. It shouldn't matter much. However,
5744 associating multiplications is only very slightly inaccurate, so do
5745 that if -funsafe-math-optimizations is specified. */
5747 if (! wins
5748 && (! FLOAT_TYPE_P (type)
5749 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5751 tree var0, con0, lit0, minus_lit0;
5752 tree var1, con1, lit1, minus_lit1;
5754 /* Split both trees into variables, constants, and literals. Then
5755 associate each group together, the constants with literals,
5756 then the result with variables. This increases the chances of
5757 literals being recombined later and of generating relocatable
5758 expressions for the sum of a constant and literal. */
5759 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5760 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5761 code == MINUS_EXPR);
5763 /* Only do something if we found more than two objects. Otherwise,
5764 nothing has changed and we risk infinite recursion. */
5765 if (2 < ((var0 != 0) + (var1 != 0)
5766 + (con0 != 0) + (con1 != 0)
5767 + (lit0 != 0) + (lit1 != 0)
5768 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5770 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5771 if (code == MINUS_EXPR)
5772 code = PLUS_EXPR;
5774 var0 = associate_trees (var0, var1, code, type);
5775 con0 = associate_trees (con0, con1, code, type);
5776 lit0 = associate_trees (lit0, lit1, code, type);
5777 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5779 /* Preserve the MINUS_EXPR if the negative part of the literal is
5780 greater than the positive part. Otherwise, the multiplicative
5781 folding code (i.e extract_muldiv) may be fooled in case
5782 unsigned constants are substracted, like in the following
5783 example: ((X*2 + 4) - 8U)/2. */
5784 if (minus_lit0 && lit0)
5786 if (tree_int_cst_lt (lit0, minus_lit0))
5788 minus_lit0 = associate_trees (minus_lit0, lit0,
5789 MINUS_EXPR, type);
5790 lit0 = 0;
5792 else
5794 lit0 = associate_trees (lit0, minus_lit0,
5795 MINUS_EXPR, type);
5796 minus_lit0 = 0;
5799 if (minus_lit0)
5801 if (con0 == 0)
5802 return convert (type, associate_trees (var0, minus_lit0,
5803 MINUS_EXPR, type));
5804 else
5806 con0 = associate_trees (con0, minus_lit0,
5807 MINUS_EXPR, type);
5808 return convert (type, associate_trees (var0, con0,
5809 PLUS_EXPR, type));
5813 con0 = associate_trees (con0, lit0, code, type);
5814 return convert (type, associate_trees (var0, con0, code, type));
5818 binary:
5819 if (wins)
5820 t1 = const_binop (code, arg0, arg1, 0);
5821 if (t1 != NULL_TREE)
5823 /* The return value should always have
5824 the same type as the original expression. */
5825 if (TREE_TYPE (t1) != TREE_TYPE (t))
5826 t1 = convert (TREE_TYPE (t), t1);
5828 return t1;
5830 return t;
5832 case MINUS_EXPR:
5833 /* A - (-B) -> A + B */
5834 if (TREE_CODE (arg1) == NEGATE_EXPR)
5835 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5836 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5837 if (TREE_CODE (arg0) == NEGATE_EXPR
5838 && (FLOAT_TYPE_P (type)
5839 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5840 && negate_expr_p (arg1)
5841 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5842 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5843 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5844 TREE_OPERAND (arg0, 0)));
5846 if (! FLOAT_TYPE_P (type))
5848 if (! wins && integer_zerop (arg0))
5849 return negate_expr (convert (type, arg1));
5850 if (integer_zerop (arg1))
5851 return non_lvalue (convert (type, arg0));
5853 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5854 about the case where C is a constant, just try one of the
5855 four possibilities. */
5857 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5858 && operand_equal_p (TREE_OPERAND (arg0, 1),
5859 TREE_OPERAND (arg1, 1), 0))
5860 return fold (build (MULT_EXPR, type,
5861 fold (build (MINUS_EXPR, type,
5862 TREE_OPERAND (arg0, 0),
5863 TREE_OPERAND (arg1, 0))),
5864 TREE_OPERAND (arg0, 1)));
5866 /* Fold A - (A & B) into ~B & A. */
5867 if (!TREE_SIDE_EFFECTS (arg0)
5868 && TREE_CODE (arg1) == BIT_AND_EXPR)
5870 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5871 return fold (build (BIT_AND_EXPR, type,
5872 fold (build1 (BIT_NOT_EXPR, type,
5873 TREE_OPERAND (arg1, 0))),
5874 arg0));
5875 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5876 return fold (build (BIT_AND_EXPR, type,
5877 fold (build1 (BIT_NOT_EXPR, type,
5878 TREE_OPERAND (arg1, 1))),
5879 arg0));
5883 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5884 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5885 return non_lvalue (convert (type, arg0));
5887 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5888 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5889 (-ARG1 + ARG0) reduces to -ARG1. */
5890 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5891 return negate_expr (convert (type, arg1));
5893 /* Fold &x - &x. This can happen from &x.foo - &x.
5894 This is unsafe for certain floats even in non-IEEE formats.
5895 In IEEE, it is unsafe because it does wrong for NaNs.
5896 Also note that operand_equal_p is always false if an operand
5897 is volatile. */
5899 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5900 && operand_equal_p (arg0, arg1, 0))
5901 return convert (type, integer_zero_node);
5903 goto associate;
5905 case MULT_EXPR:
5906 /* (-A) * (-B) -> A * B */
5907 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5908 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5909 TREE_OPERAND (arg1, 0)));
5911 if (! FLOAT_TYPE_P (type))
5913 if (integer_zerop (arg1))
5914 return omit_one_operand (type, arg1, arg0);
5915 if (integer_onep (arg1))
5916 return non_lvalue (convert (type, arg0));
5918 /* (a * (1 << b)) is (a << b) */
5919 if (TREE_CODE (arg1) == LSHIFT_EXPR
5920 && integer_onep (TREE_OPERAND (arg1, 0)))
5921 return fold (build (LSHIFT_EXPR, type, arg0,
5922 TREE_OPERAND (arg1, 1)));
5923 if (TREE_CODE (arg0) == LSHIFT_EXPR
5924 && integer_onep (TREE_OPERAND (arg0, 0)))
5925 return fold (build (LSHIFT_EXPR, type, arg1,
5926 TREE_OPERAND (arg0, 1)));
5928 if (TREE_CODE (arg1) == INTEGER_CST
5929 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
5930 convert (type, arg1),
5931 code, NULL_TREE)))
5932 return convert (type, tem);
5935 else
5937 /* Maybe fold x * 0 to 0. The expressions aren't the same
5938 when x is NaN, since x * 0 is also NaN. Nor are they the
5939 same in modes with signed zeros, since multiplying a
5940 negative value by 0 gives -0, not +0. */
5941 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5942 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5943 && real_zerop (arg1))
5944 return omit_one_operand (type, arg1, arg0);
5945 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5946 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5947 && real_onep (arg1))
5948 return non_lvalue (convert (type, arg0));
5950 /* Transform x * -1.0 into -x. */
5951 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5952 && real_minus_onep (arg1))
5953 return fold (build1 (NEGATE_EXPR, type, arg0));
5955 /* x*2 is x+x */
5956 if (! wins && real_twop (arg1)
5957 && (*lang_hooks.decls.global_bindings_p) () == 0
5958 && ! CONTAINS_PLACEHOLDER_P (arg0))
5960 tree arg = save_expr (arg0);
5961 return fold (build (PLUS_EXPR, type, arg, arg));
5964 if (flag_unsafe_math_optimizations)
5966 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5967 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5969 /* Optimizations of sqrt(...)*sqrt(...). */
5970 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5971 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5972 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5974 tree sqrtfn, arg, arglist;
5975 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5976 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
5978 /* Optimize sqrt(x)*sqrt(x) as x. */
5979 if (operand_equal_p (arg00, arg10, 0)
5980 && ! HONOR_SNANS (TYPE_MODE (type)))
5981 return arg00;
5983 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5984 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5985 arg = fold (build (MULT_EXPR, type, arg00, arg10));
5986 arglist = build_tree_list (NULL_TREE, arg);
5987 return build_function_call_expr (sqrtfn, arglist);
5990 /* Optimize exp(x)*exp(y) as exp(x+y). */
5991 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5992 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5993 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5995 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5996 tree arg = build (PLUS_EXPR, type,
5997 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5998 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5999 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6000 return build_function_call_expr (expfn, arglist);
6003 /* Optimizations of pow(...)*pow(...). */
6004 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6005 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6006 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6008 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6009 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6010 1)));
6011 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6012 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6013 1)));
6015 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6016 if (operand_equal_p (arg01, arg11, 0))
6018 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6019 tree arg = build (MULT_EXPR, type, arg00, arg10);
6020 tree arglist = tree_cons (NULL_TREE, fold (arg),
6021 build_tree_list (NULL_TREE,
6022 arg01));
6023 return build_function_call_expr (powfn, arglist);
6026 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6027 if (operand_equal_p (arg00, arg10, 0))
6029 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6030 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6031 tree arglist = tree_cons (NULL_TREE, arg00,
6032 build_tree_list (NULL_TREE,
6033 arg));
6034 return build_function_call_expr (powfn, arglist);
6038 /* Optimize tan(x)*cos(x) as sin(x). */
6039 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6040 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6041 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6042 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6043 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6044 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6045 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6046 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6048 tree sinfn;
6050 switch (fcode0)
6052 case BUILT_IN_TAN:
6053 case BUILT_IN_COS:
6054 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6055 break;
6056 case BUILT_IN_TANF:
6057 case BUILT_IN_COSF:
6058 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6059 break;
6060 case BUILT_IN_TANL:
6061 case BUILT_IN_COSL:
6062 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6063 break;
6064 default:
6065 sinfn = NULL_TREE;
6068 if (sinfn != NULL_TREE)
6069 return build_function_call_expr (sinfn,
6070 TREE_OPERAND (arg0, 1));
6074 goto associate;
6076 case BIT_IOR_EXPR:
6077 bit_ior:
6078 if (integer_all_onesp (arg1))
6079 return omit_one_operand (type, arg1, arg0);
6080 if (integer_zerop (arg1))
6081 return non_lvalue (convert (type, arg0));
6082 t1 = distribute_bit_expr (code, type, arg0, arg1);
6083 if (t1 != NULL_TREE)
6084 return t1;
6086 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6088 This results in more efficient code for machines without a NAND
6089 instruction. Combine will canonicalize to the first form
6090 which will allow use of NAND instructions provided by the
6091 backend if they exist. */
6092 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6093 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6095 return fold (build1 (BIT_NOT_EXPR, type,
6096 build (BIT_AND_EXPR, type,
6097 TREE_OPERAND (arg0, 0),
6098 TREE_OPERAND (arg1, 0))));
6101 /* See if this can be simplified into a rotate first. If that
6102 is unsuccessful continue in the association code. */
6103 goto bit_rotate;
6105 case BIT_XOR_EXPR:
6106 if (integer_zerop (arg1))
6107 return non_lvalue (convert (type, arg0));
6108 if (integer_all_onesp (arg1))
6109 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6111 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6112 with a constant, and the two constants have no bits in common,
6113 we should treat this as a BIT_IOR_EXPR since this may produce more
6114 simplifications. */
6115 if (TREE_CODE (arg0) == BIT_AND_EXPR
6116 && TREE_CODE (arg1) == BIT_AND_EXPR
6117 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6118 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6119 && integer_zerop (const_binop (BIT_AND_EXPR,
6120 TREE_OPERAND (arg0, 1),
6121 TREE_OPERAND (arg1, 1), 0)))
6123 code = BIT_IOR_EXPR;
6124 goto bit_ior;
6127 /* See if this can be simplified into a rotate first. If that
6128 is unsuccessful continue in the association code. */
6129 goto bit_rotate;
6131 case BIT_AND_EXPR:
6132 bit_and:
6133 if (integer_all_onesp (arg1))
6134 return non_lvalue (convert (type, arg0));
6135 if (integer_zerop (arg1))
6136 return omit_one_operand (type, arg1, arg0);
6137 t1 = distribute_bit_expr (code, type, arg0, arg1);
6138 if (t1 != NULL_TREE)
6139 return t1;
6140 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6141 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6142 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6144 unsigned int prec
6145 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6147 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6148 && (~TREE_INT_CST_LOW (arg1)
6149 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6150 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6153 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6155 This results in more efficient code for machines without a NOR
6156 instruction. Combine will canonicalize to the first form
6157 which will allow use of NOR instructions provided by the
6158 backend if they exist. */
6159 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6160 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6162 return fold (build1 (BIT_NOT_EXPR, type,
6163 build (BIT_IOR_EXPR, type,
6164 TREE_OPERAND (arg0, 0),
6165 TREE_OPERAND (arg1, 0))));
6168 goto associate;
6170 case BIT_ANDTC_EXPR:
6171 if (integer_all_onesp (arg0))
6172 return non_lvalue (convert (type, arg1));
6173 if (integer_zerop (arg0))
6174 return omit_one_operand (type, arg0, arg1);
6175 if (TREE_CODE (arg1) == INTEGER_CST)
6177 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
6178 code = BIT_AND_EXPR;
6179 goto bit_and;
6181 goto binary;
6183 case RDIV_EXPR:
6184 /* Don't touch a floating-point divide by zero unless the mode
6185 of the constant can represent infinity. */
6186 if (TREE_CODE (arg1) == REAL_CST
6187 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6188 && real_zerop (arg1))
6189 return t;
6191 /* (-A) / (-B) -> A / B */
6192 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
6193 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6194 TREE_OPERAND (arg1, 0)));
6196 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6197 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6198 && real_onep (arg1))
6199 return non_lvalue (convert (type, arg0));
6201 /* If ARG1 is a constant, we can convert this to a multiply by the
6202 reciprocal. This does not have the same rounding properties,
6203 so only do this if -funsafe-math-optimizations. We can actually
6204 always safely do it if ARG1 is a power of two, but it's hard to
6205 tell if it is or not in a portable manner. */
6206 if (TREE_CODE (arg1) == REAL_CST)
6208 if (flag_unsafe_math_optimizations
6209 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6210 arg1, 0)))
6211 return fold (build (MULT_EXPR, type, arg0, tem));
6212 /* Find the reciprocal if optimizing and the result is exact. */
6213 else if (optimize)
6215 REAL_VALUE_TYPE r;
6216 r = TREE_REAL_CST (arg1);
6217 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6219 tem = build_real (type, r);
6220 return fold (build (MULT_EXPR, type, arg0, tem));
6224 /* Convert A/B/C to A/(B*C). */
6225 if (flag_unsafe_math_optimizations
6226 && TREE_CODE (arg0) == RDIV_EXPR)
6228 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6229 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
6230 arg1)));
6232 /* Convert A/(B/C) to (A/B)*C. */
6233 if (flag_unsafe_math_optimizations
6234 && TREE_CODE (arg1) == RDIV_EXPR)
6236 return fold (build (MULT_EXPR, type,
6237 build (RDIV_EXPR, type, arg0,
6238 TREE_OPERAND (arg1, 0)),
6239 TREE_OPERAND (arg1, 1)));
6242 if (flag_unsafe_math_optimizations)
6244 enum built_in_function fcode = builtin_mathfn_code (arg1);
6245 /* Optimize x/exp(y) into x*exp(-y). */
6246 if (fcode == BUILT_IN_EXP
6247 || fcode == BUILT_IN_EXPF
6248 || fcode == BUILT_IN_EXPL)
6250 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6251 tree arg = build1 (NEGATE_EXPR, type,
6252 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6253 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6254 arg1 = build_function_call_expr (expfn, arglist);
6255 return fold (build (MULT_EXPR, type, arg0, arg1));
6258 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6259 if (fcode == BUILT_IN_POW
6260 || fcode == BUILT_IN_POWF
6261 || fcode == BUILT_IN_POWL)
6263 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6264 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6265 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6266 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6267 tree arglist = tree_cons(NULL_TREE, arg10,
6268 build_tree_list (NULL_TREE, neg11));
6269 arg1 = build_function_call_expr (powfn, arglist);
6270 return fold (build (MULT_EXPR, type, arg0, arg1));
6274 if (flag_unsafe_math_optimizations)
6276 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6277 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6279 /* Optimize sin(x)/cos(x) as tan(x). */
6280 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6281 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6282 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6283 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6284 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6286 tree tanfn;
6288 if (fcode0 == BUILT_IN_SIN)
6289 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6290 else if (fcode0 == BUILT_IN_SINF)
6291 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6292 else if (fcode0 == BUILT_IN_SINL)
6293 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6294 else
6295 tanfn = NULL_TREE;
6297 if (tanfn != NULL_TREE)
6298 return build_function_call_expr (tanfn,
6299 TREE_OPERAND (arg0, 1));
6302 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6303 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6304 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6305 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6306 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6307 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6309 tree tanfn;
6311 if (fcode0 == BUILT_IN_COS)
6312 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6313 else if (fcode0 == BUILT_IN_COSF)
6314 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6315 else if (fcode0 == BUILT_IN_COSL)
6316 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6317 else
6318 tanfn = NULL_TREE;
6320 if (tanfn != NULL_TREE)
6322 tree tmp = TREE_OPERAND (arg0, 1);
6323 tmp = build_function_call_expr (tanfn, tmp);
6324 return fold (build (RDIV_EXPR, type,
6325 build_real (type, dconst1),
6326 tmp));
6330 goto binary;
6332 case TRUNC_DIV_EXPR:
6333 case ROUND_DIV_EXPR:
6334 case FLOOR_DIV_EXPR:
6335 case CEIL_DIV_EXPR:
6336 case EXACT_DIV_EXPR:
6337 if (integer_onep (arg1))
6338 return non_lvalue (convert (type, arg0));
6339 if (integer_zerop (arg1))
6340 return t;
6342 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6343 operation, EXACT_DIV_EXPR.
6345 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6346 At one time others generated faster code, it's not clear if they do
6347 after the last round to changes to the DIV code in expmed.c. */
6348 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6349 && multiple_of_p (type, arg0, arg1))
6350 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6352 if (TREE_CODE (arg1) == INTEGER_CST
6353 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6354 code, NULL_TREE)))
6355 return convert (type, tem);
6357 goto binary;
6359 case CEIL_MOD_EXPR:
6360 case FLOOR_MOD_EXPR:
6361 case ROUND_MOD_EXPR:
6362 case TRUNC_MOD_EXPR:
6363 if (integer_onep (arg1))
6364 return omit_one_operand (type, integer_zero_node, arg0);
6365 if (integer_zerop (arg1))
6366 return t;
6368 if (TREE_CODE (arg1) == INTEGER_CST
6369 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6370 code, NULL_TREE)))
6371 return convert (type, tem);
6373 goto binary;
6375 case LROTATE_EXPR:
6376 case RROTATE_EXPR:
6377 if (integer_all_onesp (arg0))
6378 return omit_one_operand (type, arg0, arg1);
6379 goto shift;
6381 case RSHIFT_EXPR:
6382 /* Optimize -1 >> x for arithmetic right shifts. */
6383 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6384 return omit_one_operand (type, arg0, arg1);
6385 /* ... fall through ... */
6387 case LSHIFT_EXPR:
6388 shift:
6389 if (integer_zerop (arg1))
6390 return non_lvalue (convert (type, arg0));
6391 if (integer_zerop (arg0))
6392 return omit_one_operand (type, arg0, arg1);
6394 /* Since negative shift count is not well-defined,
6395 don't try to compute it in the compiler. */
6396 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6397 return t;
6398 /* Rewrite an LROTATE_EXPR by a constant into an
6399 RROTATE_EXPR by a new constant. */
6400 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6402 TREE_SET_CODE (t, RROTATE_EXPR);
6403 code = RROTATE_EXPR;
6404 TREE_OPERAND (t, 1) = arg1
6405 = const_binop
6406 (MINUS_EXPR,
6407 convert (TREE_TYPE (arg1),
6408 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6409 arg1, 0);
6410 if (tree_int_cst_sgn (arg1) < 0)
6411 return t;
6414 /* If we have a rotate of a bit operation with the rotate count and
6415 the second operand of the bit operation both constant,
6416 permute the two operations. */
6417 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6418 && (TREE_CODE (arg0) == BIT_AND_EXPR
6419 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
6420 || TREE_CODE (arg0) == BIT_IOR_EXPR
6421 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6422 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6423 return fold (build (TREE_CODE (arg0), type,
6424 fold (build (code, type,
6425 TREE_OPERAND (arg0, 0), arg1)),
6426 fold (build (code, type,
6427 TREE_OPERAND (arg0, 1), arg1))));
6429 /* Two consecutive rotates adding up to the width of the mode can
6430 be ignored. */
6431 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6432 && TREE_CODE (arg0) == RROTATE_EXPR
6433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6434 && TREE_INT_CST_HIGH (arg1) == 0
6435 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6436 && ((TREE_INT_CST_LOW (arg1)
6437 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6438 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6439 return TREE_OPERAND (arg0, 0);
6441 goto binary;
6443 case MIN_EXPR:
6444 if (operand_equal_p (arg0, arg1, 0))
6445 return omit_one_operand (type, arg0, arg1);
6446 if (INTEGRAL_TYPE_P (type)
6447 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6448 return omit_one_operand (type, arg1, arg0);
6449 goto associate;
6451 case MAX_EXPR:
6452 if (operand_equal_p (arg0, arg1, 0))
6453 return omit_one_operand (type, arg0, arg1);
6454 if (INTEGRAL_TYPE_P (type)
6455 && TYPE_MAX_VALUE (type)
6456 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6457 return omit_one_operand (type, arg1, arg0);
6458 goto associate;
6460 case TRUTH_NOT_EXPR:
6461 /* Note that the operand of this must be an int
6462 and its values must be 0 or 1.
6463 ("true" is a fixed value perhaps depending on the language,
6464 but we don't handle values other than 1 correctly yet.) */
6465 tem = invert_truthvalue (arg0);
6466 /* Avoid infinite recursion. */
6467 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6468 return t;
6469 return convert (type, tem);
6471 case TRUTH_ANDIF_EXPR:
6472 /* Note that the operands of this must be ints
6473 and their values must be 0 or 1.
6474 ("true" is a fixed value perhaps depending on the language.) */
6475 /* If first arg is constant zero, return it. */
6476 if (integer_zerop (arg0))
6477 return convert (type, arg0);
6478 case TRUTH_AND_EXPR:
6479 /* If either arg is constant true, drop it. */
6480 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6481 return non_lvalue (convert (type, arg1));
6482 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6483 /* Preserve sequence points. */
6484 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6485 return non_lvalue (convert (type, arg0));
6486 /* If second arg is constant zero, result is zero, but first arg
6487 must be evaluated. */
6488 if (integer_zerop (arg1))
6489 return omit_one_operand (type, arg1, arg0);
6490 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6491 case will be handled here. */
6492 if (integer_zerop (arg0))
6493 return omit_one_operand (type, arg0, arg1);
6495 truth_andor:
6496 /* We only do these simplifications if we are optimizing. */
6497 if (!optimize)
6498 return t;
6500 /* Check for things like (A || B) && (A || C). We can convert this
6501 to A || (B && C). Note that either operator can be any of the four
6502 truth and/or operations and the transformation will still be
6503 valid. Also note that we only care about order for the
6504 ANDIF and ORIF operators. If B contains side effects, this
6505 might change the truth-value of A. */
6506 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6507 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6508 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6509 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6510 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6511 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6513 tree a00 = TREE_OPERAND (arg0, 0);
6514 tree a01 = TREE_OPERAND (arg0, 1);
6515 tree a10 = TREE_OPERAND (arg1, 0);
6516 tree a11 = TREE_OPERAND (arg1, 1);
6517 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6518 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6519 && (code == TRUTH_AND_EXPR
6520 || code == TRUTH_OR_EXPR));
6522 if (operand_equal_p (a00, a10, 0))
6523 return fold (build (TREE_CODE (arg0), type, a00,
6524 fold (build (code, type, a01, a11))));
6525 else if (commutative && operand_equal_p (a00, a11, 0))
6526 return fold (build (TREE_CODE (arg0), type, a00,
6527 fold (build (code, type, a01, a10))));
6528 else if (commutative && operand_equal_p (a01, a10, 0))
6529 return fold (build (TREE_CODE (arg0), type, a01,
6530 fold (build (code, type, a00, a11))));
6532 /* This case if tricky because we must either have commutative
6533 operators or else A10 must not have side-effects. */
6535 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6536 && operand_equal_p (a01, a11, 0))
6537 return fold (build (TREE_CODE (arg0), type,
6538 fold (build (code, type, a00, a10)),
6539 a01));
6542 /* See if we can build a range comparison. */
6543 if (0 != (tem = fold_range_test (t)))
6544 return tem;
6546 /* Check for the possibility of merging component references. If our
6547 lhs is another similar operation, try to merge its rhs with our
6548 rhs. Then try to merge our lhs and rhs. */
6549 if (TREE_CODE (arg0) == code
6550 && 0 != (tem = fold_truthop (code, type,
6551 TREE_OPERAND (arg0, 1), arg1)))
6552 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6554 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6555 return tem;
6557 return t;
6559 case TRUTH_ORIF_EXPR:
6560 /* Note that the operands of this must be ints
6561 and their values must be 0 or true.
6562 ("true" is a fixed value perhaps depending on the language.) */
6563 /* If first arg is constant true, return it. */
6564 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6565 return convert (type, arg0);
6566 case TRUTH_OR_EXPR:
6567 /* If either arg is constant zero, drop it. */
6568 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6569 return non_lvalue (convert (type, arg1));
6570 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6571 /* Preserve sequence points. */
6572 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6573 return non_lvalue (convert (type, arg0));
6574 /* If second arg is constant true, result is true, but we must
6575 evaluate first arg. */
6576 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6577 return omit_one_operand (type, arg1, arg0);
6578 /* Likewise for first arg, but note this only occurs here for
6579 TRUTH_OR_EXPR. */
6580 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6581 return omit_one_operand (type, arg0, arg1);
6582 goto truth_andor;
6584 case TRUTH_XOR_EXPR:
6585 /* If either arg is constant zero, drop it. */
6586 if (integer_zerop (arg0))
6587 return non_lvalue (convert (type, arg1));
6588 if (integer_zerop (arg1))
6589 return non_lvalue (convert (type, arg0));
6590 /* If either arg is constant true, this is a logical inversion. */
6591 if (integer_onep (arg0))
6592 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6593 if (integer_onep (arg1))
6594 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6595 return t;
6597 case EQ_EXPR:
6598 case NE_EXPR:
6599 case LT_EXPR:
6600 case GT_EXPR:
6601 case LE_EXPR:
6602 case GE_EXPR:
6603 /* If one arg is a real or integer constant, put it last. */
6604 if ((TREE_CODE (arg0) == INTEGER_CST
6605 && TREE_CODE (arg1) != INTEGER_CST)
6606 || (TREE_CODE (arg0) == REAL_CST
6607 && TREE_CODE (arg0) != REAL_CST))
6609 TREE_OPERAND (t, 0) = arg1;
6610 TREE_OPERAND (t, 1) = arg0;
6611 arg0 = TREE_OPERAND (t, 0);
6612 arg1 = TREE_OPERAND (t, 1);
6613 code = swap_tree_comparison (code);
6614 TREE_SET_CODE (t, code);
6617 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6619 tree targ0 = strip_float_extensions (arg0);
6620 tree targ1 = strip_float_extensions (arg1);
6621 tree newtype = TREE_TYPE (targ0);
6623 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6624 newtype = TREE_TYPE (targ1);
6626 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6627 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6628 return fold (build (code, type, convert (newtype, targ0),
6629 convert (newtype, targ1)));
6631 /* (-a) CMP (-b) -> b CMP a */
6632 if (TREE_CODE (arg0) == NEGATE_EXPR
6633 && TREE_CODE (arg1) == NEGATE_EXPR)
6634 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6635 TREE_OPERAND (arg0, 0)));
6637 if (TREE_CODE (arg1) == REAL_CST)
6639 REAL_VALUE_TYPE cst;
6640 cst = TREE_REAL_CST (arg1);
6642 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6643 if (TREE_CODE (arg0) == NEGATE_EXPR)
6644 return
6645 fold (build (swap_tree_comparison (code), type,
6646 TREE_OPERAND (arg0, 0),
6647 build_real (TREE_TYPE (arg1),
6648 REAL_VALUE_NEGATE (cst))));
6650 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6651 /* a CMP (-0) -> a CMP 0 */
6652 if (REAL_VALUE_MINUS_ZERO (cst))
6653 return fold (build (code, type, arg0,
6654 build_real (TREE_TYPE (arg1), dconst0)));
6656 /* x != NaN is always true, other ops are always false. */
6657 if (REAL_VALUE_ISNAN (cst)
6658 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6660 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6661 return omit_one_operand (type, convert (type, t), arg0);
6664 /* Fold comparisons against infinity. */
6665 if (REAL_VALUE_ISINF (cst))
6667 tem = fold_inf_compare (code, type, arg0, arg1);
6668 if (tem != NULL_TREE)
6669 return tem;
6673 /* If this is a comparison of a real constant with a PLUS_EXPR
6674 or a MINUS_EXPR of a real constant, we can convert it into a
6675 comparison with a revised real constant as long as no overflow
6676 occurs when unsafe_math_optimizations are enabled. */
6677 if (flag_unsafe_math_optimizations
6678 && TREE_CODE (arg1) == REAL_CST
6679 && (TREE_CODE (arg0) == PLUS_EXPR
6680 || TREE_CODE (arg0) == MINUS_EXPR)
6681 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6682 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6683 ? MINUS_EXPR : PLUS_EXPR,
6684 arg1, TREE_OPERAND (arg0, 1), 0))
6685 && ! TREE_CONSTANT_OVERFLOW (tem))
6686 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6688 /* Likewise, we can simplify a comparison of a real constant with
6689 a MINUS_EXPR whose first operand is also a real constant, i.e.
6690 (c1 - x) < c2 becomes x > c1-c2. */
6691 if (flag_unsafe_math_optimizations
6692 && TREE_CODE (arg1) == REAL_CST
6693 && TREE_CODE (arg0) == MINUS_EXPR
6694 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6695 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6696 arg1, 0))
6697 && ! TREE_CONSTANT_OVERFLOW (tem))
6698 return fold (build (swap_tree_comparison (code), type,
6699 TREE_OPERAND (arg0, 1), tem));
6701 /* Fold comparisons against built-in math functions. */
6702 if (TREE_CODE (arg1) == REAL_CST
6703 && flag_unsafe_math_optimizations
6704 && ! flag_errno_math)
6706 enum built_in_function fcode = builtin_mathfn_code (arg0);
6708 if (fcode != END_BUILTINS)
6710 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6711 if (tem != NULL_TREE)
6712 return tem;
6717 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6718 First, see if one arg is constant; find the constant arg
6719 and the other one. */
6721 tree constop = 0, varop = NULL_TREE;
6722 int constopnum = -1;
6724 if (TREE_CONSTANT (arg1))
6725 constopnum = 1, constop = arg1, varop = arg0;
6726 if (TREE_CONSTANT (arg0))
6727 constopnum = 0, constop = arg0, varop = arg1;
6729 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6731 /* This optimization is invalid for ordered comparisons
6732 if CONST+INCR overflows or if foo+incr might overflow.
6733 This optimization is invalid for floating point due to rounding.
6734 For pointer types we assume overflow doesn't happen. */
6735 if (POINTER_TYPE_P (TREE_TYPE (varop))
6736 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6737 && (code == EQ_EXPR || code == NE_EXPR)))
6739 tree newconst
6740 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6741 constop, TREE_OPERAND (varop, 1)));
6743 /* Do not overwrite the current varop to be a preincrement,
6744 create a new node so that we won't confuse our caller who
6745 might create trees and throw them away, reusing the
6746 arguments that they passed to build. This shows up in
6747 the THEN or ELSE parts of ?: being postincrements. */
6748 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6749 TREE_OPERAND (varop, 0),
6750 TREE_OPERAND (varop, 1));
6752 /* If VAROP is a reference to a bitfield, we must mask
6753 the constant by the width of the field. */
6754 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6755 && DECL_BIT_FIELD(TREE_OPERAND
6756 (TREE_OPERAND (varop, 0), 1)))
6758 int size
6759 = TREE_INT_CST_LOW (DECL_SIZE
6760 (TREE_OPERAND
6761 (TREE_OPERAND (varop, 0), 1)));
6762 tree mask, unsigned_type;
6763 unsigned int precision;
6764 tree folded_compare;
6766 /* First check whether the comparison would come out
6767 always the same. If we don't do that we would
6768 change the meaning with the masking. */
6769 if (constopnum == 0)
6770 folded_compare = fold (build (code, type, constop,
6771 TREE_OPERAND (varop, 0)));
6772 else
6773 folded_compare = fold (build (code, type,
6774 TREE_OPERAND (varop, 0),
6775 constop));
6776 if (integer_zerop (folded_compare)
6777 || integer_onep (folded_compare))
6778 return omit_one_operand (type, folded_compare, varop);
6780 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6781 precision = TYPE_PRECISION (unsigned_type);
6782 mask = build_int_2 (~0, ~0);
6783 TREE_TYPE (mask) = unsigned_type;
6784 force_fit_type (mask, 0);
6785 mask = const_binop (RSHIFT_EXPR, mask,
6786 size_int (precision - size), 0);
6787 newconst = fold (build (BIT_AND_EXPR,
6788 TREE_TYPE (varop), newconst,
6789 convert (TREE_TYPE (varop),
6790 mask)));
6793 t = build (code, type,
6794 (constopnum == 0) ? newconst : varop,
6795 (constopnum == 1) ? newconst : varop);
6796 return t;
6799 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6801 if (POINTER_TYPE_P (TREE_TYPE (varop))
6802 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6803 && (code == EQ_EXPR || code == NE_EXPR)))
6805 tree newconst
6806 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6807 constop, TREE_OPERAND (varop, 1)));
6809 /* Do not overwrite the current varop to be a predecrement,
6810 create a new node so that we won't confuse our caller who
6811 might create trees and throw them away, reusing the
6812 arguments that they passed to build. This shows up in
6813 the THEN or ELSE parts of ?: being postdecrements. */
6814 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6815 TREE_OPERAND (varop, 0),
6816 TREE_OPERAND (varop, 1));
6818 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6819 && DECL_BIT_FIELD(TREE_OPERAND
6820 (TREE_OPERAND (varop, 0), 1)))
6822 int size
6823 = TREE_INT_CST_LOW (DECL_SIZE
6824 (TREE_OPERAND
6825 (TREE_OPERAND (varop, 0), 1)));
6826 tree mask, unsigned_type;
6827 unsigned int precision;
6828 tree folded_compare;
6830 if (constopnum == 0)
6831 folded_compare = fold (build (code, type, constop,
6832 TREE_OPERAND (varop, 0)));
6833 else
6834 folded_compare = fold (build (code, type,
6835 TREE_OPERAND (varop, 0),
6836 constop));
6837 if (integer_zerop (folded_compare)
6838 || integer_onep (folded_compare))
6839 return omit_one_operand (type, folded_compare, varop);
6841 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6842 precision = TYPE_PRECISION (unsigned_type);
6843 mask = build_int_2 (~0, ~0);
6844 TREE_TYPE (mask) = TREE_TYPE (varop);
6845 force_fit_type (mask, 0);
6846 mask = const_binop (RSHIFT_EXPR, mask,
6847 size_int (precision - size), 0);
6848 newconst = fold (build (BIT_AND_EXPR,
6849 TREE_TYPE (varop), newconst,
6850 convert (TREE_TYPE (varop),
6851 mask)));
6854 t = build (code, type,
6855 (constopnum == 0) ? newconst : varop,
6856 (constopnum == 1) ? newconst : varop);
6857 return t;
6862 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6863 This transformation affects the cases which are handled in later
6864 optimizations involving comparisons with non-negative constants. */
6865 if (TREE_CODE (arg1) == INTEGER_CST
6866 && TREE_CODE (arg0) != INTEGER_CST
6867 && tree_int_cst_sgn (arg1) > 0)
6869 switch (code)
6871 case GE_EXPR:
6872 code = GT_EXPR;
6873 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6874 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6875 break;
6877 case LT_EXPR:
6878 code = LE_EXPR;
6879 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6880 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6881 break;
6883 default:
6884 break;
6888 /* Comparisons with the highest or lowest possible integer of
6889 the specified size will have known values. */
6891 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6893 if (TREE_CODE (arg1) == INTEGER_CST
6894 && ! TREE_CONSTANT_OVERFLOW (arg1)
6895 && width <= HOST_BITS_PER_WIDE_INT
6896 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6897 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6899 unsigned HOST_WIDE_INT signed_max;
6900 unsigned HOST_WIDE_INT max, min;
6902 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6904 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6906 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6907 min = 0;
6909 else
6911 max = signed_max;
6912 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6915 if (TREE_INT_CST_HIGH (arg1) == 0
6916 && TREE_INT_CST_LOW (arg1) == max)
6917 switch (code)
6919 case GT_EXPR:
6920 return omit_one_operand (type,
6921 convert (type, integer_zero_node),
6922 arg0);
6923 case GE_EXPR:
6924 code = EQ_EXPR;
6925 TREE_SET_CODE (t, EQ_EXPR);
6926 break;
6927 case LE_EXPR:
6928 return omit_one_operand (type,
6929 convert (type, integer_one_node),
6930 arg0);
6931 case LT_EXPR:
6932 code = NE_EXPR;
6933 TREE_SET_CODE (t, NE_EXPR);
6934 break;
6936 /* The GE_EXPR and LT_EXPR cases above are not normally
6937 reached because of previous transformations. */
6939 default:
6940 break;
6942 else if (TREE_INT_CST_HIGH (arg1) == 0
6943 && TREE_INT_CST_LOW (arg1) == max - 1)
6944 switch (code)
6946 case GT_EXPR:
6947 code = EQ_EXPR;
6948 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6949 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6950 break;
6951 case LE_EXPR:
6952 code = NE_EXPR;
6953 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6954 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6955 break;
6956 default:
6957 break;
6959 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6960 && TREE_INT_CST_LOW (arg1) == min)
6961 switch (code)
6963 case LT_EXPR:
6964 return omit_one_operand (type,
6965 convert (type, integer_zero_node),
6966 arg0);
6967 case LE_EXPR:
6968 code = EQ_EXPR;
6969 TREE_SET_CODE (t, EQ_EXPR);
6970 break;
6972 case GE_EXPR:
6973 return omit_one_operand (type,
6974 convert (type, integer_one_node),
6975 arg0);
6976 case GT_EXPR:
6977 code = NE_EXPR;
6978 TREE_SET_CODE (t, NE_EXPR);
6979 break;
6981 default:
6982 break;
6984 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6985 && TREE_INT_CST_LOW (arg1) == min + 1)
6986 switch (code)
6988 case GE_EXPR:
6989 code = NE_EXPR;
6990 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6991 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6992 break;
6993 case LT_EXPR:
6994 code = EQ_EXPR;
6995 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6996 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6997 break;
6998 default:
6999 break;
7002 else if (TREE_INT_CST_HIGH (arg1) == 0
7003 && TREE_INT_CST_LOW (arg1) == signed_max
7004 && TREE_UNSIGNED (TREE_TYPE (arg1))
7005 /* signed_type does not work on pointer types. */
7006 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7008 /* The following case also applies to X < signed_max+1
7009 and X >= signed_max+1 because previous transformations. */
7010 if (code == LE_EXPR || code == GT_EXPR)
7012 tree st0, st1;
7013 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7014 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7015 return fold
7016 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7017 type, convert (st0, arg0),
7018 convert (st1, integer_zero_node)));
7024 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7025 a MINUS_EXPR of a constant, we can convert it into a comparison with
7026 a revised constant as long as no overflow occurs. */
7027 if ((code == EQ_EXPR || code == NE_EXPR)
7028 && TREE_CODE (arg1) == INTEGER_CST
7029 && (TREE_CODE (arg0) == PLUS_EXPR
7030 || TREE_CODE (arg0) == MINUS_EXPR)
7031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7032 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7033 ? MINUS_EXPR : PLUS_EXPR,
7034 arg1, TREE_OPERAND (arg0, 1), 0))
7035 && ! TREE_CONSTANT_OVERFLOW (tem))
7036 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7038 /* Similarly for a NEGATE_EXPR. */
7039 else if ((code == EQ_EXPR || code == NE_EXPR)
7040 && TREE_CODE (arg0) == NEGATE_EXPR
7041 && TREE_CODE (arg1) == INTEGER_CST
7042 && 0 != (tem = negate_expr (arg1))
7043 && TREE_CODE (tem) == INTEGER_CST
7044 && ! TREE_CONSTANT_OVERFLOW (tem))
7045 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7047 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7048 for !=. Don't do this for ordered comparisons due to overflow. */
7049 else if ((code == NE_EXPR || code == EQ_EXPR)
7050 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7051 return fold (build (code, type,
7052 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7054 /* If we are widening one operand of an integer comparison,
7055 see if the other operand is similarly being widened. Perhaps we
7056 can do the comparison in the narrower type. */
7057 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7058 && TREE_CODE (arg0) == NOP_EXPR
7059 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7060 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7061 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7062 || (TREE_CODE (t1) == INTEGER_CST
7063 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7064 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7066 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7067 constant, we can simplify it. */
7068 else if (TREE_CODE (arg1) == INTEGER_CST
7069 && (TREE_CODE (arg0) == MIN_EXPR
7070 || TREE_CODE (arg0) == MAX_EXPR)
7071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7072 return optimize_minmax_comparison (t);
7074 /* If we are comparing an ABS_EXPR with a constant, we can
7075 convert all the cases into explicit comparisons, but they may
7076 well not be faster than doing the ABS and one comparison.
7077 But ABS (X) <= C is a range comparison, which becomes a subtraction
7078 and a comparison, and is probably faster. */
7079 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7080 && TREE_CODE (arg0) == ABS_EXPR
7081 && ! TREE_SIDE_EFFECTS (arg0)
7082 && (0 != (tem = negate_expr (arg1)))
7083 && TREE_CODE (tem) == INTEGER_CST
7084 && ! TREE_CONSTANT_OVERFLOW (tem))
7085 return fold (build (TRUTH_ANDIF_EXPR, type,
7086 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7087 build (LE_EXPR, type,
7088 TREE_OPERAND (arg0, 0), arg1)));
7090 /* If this is an EQ or NE comparison with zero and ARG0 is
7091 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7092 two operations, but the latter can be done in one less insn
7093 on machines that have only two-operand insns or on which a
7094 constant cannot be the first operand. */
7095 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7096 && TREE_CODE (arg0) == BIT_AND_EXPR)
7098 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7099 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7100 return
7101 fold (build (code, type,
7102 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7103 build (RSHIFT_EXPR,
7104 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7105 TREE_OPERAND (arg0, 1),
7106 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7107 convert (TREE_TYPE (arg0),
7108 integer_one_node)),
7109 arg1));
7110 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7111 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7112 return
7113 fold (build (code, type,
7114 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7115 build (RSHIFT_EXPR,
7116 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7117 TREE_OPERAND (arg0, 0),
7118 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7119 convert (TREE_TYPE (arg0),
7120 integer_one_node)),
7121 arg1));
7124 /* If this is an NE or EQ comparison of zero against the result of a
7125 signed MOD operation whose second operand is a power of 2, make
7126 the MOD operation unsigned since it is simpler and equivalent. */
7127 if ((code == NE_EXPR || code == EQ_EXPR)
7128 && integer_zerop (arg1)
7129 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7130 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7131 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7132 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7133 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7134 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7136 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7137 tree newmod = build (TREE_CODE (arg0), newtype,
7138 convert (newtype, TREE_OPERAND (arg0, 0)),
7139 convert (newtype, TREE_OPERAND (arg0, 1)));
7141 return build (code, type, newmod, convert (newtype, arg1));
7144 /* If this is an NE comparison of zero with an AND of one, remove the
7145 comparison since the AND will give the correct value. */
7146 if (code == NE_EXPR && integer_zerop (arg1)
7147 && TREE_CODE (arg0) == BIT_AND_EXPR
7148 && integer_onep (TREE_OPERAND (arg0, 1)))
7149 return convert (type, arg0);
7151 /* If we have (A & C) == C where C is a power of 2, convert this into
7152 (A & C) != 0. Similarly for NE_EXPR. */
7153 if ((code == EQ_EXPR || code == NE_EXPR)
7154 && TREE_CODE (arg0) == BIT_AND_EXPR
7155 && integer_pow2p (TREE_OPERAND (arg0, 1))
7156 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7157 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7158 arg0, integer_zero_node));
7160 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7161 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7162 if ((code == EQ_EXPR || code == NE_EXPR)
7163 && TREE_CODE (arg0) == BIT_AND_EXPR
7164 && integer_zerop (arg1))
7166 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
7167 TREE_OPERAND (arg0, 1));
7168 if (arg00 != NULL_TREE)
7170 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
7171 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
7172 convert (stype, arg00),
7173 convert (stype, integer_zero_node)));
7177 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7178 and similarly for >= into !=. */
7179 if ((code == LT_EXPR || code == GE_EXPR)
7180 && TREE_UNSIGNED (TREE_TYPE (arg0))
7181 && TREE_CODE (arg1) == LSHIFT_EXPR
7182 && integer_onep (TREE_OPERAND (arg1, 0)))
7183 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7184 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7185 TREE_OPERAND (arg1, 1)),
7186 convert (TREE_TYPE (arg0), integer_zero_node));
7188 else if ((code == LT_EXPR || code == GE_EXPR)
7189 && TREE_UNSIGNED (TREE_TYPE (arg0))
7190 && (TREE_CODE (arg1) == NOP_EXPR
7191 || TREE_CODE (arg1) == CONVERT_EXPR)
7192 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7193 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7194 return
7195 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7196 convert (TREE_TYPE (arg0),
7197 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7198 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7199 convert (TREE_TYPE (arg0), integer_zero_node));
7201 /* Simplify comparison of something with itself. (For IEEE
7202 floating-point, we can only do some of these simplifications.) */
7203 if (operand_equal_p (arg0, arg1, 0))
7205 switch (code)
7207 case EQ_EXPR:
7208 case GE_EXPR:
7209 case LE_EXPR:
7210 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7211 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7212 return constant_boolean_node (1, type);
7213 code = EQ_EXPR;
7214 TREE_SET_CODE (t, code);
7215 break;
7217 case NE_EXPR:
7218 /* For NE, we can only do this simplification if integer
7219 or we don't honor IEEE floating point NaNs. */
7220 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7221 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7222 break;
7223 /* ... fall through ... */
7224 case GT_EXPR:
7225 case LT_EXPR:
7226 return constant_boolean_node (0, type);
7227 default:
7228 abort ();
7232 /* If we are comparing an expression that just has comparisons
7233 of two integer values, arithmetic expressions of those comparisons,
7234 and constants, we can simplify it. There are only three cases
7235 to check: the two values can either be equal, the first can be
7236 greater, or the second can be greater. Fold the expression for
7237 those three values. Since each value must be 0 or 1, we have
7238 eight possibilities, each of which corresponds to the constant 0
7239 or 1 or one of the six possible comparisons.
7241 This handles common cases like (a > b) == 0 but also handles
7242 expressions like ((x > y) - (y > x)) > 0, which supposedly
7243 occur in macroized code. */
7245 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7247 tree cval1 = 0, cval2 = 0;
7248 int save_p = 0;
7250 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7251 /* Don't handle degenerate cases here; they should already
7252 have been handled anyway. */
7253 && cval1 != 0 && cval2 != 0
7254 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7255 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7256 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7257 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7258 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7259 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7260 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7262 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7263 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7265 /* We can't just pass T to eval_subst in case cval1 or cval2
7266 was the same as ARG1. */
7268 tree high_result
7269 = fold (build (code, type,
7270 eval_subst (arg0, cval1, maxval, cval2, minval),
7271 arg1));
7272 tree equal_result
7273 = fold (build (code, type,
7274 eval_subst (arg0, cval1, maxval, cval2, maxval),
7275 arg1));
7276 tree low_result
7277 = fold (build (code, type,
7278 eval_subst (arg0, cval1, minval, cval2, maxval),
7279 arg1));
7281 /* All three of these results should be 0 or 1. Confirm they
7282 are. Then use those values to select the proper code
7283 to use. */
7285 if ((integer_zerop (high_result)
7286 || integer_onep (high_result))
7287 && (integer_zerop (equal_result)
7288 || integer_onep (equal_result))
7289 && (integer_zerop (low_result)
7290 || integer_onep (low_result)))
7292 /* Make a 3-bit mask with the high-order bit being the
7293 value for `>', the next for '=', and the low for '<'. */
7294 switch ((integer_onep (high_result) * 4)
7295 + (integer_onep (equal_result) * 2)
7296 + integer_onep (low_result))
7298 case 0:
7299 /* Always false. */
7300 return omit_one_operand (type, integer_zero_node, arg0);
7301 case 1:
7302 code = LT_EXPR;
7303 break;
7304 case 2:
7305 code = EQ_EXPR;
7306 break;
7307 case 3:
7308 code = LE_EXPR;
7309 break;
7310 case 4:
7311 code = GT_EXPR;
7312 break;
7313 case 5:
7314 code = NE_EXPR;
7315 break;
7316 case 6:
7317 code = GE_EXPR;
7318 break;
7319 case 7:
7320 /* Always true. */
7321 return omit_one_operand (type, integer_one_node, arg0);
7324 t = build (code, type, cval1, cval2);
7325 if (save_p)
7326 return save_expr (t);
7327 else
7328 return fold (t);
7333 /* If this is a comparison of a field, we may be able to simplify it. */
7334 if (((TREE_CODE (arg0) == COMPONENT_REF
7335 && (*lang_hooks.can_use_bit_fields_p) ())
7336 || TREE_CODE (arg0) == BIT_FIELD_REF)
7337 && (code == EQ_EXPR || code == NE_EXPR)
7338 /* Handle the constant case even without -O
7339 to make sure the warnings are given. */
7340 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7342 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7343 return t1 ? t1 : t;
7346 /* If this is a comparison of complex values and either or both sides
7347 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7348 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7349 This may prevent needless evaluations. */
7350 if ((code == EQ_EXPR || code == NE_EXPR)
7351 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7352 && (TREE_CODE (arg0) == COMPLEX_EXPR
7353 || TREE_CODE (arg1) == COMPLEX_EXPR
7354 || TREE_CODE (arg0) == COMPLEX_CST
7355 || TREE_CODE (arg1) == COMPLEX_CST))
7357 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7358 tree real0, imag0, real1, imag1;
7360 arg0 = save_expr (arg0);
7361 arg1 = save_expr (arg1);
7362 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7363 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7364 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7365 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7367 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7368 : TRUTH_ORIF_EXPR),
7369 type,
7370 fold (build (code, type, real0, real1)),
7371 fold (build (code, type, imag0, imag1))));
7374 /* Optimize comparisons of strlen vs zero to a compare of the
7375 first character of the string vs zero. To wit,
7376 strlen(ptr) == 0 => *ptr == 0
7377 strlen(ptr) != 0 => *ptr != 0
7378 Other cases should reduce to one of these two (or a constant)
7379 due to the return value of strlen being unsigned. */
7380 if ((code == EQ_EXPR || code == NE_EXPR)
7381 && integer_zerop (arg1)
7382 && TREE_CODE (arg0) == CALL_EXPR
7383 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
7385 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7386 tree arglist;
7388 if (TREE_CODE (fndecl) == FUNCTION_DECL
7389 && DECL_BUILT_IN (fndecl)
7390 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7391 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7392 && (arglist = TREE_OPERAND (arg0, 1))
7393 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7394 && ! TREE_CHAIN (arglist))
7395 return fold (build (code, type,
7396 build1 (INDIRECT_REF, char_type_node,
7397 TREE_VALUE(arglist)),
7398 integer_zero_node));
7401 /* From here on, the only cases we handle are when the result is
7402 known to be a constant.
7404 To compute GT, swap the arguments and do LT.
7405 To compute GE, do LT and invert the result.
7406 To compute LE, swap the arguments, do LT and invert the result.
7407 To compute NE, do EQ and invert the result.
7409 Therefore, the code below must handle only EQ and LT. */
7411 if (code == LE_EXPR || code == GT_EXPR)
7413 tem = arg0, arg0 = arg1, arg1 = tem;
7414 code = swap_tree_comparison (code);
7417 /* Note that it is safe to invert for real values here because we
7418 will check below in the one case that it matters. */
7420 t1 = NULL_TREE;
7421 invert = 0;
7422 if (code == NE_EXPR || code == GE_EXPR)
7424 invert = 1;
7425 code = invert_tree_comparison (code);
7428 /* Compute a result for LT or EQ if args permit;
7429 otherwise return T. */
7430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7432 if (code == EQ_EXPR)
7433 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7434 else
7435 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7436 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7437 : INT_CST_LT (arg0, arg1)),
7441 #if 0 /* This is no longer useful, but breaks some real code. */
7442 /* Assume a nonexplicit constant cannot equal an explicit one,
7443 since such code would be undefined anyway.
7444 Exception: on sysvr4, using #pragma weak,
7445 a label can come out as 0. */
7446 else if (TREE_CODE (arg1) == INTEGER_CST
7447 && !integer_zerop (arg1)
7448 && TREE_CONSTANT (arg0)
7449 && TREE_CODE (arg0) == ADDR_EXPR
7450 && code == EQ_EXPR)
7451 t1 = build_int_2 (0, 0);
7452 #endif
7453 /* Two real constants can be compared explicitly. */
7454 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7456 /* If either operand is a NaN, the result is false with two
7457 exceptions: First, an NE_EXPR is true on NaNs, but that case
7458 is already handled correctly since we will be inverting the
7459 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7460 or a GE_EXPR into a LT_EXPR, we must return true so that it
7461 will be inverted into false. */
7463 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7464 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7465 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7467 else if (code == EQ_EXPR)
7468 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7469 TREE_REAL_CST (arg1)),
7471 else
7472 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7473 TREE_REAL_CST (arg1)),
7477 if (t1 == NULL_TREE)
7478 return t;
7480 if (invert)
7481 TREE_INT_CST_LOW (t1) ^= 1;
7483 TREE_TYPE (t1) = type;
7484 if (TREE_CODE (type) == BOOLEAN_TYPE)
7485 return (*lang_hooks.truthvalue_conversion) (t1);
7486 return t1;
7488 case COND_EXPR:
7489 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7490 so all simple results must be passed through pedantic_non_lvalue. */
7491 if (TREE_CODE (arg0) == INTEGER_CST)
7492 return pedantic_non_lvalue
7493 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7494 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7495 return pedantic_omit_one_operand (type, arg1, arg0);
7497 /* If the second operand is zero, invert the comparison and swap
7498 the second and third operands. Likewise if the second operand
7499 is constant and the third is not or if the third operand is
7500 equivalent to the first operand of the comparison. */
7502 if (integer_zerop (arg1)
7503 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7504 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7505 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7506 TREE_OPERAND (t, 2),
7507 TREE_OPERAND (arg0, 1))))
7509 /* See if this can be inverted. If it can't, possibly because
7510 it was a floating-point inequality comparison, don't do
7511 anything. */
7512 tem = invert_truthvalue (arg0);
7514 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7516 t = build (code, type, tem,
7517 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7518 arg0 = tem;
7519 /* arg1 should be the first argument of the new T. */
7520 arg1 = TREE_OPERAND (t, 1);
7521 STRIP_NOPS (arg1);
7525 /* If we have A op B ? A : C, we may be able to convert this to a
7526 simpler expression, depending on the operation and the values
7527 of B and C. Signed zeros prevent all of these transformations,
7528 for reasons given above each one. */
7530 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7531 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7532 arg1, TREE_OPERAND (arg0, 1))
7533 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7535 tree arg2 = TREE_OPERAND (t, 2);
7536 enum tree_code comp_code = TREE_CODE (arg0);
7538 STRIP_NOPS (arg2);
7540 /* If we have A op 0 ? A : -A, consider applying the following
7541 transformations:
7543 A == 0? A : -A same as -A
7544 A != 0? A : -A same as A
7545 A >= 0? A : -A same as abs (A)
7546 A > 0? A : -A same as abs (A)
7547 A <= 0? A : -A same as -abs (A)
7548 A < 0? A : -A same as -abs (A)
7550 None of these transformations work for modes with signed
7551 zeros. If A is +/-0, the first two transformations will
7552 change the sign of the result (from +0 to -0, or vice
7553 versa). The last four will fix the sign of the result,
7554 even though the original expressions could be positive or
7555 negative, depending on the sign of A.
7557 Note that all these transformations are correct if A is
7558 NaN, since the two alternatives (A and -A) are also NaNs. */
7559 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7560 ? real_zerop (TREE_OPERAND (arg0, 1))
7561 : integer_zerop (TREE_OPERAND (arg0, 1)))
7562 && TREE_CODE (arg2) == NEGATE_EXPR
7563 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7564 switch (comp_code)
7566 case EQ_EXPR:
7567 return
7568 pedantic_non_lvalue
7569 (convert (type,
7570 negate_expr
7571 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7572 arg1))));
7573 case NE_EXPR:
7574 return pedantic_non_lvalue (convert (type, arg1));
7575 case GE_EXPR:
7576 case GT_EXPR:
7577 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7578 arg1 = convert ((*lang_hooks.types.signed_type)
7579 (TREE_TYPE (arg1)), arg1);
7580 return pedantic_non_lvalue
7581 (convert (type, fold (build1 (ABS_EXPR,
7582 TREE_TYPE (arg1), arg1))));
7583 case LE_EXPR:
7584 case LT_EXPR:
7585 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7586 arg1 = convert ((lang_hooks.types.signed_type)
7587 (TREE_TYPE (arg1)), arg1);
7588 return pedantic_non_lvalue
7589 (negate_expr (convert (type,
7590 fold (build1 (ABS_EXPR,
7591 TREE_TYPE (arg1),
7592 arg1)))));
7593 default:
7594 abort ();
7597 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7598 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7599 both transformations are correct when A is NaN: A != 0
7600 is then true, and A == 0 is false. */
7602 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7604 if (comp_code == NE_EXPR)
7605 return pedantic_non_lvalue (convert (type, arg1));
7606 else if (comp_code == EQ_EXPR)
7607 return pedantic_non_lvalue (convert (type, integer_zero_node));
7610 /* Try some transformations of A op B ? A : B.
7612 A == B? A : B same as B
7613 A != B? A : B same as A
7614 A >= B? A : B same as max (A, B)
7615 A > B? A : B same as max (B, A)
7616 A <= B? A : B same as min (A, B)
7617 A < B? A : B same as min (B, A)
7619 As above, these transformations don't work in the presence
7620 of signed zeros. For example, if A and B are zeros of
7621 opposite sign, the first two transformations will change
7622 the sign of the result. In the last four, the original
7623 expressions give different results for (A=+0, B=-0) and
7624 (A=-0, B=+0), but the transformed expressions do not.
7626 The first two transformations are correct if either A or B
7627 is a NaN. In the first transformation, the condition will
7628 be false, and B will indeed be chosen. In the case of the
7629 second transformation, the condition A != B will be true,
7630 and A will be chosen.
7632 The conversions to max() and min() are not correct if B is
7633 a number and A is not. The conditions in the original
7634 expressions will be false, so all four give B. The min()
7635 and max() versions would give a NaN instead. */
7636 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7637 arg2, TREE_OPERAND (arg0, 0)))
7639 tree comp_op0 = TREE_OPERAND (arg0, 0);
7640 tree comp_op1 = TREE_OPERAND (arg0, 1);
7641 tree comp_type = TREE_TYPE (comp_op0);
7643 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7644 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7646 comp_type = type;
7647 comp_op0 = arg1;
7648 comp_op1 = arg2;
7651 switch (comp_code)
7653 case EQ_EXPR:
7654 return pedantic_non_lvalue (convert (type, arg2));
7655 case NE_EXPR:
7656 return pedantic_non_lvalue (convert (type, arg1));
7657 case LE_EXPR:
7658 case LT_EXPR:
7659 /* In C++ a ?: expression can be an lvalue, so put the
7660 operand which will be used if they are equal first
7661 so that we can convert this back to the
7662 corresponding COND_EXPR. */
7663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7664 return pedantic_non_lvalue
7665 (convert (type, fold (build (MIN_EXPR, comp_type,
7666 (comp_code == LE_EXPR
7667 ? comp_op0 : comp_op1),
7668 (comp_code == LE_EXPR
7669 ? comp_op1 : comp_op0)))));
7670 break;
7671 case GE_EXPR:
7672 case GT_EXPR:
7673 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7674 return pedantic_non_lvalue
7675 (convert (type, fold (build (MAX_EXPR, comp_type,
7676 (comp_code == GE_EXPR
7677 ? comp_op0 : comp_op1),
7678 (comp_code == GE_EXPR
7679 ? comp_op1 : comp_op0)))));
7680 break;
7681 default:
7682 abort ();
7686 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7687 we might still be able to simplify this. For example,
7688 if C1 is one less or one more than C2, this might have started
7689 out as a MIN or MAX and been transformed by this function.
7690 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7692 if (INTEGRAL_TYPE_P (type)
7693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7694 && TREE_CODE (arg2) == INTEGER_CST)
7695 switch (comp_code)
7697 case EQ_EXPR:
7698 /* We can replace A with C1 in this case. */
7699 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7700 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7701 TREE_OPERAND (t, 2));
7702 break;
7704 case LT_EXPR:
7705 /* If C1 is C2 + 1, this is min(A, C2). */
7706 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7707 && operand_equal_p (TREE_OPERAND (arg0, 1),
7708 const_binop (PLUS_EXPR, arg2,
7709 integer_one_node, 0), 1))
7710 return pedantic_non_lvalue
7711 (fold (build (MIN_EXPR, type, arg1, arg2)));
7712 break;
7714 case LE_EXPR:
7715 /* If C1 is C2 - 1, this is min(A, C2). */
7716 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7717 && operand_equal_p (TREE_OPERAND (arg0, 1),
7718 const_binop (MINUS_EXPR, arg2,
7719 integer_one_node, 0), 1))
7720 return pedantic_non_lvalue
7721 (fold (build (MIN_EXPR, type, arg1, arg2)));
7722 break;
7724 case GT_EXPR:
7725 /* If C1 is C2 - 1, this is max(A, C2). */
7726 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7727 && operand_equal_p (TREE_OPERAND (arg0, 1),
7728 const_binop (MINUS_EXPR, arg2,
7729 integer_one_node, 0), 1))
7730 return pedantic_non_lvalue
7731 (fold (build (MAX_EXPR, type, arg1, arg2)));
7732 break;
7734 case GE_EXPR:
7735 /* If C1 is C2 + 1, this is max(A, C2). */
7736 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7737 && operand_equal_p (TREE_OPERAND (arg0, 1),
7738 const_binop (PLUS_EXPR, arg2,
7739 integer_one_node, 0), 1))
7740 return pedantic_non_lvalue
7741 (fold (build (MAX_EXPR, type, arg1, arg2)));
7742 break;
7743 case NE_EXPR:
7744 break;
7745 default:
7746 abort ();
7750 /* If the second operand is simpler than the third, swap them
7751 since that produces better jump optimization results. */
7752 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7753 || TREE_CODE (arg1) == SAVE_EXPR)
7754 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7755 || DECL_P (TREE_OPERAND (t, 2))
7756 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7758 /* See if this can be inverted. If it can't, possibly because
7759 it was a floating-point inequality comparison, don't do
7760 anything. */
7761 tem = invert_truthvalue (arg0);
7763 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7765 t = build (code, type, tem,
7766 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7767 arg0 = tem;
7768 /* arg1 should be the first argument of the new T. */
7769 arg1 = TREE_OPERAND (t, 1);
7770 STRIP_NOPS (arg1);
7774 /* Convert A ? 1 : 0 to simply A. */
7775 if (integer_onep (TREE_OPERAND (t, 1))
7776 && integer_zerop (TREE_OPERAND (t, 2))
7777 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7778 call to fold will try to move the conversion inside
7779 a COND, which will recurse. In that case, the COND_EXPR
7780 is probably the best choice, so leave it alone. */
7781 && type == TREE_TYPE (arg0))
7782 return pedantic_non_lvalue (arg0);
7784 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7785 over COND_EXPR in cases such as floating point comparisons. */
7786 if (integer_zerop (TREE_OPERAND (t, 1))
7787 && integer_onep (TREE_OPERAND (t, 2))
7788 && truth_value_p (TREE_CODE (arg0)))
7789 return pedantic_non_lvalue (convert (type,
7790 invert_truthvalue (arg0)));
7792 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7793 operation is simply A & 2. */
7795 if (integer_zerop (TREE_OPERAND (t, 2))
7796 && TREE_CODE (arg0) == NE_EXPR
7797 && integer_zerop (TREE_OPERAND (arg0, 1))
7798 && integer_pow2p (arg1)
7799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7800 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7801 arg1, 1))
7802 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7804 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7805 if (integer_zerop (TREE_OPERAND (t, 2))
7806 && truth_value_p (TREE_CODE (arg0))
7807 && truth_value_p (TREE_CODE (arg1)))
7808 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7809 arg0, arg1)));
7811 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7812 if (integer_onep (TREE_OPERAND (t, 2))
7813 && truth_value_p (TREE_CODE (arg0))
7814 && truth_value_p (TREE_CODE (arg1)))
7816 /* Only perform transformation if ARG0 is easily inverted. */
7817 tem = invert_truthvalue (arg0);
7818 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7819 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7820 tem, arg1)));
7823 return t;
7825 case COMPOUND_EXPR:
7826 /* When pedantic, a compound expression can be neither an lvalue
7827 nor an integer constant expression. */
7828 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7829 return t;
7830 /* Don't let (0, 0) be null pointer constant. */
7831 if (integer_zerop (arg1))
7832 return build1 (NOP_EXPR, type, arg1);
7833 return convert (type, arg1);
7835 case COMPLEX_EXPR:
7836 if (wins)
7837 return build_complex (type, arg0, arg1);
7838 return t;
7840 case REALPART_EXPR:
7841 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7842 return t;
7843 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7844 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7845 TREE_OPERAND (arg0, 1));
7846 else if (TREE_CODE (arg0) == COMPLEX_CST)
7847 return TREE_REALPART (arg0);
7848 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7849 return fold (build (TREE_CODE (arg0), type,
7850 fold (build1 (REALPART_EXPR, type,
7851 TREE_OPERAND (arg0, 0))),
7852 fold (build1 (REALPART_EXPR,
7853 type, TREE_OPERAND (arg0, 1)))));
7854 return t;
7856 case IMAGPART_EXPR:
7857 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7858 return convert (type, integer_zero_node);
7859 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7860 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7861 TREE_OPERAND (arg0, 0));
7862 else if (TREE_CODE (arg0) == COMPLEX_CST)
7863 return TREE_IMAGPART (arg0);
7864 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7865 return fold (build (TREE_CODE (arg0), type,
7866 fold (build1 (IMAGPART_EXPR, type,
7867 TREE_OPERAND (arg0, 0))),
7868 fold (build1 (IMAGPART_EXPR, type,
7869 TREE_OPERAND (arg0, 1)))));
7870 return t;
7872 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7873 appropriate. */
7874 case CLEANUP_POINT_EXPR:
7875 if (! has_cleanups (arg0))
7876 return TREE_OPERAND (t, 0);
7879 enum tree_code code0 = TREE_CODE (arg0);
7880 int kind0 = TREE_CODE_CLASS (code0);
7881 tree arg00 = TREE_OPERAND (arg0, 0);
7882 tree arg01;
7884 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7885 return fold (build1 (code0, type,
7886 fold (build1 (CLEANUP_POINT_EXPR,
7887 TREE_TYPE (arg00), arg00))));
7889 if (kind0 == '<' || kind0 == '2'
7890 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7891 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7892 || code0 == TRUTH_XOR_EXPR)
7894 arg01 = TREE_OPERAND (arg0, 1);
7896 if (TREE_CONSTANT (arg00)
7897 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7898 && ! has_cleanups (arg00)))
7899 return fold (build (code0, type, arg00,
7900 fold (build1 (CLEANUP_POINT_EXPR,
7901 TREE_TYPE (arg01), arg01))));
7903 if (TREE_CONSTANT (arg01))
7904 return fold (build (code0, type,
7905 fold (build1 (CLEANUP_POINT_EXPR,
7906 TREE_TYPE (arg00), arg00)),
7907 arg01));
7910 return t;
7913 case CALL_EXPR:
7914 /* Check for a built-in function. */
7915 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7916 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7917 == FUNCTION_DECL)
7918 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7920 tree tmp = fold_builtin (expr);
7921 if (tmp)
7922 return tmp;
7924 return t;
7926 default:
7927 return t;
7928 } /* switch (code) */
7931 /* Determine if first argument is a multiple of second argument. Return 0 if
7932 it is not, or we cannot easily determined it to be.
7934 An example of the sort of thing we care about (at this point; this routine
7935 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7936 fold cases do now) is discovering that
7938 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7940 is a multiple of
7942 SAVE_EXPR (J * 8)
7944 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7946 This code also handles discovering that
7948 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7950 is a multiple of 8 so we don't have to worry about dealing with a
7951 possible remainder.
7953 Note that we *look* inside a SAVE_EXPR only to determine how it was
7954 calculated; it is not safe for fold to do much of anything else with the
7955 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7956 at run time. For example, the latter example above *cannot* be implemented
7957 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7958 evaluation time of the original SAVE_EXPR is not necessarily the same at
7959 the time the new expression is evaluated. The only optimization of this
7960 sort that would be valid is changing
7962 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7964 divided by 8 to
7966 SAVE_EXPR (I) * SAVE_EXPR (J)
7968 (where the same SAVE_EXPR (J) is used in the original and the
7969 transformed version). */
7971 static int
7972 multiple_of_p (type, top, bottom)
7973 tree type;
7974 tree top;
7975 tree bottom;
7977 if (operand_equal_p (top, bottom, 0))
7978 return 1;
7980 if (TREE_CODE (type) != INTEGER_TYPE)
7981 return 0;
7983 switch (TREE_CODE (top))
7985 case MULT_EXPR:
7986 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7987 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7989 case PLUS_EXPR:
7990 case MINUS_EXPR:
7991 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7992 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7994 case LSHIFT_EXPR:
7995 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7997 tree op1, t1;
7999 op1 = TREE_OPERAND (top, 1);
8000 /* const_binop may not detect overflow correctly,
8001 so check for it explicitly here. */
8002 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8003 > TREE_INT_CST_LOW (op1)
8004 && TREE_INT_CST_HIGH (op1) == 0
8005 && 0 != (t1 = convert (type,
8006 const_binop (LSHIFT_EXPR, size_one_node,
8007 op1, 0)))
8008 && ! TREE_OVERFLOW (t1))
8009 return multiple_of_p (type, t1, bottom);
8011 return 0;
8013 case NOP_EXPR:
8014 /* Can't handle conversions from non-integral or wider integral type. */
8015 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8016 || (TYPE_PRECISION (type)
8017 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8018 return 0;
8020 /* .. fall through ... */
8022 case SAVE_EXPR:
8023 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8025 case INTEGER_CST:
8026 if (TREE_CODE (bottom) != INTEGER_CST
8027 || (TREE_UNSIGNED (type)
8028 && (tree_int_cst_sgn (top) < 0
8029 || tree_int_cst_sgn (bottom) < 0)))
8030 return 0;
8031 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8032 top, bottom, 0));
8034 default:
8035 return 0;
8039 /* Return true if `t' is known to be non-negative. */
8042 tree_expr_nonnegative_p (t)
8043 tree t;
8045 switch (TREE_CODE (t))
8047 case ABS_EXPR:
8048 case FFS_EXPR:
8049 case POPCOUNT_EXPR:
8050 case PARITY_EXPR:
8051 return 1;
8053 case CLZ_EXPR:
8054 case CTZ_EXPR:
8055 /* These are undefined at zero. This is true even if
8056 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8057 computing here is a user-visible property. */
8058 return 0;
8060 case INTEGER_CST:
8061 return tree_int_cst_sgn (t) >= 0;
8063 case REAL_CST:
8064 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8066 case PLUS_EXPR:
8067 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8068 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8069 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8071 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8072 both unsigned and at least 2 bits shorter than the result. */
8073 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8074 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8075 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8077 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8078 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8079 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8080 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8082 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8083 TYPE_PRECISION (inner2)) + 1;
8084 return prec < TYPE_PRECISION (TREE_TYPE (t));
8087 break;
8089 case MULT_EXPR:
8090 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8092 /* x * x for floating point x is always non-negative. */
8093 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8094 return 1;
8095 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8096 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8099 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8100 both unsigned and their total bits is shorter than the result. */
8101 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8102 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8103 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8105 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8106 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8107 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8108 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8109 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8110 < TYPE_PRECISION (TREE_TYPE (t));
8112 return 0;
8114 case TRUNC_DIV_EXPR:
8115 case CEIL_DIV_EXPR:
8116 case FLOOR_DIV_EXPR:
8117 case ROUND_DIV_EXPR:
8118 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8119 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8121 case TRUNC_MOD_EXPR:
8122 case CEIL_MOD_EXPR:
8123 case FLOOR_MOD_EXPR:
8124 case ROUND_MOD_EXPR:
8125 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8127 case RDIV_EXPR:
8128 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8129 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8131 case NOP_EXPR:
8133 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8134 tree outer_type = TREE_TYPE (t);
8136 if (TREE_CODE (outer_type) == REAL_TYPE)
8138 if (TREE_CODE (inner_type) == REAL_TYPE)
8139 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8140 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8142 if (TREE_UNSIGNED (inner_type))
8143 return 1;
8144 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8147 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8149 if (TREE_CODE (inner_type) == REAL_TYPE)
8150 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8151 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8152 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8153 && TREE_UNSIGNED (inner_type);
8156 break;
8158 case COND_EXPR:
8159 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8160 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8161 case COMPOUND_EXPR:
8162 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8163 case MIN_EXPR:
8164 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8165 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8166 case MAX_EXPR:
8167 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8168 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8169 case MODIFY_EXPR:
8170 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8171 case BIND_EXPR:
8172 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8173 case SAVE_EXPR:
8174 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8175 case NON_LVALUE_EXPR:
8176 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8177 case RTL_EXPR:
8178 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8180 case CALL_EXPR:
8181 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
8183 tree fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8184 tree arglist = TREE_OPERAND (t, 1);
8185 if (TREE_CODE (fndecl) == FUNCTION_DECL
8186 && DECL_BUILT_IN (fndecl)
8187 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8188 switch (DECL_FUNCTION_CODE (fndecl))
8190 case BUILT_IN_CABS:
8191 case BUILT_IN_CABSL:
8192 case BUILT_IN_CABSF:
8193 case BUILT_IN_EXP:
8194 case BUILT_IN_EXPF:
8195 case BUILT_IN_EXPL:
8196 case BUILT_IN_FABS:
8197 case BUILT_IN_FABSF:
8198 case BUILT_IN_FABSL:
8199 case BUILT_IN_SQRT:
8200 case BUILT_IN_SQRTF:
8201 case BUILT_IN_SQRTL:
8202 return 1;
8204 case BUILT_IN_ATAN:
8205 case BUILT_IN_ATANF:
8206 case BUILT_IN_ATANL:
8207 case BUILT_IN_CEIL:
8208 case BUILT_IN_CEILF:
8209 case BUILT_IN_CEILL:
8210 case BUILT_IN_FLOOR:
8211 case BUILT_IN_FLOORF:
8212 case BUILT_IN_FLOORL:
8213 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8215 case BUILT_IN_POW:
8216 case BUILT_IN_POWF:
8217 case BUILT_IN_POWL:
8218 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8220 default:
8221 break;
8225 /* ... fall through ... */
8227 default:
8228 if (truth_value_p (TREE_CODE (t)))
8229 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8230 return 1;
8233 /* We don't know sign of `t', so be conservative and return false. */
8234 return 0;
8237 /* Return true if `r' is known to be non-negative.
8238 Only handles constants at the moment. */
8241 rtl_expr_nonnegative_p (r)
8242 rtx r;
8244 switch (GET_CODE (r))
8246 case CONST_INT:
8247 return INTVAL (r) >= 0;
8249 case CONST_DOUBLE:
8250 if (GET_MODE (r) == VOIDmode)
8251 return CONST_DOUBLE_HIGH (r) >= 0;
8252 return 0;
8254 case CONST_VECTOR:
8256 int units, i;
8257 rtx elt;
8259 units = CONST_VECTOR_NUNITS (r);
8261 for (i = 0; i < units; ++i)
8263 elt = CONST_VECTOR_ELT (r, i);
8264 if (!rtl_expr_nonnegative_p (elt))
8265 return 0;
8268 return 1;
8271 case SYMBOL_REF:
8272 case LABEL_REF:
8273 /* These are always nonnegative. */
8274 return 1;
8276 default:
8277 return 0;
8281 #include "gt-fold-const.h"