2003-12-26 Guilhem Lavaux <guilhem@kaffe.org>
[official-gcc.git] / gcc / fold-const.c
blob3da0ebf6f46a82110a074da04266e969460d5913
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert (tree, tree);
73 static enum tree_code invert_tree_comparison (enum tree_code);
74 static enum tree_code swap_tree_comparison (enum tree_code);
75 static int comparison_to_compcode (enum tree_code);
76 static enum tree_code compcode_to_comparison (int);
77 static int truth_value_p (enum tree_code);
78 static int operand_equal_for_comparison_p (tree, tree, tree);
79 static int twoval_comparison_p (tree, tree *, tree *, int *);
80 static tree eval_subst (tree, tree, tree, tree, tree);
81 static tree pedantic_omit_one_operand (tree, tree, tree);
82 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
83 static tree make_bit_field_ref (tree, tree, int, int, int);
84 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
85 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
86 enum machine_mode *, int *, int *,
87 tree *, tree *);
88 static int all_ones_mask_p (tree, int);
89 static tree sign_bit_p (tree, tree);
90 static int simple_operand_p (tree);
91 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
92 static tree make_range (tree, int *, tree *, tree *);
93 static tree build_range_check (tree, tree, int, tree, tree);
94 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
95 tree);
96 static tree fold_range_test (tree);
97 static tree unextend (tree, int, int, tree);
98 static tree fold_truthop (enum tree_code, tree, tree, tree);
99 static tree optimize_minmax_comparison (tree);
100 static tree extract_muldiv (tree, tree, enum tree_code, tree);
101 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
102 static tree strip_compound_expr (tree, tree);
103 static int multiple_of_p (tree, tree, tree);
104 static tree constant_boolean_node (int, tree);
105 static int count_cond (tree, int);
106 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
107 tree, int);
108 static bool fold_real_zero_addition_p (tree, tree, int);
109 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
110 tree, tree, tree);
111 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
112 static bool reorder_operands_p (tree, tree);
113 static bool tree_swap_operands_p (tree, tree, bool);
115 /* The following constants represent a bit based encoding of GCC's
116 comparison operators. This encoding simplifies transformations
117 on relational comparison operators, such as AND and OR. */
118 #define COMPCODE_FALSE 0
119 #define COMPCODE_LT 1
120 #define COMPCODE_EQ 2
121 #define COMPCODE_LE 3
122 #define COMPCODE_GT 4
123 #define COMPCODE_NE 5
124 #define COMPCODE_GE 6
125 #define COMPCODE_TRUE 7
127 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
128 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
129 and SUM1. Then this yields nonzero if overflow occurred during the
130 addition.
132 Overflow occurs if A and B have the same sign, but A and SUM differ in
133 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
134 sign. */
135 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
138 We do that by representing the two-word integer in 4 words, with only
139 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
140 number. The value of the word is LOWPART + HIGHPART * BASE. */
142 #define LOWPART(x) \
143 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
144 #define HIGHPART(x) \
145 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
146 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148 /* Unpack a two-word integer into 4 words.
149 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
150 WORDS points to the array of HOST_WIDE_INTs. */
152 static void
153 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
155 words[0] = LOWPART (low);
156 words[1] = HIGHPART (low);
157 words[2] = LOWPART (hi);
158 words[3] = HIGHPART (hi);
161 /* Pack an array of 4 words into a two-word integer.
162 WORDS points to the array of words.
163 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
165 static void
166 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
167 HOST_WIDE_INT *hi)
169 *low = words[0] + words[1] * BASE;
170 *hi = words[2] + words[3] * BASE;
173 /* Make the integer constant T valid for its type by setting to 0 or 1 all
174 the bits in the constant that don't belong in the type.
176 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
177 nonzero, a signed overflow has already occurred in calculating T, so
178 propagate it. */
181 force_fit_type (tree t, int overflow)
183 unsigned HOST_WIDE_INT low;
184 HOST_WIDE_INT high;
185 unsigned int prec;
187 if (TREE_CODE (t) == REAL_CST)
189 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
190 Consider doing it via real_convert now. */
191 return overflow;
194 else if (TREE_CODE (t) != INTEGER_CST)
195 return overflow;
197 low = TREE_INT_CST_LOW (t);
198 high = TREE_INT_CST_HIGH (t);
200 if (POINTER_TYPE_P (TREE_TYPE (t))
201 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
202 prec = POINTER_SIZE;
203 else
204 prec = TYPE_PRECISION (TREE_TYPE (t));
206 /* First clear all bits that are beyond the type's precision. */
208 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210 else if (prec > HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_HIGH (t)
212 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
213 else
215 TREE_INT_CST_HIGH (t) = 0;
216 if (prec < HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
220 /* Unsigned types do not suffer sign extension or overflow unless they
221 are a sizetype. */
222 if (TREE_UNSIGNED (TREE_TYPE (t))
223 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
225 return overflow;
227 /* If the value's sign bit is set, extend the sign. */
228 if (prec != 2 * HOST_BITS_PER_WIDE_INT
229 && (prec > HOST_BITS_PER_WIDE_INT
230 ? 0 != (TREE_INT_CST_HIGH (t)
231 & ((HOST_WIDE_INT) 1
232 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
233 : 0 != (TREE_INT_CST_LOW (t)
234 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
236 /* Value is negative:
237 set to 1 all the bits that are outside this type's precision. */
238 if (prec > HOST_BITS_PER_WIDE_INT)
239 TREE_INT_CST_HIGH (t)
240 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
241 else
243 TREE_INT_CST_HIGH (t) = -1;
244 if (prec < HOST_BITS_PER_WIDE_INT)
245 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
249 /* Return nonzero if signed overflow occurred. */
250 return
251 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
252 != 0);
255 /* Add two doubleword integers with doubleword result.
256 Each argument is given as two `HOST_WIDE_INT' pieces.
257 One argument is L1 and H1; the other, L2 and H2.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
262 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
263 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
265 unsigned HOST_WIDE_INT l;
266 HOST_WIDE_INT h;
268 l = l1 + l2;
269 h = h1 + h2 + (l < l1);
271 *lv = l;
272 *hv = h;
273 return OVERFLOW_SUM_SIGN (h1, h2, h);
276 /* Negate a doubleword integer with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
282 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
283 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
285 if (l1 == 0)
287 *lv = 0;
288 *hv = - h1;
289 return (*hv & h1) < 0;
291 else
293 *lv = -l1;
294 *hv = ~h1;
295 return 0;
299 /* Multiply two doubleword integers with doubleword result.
300 Return nonzero if the operation overflows, assuming it's signed.
301 Each argument is given as two `HOST_WIDE_INT' pieces.
302 One argument is L1 and H1; the other, L2 and H2.
303 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
306 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
307 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
308 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
310 HOST_WIDE_INT arg1[4];
311 HOST_WIDE_INT arg2[4];
312 HOST_WIDE_INT prod[4 * 2];
313 unsigned HOST_WIDE_INT carry;
314 int i, j, k;
315 unsigned HOST_WIDE_INT toplow, neglow;
316 HOST_WIDE_INT tophigh, neghigh;
318 encode (arg1, l1, h1);
319 encode (arg2, l2, h2);
321 memset (prod, 0, sizeof prod);
323 for (i = 0; i < 4; i++)
325 carry = 0;
326 for (j = 0; j < 4; j++)
328 k = i + j;
329 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
330 carry += arg1[i] * arg2[j];
331 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
332 carry += prod[k];
333 prod[k] = LOWPART (carry);
334 carry = HIGHPART (carry);
336 prod[i + 4] = carry;
339 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
341 /* Check for overflow by calculating the top half of the answer in full;
342 it should agree with the low half's sign bit. */
343 decode (prod + 4, &toplow, &tophigh);
344 if (h1 < 0)
346 neg_double (l2, h2, &neglow, &neghigh);
347 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
349 if (h2 < 0)
351 neg_double (l1, h1, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
357 /* Shift the doubleword integer in L1, H1 left by COUNT places
358 keeping only PREC bits of result.
359 Shift right if COUNT is negative.
360 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
361 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
363 void
364 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
365 HOST_WIDE_INT count, unsigned int prec,
366 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
368 unsigned HOST_WIDE_INT signmask;
370 if (count < 0)
372 rshift_double (l1, h1, -count, prec, lv, hv, arith);
373 return;
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED)
378 count %= prec;
379 #endif
381 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
383 /* Shifting by the host word size is undefined according to the
384 ANSI standard, so we must handle this as a special case. */
385 *hv = 0;
386 *lv = 0;
388 else if (count >= HOST_BITS_PER_WIDE_INT)
390 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
391 *lv = 0;
393 else
395 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
396 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
397 *lv = l1 << count;
400 /* Sign extend all bits that are beyond the precision. */
402 signmask = -((prec > HOST_BITS_PER_WIDE_INT
403 ? ((unsigned HOST_WIDE_INT) *hv
404 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
405 : (*lv >> (prec - 1))) & 1);
407 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
409 else if (prec >= HOST_BITS_PER_WIDE_INT)
411 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
412 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
414 else
416 *hv = signmask;
417 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
418 *lv |= signmask << prec;
422 /* Shift the doubleword integer in L1, H1 right by COUNT places
423 keeping only PREC bits of result. COUNT must be positive.
424 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
425 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
427 void
428 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
429 HOST_WIDE_INT count, unsigned int prec,
430 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
431 int arith)
433 unsigned HOST_WIDE_INT signmask;
435 signmask = (arith
436 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
437 : 0);
439 #ifdef SHIFT_COUNT_TRUNCATED
440 if (SHIFT_COUNT_TRUNCATED)
441 count %= prec;
442 #endif
444 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
448 *hv = 0;
449 *lv = 0;
451 else if (count >= HOST_BITS_PER_WIDE_INT)
453 *hv = 0;
454 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
456 else
458 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
459 *lv = ((l1 >> count)
460 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
463 /* Zero / sign extend all bits that are beyond the precision. */
465 if (count >= (HOST_WIDE_INT)prec)
467 *hv = signmask;
468 *lv = signmask;
470 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
475 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
477 else
479 *hv = signmask;
480 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
481 *lv |= signmask << (prec - count);
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
490 void
491 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
492 HOST_WIDE_INT count, unsigned int prec,
493 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495 unsigned HOST_WIDE_INT s1l, s2l;
496 HOST_WIDE_INT s1h, s2h;
498 count %= prec;
499 if (count < 0)
500 count += prec;
502 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
503 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
504 *lv = s1l | s2l;
505 *hv = s1h | s2h;
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
512 void
513 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
520 count %= prec;
521 if (count < 0)
522 count += prec;
524 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526 *lv = s1l | s2l;
527 *hv = s1h | s2h;
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
534 or EXACT_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (enum tree_code code, int uns,
541 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig,
543 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig,
545 unsigned HOST_WIDE_INT *lquo,
546 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
547 HOST_WIDE_INT *hrem)
549 int quo_neg = 0;
550 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den[4], quo[4];
552 int i, j;
553 unsigned HOST_WIDE_INT work;
554 unsigned HOST_WIDE_INT carry = 0;
555 unsigned HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 unsigned HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
559 int overflow = 0;
561 if (hden == 0 && lden == 0)
562 overflow = 1, lden = 1;
564 /* Calculate quotient sign and convert operands to unsigned. */
565 if (!uns)
567 if (hnum < 0)
569 quo_neg = ~ quo_neg;
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum)
572 && ((HOST_WIDE_INT) lden & hden) == -1)
573 overflow = 1;
575 if (hden < 0)
577 quo_neg = ~ quo_neg;
578 neg_double (lden, hden, &lden, &hden);
582 if (hnum == 0 && hden == 0)
583 { /* single precision */
584 *hquo = *hrem = 0;
585 /* This unsigned division rounds toward zero. */
586 *lquo = lnum / lden;
587 goto finish_up;
590 if (hnum == 0)
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
593 *hquo = *lquo = 0;
594 *hrem = hnum;
595 *lrem = lnum;
596 goto finish_up;
599 memset (quo, 0, sizeof quo);
601 memset (num, 0, sizeof num); /* to zero 9th element */
602 memset (den, 0, sizeof den);
604 encode (num, lnum, hnum);
605 encode (den, lden, hden);
607 /* Special code for when the divisor < BASE. */
608 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610 /* hnum != 0 already checked. */
611 for (i = 4 - 1; i >= 0; i--)
613 work = num[i] + carry * BASE;
614 quo[i] = work / lden;
615 carry = work % lden;
618 else
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig, den_hi_sig;
623 unsigned HOST_WIDE_INT quo_est, scale;
625 /* Find the highest nonzero divisor digit. */
626 for (i = 4 - 1;; i--)
627 if (den[i] != 0)
629 den_hi_sig = i;
630 break;
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
636 scale = BASE / (den[den_hi_sig] + 1);
637 if (scale > 1)
638 { /* scale divisor and dividend */
639 carry = 0;
640 for (i = 0; i <= 4 - 1; i++)
642 work = (num[i] * scale) + carry;
643 num[i] = LOWPART (work);
644 carry = HIGHPART (work);
647 num[4] = carry;
648 carry = 0;
649 for (i = 0; i <= 4 - 1; i++)
651 work = (den[i] * scale) + carry;
652 den[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 if (den[i] != 0) den_hi_sig = i;
658 num_hi_sig = 4;
660 /* Main loop */
661 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp;
668 num_hi_sig = i + den_hi_sig + 1;
669 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
670 if (num[num_hi_sig] != den[den_hi_sig])
671 quo_est = work / den[den_hi_sig];
672 else
673 quo_est = BASE - 1;
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp = work - quo_est * den[den_hi_sig];
677 if (tmp < BASE
678 && (den[den_hi_sig - 1] * quo_est
679 > (tmp * BASE + num[num_hi_sig - 2])))
680 quo_est--;
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
686 carry = 0;
687 for (j = 0; j <= den_hi_sig; j++)
689 work = quo_est * den[j] + carry;
690 carry = HIGHPART (work);
691 work = num[i + j] - LOWPART (work);
692 num[i + j] = LOWPART (work);
693 carry += HIGHPART (work) != 0;
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
700 quo_est--;
701 carry = 0; /* add divisor back in */
702 for (j = 0; j <= den_hi_sig; j++)
704 work = num[i + j] + den[j] + carry;
705 carry = HIGHPART (work);
706 num[i + j] = LOWPART (work);
709 num [num_hi_sig] += carry;
712 /* Store the quotient digit. */
713 quo[i] = quo_est;
717 decode (quo, lquo, hquo);
719 finish_up:
720 /* If result is negative, make it so. */
721 if (quo_neg)
722 neg_double (*lquo, *hquo, lquo, hquo);
724 /* compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
726 neg_double (*lrem, *hrem, lrem, hrem);
727 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
729 switch (code)
731 case TRUNC_DIV_EXPR:
732 case TRUNC_MOD_EXPR: /* round toward zero */
733 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
734 return overflow;
736 case FLOOR_DIV_EXPR:
737 case FLOOR_MOD_EXPR: /* round toward negative infinity */
738 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
740 /* quo = quo - 1; */
741 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
742 lquo, hquo);
744 else
745 return overflow;
746 break;
748 case CEIL_DIV_EXPR:
749 case CEIL_MOD_EXPR: /* round toward positive infinity */
750 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
753 lquo, hquo);
755 else
756 return overflow;
757 break;
759 case ROUND_DIV_EXPR:
760 case ROUND_MOD_EXPR: /* round to closest integer */
762 unsigned HOST_WIDE_INT labs_rem = *lrem;
763 HOST_WIDE_INT habs_rem = *hrem;
764 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
765 HOST_WIDE_INT habs_den = hden, htwice;
767 /* Get absolute values. */
768 if (*hrem < 0)
769 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
770 if (hden < 0)
771 neg_double (lden, hden, &labs_den, &habs_den);
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
775 labs_rem, habs_rem, &ltwice, &htwice);
777 if (((unsigned HOST_WIDE_INT) habs_den
778 < (unsigned HOST_WIDE_INT) htwice)
779 || (((unsigned HOST_WIDE_INT) habs_den
780 == (unsigned HOST_WIDE_INT) htwice)
781 && (labs_den < ltwice)))
783 if (*hquo < 0)
784 /* quo = quo - 1; */
785 add_double (*lquo, *hquo,
786 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
787 else
788 /* quo = quo + 1; */
789 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 lquo, hquo);
792 else
793 return overflow;
795 break;
797 default:
798 abort ();
801 /* compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
805 return overflow;
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
811 static bool
812 negate_mathfn_p (enum built_in_function code)
814 switch (code)
816 case BUILT_IN_ASIN:
817 case BUILT_IN_ASINF:
818 case BUILT_IN_ASINL:
819 case BUILT_IN_ATAN:
820 case BUILT_IN_ATANF:
821 case BUILT_IN_ATANL:
822 case BUILT_IN_SIN:
823 case BUILT_IN_SINF:
824 case BUILT_IN_SINL:
825 case BUILT_IN_TAN:
826 case BUILT_IN_TANF:
827 case BUILT_IN_TANL:
828 return true;
830 default:
831 break;
833 return false;
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
840 static bool
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
844 unsigned int prec;
845 tree type;
847 if (t == 0)
848 return false;
850 type = TREE_TYPE (t);
852 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 if (TREE_UNSIGNED (type) || ! flag_trapv)
857 return true;
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
864 return true;
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
868 else
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874 case REAL_CST:
875 case NEGATE_EXPR:
876 return true;
878 case COMPLEX_CST:
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
882 case MINUS_EXPR:
883 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
884 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
885 && reorder_operands_p (TREE_OPERAND (t, 0),
886 TREE_OPERAND (t, 1));
888 case MULT_EXPR:
889 if (TREE_UNSIGNED (TREE_TYPE (t)))
890 break;
892 /* Fall through. */
894 case RDIV_EXPR:
895 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
896 return negate_expr_p (TREE_OPERAND (t, 1))
897 || negate_expr_p (TREE_OPERAND (t, 0));
898 break;
900 case NOP_EXPR:
901 /* Negate -((double)float) as (double)(-float). */
902 if (TREE_CODE (type) == REAL_TYPE)
904 tree tem = strip_float_extensions (t);
905 if (tem != t)
906 return negate_expr_p (tem);
908 break;
910 case CALL_EXPR:
911 /* Negate -f(x) as f(-x). */
912 if (negate_mathfn_p (builtin_mathfn_code (t)))
913 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
914 break;
916 default:
917 break;
919 return false;
922 /* Given T, an expression, return the negation of T. Allow for T to be
923 null, in which case return null. */
925 static tree
926 negate_expr (tree t)
928 tree type;
929 tree tem;
931 if (t == 0)
932 return 0;
934 type = TREE_TYPE (t);
935 STRIP_SIGN_NOPS (t);
937 switch (TREE_CODE (t))
939 case INTEGER_CST:
941 unsigned HOST_WIDE_INT low;
942 HOST_WIDE_INT high;
943 int overflow = neg_double (TREE_INT_CST_LOW (t),
944 TREE_INT_CST_HIGH (t),
945 &low, &high);
946 tem = build_int_2 (low, high);
947 TREE_TYPE (tem) = type;
948 TREE_OVERFLOW (tem)
949 = (TREE_OVERFLOW (t)
950 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
951 TREE_CONSTANT_OVERFLOW (tem)
952 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
954 if (! TREE_OVERFLOW (tem)
955 || TREE_UNSIGNED (type)
956 || ! flag_trapv)
957 return tem;
958 break;
960 case REAL_CST:
961 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
962 /* Two's complement FP formats, such as c4x, may overflow. */
963 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
964 return convert (type, tem);
965 break;
967 case COMPLEX_CST:
969 tree rpart = negate_expr (TREE_REALPART (t));
970 tree ipart = negate_expr (TREE_IMAGPART (t));
972 if ((TREE_CODE (rpart) == REAL_CST
973 && TREE_CODE (ipart) == REAL_CST)
974 || (TREE_CODE (rpart) == INTEGER_CST
975 && TREE_CODE (ipart) == INTEGER_CST))
976 return build_complex (type, rpart, ipart);
978 break;
980 case NEGATE_EXPR:
981 return convert (type, TREE_OPERAND (t, 0));
983 case MINUS_EXPR:
984 /* - (A - B) -> B - A */
985 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
986 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
987 return convert (type,
988 fold (build (MINUS_EXPR, TREE_TYPE (t),
989 TREE_OPERAND (t, 1),
990 TREE_OPERAND (t, 0))));
991 break;
993 case MULT_EXPR:
994 if (TREE_UNSIGNED (TREE_TYPE (t)))
995 break;
997 /* Fall through. */
999 case RDIV_EXPR:
1000 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1002 tem = TREE_OPERAND (t, 1);
1003 if (negate_expr_p (tem))
1004 return convert (type,
1005 fold (build (TREE_CODE (t), TREE_TYPE (t),
1006 TREE_OPERAND (t, 0),
1007 negate_expr (tem))));
1008 tem = TREE_OPERAND (t, 0);
1009 if (negate_expr_p (tem))
1010 return convert (type,
1011 fold (build (TREE_CODE (t), TREE_TYPE (t),
1012 negate_expr (tem),
1013 TREE_OPERAND (t, 1))));
1015 break;
1017 case NOP_EXPR:
1018 /* Convert -((double)float) into (double)(-float). */
1019 if (TREE_CODE (type) == REAL_TYPE)
1021 tem = strip_float_extensions (t);
1022 if (tem != t && negate_expr_p (tem))
1023 return convert (type, negate_expr (tem));
1025 break;
1027 case CALL_EXPR:
1028 /* Negate -f(x) as f(-x). */
1029 if (negate_mathfn_p (builtin_mathfn_code (t))
1030 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1032 tree fndecl, arg, arglist;
1034 fndecl = get_callee_fndecl (t);
1035 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1036 arglist = build_tree_list (NULL_TREE, arg);
1037 return build_function_call_expr (fndecl, arglist);
1039 break;
1041 default:
1042 break;
1045 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
1048 /* Split a tree IN into a constant, literal and variable parts that could be
1049 combined with CODE to make IN. "constant" means an expression with
1050 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1051 commutative arithmetic operation. Store the constant part into *CONP,
1052 the literal in *LITP and return the variable part. If a part isn't
1053 present, set it to null. If the tree does not decompose in this way,
1054 return the entire tree as the variable part and the other parts as null.
1056 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1057 case, we negate an operand that was subtracted. Except if it is a
1058 literal for which we use *MINUS_LITP instead.
1060 If NEGATE_P is true, we are negating all of IN, again except a literal
1061 for which we use *MINUS_LITP instead.
1063 If IN is itself a literal or constant, return it as appropriate.
1065 Note that we do not guarantee that any of the three values will be the
1066 same type as IN, but they will have the same signedness and mode. */
1068 static tree
1069 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1070 tree *minus_litp, int negate_p)
1072 tree var = 0;
1074 *conp = 0;
1075 *litp = 0;
1076 *minus_litp = 0;
1078 /* Strip any conversions that don't change the machine mode or signedness. */
1079 STRIP_SIGN_NOPS (in);
1081 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1082 *litp = in;
1083 else if (TREE_CODE (in) == code
1084 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1085 /* We can associate addition and subtraction together (even
1086 though the C standard doesn't say so) for integers because
1087 the value is not affected. For reals, the value might be
1088 affected, so we can't. */
1089 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1090 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1092 tree op0 = TREE_OPERAND (in, 0);
1093 tree op1 = TREE_OPERAND (in, 1);
1094 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1095 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1097 /* First see if either of the operands is a literal, then a constant. */
1098 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1099 *litp = op0, op0 = 0;
1100 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1101 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1103 if (op0 != 0 && TREE_CONSTANT (op0))
1104 *conp = op0, op0 = 0;
1105 else if (op1 != 0 && TREE_CONSTANT (op1))
1106 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1108 /* If we haven't dealt with either operand, this is not a case we can
1109 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1110 if (op0 != 0 && op1 != 0)
1111 var = in;
1112 else if (op0 != 0)
1113 var = op0;
1114 else
1115 var = op1, neg_var_p = neg1_p;
1117 /* Now do any needed negations. */
1118 if (neg_litp_p)
1119 *minus_litp = *litp, *litp = 0;
1120 if (neg_conp_p)
1121 *conp = negate_expr (*conp);
1122 if (neg_var_p)
1123 var = negate_expr (var);
1125 else if (TREE_CONSTANT (in))
1126 *conp = in;
1127 else
1128 var = in;
1130 if (negate_p)
1132 if (*litp)
1133 *minus_litp = *litp, *litp = 0;
1134 else if (*minus_litp)
1135 *litp = *minus_litp, *minus_litp = 0;
1136 *conp = negate_expr (*conp);
1137 var = negate_expr (var);
1140 return var;
1143 /* Re-associate trees split by the above function. T1 and T2 are either
1144 expressions to associate or null. Return the new expression, if any. If
1145 we build an operation, do it in TYPE and with CODE. */
1147 static tree
1148 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1150 if (t1 == 0)
1151 return t2;
1152 else if (t2 == 0)
1153 return t1;
1155 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1156 try to fold this since we will have infinite recursion. But do
1157 deal with any NEGATE_EXPRs. */
1158 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1159 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1161 if (code == PLUS_EXPR)
1163 if (TREE_CODE (t1) == NEGATE_EXPR)
1164 return build (MINUS_EXPR, type, convert (type, t2),
1165 convert (type, TREE_OPERAND (t1, 0)));
1166 else if (TREE_CODE (t2) == NEGATE_EXPR)
1167 return build (MINUS_EXPR, type, convert (type, t1),
1168 convert (type, TREE_OPERAND (t2, 0)));
1170 return build (code, type, convert (type, t1), convert (type, t2));
1173 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1176 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1177 to produce a new constant.
1179 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1181 static tree
1182 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1184 unsigned HOST_WIDE_INT int1l, int2l;
1185 HOST_WIDE_INT int1h, int2h;
1186 unsigned HOST_WIDE_INT low;
1187 HOST_WIDE_INT hi;
1188 unsigned HOST_WIDE_INT garbagel;
1189 HOST_WIDE_INT garbageh;
1190 tree t;
1191 tree type = TREE_TYPE (arg1);
1192 int uns = TREE_UNSIGNED (type);
1193 int is_sizetype
1194 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1195 int overflow = 0;
1196 int no_overflow = 0;
1198 int1l = TREE_INT_CST_LOW (arg1);
1199 int1h = TREE_INT_CST_HIGH (arg1);
1200 int2l = TREE_INT_CST_LOW (arg2);
1201 int2h = TREE_INT_CST_HIGH (arg2);
1203 switch (code)
1205 case BIT_IOR_EXPR:
1206 low = int1l | int2l, hi = int1h | int2h;
1207 break;
1209 case BIT_XOR_EXPR:
1210 low = int1l ^ int2l, hi = int1h ^ int2h;
1211 break;
1213 case BIT_AND_EXPR:
1214 low = int1l & int2l, hi = int1h & int2h;
1215 break;
1217 case RSHIFT_EXPR:
1218 int2l = -int2l;
1219 case LSHIFT_EXPR:
1220 /* It's unclear from the C standard whether shifts can overflow.
1221 The following code ignores overflow; perhaps a C standard
1222 interpretation ruling is needed. */
1223 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1224 &low, &hi, !uns);
1225 no_overflow = 1;
1226 break;
1228 case RROTATE_EXPR:
1229 int2l = - int2l;
1230 case LROTATE_EXPR:
1231 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1232 &low, &hi);
1233 break;
1235 case PLUS_EXPR:
1236 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1237 break;
1239 case MINUS_EXPR:
1240 neg_double (int2l, int2h, &low, &hi);
1241 add_double (int1l, int1h, low, hi, &low, &hi);
1242 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1243 break;
1245 case MULT_EXPR:
1246 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1247 break;
1249 case TRUNC_DIV_EXPR:
1250 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1251 case EXACT_DIV_EXPR:
1252 /* This is a shortcut for a common special case. */
1253 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1254 && ! TREE_CONSTANT_OVERFLOW (arg1)
1255 && ! TREE_CONSTANT_OVERFLOW (arg2)
1256 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1258 if (code == CEIL_DIV_EXPR)
1259 int1l += int2l - 1;
1261 low = int1l / int2l, hi = 0;
1262 break;
1265 /* ... fall through ... */
1267 case ROUND_DIV_EXPR:
1268 if (int2h == 0 && int2l == 1)
1270 low = int1l, hi = int1h;
1271 break;
1273 if (int1l == int2l && int1h == int2h
1274 && ! (int1l == 0 && int1h == 0))
1276 low = 1, hi = 0;
1277 break;
1279 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1280 &low, &hi, &garbagel, &garbageh);
1281 break;
1283 case TRUNC_MOD_EXPR:
1284 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1285 /* This is a shortcut for a common special case. */
1286 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1287 && ! TREE_CONSTANT_OVERFLOW (arg1)
1288 && ! TREE_CONSTANT_OVERFLOW (arg2)
1289 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1291 if (code == CEIL_MOD_EXPR)
1292 int1l += int2l - 1;
1293 low = int1l % int2l, hi = 0;
1294 break;
1297 /* ... fall through ... */
1299 case ROUND_MOD_EXPR:
1300 overflow = div_and_round_double (code, uns,
1301 int1l, int1h, int2l, int2h,
1302 &garbagel, &garbageh, &low, &hi);
1303 break;
1305 case MIN_EXPR:
1306 case MAX_EXPR:
1307 if (uns)
1308 low = (((unsigned HOST_WIDE_INT) int1h
1309 < (unsigned HOST_WIDE_INT) int2h)
1310 || (((unsigned HOST_WIDE_INT) int1h
1311 == (unsigned HOST_WIDE_INT) int2h)
1312 && int1l < int2l));
1313 else
1314 low = (int1h < int2h
1315 || (int1h == int2h && int1l < int2l));
1317 if (low == (code == MIN_EXPR))
1318 low = int1l, hi = int1h;
1319 else
1320 low = int2l, hi = int2h;
1321 break;
1323 default:
1324 abort ();
1327 /* If this is for a sizetype, can be represented as one (signed)
1328 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1329 constants. */
1330 if (is_sizetype
1331 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1332 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1333 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1334 return size_int_type_wide (low, type);
1335 else
1337 t = build_int_2 (low, hi);
1338 TREE_TYPE (t) = TREE_TYPE (arg1);
1341 TREE_OVERFLOW (t)
1342 = ((notrunc
1343 ? (!uns || is_sizetype) && overflow
1344 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1345 && ! no_overflow))
1346 | TREE_OVERFLOW (arg1)
1347 | TREE_OVERFLOW (arg2));
1349 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1350 So check if force_fit_type truncated the value. */
1351 if (is_sizetype
1352 && ! TREE_OVERFLOW (t)
1353 && (TREE_INT_CST_HIGH (t) != hi
1354 || TREE_INT_CST_LOW (t) != low))
1355 TREE_OVERFLOW (t) = 1;
1357 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1358 | TREE_CONSTANT_OVERFLOW (arg1)
1359 | TREE_CONSTANT_OVERFLOW (arg2));
1360 return t;
1363 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1364 constant. We assume ARG1 and ARG2 have the same data type, or at least
1365 are the same kind of constant and the same machine mode.
1367 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1369 static tree
1370 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1372 STRIP_NOPS (arg1);
1373 STRIP_NOPS (arg2);
1375 if (TREE_CODE (arg1) == INTEGER_CST)
1376 return int_const_binop (code, arg1, arg2, notrunc);
1378 if (TREE_CODE (arg1) == REAL_CST)
1380 enum machine_mode mode;
1381 REAL_VALUE_TYPE d1;
1382 REAL_VALUE_TYPE d2;
1383 REAL_VALUE_TYPE value;
1384 tree t, type;
1386 d1 = TREE_REAL_CST (arg1);
1387 d2 = TREE_REAL_CST (arg2);
1389 type = TREE_TYPE (arg1);
1390 mode = TYPE_MODE (type);
1392 /* Don't perform operation if we honor signaling NaNs and
1393 either operand is a NaN. */
1394 if (HONOR_SNANS (mode)
1395 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1396 return NULL_TREE;
1398 /* Don't perform operation if it would raise a division
1399 by zero exception. */
1400 if (code == RDIV_EXPR
1401 && REAL_VALUES_EQUAL (d2, dconst0)
1402 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1403 return NULL_TREE;
1405 /* If either operand is a NaN, just return it. Otherwise, set up
1406 for floating-point trap; we return an overflow. */
1407 if (REAL_VALUE_ISNAN (d1))
1408 return arg1;
1409 else if (REAL_VALUE_ISNAN (d2))
1410 return arg2;
1412 REAL_ARITHMETIC (value, code, d1, d2);
1414 t = build_real (type, real_value_truncate (mode, value));
1416 TREE_OVERFLOW (t)
1417 = (force_fit_type (t, 0)
1418 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1419 TREE_CONSTANT_OVERFLOW (t)
1420 = TREE_OVERFLOW (t)
1421 | TREE_CONSTANT_OVERFLOW (arg1)
1422 | TREE_CONSTANT_OVERFLOW (arg2);
1423 return t;
1425 if (TREE_CODE (arg1) == COMPLEX_CST)
1427 tree type = TREE_TYPE (arg1);
1428 tree r1 = TREE_REALPART (arg1);
1429 tree i1 = TREE_IMAGPART (arg1);
1430 tree r2 = TREE_REALPART (arg2);
1431 tree i2 = TREE_IMAGPART (arg2);
1432 tree t;
1434 switch (code)
1436 case PLUS_EXPR:
1437 t = build_complex (type,
1438 const_binop (PLUS_EXPR, r1, r2, notrunc),
1439 const_binop (PLUS_EXPR, i1, i2, notrunc));
1440 break;
1442 case MINUS_EXPR:
1443 t = build_complex (type,
1444 const_binop (MINUS_EXPR, r1, r2, notrunc),
1445 const_binop (MINUS_EXPR, i1, i2, notrunc));
1446 break;
1448 case MULT_EXPR:
1449 t = build_complex (type,
1450 const_binop (MINUS_EXPR,
1451 const_binop (MULT_EXPR,
1452 r1, r2, notrunc),
1453 const_binop (MULT_EXPR,
1454 i1, i2, notrunc),
1455 notrunc),
1456 const_binop (PLUS_EXPR,
1457 const_binop (MULT_EXPR,
1458 r1, i2, notrunc),
1459 const_binop (MULT_EXPR,
1460 i1, r2, notrunc),
1461 notrunc));
1462 break;
1464 case RDIV_EXPR:
1466 tree magsquared
1467 = const_binop (PLUS_EXPR,
1468 const_binop (MULT_EXPR, r2, r2, notrunc),
1469 const_binop (MULT_EXPR, i2, i2, notrunc),
1470 notrunc);
1472 t = build_complex (type,
1473 const_binop
1474 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1475 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1476 const_binop (PLUS_EXPR,
1477 const_binop (MULT_EXPR, r1, r2,
1478 notrunc),
1479 const_binop (MULT_EXPR, i1, i2,
1480 notrunc),
1481 notrunc),
1482 magsquared, notrunc),
1483 const_binop
1484 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1485 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1486 const_binop (MINUS_EXPR,
1487 const_binop (MULT_EXPR, i1, r2,
1488 notrunc),
1489 const_binop (MULT_EXPR, r1, i2,
1490 notrunc),
1491 notrunc),
1492 magsquared, notrunc));
1494 break;
1496 default:
1497 abort ();
1499 return t;
1501 return 0;
1504 /* These are the hash table functions for the hash table of INTEGER_CST
1505 nodes of a sizetype. */
1507 /* Return the hash code code X, an INTEGER_CST. */
1509 static hashval_t
1510 size_htab_hash (const void *x)
1512 tree t = (tree) x;
1514 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1515 ^ htab_hash_pointer (TREE_TYPE (t))
1516 ^ (TREE_OVERFLOW (t) << 20));
1519 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1520 is the same as that given by *Y, which is the same. */
1522 static int
1523 size_htab_eq (const void *x, const void *y)
1525 tree xt = (tree) x;
1526 tree yt = (tree) y;
1528 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1529 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1530 && TREE_TYPE (xt) == TREE_TYPE (yt)
1531 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1534 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1535 bits are given by NUMBER and of the sizetype represented by KIND. */
1537 tree
1538 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1540 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1543 /* Likewise, but the desired type is specified explicitly. */
1545 static GTY (()) tree new_const;
1546 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1547 htab_t size_htab;
1549 tree
1550 size_int_type_wide (HOST_WIDE_INT number, tree type)
1552 void **slot;
1554 if (size_htab == 0)
1556 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1557 new_const = make_node (INTEGER_CST);
1560 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1561 hash table, we return the value from the hash table. Otherwise, we
1562 place that in the hash table and make a new node for the next time. */
1563 TREE_INT_CST_LOW (new_const) = number;
1564 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1565 TREE_TYPE (new_const) = type;
1566 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1567 = force_fit_type (new_const, 0);
1569 slot = htab_find_slot (size_htab, new_const, INSERT);
1570 if (*slot == 0)
1572 tree t = new_const;
1574 *slot = new_const;
1575 new_const = make_node (INTEGER_CST);
1576 return t;
1578 else
1579 return (tree) *slot;
1582 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1583 is a tree code. The type of the result is taken from the operands.
1584 Both must be the same type integer type and it must be a size type.
1585 If the operands are constant, so is the result. */
1587 tree
1588 size_binop (enum tree_code code, tree arg0, tree arg1)
1590 tree type = TREE_TYPE (arg0);
1592 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1593 || type != TREE_TYPE (arg1))
1594 abort ();
1596 /* Handle the special case of two integer constants faster. */
1597 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1599 /* And some specific cases even faster than that. */
1600 if (code == PLUS_EXPR && integer_zerop (arg0))
1601 return arg1;
1602 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1603 && integer_zerop (arg1))
1604 return arg0;
1605 else if (code == MULT_EXPR && integer_onep (arg0))
1606 return arg1;
1608 /* Handle general case of two integer constants. */
1609 return int_const_binop (code, arg0, arg1, 0);
1612 if (arg0 == error_mark_node || arg1 == error_mark_node)
1613 return error_mark_node;
1615 return fold (build (code, type, arg0, arg1));
1618 /* Given two values, either both of sizetype or both of bitsizetype,
1619 compute the difference between the two values. Return the value
1620 in signed type corresponding to the type of the operands. */
1622 tree
1623 size_diffop (tree arg0, tree arg1)
1625 tree type = TREE_TYPE (arg0);
1626 tree ctype;
1628 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1629 || type != TREE_TYPE (arg1))
1630 abort ();
1632 /* If the type is already signed, just do the simple thing. */
1633 if (! TREE_UNSIGNED (type))
1634 return size_binop (MINUS_EXPR, arg0, arg1);
1636 ctype = (type == bitsizetype || type == ubitsizetype
1637 ? sbitsizetype : ssizetype);
1639 /* If either operand is not a constant, do the conversions to the signed
1640 type and subtract. The hardware will do the right thing with any
1641 overflow in the subtraction. */
1642 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1643 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1644 convert (ctype, arg1));
1646 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1647 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1648 overflow) and negate (which can't either). Special-case a result
1649 of zero while we're here. */
1650 if (tree_int_cst_equal (arg0, arg1))
1651 return convert (ctype, integer_zero_node);
1652 else if (tree_int_cst_lt (arg1, arg0))
1653 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1654 else
1655 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1656 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1660 /* Given T, a tree representing type conversion of ARG1, a constant,
1661 return a constant tree representing the result of conversion. */
1663 static tree
1664 fold_convert (tree t, tree arg1)
1666 tree type = TREE_TYPE (t);
1667 int overflow = 0;
1669 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1671 if (TREE_CODE (arg1) == INTEGER_CST)
1673 /* If we would build a constant wider than GCC supports,
1674 leave the conversion unfolded. */
1675 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1676 return t;
1678 /* If we are trying to make a sizetype for a small integer, use
1679 size_int to pick up cached types to reduce duplicate nodes. */
1680 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1681 && !TREE_CONSTANT_OVERFLOW (arg1)
1682 && compare_tree_int (arg1, 10000) < 0)
1683 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1685 /* Given an integer constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1688 TREE_INT_CST_HIGH (arg1));
1689 TREE_TYPE (t) = type;
1690 /* Indicate an overflow if (1) ARG1 already overflowed,
1691 or (2) force_fit_type indicates an overflow.
1692 Tell force_fit_type that an overflow has already occurred
1693 if ARG1 is a too-large unsigned value and T is signed.
1694 But don't indicate an overflow if converting a pointer. */
1695 TREE_OVERFLOW (t)
1696 = ((force_fit_type (t,
1697 (TREE_INT_CST_HIGH (arg1) < 0
1698 && (TREE_UNSIGNED (type)
1699 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1700 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1701 || TREE_OVERFLOW (arg1));
1702 TREE_CONSTANT_OVERFLOW (t)
1703 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1705 else if (TREE_CODE (arg1) == REAL_CST)
1707 /* The following code implements the floating point to integer
1708 conversion rules required by the Java Language Specification,
1709 that IEEE NaNs are mapped to zero and values that overflow
1710 the target precision saturate, i.e. values greater than
1711 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1712 are mapped to INT_MIN. These semantics are allowed by the
1713 C and C++ standards that simply state that the behavior of
1714 FP-to-integer conversion is unspecified upon overflow. */
1716 HOST_WIDE_INT high, low;
1718 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1719 /* If x is NaN, return zero and show we have an overflow. */
1720 if (REAL_VALUE_ISNAN (x))
1722 overflow = 1;
1723 high = 0;
1724 low = 0;
1727 /* See if X will be in range after truncation towards 0.
1728 To compensate for truncation, move the bounds away from 0,
1729 but reject if X exactly equals the adjusted bounds. */
1731 if (! overflow)
1733 tree lt = TYPE_MIN_VALUE (type);
1734 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1735 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1736 if (! REAL_VALUES_LESS (l, x))
1738 overflow = 1;
1739 high = TREE_INT_CST_HIGH (lt);
1740 low = TREE_INT_CST_LOW (lt);
1744 if (! overflow)
1746 tree ut = TYPE_MAX_VALUE (type);
1747 if (ut)
1749 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1750 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1751 if (! REAL_VALUES_LESS (x, u))
1753 overflow = 1;
1754 high = TREE_INT_CST_HIGH (ut);
1755 low = TREE_INT_CST_LOW (ut);
1760 if (! overflow)
1761 REAL_VALUE_TO_INT (&low, &high, x);
1763 t = build_int_2 (low, high);
1764 TREE_TYPE (t) = type;
1765 TREE_OVERFLOW (t)
1766 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1767 TREE_CONSTANT_OVERFLOW (t)
1768 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1770 TREE_TYPE (t) = type;
1772 else if (TREE_CODE (type) == REAL_TYPE)
1774 if (TREE_CODE (arg1) == INTEGER_CST)
1775 return build_real_from_int_cst (type, arg1);
1776 if (TREE_CODE (arg1) == REAL_CST)
1778 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1780 /* We make a copy of ARG1 so that we don't modify an
1781 existing constant tree. */
1782 t = copy_node (arg1);
1783 TREE_TYPE (t) = type;
1784 return t;
1787 t = build_real (type,
1788 real_value_truncate (TYPE_MODE (type),
1789 TREE_REAL_CST (arg1)));
1791 TREE_OVERFLOW (t)
1792 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1793 TREE_CONSTANT_OVERFLOW (t)
1794 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1795 return t;
1798 TREE_CONSTANT (t) = 1;
1799 return t;
1802 /* Return an expr equal to X but certainly not valid as an lvalue. */
1804 tree
1805 non_lvalue (tree x)
1807 tree result;
1809 /* These things are certainly not lvalues. */
1810 if (TREE_CODE (x) == NON_LVALUE_EXPR
1811 || TREE_CODE (x) == INTEGER_CST
1812 || TREE_CODE (x) == REAL_CST
1813 || TREE_CODE (x) == STRING_CST
1814 || TREE_CODE (x) == ADDR_EXPR)
1815 return x;
1817 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1818 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1819 return result;
1822 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1823 Zero means allow extended lvalues. */
1825 int pedantic_lvalues;
1827 /* When pedantic, return an expr equal to X but certainly not valid as a
1828 pedantic lvalue. Otherwise, return X. */
1830 tree
1831 pedantic_non_lvalue (tree x)
1833 if (pedantic_lvalues)
1834 return non_lvalue (x);
1835 else
1836 return x;
1839 /* Given a tree comparison code, return the code that is the logical inverse
1840 of the given code. It is not safe to do this for floating-point
1841 comparisons, except for NE_EXPR and EQ_EXPR. */
1843 static enum tree_code
1844 invert_tree_comparison (enum tree_code code)
1846 switch (code)
1848 case EQ_EXPR:
1849 return NE_EXPR;
1850 case NE_EXPR:
1851 return EQ_EXPR;
1852 case GT_EXPR:
1853 return LE_EXPR;
1854 case GE_EXPR:
1855 return LT_EXPR;
1856 case LT_EXPR:
1857 return GE_EXPR;
1858 case LE_EXPR:
1859 return GT_EXPR;
1860 default:
1861 abort ();
1865 /* Similar, but return the comparison that results if the operands are
1866 swapped. This is safe for floating-point. */
1868 static enum tree_code
1869 swap_tree_comparison (enum tree_code code)
1871 switch (code)
1873 case EQ_EXPR:
1874 case NE_EXPR:
1875 return code;
1876 case GT_EXPR:
1877 return LT_EXPR;
1878 case GE_EXPR:
1879 return LE_EXPR;
1880 case LT_EXPR:
1881 return GT_EXPR;
1882 case LE_EXPR:
1883 return GE_EXPR;
1884 default:
1885 abort ();
1890 /* Convert a comparison tree code from an enum tree_code representation
1891 into a compcode bit-based encoding. This function is the inverse of
1892 compcode_to_comparison. */
1894 static int
1895 comparison_to_compcode (enum tree_code code)
1897 switch (code)
1899 case LT_EXPR:
1900 return COMPCODE_LT;
1901 case EQ_EXPR:
1902 return COMPCODE_EQ;
1903 case LE_EXPR:
1904 return COMPCODE_LE;
1905 case GT_EXPR:
1906 return COMPCODE_GT;
1907 case NE_EXPR:
1908 return COMPCODE_NE;
1909 case GE_EXPR:
1910 return COMPCODE_GE;
1911 default:
1912 abort ();
1916 /* Convert a compcode bit-based encoding of a comparison operator back
1917 to GCC's enum tree_code representation. This function is the
1918 inverse of comparison_to_compcode. */
1920 static enum tree_code
1921 compcode_to_comparison (int code)
1923 switch (code)
1925 case COMPCODE_LT:
1926 return LT_EXPR;
1927 case COMPCODE_EQ:
1928 return EQ_EXPR;
1929 case COMPCODE_LE:
1930 return LE_EXPR;
1931 case COMPCODE_GT:
1932 return GT_EXPR;
1933 case COMPCODE_NE:
1934 return NE_EXPR;
1935 case COMPCODE_GE:
1936 return GE_EXPR;
1937 default:
1938 abort ();
1942 /* Return nonzero if CODE is a tree code that represents a truth value. */
1944 static int
1945 truth_value_p (enum tree_code code)
1947 return (TREE_CODE_CLASS (code) == '<'
1948 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1949 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1950 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1953 /* Return nonzero if two operands (typically of the same tree node)
1954 are necessarily equal. If either argument has side-effects this
1955 function returns zero.
1957 If ONLY_CONST is nonzero, only return nonzero for constants.
1958 This function tests whether the operands are indistinguishable;
1959 it does not test whether they are equal using C's == operation.
1960 The distinction is important for IEEE floating point, because
1961 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1962 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1964 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1965 even though it may hold multiple values during a function.
1966 This is because a GCC tree node guarantees that nothing else is
1967 executed between the evaluation of its "operands" (which may often
1968 be evaluated in arbitrary order). Hence if the operands themselves
1969 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1970 same value in each operand/subexpression. Hence a zero value for
1971 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1972 If comparing arbitrary expression trees, such as from different
1973 statements, ONLY_CONST must usually be nonzero. */
1976 operand_equal_p (tree arg0, tree arg1, int only_const)
1978 tree fndecl;
1980 /* If both types don't have the same signedness, then we can't consider
1981 them equal. We must check this before the STRIP_NOPS calls
1982 because they may change the signedness of the arguments. */
1983 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1984 return 0;
1986 STRIP_NOPS (arg0);
1987 STRIP_NOPS (arg1);
1989 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1990 /* This is needed for conversions and for COMPONENT_REF.
1991 Might as well play it safe and always test this. */
1992 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1993 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1994 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1995 return 0;
1997 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1998 We don't care about side effects in that case because the SAVE_EXPR
1999 takes care of that for us. In all other cases, two expressions are
2000 equal if they have no side effects. If we have two identical
2001 expressions with side effects that should be treated the same due
2002 to the only side effects being identical SAVE_EXPR's, that will
2003 be detected in the recursive calls below. */
2004 if (arg0 == arg1 && ! only_const
2005 && (TREE_CODE (arg0) == SAVE_EXPR
2006 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2007 return 1;
2009 /* Next handle constant cases, those for which we can return 1 even
2010 if ONLY_CONST is set. */
2011 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2012 switch (TREE_CODE (arg0))
2014 case INTEGER_CST:
2015 return (! TREE_CONSTANT_OVERFLOW (arg0)
2016 && ! TREE_CONSTANT_OVERFLOW (arg1)
2017 && tree_int_cst_equal (arg0, arg1));
2019 case REAL_CST:
2020 return (! TREE_CONSTANT_OVERFLOW (arg0)
2021 && ! TREE_CONSTANT_OVERFLOW (arg1)
2022 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2023 TREE_REAL_CST (arg1)));
2025 case VECTOR_CST:
2027 tree v1, v2;
2029 if (TREE_CONSTANT_OVERFLOW (arg0)
2030 || TREE_CONSTANT_OVERFLOW (arg1))
2031 return 0;
2033 v1 = TREE_VECTOR_CST_ELTS (arg0);
2034 v2 = TREE_VECTOR_CST_ELTS (arg1);
2035 while (v1 && v2)
2037 if (!operand_equal_p (v1, v2, only_const))
2038 return 0;
2039 v1 = TREE_CHAIN (v1);
2040 v2 = TREE_CHAIN (v2);
2043 return 1;
2046 case COMPLEX_CST:
2047 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2048 only_const)
2049 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2050 only_const));
2052 case STRING_CST:
2053 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2054 && ! memcmp (TREE_STRING_POINTER (arg0),
2055 TREE_STRING_POINTER (arg1),
2056 TREE_STRING_LENGTH (arg0)));
2058 case ADDR_EXPR:
2059 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2061 default:
2062 break;
2065 if (only_const)
2066 return 0;
2068 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2070 case '1':
2071 /* Two conversions are equal only if signedness and modes match. */
2072 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2073 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2074 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2075 return 0;
2077 return operand_equal_p (TREE_OPERAND (arg0, 0),
2078 TREE_OPERAND (arg1, 0), 0);
2080 case '<':
2081 case '2':
2082 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2083 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2085 return 1;
2087 /* For commutative ops, allow the other order. */
2088 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2089 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2090 || TREE_CODE (arg0) == BIT_IOR_EXPR
2091 || TREE_CODE (arg0) == BIT_XOR_EXPR
2092 || TREE_CODE (arg0) == BIT_AND_EXPR
2093 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2094 && operand_equal_p (TREE_OPERAND (arg0, 0),
2095 TREE_OPERAND (arg1, 1), 0)
2096 && operand_equal_p (TREE_OPERAND (arg0, 1),
2097 TREE_OPERAND (arg1, 0), 0));
2099 case 'r':
2100 /* If either of the pointer (or reference) expressions we are
2101 dereferencing contain a side effect, these cannot be equal. */
2102 if (TREE_SIDE_EFFECTS (arg0)
2103 || TREE_SIDE_EFFECTS (arg1))
2104 return 0;
2106 switch (TREE_CODE (arg0))
2108 case INDIRECT_REF:
2109 return operand_equal_p (TREE_OPERAND (arg0, 0),
2110 TREE_OPERAND (arg1, 0), 0);
2112 case COMPONENT_REF:
2113 case ARRAY_REF:
2114 case ARRAY_RANGE_REF:
2115 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2116 TREE_OPERAND (arg1, 0), 0)
2117 && operand_equal_p (TREE_OPERAND (arg0, 1),
2118 TREE_OPERAND (arg1, 1), 0));
2120 case BIT_FIELD_REF:
2121 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2122 TREE_OPERAND (arg1, 0), 0)
2123 && operand_equal_p (TREE_OPERAND (arg0, 1),
2124 TREE_OPERAND (arg1, 1), 0)
2125 && operand_equal_p (TREE_OPERAND (arg0, 2),
2126 TREE_OPERAND (arg1, 2), 0));
2127 default:
2128 return 0;
2131 case 'e':
2132 switch (TREE_CODE (arg0))
2134 case ADDR_EXPR:
2135 case TRUTH_NOT_EXPR:
2136 return operand_equal_p (TREE_OPERAND (arg0, 0),
2137 TREE_OPERAND (arg1, 0), 0);
2139 case RTL_EXPR:
2140 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2142 case CALL_EXPR:
2143 /* If the CALL_EXPRs call different functions, then they
2144 clearly can not be equal. */
2145 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2146 TREE_OPERAND (arg1, 0), 0))
2147 return 0;
2149 /* Only consider const functions equivalent. */
2150 fndecl = get_callee_fndecl (arg0);
2151 if (fndecl == NULL_TREE
2152 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2153 return 0;
2155 /* Now see if all the arguments are the same. operand_equal_p
2156 does not handle TREE_LIST, so we walk the operands here
2157 feeding them to operand_equal_p. */
2158 arg0 = TREE_OPERAND (arg0, 1);
2159 arg1 = TREE_OPERAND (arg1, 1);
2160 while (arg0 && arg1)
2162 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2163 return 0;
2165 arg0 = TREE_CHAIN (arg0);
2166 arg1 = TREE_CHAIN (arg1);
2169 /* If we get here and both argument lists are exhausted
2170 then the CALL_EXPRs are equal. */
2171 return ! (arg0 || arg1);
2173 default:
2174 return 0;
2177 case 'd':
2178 /* Consider __builtin_sqrt equal to sqrt. */
2179 return TREE_CODE (arg0) == FUNCTION_DECL
2180 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2181 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2182 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2184 default:
2185 return 0;
2189 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2190 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2192 When in doubt, return 0. */
2194 static int
2195 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2197 int unsignedp1, unsignedpo;
2198 tree primarg0, primarg1, primother;
2199 unsigned int correct_width;
2201 if (operand_equal_p (arg0, arg1, 0))
2202 return 1;
2204 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2205 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2206 return 0;
2208 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2209 and see if the inner values are the same. This removes any
2210 signedness comparison, which doesn't matter here. */
2211 primarg0 = arg0, primarg1 = arg1;
2212 STRIP_NOPS (primarg0);
2213 STRIP_NOPS (primarg1);
2214 if (operand_equal_p (primarg0, primarg1, 0))
2215 return 1;
2217 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2218 actual comparison operand, ARG0.
2220 First throw away any conversions to wider types
2221 already present in the operands. */
2223 primarg1 = get_narrower (arg1, &unsignedp1);
2224 primother = get_narrower (other, &unsignedpo);
2226 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2227 if (unsignedp1 == unsignedpo
2228 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2229 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2231 tree type = TREE_TYPE (arg0);
2233 /* Make sure shorter operand is extended the right way
2234 to match the longer operand. */
2235 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2236 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2238 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2239 return 1;
2242 return 0;
2245 /* See if ARG is an expression that is either a comparison or is performing
2246 arithmetic on comparisons. The comparisons must only be comparing
2247 two different values, which will be stored in *CVAL1 and *CVAL2; if
2248 they are nonzero it means that some operands have already been found.
2249 No variables may be used anywhere else in the expression except in the
2250 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2251 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2253 If this is true, return 1. Otherwise, return zero. */
2255 static int
2256 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2258 enum tree_code code = TREE_CODE (arg);
2259 char class = TREE_CODE_CLASS (code);
2261 /* We can handle some of the 'e' cases here. */
2262 if (class == 'e' && code == TRUTH_NOT_EXPR)
2263 class = '1';
2264 else if (class == 'e'
2265 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2266 || code == COMPOUND_EXPR))
2267 class = '2';
2269 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2270 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2272 /* If we've already found a CVAL1 or CVAL2, this expression is
2273 two complex to handle. */
2274 if (*cval1 || *cval2)
2275 return 0;
2277 class = '1';
2278 *save_p = 1;
2281 switch (class)
2283 case '1':
2284 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2286 case '2':
2287 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2288 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2289 cval1, cval2, save_p));
2291 case 'c':
2292 return 1;
2294 case 'e':
2295 if (code == COND_EXPR)
2296 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2297 cval1, cval2, save_p)
2298 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2299 cval1, cval2, save_p)
2300 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2301 cval1, cval2, save_p));
2302 return 0;
2304 case '<':
2305 /* First see if we can handle the first operand, then the second. For
2306 the second operand, we know *CVAL1 can't be zero. It must be that
2307 one side of the comparison is each of the values; test for the
2308 case where this isn't true by failing if the two operands
2309 are the same. */
2311 if (operand_equal_p (TREE_OPERAND (arg, 0),
2312 TREE_OPERAND (arg, 1), 0))
2313 return 0;
2315 if (*cval1 == 0)
2316 *cval1 = TREE_OPERAND (arg, 0);
2317 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2319 else if (*cval2 == 0)
2320 *cval2 = TREE_OPERAND (arg, 0);
2321 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2323 else
2324 return 0;
2326 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2328 else if (*cval2 == 0)
2329 *cval2 = TREE_OPERAND (arg, 1);
2330 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2332 else
2333 return 0;
2335 return 1;
2337 default:
2338 return 0;
2342 /* ARG is a tree that is known to contain just arithmetic operations and
2343 comparisons. Evaluate the operations in the tree substituting NEW0 for
2344 any occurrence of OLD0 as an operand of a comparison and likewise for
2345 NEW1 and OLD1. */
2347 static tree
2348 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2350 tree type = TREE_TYPE (arg);
2351 enum tree_code code = TREE_CODE (arg);
2352 char class = TREE_CODE_CLASS (code);
2354 /* We can handle some of the 'e' cases here. */
2355 if (class == 'e' && code == TRUTH_NOT_EXPR)
2356 class = '1';
2357 else if (class == 'e'
2358 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2359 class = '2';
2361 switch (class)
2363 case '1':
2364 return fold (build1 (code, type,
2365 eval_subst (TREE_OPERAND (arg, 0),
2366 old0, new0, old1, new1)));
2368 case '2':
2369 return fold (build (code, type,
2370 eval_subst (TREE_OPERAND (arg, 0),
2371 old0, new0, old1, new1),
2372 eval_subst (TREE_OPERAND (arg, 1),
2373 old0, new0, old1, new1)));
2375 case 'e':
2376 switch (code)
2378 case SAVE_EXPR:
2379 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2381 case COMPOUND_EXPR:
2382 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2384 case COND_EXPR:
2385 return fold (build (code, type,
2386 eval_subst (TREE_OPERAND (arg, 0),
2387 old0, new0, old1, new1),
2388 eval_subst (TREE_OPERAND (arg, 1),
2389 old0, new0, old1, new1),
2390 eval_subst (TREE_OPERAND (arg, 2),
2391 old0, new0, old1, new1)));
2392 default:
2393 break;
2395 /* Fall through - ??? */
2397 case '<':
2399 tree arg0 = TREE_OPERAND (arg, 0);
2400 tree arg1 = TREE_OPERAND (arg, 1);
2402 /* We need to check both for exact equality and tree equality. The
2403 former will be true if the operand has a side-effect. In that
2404 case, we know the operand occurred exactly once. */
2406 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2407 arg0 = new0;
2408 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2409 arg0 = new1;
2411 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2412 arg1 = new0;
2413 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2414 arg1 = new1;
2416 return fold (build (code, type, arg0, arg1));
2419 default:
2420 return arg;
2424 /* Return a tree for the case when the result of an expression is RESULT
2425 converted to TYPE and OMITTED was previously an operand of the expression
2426 but is now not needed (e.g., we folded OMITTED * 0).
2428 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2429 the conversion of RESULT to TYPE. */
2431 tree
2432 omit_one_operand (tree type, tree result, tree omitted)
2434 tree t = convert (type, result);
2436 if (TREE_SIDE_EFFECTS (omitted))
2437 return build (COMPOUND_EXPR, type, omitted, t);
2439 return non_lvalue (t);
2442 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2444 static tree
2445 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2447 tree t = convert (type, result);
2449 if (TREE_SIDE_EFFECTS (omitted))
2450 return build (COMPOUND_EXPR, type, omitted, t);
2452 return pedantic_non_lvalue (t);
2455 /* Return a simplified tree node for the truth-negation of ARG. This
2456 never alters ARG itself. We assume that ARG is an operation that
2457 returns a truth value (0 or 1). */
2459 tree
2460 invert_truthvalue (tree arg)
2462 tree type = TREE_TYPE (arg);
2463 enum tree_code code = TREE_CODE (arg);
2465 if (code == ERROR_MARK)
2466 return arg;
2468 /* If this is a comparison, we can simply invert it, except for
2469 floating-point non-equality comparisons, in which case we just
2470 enclose a TRUTH_NOT_EXPR around what we have. */
2472 if (TREE_CODE_CLASS (code) == '<')
2474 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2475 && !flag_unsafe_math_optimizations
2476 && code != NE_EXPR
2477 && code != EQ_EXPR)
2478 return build1 (TRUTH_NOT_EXPR, type, arg);
2479 else
2480 return build (invert_tree_comparison (code), type,
2481 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2484 switch (code)
2486 case INTEGER_CST:
2487 return convert (type, build_int_2 (integer_zerop (arg), 0));
2489 case TRUTH_AND_EXPR:
2490 return build (TRUTH_OR_EXPR, type,
2491 invert_truthvalue (TREE_OPERAND (arg, 0)),
2492 invert_truthvalue (TREE_OPERAND (arg, 1)));
2494 case TRUTH_OR_EXPR:
2495 return build (TRUTH_AND_EXPR, type,
2496 invert_truthvalue (TREE_OPERAND (arg, 0)),
2497 invert_truthvalue (TREE_OPERAND (arg, 1)));
2499 case TRUTH_XOR_EXPR:
2500 /* Here we can invert either operand. We invert the first operand
2501 unless the second operand is a TRUTH_NOT_EXPR in which case our
2502 result is the XOR of the first operand with the inside of the
2503 negation of the second operand. */
2505 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2506 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2507 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2508 else
2509 return build (TRUTH_XOR_EXPR, type,
2510 invert_truthvalue (TREE_OPERAND (arg, 0)),
2511 TREE_OPERAND (arg, 1));
2513 case TRUTH_ANDIF_EXPR:
2514 return build (TRUTH_ORIF_EXPR, type,
2515 invert_truthvalue (TREE_OPERAND (arg, 0)),
2516 invert_truthvalue (TREE_OPERAND (arg, 1)));
2518 case TRUTH_ORIF_EXPR:
2519 return build (TRUTH_ANDIF_EXPR, type,
2520 invert_truthvalue (TREE_OPERAND (arg, 0)),
2521 invert_truthvalue (TREE_OPERAND (arg, 1)));
2523 case TRUTH_NOT_EXPR:
2524 return TREE_OPERAND (arg, 0);
2526 case COND_EXPR:
2527 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2528 invert_truthvalue (TREE_OPERAND (arg, 1)),
2529 invert_truthvalue (TREE_OPERAND (arg, 2)));
2531 case COMPOUND_EXPR:
2532 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2533 invert_truthvalue (TREE_OPERAND (arg, 1)));
2535 case WITH_RECORD_EXPR:
2536 return build (WITH_RECORD_EXPR, type,
2537 invert_truthvalue (TREE_OPERAND (arg, 0)),
2538 TREE_OPERAND (arg, 1));
2540 case NON_LVALUE_EXPR:
2541 return invert_truthvalue (TREE_OPERAND (arg, 0));
2543 case NOP_EXPR:
2544 case CONVERT_EXPR:
2545 case FLOAT_EXPR:
2546 return build1 (TREE_CODE (arg), type,
2547 invert_truthvalue (TREE_OPERAND (arg, 0)));
2549 case BIT_AND_EXPR:
2550 if (!integer_onep (TREE_OPERAND (arg, 1)))
2551 break;
2552 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2554 case SAVE_EXPR:
2555 return build1 (TRUTH_NOT_EXPR, type, arg);
2557 case CLEANUP_POINT_EXPR:
2558 return build1 (CLEANUP_POINT_EXPR, type,
2559 invert_truthvalue (TREE_OPERAND (arg, 0)));
2561 default:
2562 break;
2564 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2565 abort ();
2566 return build1 (TRUTH_NOT_EXPR, type, arg);
2569 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2570 operands are another bit-wise operation with a common input. If so,
2571 distribute the bit operations to save an operation and possibly two if
2572 constants are involved. For example, convert
2573 (A | B) & (A | C) into A | (B & C)
2574 Further simplification will occur if B and C are constants.
2576 If this optimization cannot be done, 0 will be returned. */
2578 static tree
2579 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2581 tree common;
2582 tree left, right;
2584 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2585 || TREE_CODE (arg0) == code
2586 || (TREE_CODE (arg0) != BIT_AND_EXPR
2587 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2588 return 0;
2590 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2592 common = TREE_OPERAND (arg0, 0);
2593 left = TREE_OPERAND (arg0, 1);
2594 right = TREE_OPERAND (arg1, 1);
2596 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2598 common = TREE_OPERAND (arg0, 0);
2599 left = TREE_OPERAND (arg0, 1);
2600 right = TREE_OPERAND (arg1, 0);
2602 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2604 common = TREE_OPERAND (arg0, 1);
2605 left = TREE_OPERAND (arg0, 0);
2606 right = TREE_OPERAND (arg1, 1);
2608 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2610 common = TREE_OPERAND (arg0, 1);
2611 left = TREE_OPERAND (arg0, 0);
2612 right = TREE_OPERAND (arg1, 0);
2614 else
2615 return 0;
2617 return fold (build (TREE_CODE (arg0), type, common,
2618 fold (build (code, type, left, right))));
2621 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2622 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2624 static tree
2625 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2626 int unsignedp)
2628 tree result = build (BIT_FIELD_REF, type, inner,
2629 size_int (bitsize), bitsize_int (bitpos));
2631 TREE_UNSIGNED (result) = unsignedp;
2633 return result;
2636 /* Optimize a bit-field compare.
2638 There are two cases: First is a compare against a constant and the
2639 second is a comparison of two items where the fields are at the same
2640 bit position relative to the start of a chunk (byte, halfword, word)
2641 large enough to contain it. In these cases we can avoid the shift
2642 implicit in bitfield extractions.
2644 For constants, we emit a compare of the shifted constant with the
2645 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2646 compared. For two fields at the same position, we do the ANDs with the
2647 similar mask and compare the result of the ANDs.
2649 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2650 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2651 are the left and right operands of the comparison, respectively.
2653 If the optimization described above can be done, we return the resulting
2654 tree. Otherwise we return zero. */
2656 static tree
2657 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2658 tree lhs, tree rhs)
2660 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2661 tree type = TREE_TYPE (lhs);
2662 tree signed_type, unsigned_type;
2663 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2664 enum machine_mode lmode, rmode, nmode;
2665 int lunsignedp, runsignedp;
2666 int lvolatilep = 0, rvolatilep = 0;
2667 tree linner, rinner = NULL_TREE;
2668 tree mask;
2669 tree offset;
2671 /* Get all the information about the extractions being done. If the bit size
2672 if the same as the size of the underlying object, we aren't doing an
2673 extraction at all and so can do nothing. We also don't want to
2674 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2675 then will no longer be able to replace it. */
2676 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2677 &lunsignedp, &lvolatilep);
2678 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2679 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2680 return 0;
2682 if (!const_p)
2684 /* If this is not a constant, we can only do something if bit positions,
2685 sizes, and signedness are the same. */
2686 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2687 &runsignedp, &rvolatilep);
2689 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2690 || lunsignedp != runsignedp || offset != 0
2691 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2692 return 0;
2695 /* See if we can find a mode to refer to this field. We should be able to,
2696 but fail if we can't. */
2697 nmode = get_best_mode (lbitsize, lbitpos,
2698 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2699 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2700 TYPE_ALIGN (TREE_TYPE (rinner))),
2701 word_mode, lvolatilep || rvolatilep);
2702 if (nmode == VOIDmode)
2703 return 0;
2705 /* Set signed and unsigned types of the precision of this mode for the
2706 shifts below. */
2707 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2708 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2710 /* Compute the bit position and size for the new reference and our offset
2711 within it. If the new reference is the same size as the original, we
2712 won't optimize anything, so return zero. */
2713 nbitsize = GET_MODE_BITSIZE (nmode);
2714 nbitpos = lbitpos & ~ (nbitsize - 1);
2715 lbitpos -= nbitpos;
2716 if (nbitsize == lbitsize)
2717 return 0;
2719 if (BYTES_BIG_ENDIAN)
2720 lbitpos = nbitsize - lbitsize - lbitpos;
2722 /* Make the mask to be used against the extracted field. */
2723 mask = build_int_2 (~0, ~0);
2724 TREE_TYPE (mask) = unsigned_type;
2725 force_fit_type (mask, 0);
2726 mask = convert (unsigned_type, mask);
2727 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2728 mask = const_binop (RSHIFT_EXPR, mask,
2729 size_int (nbitsize - lbitsize - lbitpos), 0);
2731 if (! const_p)
2732 /* If not comparing with constant, just rework the comparison
2733 and return. */
2734 return build (code, compare_type,
2735 build (BIT_AND_EXPR, unsigned_type,
2736 make_bit_field_ref (linner, unsigned_type,
2737 nbitsize, nbitpos, 1),
2738 mask),
2739 build (BIT_AND_EXPR, unsigned_type,
2740 make_bit_field_ref (rinner, unsigned_type,
2741 nbitsize, nbitpos, 1),
2742 mask));
2744 /* Otherwise, we are handling the constant case. See if the constant is too
2745 big for the field. Warn and return a tree of for 0 (false) if so. We do
2746 this not only for its own sake, but to avoid having to test for this
2747 error case below. If we didn't, we might generate wrong code.
2749 For unsigned fields, the constant shifted right by the field length should
2750 be all zero. For signed fields, the high-order bits should agree with
2751 the sign bit. */
2753 if (lunsignedp)
2755 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2756 convert (unsigned_type, rhs),
2757 size_int (lbitsize), 0)))
2759 warning ("comparison is always %d due to width of bit-field",
2760 code == NE_EXPR);
2761 return convert (compare_type,
2762 (code == NE_EXPR
2763 ? integer_one_node : integer_zero_node));
2766 else
2768 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2769 size_int (lbitsize - 1), 0);
2770 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2772 warning ("comparison is always %d due to width of bit-field",
2773 code == NE_EXPR);
2774 return convert (compare_type,
2775 (code == NE_EXPR
2776 ? integer_one_node : integer_zero_node));
2780 /* Single-bit compares should always be against zero. */
2781 if (lbitsize == 1 && ! integer_zerop (rhs))
2783 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2784 rhs = convert (type, integer_zero_node);
2787 /* Make a new bitfield reference, shift the constant over the
2788 appropriate number of bits and mask it with the computed mask
2789 (in case this was a signed field). If we changed it, make a new one. */
2790 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2791 if (lvolatilep)
2793 TREE_SIDE_EFFECTS (lhs) = 1;
2794 TREE_THIS_VOLATILE (lhs) = 1;
2797 rhs = fold (const_binop (BIT_AND_EXPR,
2798 const_binop (LSHIFT_EXPR,
2799 convert (unsigned_type, rhs),
2800 size_int (lbitpos), 0),
2801 mask, 0));
2803 return build (code, compare_type,
2804 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2805 rhs);
2808 /* Subroutine for fold_truthop: decode a field reference.
2810 If EXP is a comparison reference, we return the innermost reference.
2812 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2813 set to the starting bit number.
2815 If the innermost field can be completely contained in a mode-sized
2816 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2818 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2819 otherwise it is not changed.
2821 *PUNSIGNEDP is set to the signedness of the field.
2823 *PMASK is set to the mask used. This is either contained in a
2824 BIT_AND_EXPR or derived from the width of the field.
2826 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2828 Return 0 if this is not a component reference or is one that we can't
2829 do anything with. */
2831 static tree
2832 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2833 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2834 int *punsignedp, int *pvolatilep,
2835 tree *pmask, tree *pand_mask)
2837 tree outer_type = 0;
2838 tree and_mask = 0;
2839 tree mask, inner, offset;
2840 tree unsigned_type;
2841 unsigned int precision;
2843 /* All the optimizations using this function assume integer fields.
2844 There are problems with FP fields since the type_for_size call
2845 below can fail for, e.g., XFmode. */
2846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2847 return 0;
2849 /* We are interested in the bare arrangement of bits, so strip everything
2850 that doesn't affect the machine mode. However, record the type of the
2851 outermost expression if it may matter below. */
2852 if (TREE_CODE (exp) == NOP_EXPR
2853 || TREE_CODE (exp) == CONVERT_EXPR
2854 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2855 outer_type = TREE_TYPE (exp);
2856 STRIP_NOPS (exp);
2858 if (TREE_CODE (exp) == BIT_AND_EXPR)
2860 and_mask = TREE_OPERAND (exp, 1);
2861 exp = TREE_OPERAND (exp, 0);
2862 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2863 if (TREE_CODE (and_mask) != INTEGER_CST)
2864 return 0;
2867 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2868 punsignedp, pvolatilep);
2869 if ((inner == exp && and_mask == 0)
2870 || *pbitsize < 0 || offset != 0
2871 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2872 return 0;
2874 /* If the number of bits in the reference is the same as the bitsize of
2875 the outer type, then the outer type gives the signedness. Otherwise
2876 (in case of a small bitfield) the signedness is unchanged. */
2877 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2878 *punsignedp = TREE_UNSIGNED (outer_type);
2880 /* Compute the mask to access the bitfield. */
2881 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2882 precision = TYPE_PRECISION (unsigned_type);
2884 mask = build_int_2 (~0, ~0);
2885 TREE_TYPE (mask) = unsigned_type;
2886 force_fit_type (mask, 0);
2887 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2888 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2890 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2891 if (and_mask != 0)
2892 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2893 convert (unsigned_type, and_mask), mask));
2895 *pmask = mask;
2896 *pand_mask = and_mask;
2897 return inner;
2900 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2901 bit positions. */
2903 static int
2904 all_ones_mask_p (tree mask, int size)
2906 tree type = TREE_TYPE (mask);
2907 unsigned int precision = TYPE_PRECISION (type);
2908 tree tmask;
2910 tmask = build_int_2 (~0, ~0);
2911 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2912 force_fit_type (tmask, 0);
2913 return
2914 tree_int_cst_equal (mask,
2915 const_binop (RSHIFT_EXPR,
2916 const_binop (LSHIFT_EXPR, tmask,
2917 size_int (precision - size),
2919 size_int (precision - size), 0));
2922 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2923 represents the sign bit of EXP's type. If EXP represents a sign
2924 or zero extension, also test VAL against the unextended type.
2925 The return value is the (sub)expression whose sign bit is VAL,
2926 or NULL_TREE otherwise. */
2928 static tree
2929 sign_bit_p (tree exp, tree val)
2931 unsigned HOST_WIDE_INT mask_lo, lo;
2932 HOST_WIDE_INT mask_hi, hi;
2933 int width;
2934 tree t;
2936 /* Tree EXP must have an integral type. */
2937 t = TREE_TYPE (exp);
2938 if (! INTEGRAL_TYPE_P (t))
2939 return NULL_TREE;
2941 /* Tree VAL must be an integer constant. */
2942 if (TREE_CODE (val) != INTEGER_CST
2943 || TREE_CONSTANT_OVERFLOW (val))
2944 return NULL_TREE;
2946 width = TYPE_PRECISION (t);
2947 if (width > HOST_BITS_PER_WIDE_INT)
2949 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2950 lo = 0;
2952 mask_hi = ((unsigned HOST_WIDE_INT) -1
2953 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2954 mask_lo = -1;
2956 else
2958 hi = 0;
2959 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2961 mask_hi = 0;
2962 mask_lo = ((unsigned HOST_WIDE_INT) -1
2963 >> (HOST_BITS_PER_WIDE_INT - width));
2966 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2967 treat VAL as if it were unsigned. */
2968 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2969 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2970 return exp;
2972 /* Handle extension from a narrower type. */
2973 if (TREE_CODE (exp) == NOP_EXPR
2974 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2975 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2977 return NULL_TREE;
2980 /* Subroutine for fold_truthop: determine if an operand is simple enough
2981 to be evaluated unconditionally. */
2983 static int
2984 simple_operand_p (tree exp)
2986 /* Strip any conversions that don't change the machine mode. */
2987 while ((TREE_CODE (exp) == NOP_EXPR
2988 || TREE_CODE (exp) == CONVERT_EXPR)
2989 && (TYPE_MODE (TREE_TYPE (exp))
2990 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2991 exp = TREE_OPERAND (exp, 0);
2993 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2994 || (DECL_P (exp)
2995 && ! TREE_ADDRESSABLE (exp)
2996 && ! TREE_THIS_VOLATILE (exp)
2997 && ! DECL_NONLOCAL (exp)
2998 /* Don't regard global variables as simple. They may be
2999 allocated in ways unknown to the compiler (shared memory,
3000 #pragma weak, etc). */
3001 && ! TREE_PUBLIC (exp)
3002 && ! DECL_EXTERNAL (exp)
3003 /* Loading a static variable is unduly expensive, but global
3004 registers aren't expensive. */
3005 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3008 /* The following functions are subroutines to fold_range_test and allow it to
3009 try to change a logical combination of comparisons into a range test.
3011 For example, both
3012 X == 2 || X == 3 || X == 4 || X == 5
3014 X >= 2 && X <= 5
3015 are converted to
3016 (unsigned) (X - 2) <= 3
3018 We describe each set of comparisons as being either inside or outside
3019 a range, using a variable named like IN_P, and then describe the
3020 range with a lower and upper bound. If one of the bounds is omitted,
3021 it represents either the highest or lowest value of the type.
3023 In the comments below, we represent a range by two numbers in brackets
3024 preceded by a "+" to designate being inside that range, or a "-" to
3025 designate being outside that range, so the condition can be inverted by
3026 flipping the prefix. An omitted bound is represented by a "-". For
3027 example, "- [-, 10]" means being outside the range starting at the lowest
3028 possible value and ending at 10, in other words, being greater than 10.
3029 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3030 always false.
3032 We set up things so that the missing bounds are handled in a consistent
3033 manner so neither a missing bound nor "true" and "false" need to be
3034 handled using a special case. */
3036 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3037 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3038 and UPPER1_P are nonzero if the respective argument is an upper bound
3039 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3040 must be specified for a comparison. ARG1 will be converted to ARG0's
3041 type if both are specified. */
3043 static tree
3044 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3045 tree arg1, int upper1_p)
3047 tree tem;
3048 int result;
3049 int sgn0, sgn1;
3051 /* If neither arg represents infinity, do the normal operation.
3052 Else, if not a comparison, return infinity. Else handle the special
3053 comparison rules. Note that most of the cases below won't occur, but
3054 are handled for consistency. */
3056 if (arg0 != 0 && arg1 != 0)
3058 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3059 arg0, convert (TREE_TYPE (arg0), arg1)));
3060 STRIP_NOPS (tem);
3061 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3064 if (TREE_CODE_CLASS (code) != '<')
3065 return 0;
3067 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3068 for neither. In real maths, we cannot assume open ended ranges are
3069 the same. But, this is computer arithmetic, where numbers are finite.
3070 We can therefore make the transformation of any unbounded range with
3071 the value Z, Z being greater than any representable number. This permits
3072 us to treat unbounded ranges as equal. */
3073 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3074 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3075 switch (code)
3077 case EQ_EXPR:
3078 result = sgn0 == sgn1;
3079 break;
3080 case NE_EXPR:
3081 result = sgn0 != sgn1;
3082 break;
3083 case LT_EXPR:
3084 result = sgn0 < sgn1;
3085 break;
3086 case LE_EXPR:
3087 result = sgn0 <= sgn1;
3088 break;
3089 case GT_EXPR:
3090 result = sgn0 > sgn1;
3091 break;
3092 case GE_EXPR:
3093 result = sgn0 >= sgn1;
3094 break;
3095 default:
3096 abort ();
3099 return convert (type, result ? integer_one_node : integer_zero_node);
3102 /* Given EXP, a logical expression, set the range it is testing into
3103 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3104 actually being tested. *PLOW and *PHIGH will be made of the same type
3105 as the returned expression. If EXP is not a comparison, we will most
3106 likely not be returning a useful value and range. */
3108 static tree
3109 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3111 enum tree_code code;
3112 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3113 tree orig_type = NULL_TREE;
3114 int in_p, n_in_p;
3115 tree low, high, n_low, n_high;
3117 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3118 and see if we can refine the range. Some of the cases below may not
3119 happen, but it doesn't seem worth worrying about this. We "continue"
3120 the outer loop when we've changed something; otherwise we "break"
3121 the switch, which will "break" the while. */
3123 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3125 while (1)
3127 code = TREE_CODE (exp);
3129 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3131 if (first_rtl_op (code) > 0)
3132 arg0 = TREE_OPERAND (exp, 0);
3133 if (TREE_CODE_CLASS (code) == '<'
3134 || TREE_CODE_CLASS (code) == '1'
3135 || TREE_CODE_CLASS (code) == '2')
3136 type = TREE_TYPE (arg0);
3137 if (TREE_CODE_CLASS (code) == '2'
3138 || TREE_CODE_CLASS (code) == '<'
3139 || (TREE_CODE_CLASS (code) == 'e'
3140 && TREE_CODE_LENGTH (code) > 1))
3141 arg1 = TREE_OPERAND (exp, 1);
3144 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3145 lose a cast by accident. */
3146 if (type != NULL_TREE && orig_type == NULL_TREE)
3147 orig_type = type;
3149 switch (code)
3151 case TRUTH_NOT_EXPR:
3152 in_p = ! in_p, exp = arg0;
3153 continue;
3155 case EQ_EXPR: case NE_EXPR:
3156 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3157 /* We can only do something if the range is testing for zero
3158 and if the second operand is an integer constant. Note that
3159 saying something is "in" the range we make is done by
3160 complementing IN_P since it will set in the initial case of
3161 being not equal to zero; "out" is leaving it alone. */
3162 if (low == 0 || high == 0
3163 || ! integer_zerop (low) || ! integer_zerop (high)
3164 || TREE_CODE (arg1) != INTEGER_CST)
3165 break;
3167 switch (code)
3169 case NE_EXPR: /* - [c, c] */
3170 low = high = arg1;
3171 break;
3172 case EQ_EXPR: /* + [c, c] */
3173 in_p = ! in_p, low = high = arg1;
3174 break;
3175 case GT_EXPR: /* - [-, c] */
3176 low = 0, high = arg1;
3177 break;
3178 case GE_EXPR: /* + [c, -] */
3179 in_p = ! in_p, low = arg1, high = 0;
3180 break;
3181 case LT_EXPR: /* - [c, -] */
3182 low = arg1, high = 0;
3183 break;
3184 case LE_EXPR: /* + [-, c] */
3185 in_p = ! in_p, low = 0, high = arg1;
3186 break;
3187 default:
3188 abort ();
3191 exp = arg0;
3193 /* If this is an unsigned comparison, we also know that EXP is
3194 greater than or equal to zero. We base the range tests we make
3195 on that fact, so we record it here so we can parse existing
3196 range tests. */
3197 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3199 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3200 1, convert (type, integer_zero_node),
3201 NULL_TREE))
3202 break;
3204 in_p = n_in_p, low = n_low, high = n_high;
3206 /* If the high bound is missing, but we have a nonzero low
3207 bound, reverse the range so it goes from zero to the low bound
3208 minus 1. */
3209 if (high == 0 && low && ! integer_zerop (low))
3211 in_p = ! in_p;
3212 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3213 integer_one_node, 0);
3214 low = convert (type, integer_zero_node);
3217 continue;
3219 case NEGATE_EXPR:
3220 /* (-x) IN [a,b] -> x in [-b, -a] */
3221 n_low = range_binop (MINUS_EXPR, type,
3222 convert (type, integer_zero_node), 0, high, 1);
3223 n_high = range_binop (MINUS_EXPR, type,
3224 convert (type, integer_zero_node), 0, low, 0);
3225 low = n_low, high = n_high;
3226 exp = arg0;
3227 continue;
3229 case BIT_NOT_EXPR:
3230 /* ~ X -> -X - 1 */
3231 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3232 convert (type, integer_one_node));
3233 continue;
3235 case PLUS_EXPR: case MINUS_EXPR:
3236 if (TREE_CODE (arg1) != INTEGER_CST)
3237 break;
3239 /* If EXP is signed, any overflow in the computation is undefined,
3240 so we don't worry about it so long as our computations on
3241 the bounds don't overflow. For unsigned, overflow is defined
3242 and this is exactly the right thing. */
3243 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3244 type, low, 0, arg1, 0);
3245 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3246 type, high, 1, arg1, 0);
3247 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3248 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3249 break;
3251 /* Check for an unsigned range which has wrapped around the maximum
3252 value thus making n_high < n_low, and normalize it. */
3253 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3255 low = range_binop (PLUS_EXPR, type, n_high, 0,
3256 integer_one_node, 0);
3257 high = range_binop (MINUS_EXPR, type, n_low, 0,
3258 integer_one_node, 0);
3260 /* If the range is of the form +/- [ x+1, x ], we won't
3261 be able to normalize it. But then, it represents the
3262 whole range or the empty set, so make it
3263 +/- [ -, - ]. */
3264 if (tree_int_cst_equal (n_low, low)
3265 && tree_int_cst_equal (n_high, high))
3266 low = high = 0;
3267 else
3268 in_p = ! in_p;
3270 else
3271 low = n_low, high = n_high;
3273 exp = arg0;
3274 continue;
3276 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3277 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3278 break;
3280 if (! INTEGRAL_TYPE_P (type)
3281 || (low != 0 && ! int_fits_type_p (low, type))
3282 || (high != 0 && ! int_fits_type_p (high, type)))
3283 break;
3285 n_low = low, n_high = high;
3287 if (n_low != 0)
3288 n_low = convert (type, n_low);
3290 if (n_high != 0)
3291 n_high = convert (type, n_high);
3293 /* If we're converting from an unsigned to a signed type,
3294 we will be doing the comparison as unsigned. The tests above
3295 have already verified that LOW and HIGH are both positive.
3297 So we have to make sure that the original unsigned value will
3298 be interpreted as positive. */
3299 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3301 tree equiv_type = (*lang_hooks.types.type_for_mode)
3302 (TYPE_MODE (type), 1);
3303 tree high_positive;
3305 /* A range without an upper bound is, naturally, unbounded.
3306 Since convert would have cropped a very large value, use
3307 the max value for the destination type. */
3308 high_positive
3309 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3310 : TYPE_MAX_VALUE (type);
3312 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3313 high_positive = fold (build (RSHIFT_EXPR, type,
3314 convert (type, high_positive),
3315 convert (type, integer_one_node)));
3317 /* If the low bound is specified, "and" the range with the
3318 range for which the original unsigned value will be
3319 positive. */
3320 if (low != 0)
3322 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3323 1, n_low, n_high,
3324 1, convert (type, integer_zero_node),
3325 high_positive))
3326 break;
3328 in_p = (n_in_p == in_p);
3330 else
3332 /* Otherwise, "or" the range with the range of the input
3333 that will be interpreted as negative. */
3334 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3335 0, n_low, n_high,
3336 1, convert (type, integer_zero_node),
3337 high_positive))
3338 break;
3340 in_p = (in_p != n_in_p);
3344 exp = arg0;
3345 low = n_low, high = n_high;
3346 continue;
3348 default:
3349 break;
3352 break;
3355 /* If EXP is a constant, we can evaluate whether this is true or false. */
3356 if (TREE_CODE (exp) == INTEGER_CST)
3358 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3359 exp, 0, low, 0))
3360 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3361 exp, 1, high, 1)));
3362 low = high = 0;
3363 exp = 0;
3366 *pin_p = in_p, *plow = low, *phigh = high;
3367 return exp;
3370 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3371 type, TYPE, return an expression to test if EXP is in (or out of, depending
3372 on IN_P) the range. */
3374 static tree
3375 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3377 tree etype = TREE_TYPE (exp);
3378 tree value;
3380 if (! in_p
3381 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3382 return invert_truthvalue (value);
3384 if (low == 0 && high == 0)
3385 return convert (type, integer_one_node);
3387 if (low == 0)
3388 return fold (build (LE_EXPR, type, exp, high));
3390 if (high == 0)
3391 return fold (build (GE_EXPR, type, exp, low));
3393 if (operand_equal_p (low, high, 0))
3394 return fold (build (EQ_EXPR, type, exp, low));
3396 if (integer_zerop (low))
3398 if (! TREE_UNSIGNED (etype))
3400 etype = (*lang_hooks.types.unsigned_type) (etype);
3401 high = convert (etype, high);
3402 exp = convert (etype, exp);
3404 return build_range_check (type, exp, 1, 0, high);
3407 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3408 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3410 unsigned HOST_WIDE_INT lo;
3411 HOST_WIDE_INT hi;
3412 int prec;
3414 prec = TYPE_PRECISION (etype);
3415 if (prec <= HOST_BITS_PER_WIDE_INT)
3417 hi = 0;
3418 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3420 else
3422 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3423 lo = (unsigned HOST_WIDE_INT) -1;
3426 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3428 if (TREE_UNSIGNED (etype))
3430 etype = (*lang_hooks.types.signed_type) (etype);
3431 exp = convert (etype, exp);
3433 return fold (build (GT_EXPR, type, exp,
3434 convert (etype, integer_zero_node)));
3438 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3439 && ! TREE_OVERFLOW (value))
3440 return build_range_check (type,
3441 fold (build (MINUS_EXPR, etype, exp, low)),
3442 1, convert (etype, integer_zero_node), value);
3444 return 0;
3447 /* Given two ranges, see if we can merge them into one. Return 1 if we
3448 can, 0 if we can't. Set the output range into the specified parameters. */
3450 static int
3451 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3452 tree high0, int in1_p, tree low1, tree high1)
3454 int no_overlap;
3455 int subset;
3456 int temp;
3457 tree tem;
3458 int in_p;
3459 tree low, high;
3460 int lowequal = ((low0 == 0 && low1 == 0)
3461 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3462 low0, 0, low1, 0)));
3463 int highequal = ((high0 == 0 && high1 == 0)
3464 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3465 high0, 1, high1, 1)));
3467 /* Make range 0 be the range that starts first, or ends last if they
3468 start at the same value. Swap them if it isn't. */
3469 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3470 low0, 0, low1, 0))
3471 || (lowequal
3472 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3473 high1, 1, high0, 1))))
3475 temp = in0_p, in0_p = in1_p, in1_p = temp;
3476 tem = low0, low0 = low1, low1 = tem;
3477 tem = high0, high0 = high1, high1 = tem;
3480 /* Now flag two cases, whether the ranges are disjoint or whether the
3481 second range is totally subsumed in the first. Note that the tests
3482 below are simplified by the ones above. */
3483 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3484 high0, 1, low1, 0));
3485 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3486 high1, 1, high0, 1));
3488 /* We now have four cases, depending on whether we are including or
3489 excluding the two ranges. */
3490 if (in0_p && in1_p)
3492 /* If they don't overlap, the result is false. If the second range
3493 is a subset it is the result. Otherwise, the range is from the start
3494 of the second to the end of the first. */
3495 if (no_overlap)
3496 in_p = 0, low = high = 0;
3497 else if (subset)
3498 in_p = 1, low = low1, high = high1;
3499 else
3500 in_p = 1, low = low1, high = high0;
3503 else if (in0_p && ! in1_p)
3505 /* If they don't overlap, the result is the first range. If they are
3506 equal, the result is false. If the second range is a subset of the
3507 first, and the ranges begin at the same place, we go from just after
3508 the end of the first range to the end of the second. If the second
3509 range is not a subset of the first, or if it is a subset and both
3510 ranges end at the same place, the range starts at the start of the
3511 first range and ends just before the second range.
3512 Otherwise, we can't describe this as a single range. */
3513 if (no_overlap)
3514 in_p = 1, low = low0, high = high0;
3515 else if (lowequal && highequal)
3516 in_p = 0, low = high = 0;
3517 else if (subset && lowequal)
3519 in_p = 1, high = high0;
3520 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3521 integer_one_node, 0);
3523 else if (! subset || highequal)
3525 in_p = 1, low = low0;
3526 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3527 integer_one_node, 0);
3529 else
3530 return 0;
3533 else if (! in0_p && in1_p)
3535 /* If they don't overlap, the result is the second range. If the second
3536 is a subset of the first, the result is false. Otherwise,
3537 the range starts just after the first range and ends at the
3538 end of the second. */
3539 if (no_overlap)
3540 in_p = 1, low = low1, high = high1;
3541 else if (subset || highequal)
3542 in_p = 0, low = high = 0;
3543 else
3545 in_p = 1, high = high1;
3546 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3547 integer_one_node, 0);
3551 else
3553 /* The case where we are excluding both ranges. Here the complex case
3554 is if they don't overlap. In that case, the only time we have a
3555 range is if they are adjacent. If the second is a subset of the
3556 first, the result is the first. Otherwise, the range to exclude
3557 starts at the beginning of the first range and ends at the end of the
3558 second. */
3559 if (no_overlap)
3561 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3562 range_binop (PLUS_EXPR, NULL_TREE,
3563 high0, 1,
3564 integer_one_node, 1),
3565 1, low1, 0)))
3566 in_p = 0, low = low0, high = high1;
3567 else
3568 return 0;
3570 else if (subset)
3571 in_p = 0, low = low0, high = high0;
3572 else
3573 in_p = 0, low = low0, high = high1;
3576 *pin_p = in_p, *plow = low, *phigh = high;
3577 return 1;
3580 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3581 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3582 #endif
3584 /* EXP is some logical combination of boolean tests. See if we can
3585 merge it into some range test. Return the new tree if so. */
3587 static tree
3588 fold_range_test (tree exp)
3590 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3591 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3592 int in0_p, in1_p, in_p;
3593 tree low0, low1, low, high0, high1, high;
3594 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3595 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3596 tree tem;
3598 /* If this is an OR operation, invert both sides; we will invert
3599 again at the end. */
3600 if (or_op)
3601 in0_p = ! in0_p, in1_p = ! in1_p;
3603 /* If both expressions are the same, if we can merge the ranges, and we
3604 can build the range test, return it or it inverted. If one of the
3605 ranges is always true or always false, consider it to be the same
3606 expression as the other. */
3607 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3608 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3609 in1_p, low1, high1)
3610 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3611 lhs != 0 ? lhs
3612 : rhs != 0 ? rhs : integer_zero_node,
3613 in_p, low, high))))
3614 return or_op ? invert_truthvalue (tem) : tem;
3616 /* On machines where the branch cost is expensive, if this is a
3617 short-circuited branch and the underlying object on both sides
3618 is the same, make a non-short-circuit operation. */
3619 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3620 && lhs != 0 && rhs != 0
3621 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3622 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3623 && operand_equal_p (lhs, rhs, 0))
3625 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3626 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3627 which cases we can't do this. */
3628 if (simple_operand_p (lhs))
3629 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3630 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3631 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3632 TREE_OPERAND (exp, 1));
3634 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3635 && ! CONTAINS_PLACEHOLDER_P (lhs))
3637 tree common = save_expr (lhs);
3639 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3640 or_op ? ! in0_p : in0_p,
3641 low0, high0))
3642 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3643 or_op ? ! in1_p : in1_p,
3644 low1, high1))))
3645 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3646 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3647 TREE_TYPE (exp), lhs, rhs);
3651 return 0;
3654 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3655 bit value. Arrange things so the extra bits will be set to zero if and
3656 only if C is signed-extended to its full width. If MASK is nonzero,
3657 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3659 static tree
3660 unextend (tree c, int p, int unsignedp, tree mask)
3662 tree type = TREE_TYPE (c);
3663 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3664 tree temp;
3666 if (p == modesize || unsignedp)
3667 return c;
3669 /* We work by getting just the sign bit into the low-order bit, then
3670 into the high-order bit, then sign-extend. We then XOR that value
3671 with C. */
3672 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3673 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3675 /* We must use a signed type in order to get an arithmetic right shift.
3676 However, we must also avoid introducing accidental overflows, so that
3677 a subsequent call to integer_zerop will work. Hence we must
3678 do the type conversion here. At this point, the constant is either
3679 zero or one, and the conversion to a signed type can never overflow.
3680 We could get an overflow if this conversion is done anywhere else. */
3681 if (TREE_UNSIGNED (type))
3682 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3684 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3685 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3686 if (mask != 0)
3687 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3688 /* If necessary, convert the type back to match the type of C. */
3689 if (TREE_UNSIGNED (type))
3690 temp = convert (type, temp);
3692 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3695 /* Find ways of folding logical expressions of LHS and RHS:
3696 Try to merge two comparisons to the same innermost item.
3697 Look for range tests like "ch >= '0' && ch <= '9'".
3698 Look for combinations of simple terms on machines with expensive branches
3699 and evaluate the RHS unconditionally.
3701 For example, if we have p->a == 2 && p->b == 4 and we can make an
3702 object large enough to span both A and B, we can do this with a comparison
3703 against the object ANDed with the a mask.
3705 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3706 operations to do this with one comparison.
3708 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3709 function and the one above.
3711 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3712 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3714 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3715 two operands.
3717 We return the simplified tree or 0 if no optimization is possible. */
3719 static tree
3720 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3722 /* If this is the "or" of two comparisons, we can do something if
3723 the comparisons are NE_EXPR. If this is the "and", we can do something
3724 if the comparisons are EQ_EXPR. I.e.,
3725 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3727 WANTED_CODE is this operation code. For single bit fields, we can
3728 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3729 comparison for one-bit fields. */
3731 enum tree_code wanted_code;
3732 enum tree_code lcode, rcode;
3733 tree ll_arg, lr_arg, rl_arg, rr_arg;
3734 tree ll_inner, lr_inner, rl_inner, rr_inner;
3735 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3736 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3737 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3738 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3739 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3740 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3741 enum machine_mode lnmode, rnmode;
3742 tree ll_mask, lr_mask, rl_mask, rr_mask;
3743 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3744 tree l_const, r_const;
3745 tree lntype, rntype, result;
3746 int first_bit, end_bit;
3747 int volatilep;
3749 /* Start by getting the comparison codes. Fail if anything is volatile.
3750 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3751 it were surrounded with a NE_EXPR. */
3753 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3754 return 0;
3756 lcode = TREE_CODE (lhs);
3757 rcode = TREE_CODE (rhs);
3759 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3760 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3762 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3763 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3765 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3766 return 0;
3768 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3769 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3771 ll_arg = TREE_OPERAND (lhs, 0);
3772 lr_arg = TREE_OPERAND (lhs, 1);
3773 rl_arg = TREE_OPERAND (rhs, 0);
3774 rr_arg = TREE_OPERAND (rhs, 1);
3776 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3777 if (simple_operand_p (ll_arg)
3778 && simple_operand_p (lr_arg)
3779 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3781 int compcode;
3783 if (operand_equal_p (ll_arg, rl_arg, 0)
3784 && operand_equal_p (lr_arg, rr_arg, 0))
3786 int lcompcode, rcompcode;
3788 lcompcode = comparison_to_compcode (lcode);
3789 rcompcode = comparison_to_compcode (rcode);
3790 compcode = (code == TRUTH_AND_EXPR)
3791 ? lcompcode & rcompcode
3792 : lcompcode | rcompcode;
3794 else if (operand_equal_p (ll_arg, rr_arg, 0)
3795 && operand_equal_p (lr_arg, rl_arg, 0))
3797 int lcompcode, rcompcode;
3799 rcode = swap_tree_comparison (rcode);
3800 lcompcode = comparison_to_compcode (lcode);
3801 rcompcode = comparison_to_compcode (rcode);
3802 compcode = (code == TRUTH_AND_EXPR)
3803 ? lcompcode & rcompcode
3804 : lcompcode | rcompcode;
3806 else
3807 compcode = -1;
3809 if (compcode == COMPCODE_TRUE)
3810 return convert (truth_type, integer_one_node);
3811 else if (compcode == COMPCODE_FALSE)
3812 return convert (truth_type, integer_zero_node);
3813 else if (compcode != -1)
3814 return build (compcode_to_comparison (compcode),
3815 truth_type, ll_arg, lr_arg);
3818 /* If the RHS can be evaluated unconditionally and its operands are
3819 simple, it wins to evaluate the RHS unconditionally on machines
3820 with expensive branches. In this case, this isn't a comparison
3821 that can be merged. Avoid doing this if the RHS is a floating-point
3822 comparison since those can trap. */
3824 if (BRANCH_COST >= 2
3825 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3826 && simple_operand_p (rl_arg)
3827 && simple_operand_p (rr_arg))
3829 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3830 if (code == TRUTH_OR_EXPR
3831 && lcode == NE_EXPR && integer_zerop (lr_arg)
3832 && rcode == NE_EXPR && integer_zerop (rr_arg)
3833 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3834 return build (NE_EXPR, truth_type,
3835 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3836 ll_arg, rl_arg),
3837 integer_zero_node);
3839 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3840 if (code == TRUTH_AND_EXPR
3841 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3842 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3843 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3844 return build (EQ_EXPR, truth_type,
3845 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3846 ll_arg, rl_arg),
3847 integer_zero_node);
3849 return build (code, truth_type, lhs, rhs);
3852 /* See if the comparisons can be merged. Then get all the parameters for
3853 each side. */
3855 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3856 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3857 return 0;
3859 volatilep = 0;
3860 ll_inner = decode_field_reference (ll_arg,
3861 &ll_bitsize, &ll_bitpos, &ll_mode,
3862 &ll_unsignedp, &volatilep, &ll_mask,
3863 &ll_and_mask);
3864 lr_inner = decode_field_reference (lr_arg,
3865 &lr_bitsize, &lr_bitpos, &lr_mode,
3866 &lr_unsignedp, &volatilep, &lr_mask,
3867 &lr_and_mask);
3868 rl_inner = decode_field_reference (rl_arg,
3869 &rl_bitsize, &rl_bitpos, &rl_mode,
3870 &rl_unsignedp, &volatilep, &rl_mask,
3871 &rl_and_mask);
3872 rr_inner = decode_field_reference (rr_arg,
3873 &rr_bitsize, &rr_bitpos, &rr_mode,
3874 &rr_unsignedp, &volatilep, &rr_mask,
3875 &rr_and_mask);
3877 /* It must be true that the inner operation on the lhs of each
3878 comparison must be the same if we are to be able to do anything.
3879 Then see if we have constants. If not, the same must be true for
3880 the rhs's. */
3881 if (volatilep || ll_inner == 0 || rl_inner == 0
3882 || ! operand_equal_p (ll_inner, rl_inner, 0))
3883 return 0;
3885 if (TREE_CODE (lr_arg) == INTEGER_CST
3886 && TREE_CODE (rr_arg) == INTEGER_CST)
3887 l_const = lr_arg, r_const = rr_arg;
3888 else if (lr_inner == 0 || rr_inner == 0
3889 || ! operand_equal_p (lr_inner, rr_inner, 0))
3890 return 0;
3891 else
3892 l_const = r_const = 0;
3894 /* If either comparison code is not correct for our logical operation,
3895 fail. However, we can convert a one-bit comparison against zero into
3896 the opposite comparison against that bit being set in the field. */
3898 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3899 if (lcode != wanted_code)
3901 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3903 /* Make the left operand unsigned, since we are only interested
3904 in the value of one bit. Otherwise we are doing the wrong
3905 thing below. */
3906 ll_unsignedp = 1;
3907 l_const = ll_mask;
3909 else
3910 return 0;
3913 /* This is analogous to the code for l_const above. */
3914 if (rcode != wanted_code)
3916 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3918 rl_unsignedp = 1;
3919 r_const = rl_mask;
3921 else
3922 return 0;
3925 /* After this point all optimizations will generate bit-field
3926 references, which we might not want. */
3927 if (! (*lang_hooks.can_use_bit_fields_p) ())
3928 return 0;
3930 /* See if we can find a mode that contains both fields being compared on
3931 the left. If we can't, fail. Otherwise, update all constants and masks
3932 to be relative to a field of that size. */
3933 first_bit = MIN (ll_bitpos, rl_bitpos);
3934 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3935 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3936 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3937 volatilep);
3938 if (lnmode == VOIDmode)
3939 return 0;
3941 lnbitsize = GET_MODE_BITSIZE (lnmode);
3942 lnbitpos = first_bit & ~ (lnbitsize - 1);
3943 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3944 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3946 if (BYTES_BIG_ENDIAN)
3948 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3949 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3952 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3953 size_int (xll_bitpos), 0);
3954 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3955 size_int (xrl_bitpos), 0);
3957 if (l_const)
3959 l_const = convert (lntype, l_const);
3960 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3961 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3962 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3963 fold (build1 (BIT_NOT_EXPR,
3964 lntype, ll_mask)),
3965 0)))
3967 warning ("comparison is always %d", wanted_code == NE_EXPR);
3969 return convert (truth_type,
3970 wanted_code == NE_EXPR
3971 ? integer_one_node : integer_zero_node);
3974 if (r_const)
3976 r_const = convert (lntype, r_const);
3977 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3978 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3979 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3980 fold (build1 (BIT_NOT_EXPR,
3981 lntype, rl_mask)),
3982 0)))
3984 warning ("comparison is always %d", wanted_code == NE_EXPR);
3986 return convert (truth_type,
3987 wanted_code == NE_EXPR
3988 ? integer_one_node : integer_zero_node);
3992 /* If the right sides are not constant, do the same for it. Also,
3993 disallow this optimization if a size or signedness mismatch occurs
3994 between the left and right sides. */
3995 if (l_const == 0)
3997 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3998 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3999 /* Make sure the two fields on the right
4000 correspond to the left without being swapped. */
4001 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4002 return 0;
4004 first_bit = MIN (lr_bitpos, rr_bitpos);
4005 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4006 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4007 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4008 volatilep);
4009 if (rnmode == VOIDmode)
4010 return 0;
4012 rnbitsize = GET_MODE_BITSIZE (rnmode);
4013 rnbitpos = first_bit & ~ (rnbitsize - 1);
4014 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4015 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4017 if (BYTES_BIG_ENDIAN)
4019 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4020 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4023 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4024 size_int (xlr_bitpos), 0);
4025 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4026 size_int (xrr_bitpos), 0);
4028 /* Make a mask that corresponds to both fields being compared.
4029 Do this for both items being compared. If the operands are the
4030 same size and the bits being compared are in the same position
4031 then we can do this by masking both and comparing the masked
4032 results. */
4033 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4034 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4035 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4037 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4038 ll_unsignedp || rl_unsignedp);
4039 if (! all_ones_mask_p (ll_mask, lnbitsize))
4040 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4042 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4043 lr_unsignedp || rr_unsignedp);
4044 if (! all_ones_mask_p (lr_mask, rnbitsize))
4045 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4047 return build (wanted_code, truth_type, lhs, rhs);
4050 /* There is still another way we can do something: If both pairs of
4051 fields being compared are adjacent, we may be able to make a wider
4052 field containing them both.
4054 Note that we still must mask the lhs/rhs expressions. Furthermore,
4055 the mask must be shifted to account for the shift done by
4056 make_bit_field_ref. */
4057 if ((ll_bitsize + ll_bitpos == rl_bitpos
4058 && lr_bitsize + lr_bitpos == rr_bitpos)
4059 || (ll_bitpos == rl_bitpos + rl_bitsize
4060 && lr_bitpos == rr_bitpos + rr_bitsize))
4062 tree type;
4064 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4065 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4066 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4067 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4069 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4070 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4071 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4072 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4074 /* Convert to the smaller type before masking out unwanted bits. */
4075 type = lntype;
4076 if (lntype != rntype)
4078 if (lnbitsize > rnbitsize)
4080 lhs = convert (rntype, lhs);
4081 ll_mask = convert (rntype, ll_mask);
4082 type = rntype;
4084 else if (lnbitsize < rnbitsize)
4086 rhs = convert (lntype, rhs);
4087 lr_mask = convert (lntype, lr_mask);
4088 type = lntype;
4092 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4093 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4095 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4096 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4098 return build (wanted_code, truth_type, lhs, rhs);
4101 return 0;
4104 /* Handle the case of comparisons with constants. If there is something in
4105 common between the masks, those bits of the constants must be the same.
4106 If not, the condition is always false. Test for this to avoid generating
4107 incorrect code below. */
4108 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4109 if (! integer_zerop (result)
4110 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4111 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4113 if (wanted_code == NE_EXPR)
4115 warning ("`or' of unmatched not-equal tests is always 1");
4116 return convert (truth_type, integer_one_node);
4118 else
4120 warning ("`and' of mutually exclusive equal-tests is always 0");
4121 return convert (truth_type, integer_zero_node);
4125 /* Construct the expression we will return. First get the component
4126 reference we will make. Unless the mask is all ones the width of
4127 that field, perform the mask operation. Then compare with the
4128 merged constant. */
4129 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4130 ll_unsignedp || rl_unsignedp);
4132 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4133 if (! all_ones_mask_p (ll_mask, lnbitsize))
4134 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4136 return build (wanted_code, truth_type, result,
4137 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4140 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4141 constant. */
4143 static tree
4144 optimize_minmax_comparison (tree t)
4146 tree type = TREE_TYPE (t);
4147 tree arg0 = TREE_OPERAND (t, 0);
4148 enum tree_code op_code;
4149 tree comp_const = TREE_OPERAND (t, 1);
4150 tree minmax_const;
4151 int consts_equal, consts_lt;
4152 tree inner;
4154 STRIP_SIGN_NOPS (arg0);
4156 op_code = TREE_CODE (arg0);
4157 minmax_const = TREE_OPERAND (arg0, 1);
4158 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4159 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4160 inner = TREE_OPERAND (arg0, 0);
4162 /* If something does not permit us to optimize, return the original tree. */
4163 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4164 || TREE_CODE (comp_const) != INTEGER_CST
4165 || TREE_CONSTANT_OVERFLOW (comp_const)
4166 || TREE_CODE (minmax_const) != INTEGER_CST
4167 || TREE_CONSTANT_OVERFLOW (minmax_const))
4168 return t;
4170 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4171 and GT_EXPR, doing the rest with recursive calls using logical
4172 simplifications. */
4173 switch (TREE_CODE (t))
4175 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4176 return
4177 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4179 case GE_EXPR:
4180 return
4181 fold (build (TRUTH_ORIF_EXPR, type,
4182 optimize_minmax_comparison
4183 (build (EQ_EXPR, type, arg0, comp_const)),
4184 optimize_minmax_comparison
4185 (build (GT_EXPR, type, arg0, comp_const))));
4187 case EQ_EXPR:
4188 if (op_code == MAX_EXPR && consts_equal)
4189 /* MAX (X, 0) == 0 -> X <= 0 */
4190 return fold (build (LE_EXPR, type, inner, comp_const));
4192 else if (op_code == MAX_EXPR && consts_lt)
4193 /* MAX (X, 0) == 5 -> X == 5 */
4194 return fold (build (EQ_EXPR, type, inner, comp_const));
4196 else if (op_code == MAX_EXPR)
4197 /* MAX (X, 0) == -1 -> false */
4198 return omit_one_operand (type, integer_zero_node, inner);
4200 else if (consts_equal)
4201 /* MIN (X, 0) == 0 -> X >= 0 */
4202 return fold (build (GE_EXPR, type, inner, comp_const));
4204 else if (consts_lt)
4205 /* MIN (X, 0) == 5 -> false */
4206 return omit_one_operand (type, integer_zero_node, inner);
4208 else
4209 /* MIN (X, 0) == -1 -> X == -1 */
4210 return fold (build (EQ_EXPR, type, inner, comp_const));
4212 case GT_EXPR:
4213 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4214 /* MAX (X, 0) > 0 -> X > 0
4215 MAX (X, 0) > 5 -> X > 5 */
4216 return fold (build (GT_EXPR, type, inner, comp_const));
4218 else if (op_code == MAX_EXPR)
4219 /* MAX (X, 0) > -1 -> true */
4220 return omit_one_operand (type, integer_one_node, inner);
4222 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4223 /* MIN (X, 0) > 0 -> false
4224 MIN (X, 0) > 5 -> false */
4225 return omit_one_operand (type, integer_zero_node, inner);
4227 else
4228 /* MIN (X, 0) > -1 -> X > -1 */
4229 return fold (build (GT_EXPR, type, inner, comp_const));
4231 default:
4232 return t;
4236 /* T is an integer expression that is being multiplied, divided, or taken a
4237 modulus (CODE says which and what kind of divide or modulus) by a
4238 constant C. See if we can eliminate that operation by folding it with
4239 other operations already in T. WIDE_TYPE, if non-null, is a type that
4240 should be used for the computation if wider than our type.
4242 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4243 (X * 2) + (Y * 4). We must, however, be assured that either the original
4244 expression would not overflow or that overflow is undefined for the type
4245 in the language in question.
4247 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4248 the machine has a multiply-accumulate insn or that this is part of an
4249 addressing calculation.
4251 If we return a non-null expression, it is an equivalent form of the
4252 original computation, but need not be in the original type. */
4254 static tree
4255 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4257 /* To avoid exponential search depth, refuse to allow recursion past
4258 three levels. Beyond that (1) it's highly unlikely that we'll find
4259 something interesting and (2) we've probably processed it before
4260 when we built the inner expression. */
4262 static int depth;
4263 tree ret;
4265 if (depth > 3)
4266 return NULL;
4268 depth++;
4269 ret = extract_muldiv_1 (t, c, code, wide_type);
4270 depth--;
4272 return ret;
4275 static tree
4276 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4278 tree type = TREE_TYPE (t);
4279 enum tree_code tcode = TREE_CODE (t);
4280 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4281 > GET_MODE_SIZE (TYPE_MODE (type)))
4282 ? wide_type : type);
4283 tree t1, t2;
4284 int same_p = tcode == code;
4285 tree op0 = NULL_TREE, op1 = NULL_TREE;
4287 /* Don't deal with constants of zero here; they confuse the code below. */
4288 if (integer_zerop (c))
4289 return NULL_TREE;
4291 if (TREE_CODE_CLASS (tcode) == '1')
4292 op0 = TREE_OPERAND (t, 0);
4294 if (TREE_CODE_CLASS (tcode) == '2')
4295 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4297 /* Note that we need not handle conditional operations here since fold
4298 already handles those cases. So just do arithmetic here. */
4299 switch (tcode)
4301 case INTEGER_CST:
4302 /* For a constant, we can always simplify if we are a multiply
4303 or (for divide and modulus) if it is a multiple of our constant. */
4304 if (code == MULT_EXPR
4305 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4306 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4307 break;
4309 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4310 /* If op0 is an expression ... */
4311 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4312 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4313 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4314 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4315 /* ... and is unsigned, and its type is smaller than ctype,
4316 then we cannot pass through as widening. */
4317 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4318 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4319 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4320 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4321 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4322 /* ... or its type is larger than ctype,
4323 then we cannot pass through this truncation. */
4324 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4325 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4326 /* ... or signedness changes for division or modulus,
4327 then we cannot pass through this conversion. */
4328 || (code != MULT_EXPR
4329 && (TREE_UNSIGNED (ctype)
4330 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4331 break;
4333 /* Pass the constant down and see if we can make a simplification. If
4334 we can, replace this expression with the inner simplification for
4335 possible later conversion to our or some other type. */
4336 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4337 && TREE_CODE (t2) == INTEGER_CST
4338 && ! TREE_CONSTANT_OVERFLOW (t2)
4339 && (0 != (t1 = extract_muldiv (op0, t2, code,
4340 code == MULT_EXPR
4341 ? ctype : NULL_TREE))))
4342 return t1;
4343 break;
4345 case NEGATE_EXPR: case ABS_EXPR:
4346 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4347 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4348 break;
4350 case MIN_EXPR: case MAX_EXPR:
4351 /* If widening the type changes the signedness, then we can't perform
4352 this optimization as that changes the result. */
4353 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4354 break;
4356 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4357 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4358 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4360 if (tree_int_cst_sgn (c) < 0)
4361 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4363 return fold (build (tcode, ctype, convert (ctype, t1),
4364 convert (ctype, t2)));
4366 break;
4368 case WITH_RECORD_EXPR:
4369 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4370 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4371 TREE_OPERAND (t, 1));
4372 break;
4374 case LSHIFT_EXPR: case RSHIFT_EXPR:
4375 /* If the second operand is constant, this is a multiplication
4376 or floor division, by a power of two, so we can treat it that
4377 way unless the multiplier or divisor overflows. */
4378 if (TREE_CODE (op1) == INTEGER_CST
4379 /* const_binop may not detect overflow correctly,
4380 so check for it explicitly here. */
4381 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4382 && TREE_INT_CST_HIGH (op1) == 0
4383 && 0 != (t1 = convert (ctype,
4384 const_binop (LSHIFT_EXPR, size_one_node,
4385 op1, 0)))
4386 && ! TREE_OVERFLOW (t1))
4387 return extract_muldiv (build (tcode == LSHIFT_EXPR
4388 ? MULT_EXPR : FLOOR_DIV_EXPR,
4389 ctype, convert (ctype, op0), t1),
4390 c, code, wide_type);
4391 break;
4393 case PLUS_EXPR: case MINUS_EXPR:
4394 /* See if we can eliminate the operation on both sides. If we can, we
4395 can return a new PLUS or MINUS. If we can't, the only remaining
4396 cases where we can do anything are if the second operand is a
4397 constant. */
4398 t1 = extract_muldiv (op0, c, code, wide_type);
4399 t2 = extract_muldiv (op1, c, code, wide_type);
4400 if (t1 != 0 && t2 != 0
4401 && (code == MULT_EXPR
4402 /* If not multiplication, we can only do this if both operands
4403 are divisible by c. */
4404 || (multiple_of_p (ctype, op0, c)
4405 && multiple_of_p (ctype, op1, c))))
4406 return fold (build (tcode, ctype, convert (ctype, t1),
4407 convert (ctype, t2)));
4409 /* If this was a subtraction, negate OP1 and set it to be an addition.
4410 This simplifies the logic below. */
4411 if (tcode == MINUS_EXPR)
4412 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4414 if (TREE_CODE (op1) != INTEGER_CST)
4415 break;
4417 /* If either OP1 or C are negative, this optimization is not safe for
4418 some of the division and remainder types while for others we need
4419 to change the code. */
4420 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4422 if (code == CEIL_DIV_EXPR)
4423 code = FLOOR_DIV_EXPR;
4424 else if (code == FLOOR_DIV_EXPR)
4425 code = CEIL_DIV_EXPR;
4426 else if (code != MULT_EXPR
4427 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4428 break;
4431 /* If it's a multiply or a division/modulus operation of a multiple
4432 of our constant, do the operation and verify it doesn't overflow. */
4433 if (code == MULT_EXPR
4434 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4436 op1 = const_binop (code, convert (ctype, op1),
4437 convert (ctype, c), 0);
4438 /* We allow the constant to overflow with wrapping semantics. */
4439 if (op1 == 0
4440 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4441 break;
4443 else
4444 break;
4446 /* If we have an unsigned type is not a sizetype, we cannot widen
4447 the operation since it will change the result if the original
4448 computation overflowed. */
4449 if (TREE_UNSIGNED (ctype)
4450 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4451 && ctype != type)
4452 break;
4454 /* If we were able to eliminate our operation from the first side,
4455 apply our operation to the second side and reform the PLUS. */
4456 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4457 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4459 /* The last case is if we are a multiply. In that case, we can
4460 apply the distributive law to commute the multiply and addition
4461 if the multiplication of the constants doesn't overflow. */
4462 if (code == MULT_EXPR)
4463 return fold (build (tcode, ctype, fold (build (code, ctype,
4464 convert (ctype, op0),
4465 convert (ctype, c))),
4466 op1));
4468 break;
4470 case MULT_EXPR:
4471 /* We have a special case here if we are doing something like
4472 (C * 8) % 4 since we know that's zero. */
4473 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4474 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4475 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4476 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4477 return omit_one_operand (type, integer_zero_node, op0);
4479 /* ... fall through ... */
4481 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4482 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4483 /* If we can extract our operation from the LHS, do so and return a
4484 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4485 do something only if the second operand is a constant. */
4486 if (same_p
4487 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4488 return fold (build (tcode, ctype, convert (ctype, t1),
4489 convert (ctype, op1)));
4490 else if (tcode == MULT_EXPR && code == MULT_EXPR
4491 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4492 return fold (build (tcode, ctype, convert (ctype, op0),
4493 convert (ctype, t1)));
4494 else if (TREE_CODE (op1) != INTEGER_CST)
4495 return 0;
4497 /* If these are the same operation types, we can associate them
4498 assuming no overflow. */
4499 if (tcode == code
4500 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4501 convert (ctype, c), 0))
4502 && ! TREE_OVERFLOW (t1))
4503 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4505 /* If these operations "cancel" each other, we have the main
4506 optimizations of this pass, which occur when either constant is a
4507 multiple of the other, in which case we replace this with either an
4508 operation or CODE or TCODE.
4510 If we have an unsigned type that is not a sizetype, we cannot do
4511 this since it will change the result if the original computation
4512 overflowed. */
4513 if ((! TREE_UNSIGNED (ctype)
4514 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4515 && ! flag_wrapv
4516 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4517 || (tcode == MULT_EXPR
4518 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4519 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4521 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4522 return fold (build (tcode, ctype, convert (ctype, op0),
4523 convert (ctype,
4524 const_binop (TRUNC_DIV_EXPR,
4525 op1, c, 0))));
4526 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4527 return fold (build (code, ctype, convert (ctype, op0),
4528 convert (ctype,
4529 const_binop (TRUNC_DIV_EXPR,
4530 c, op1, 0))));
4532 break;
4534 default:
4535 break;
4538 return 0;
4541 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4542 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4543 that we may sometimes modify the tree. */
4545 static tree
4546 strip_compound_expr (tree t, tree s)
4548 enum tree_code code = TREE_CODE (t);
4550 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4551 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4552 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4553 return TREE_OPERAND (t, 1);
4555 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4556 don't bother handling any other types. */
4557 else if (code == COND_EXPR)
4559 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4560 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4561 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4563 else if (TREE_CODE_CLASS (code) == '1')
4564 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4565 else if (TREE_CODE_CLASS (code) == '<'
4566 || TREE_CODE_CLASS (code) == '2')
4568 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4569 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4572 return t;
4575 /* Return a node which has the indicated constant VALUE (either 0 or
4576 1), and is of the indicated TYPE. */
4578 static tree
4579 constant_boolean_node (int value, tree type)
4581 if (type == integer_type_node)
4582 return value ? integer_one_node : integer_zero_node;
4583 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4584 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4585 integer_zero_node);
4586 else
4588 tree t = build_int_2 (value, 0);
4590 TREE_TYPE (t) = type;
4591 return t;
4595 /* Utility function for the following routine, to see how complex a nesting of
4596 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4597 we don't care (to avoid spending too much time on complex expressions.). */
4599 static int
4600 count_cond (tree expr, int lim)
4602 int ctrue, cfalse;
4604 if (TREE_CODE (expr) != COND_EXPR)
4605 return 0;
4606 else if (lim <= 0)
4607 return 0;
4609 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4610 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4611 return MIN (lim, 1 + ctrue + cfalse);
4614 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4615 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4616 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4617 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4618 COND is the first argument to CODE; otherwise (as in the example
4619 given here), it is the second argument. TYPE is the type of the
4620 original expression. */
4622 static tree
4623 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4624 tree cond, tree arg, int cond_first_p)
4626 tree test, true_value, false_value;
4627 tree lhs = NULL_TREE;
4628 tree rhs = NULL_TREE;
4629 /* In the end, we'll produce a COND_EXPR. Both arms of the
4630 conditional expression will be binary operations. The left-hand
4631 side of the expression to be executed if the condition is true
4632 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4633 of the expression to be executed if the condition is true will be
4634 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4635 but apply to the expression to be executed if the conditional is
4636 false. */
4637 tree *true_lhs;
4638 tree *true_rhs;
4639 tree *false_lhs;
4640 tree *false_rhs;
4641 /* These are the codes to use for the left-hand side and right-hand
4642 side of the COND_EXPR. Normally, they are the same as CODE. */
4643 enum tree_code lhs_code = code;
4644 enum tree_code rhs_code = code;
4645 /* And these are the types of the expressions. */
4646 tree lhs_type = type;
4647 tree rhs_type = type;
4648 int save = 0;
4650 if (cond_first_p)
4652 true_rhs = false_rhs = &arg;
4653 true_lhs = &true_value;
4654 false_lhs = &false_value;
4656 else
4658 true_lhs = false_lhs = &arg;
4659 true_rhs = &true_value;
4660 false_rhs = &false_value;
4663 if (TREE_CODE (cond) == COND_EXPR)
4665 test = TREE_OPERAND (cond, 0);
4666 true_value = TREE_OPERAND (cond, 1);
4667 false_value = TREE_OPERAND (cond, 2);
4668 /* If this operand throws an expression, then it does not make
4669 sense to try to perform a logical or arithmetic operation
4670 involving it. Instead of building `a + throw 3' for example,
4671 we simply build `a, throw 3'. */
4672 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4674 if (! cond_first_p)
4676 lhs_code = COMPOUND_EXPR;
4677 lhs_type = void_type_node;
4679 else
4680 lhs = true_value;
4682 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4684 if (! cond_first_p)
4686 rhs_code = COMPOUND_EXPR;
4687 rhs_type = void_type_node;
4689 else
4690 rhs = false_value;
4693 else
4695 tree testtype = TREE_TYPE (cond);
4696 test = cond;
4697 true_value = convert (testtype, integer_one_node);
4698 false_value = convert (testtype, integer_zero_node);
4701 /* If ARG is complex we want to make sure we only evaluate it once. Though
4702 this is only required if it is volatile, it might be more efficient even
4703 if it is not. However, if we succeed in folding one part to a constant,
4704 we do not need to make this SAVE_EXPR. Since we do this optimization
4705 primarily to see if we do end up with constant and this SAVE_EXPR
4706 interferes with later optimizations, suppressing it when we can is
4707 important.
4709 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4710 do so. Don't try to see if the result is a constant if an arm is a
4711 COND_EXPR since we get exponential behavior in that case. */
4713 if (saved_expr_p (arg))
4714 save = 1;
4715 else if (lhs == 0 && rhs == 0
4716 && !TREE_CONSTANT (arg)
4717 && (*lang_hooks.decls.global_bindings_p) () == 0
4718 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4719 || TREE_SIDE_EFFECTS (arg)))
4721 if (TREE_CODE (true_value) != COND_EXPR)
4722 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4724 if (TREE_CODE (false_value) != COND_EXPR)
4725 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4727 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4728 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4730 arg = save_expr (arg);
4731 lhs = rhs = 0;
4732 save = 1;
4736 if (lhs == 0)
4737 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4738 if (rhs == 0)
4739 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4741 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4743 if (save)
4744 return build (COMPOUND_EXPR, type,
4745 convert (void_type_node, arg),
4746 strip_compound_expr (test, arg));
4747 else
4748 return convert (type, test);
4752 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4754 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4755 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4756 ADDEND is the same as X.
4758 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4759 and finite. The problematic cases are when X is zero, and its mode
4760 has signed zeros. In the case of rounding towards -infinity,
4761 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4762 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4764 static bool
4765 fold_real_zero_addition_p (tree type, tree addend, int negate)
4767 if (!real_zerop (addend))
4768 return false;
4770 /* Don't allow the fold with -fsignaling-nans. */
4771 if (HONOR_SNANS (TYPE_MODE (type)))
4772 return false;
4774 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4775 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4776 return true;
4778 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4779 if (TREE_CODE (addend) == REAL_CST
4780 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4781 negate = !negate;
4783 /* The mode has signed zeros, and we have to honor their sign.
4784 In this situation, there is only one case we can return true for.
4785 X - 0 is the same as X unless rounding towards -infinity is
4786 supported. */
4787 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4790 /* Subroutine of fold() that checks comparisons of built-in math
4791 functions against real constants.
4793 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4794 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4795 is the type of the result and ARG0 and ARG1 are the operands of the
4796 comparison. ARG1 must be a TREE_REAL_CST.
4798 The function returns the constant folded tree if a simplification
4799 can be made, and NULL_TREE otherwise. */
4801 static tree
4802 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4803 tree type, tree arg0, tree arg1)
4805 REAL_VALUE_TYPE c;
4807 if (fcode == BUILT_IN_SQRT
4808 || fcode == BUILT_IN_SQRTF
4809 || fcode == BUILT_IN_SQRTL)
4811 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4812 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4814 c = TREE_REAL_CST (arg1);
4815 if (REAL_VALUE_NEGATIVE (c))
4817 /* sqrt(x) < y is always false, if y is negative. */
4818 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4819 return omit_one_operand (type,
4820 convert (type, integer_zero_node),
4821 arg);
4823 /* sqrt(x) > y is always true, if y is negative and we
4824 don't care about NaNs, i.e. negative values of x. */
4825 if (code == NE_EXPR || !HONOR_NANS (mode))
4826 return omit_one_operand (type,
4827 convert (type, integer_one_node),
4828 arg);
4830 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4831 return fold (build (GE_EXPR, type, arg,
4832 build_real (TREE_TYPE (arg), dconst0)));
4834 else if (code == GT_EXPR || code == GE_EXPR)
4836 REAL_VALUE_TYPE c2;
4838 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4839 real_convert (&c2, mode, &c2);
4841 if (REAL_VALUE_ISINF (c2))
4843 /* sqrt(x) > y is x == +Inf, when y is very large. */
4844 if (HONOR_INFINITIES (mode))
4845 return fold (build (EQ_EXPR, type, arg,
4846 build_real (TREE_TYPE (arg), c2)));
4848 /* sqrt(x) > y is always false, when y is very large
4849 and we don't care about infinities. */
4850 return omit_one_operand (type,
4851 convert (type, integer_zero_node),
4852 arg);
4855 /* sqrt(x) > c is the same as x > c*c. */
4856 return fold (build (code, type, arg,
4857 build_real (TREE_TYPE (arg), c2)));
4859 else if (code == LT_EXPR || code == LE_EXPR)
4861 REAL_VALUE_TYPE c2;
4863 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4864 real_convert (&c2, mode, &c2);
4866 if (REAL_VALUE_ISINF (c2))
4868 /* sqrt(x) < y is always true, when y is a very large
4869 value and we don't care about NaNs or Infinities. */
4870 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4871 return omit_one_operand (type,
4872 convert (type, integer_one_node),
4873 arg);
4875 /* sqrt(x) < y is x != +Inf when y is very large and we
4876 don't care about NaNs. */
4877 if (! HONOR_NANS (mode))
4878 return fold (build (NE_EXPR, type, arg,
4879 build_real (TREE_TYPE (arg), c2)));
4881 /* sqrt(x) < y is x >= 0 when y is very large and we
4882 don't care about Infinities. */
4883 if (! HONOR_INFINITIES (mode))
4884 return fold (build (GE_EXPR, type, arg,
4885 build_real (TREE_TYPE (arg), dconst0)));
4887 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4888 if ((*lang_hooks.decls.global_bindings_p) () != 0
4889 || CONTAINS_PLACEHOLDER_P (arg))
4890 return NULL_TREE;
4892 arg = save_expr (arg);
4893 return fold (build (TRUTH_ANDIF_EXPR, type,
4894 fold (build (GE_EXPR, type, arg,
4895 build_real (TREE_TYPE (arg),
4896 dconst0))),
4897 fold (build (NE_EXPR, type, arg,
4898 build_real (TREE_TYPE (arg),
4899 c2)))));
4902 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4903 if (! HONOR_NANS (mode))
4904 return fold (build (code, type, arg,
4905 build_real (TREE_TYPE (arg), c2)));
4907 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4908 if ((*lang_hooks.decls.global_bindings_p) () == 0
4909 && ! CONTAINS_PLACEHOLDER_P (arg))
4911 arg = save_expr (arg);
4912 return fold (build (TRUTH_ANDIF_EXPR, type,
4913 fold (build (GE_EXPR, type, arg,
4914 build_real (TREE_TYPE (arg),
4915 dconst0))),
4916 fold (build (code, type, arg,
4917 build_real (TREE_TYPE (arg),
4918 c2)))));
4923 return NULL_TREE;
4926 /* Subroutine of fold() that optimizes comparisons against Infinities,
4927 either +Inf or -Inf.
4929 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4930 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4931 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4933 The function returns the constant folded tree if a simplification
4934 can be made, and NULL_TREE otherwise. */
4936 static tree
4937 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4939 enum machine_mode mode;
4940 REAL_VALUE_TYPE max;
4941 tree temp;
4942 bool neg;
4944 mode = TYPE_MODE (TREE_TYPE (arg0));
4946 /* For negative infinity swap the sense of the comparison. */
4947 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4948 if (neg)
4949 code = swap_tree_comparison (code);
4951 switch (code)
4953 case GT_EXPR:
4954 /* x > +Inf is always false, if with ignore sNANs. */
4955 if (HONOR_SNANS (mode))
4956 return NULL_TREE;
4957 return omit_one_operand (type,
4958 convert (type, integer_zero_node),
4959 arg0);
4961 case LE_EXPR:
4962 /* x <= +Inf is always true, if we don't case about NaNs. */
4963 if (! HONOR_NANS (mode))
4964 return omit_one_operand (type,
4965 convert (type, integer_one_node),
4966 arg0);
4968 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4969 if ((*lang_hooks.decls.global_bindings_p) () == 0
4970 && ! CONTAINS_PLACEHOLDER_P (arg0))
4972 arg0 = save_expr (arg0);
4973 return fold (build (EQ_EXPR, type, arg0, arg0));
4975 break;
4977 case EQ_EXPR:
4978 case GE_EXPR:
4979 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4980 real_maxval (&max, neg, mode);
4981 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4982 arg0, build_real (TREE_TYPE (arg0), max)));
4984 case LT_EXPR:
4985 /* x < +Inf is always equal to x <= DBL_MAX. */
4986 real_maxval (&max, neg, mode);
4987 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4988 arg0, build_real (TREE_TYPE (arg0), max)));
4990 case NE_EXPR:
4991 /* x != +Inf is always equal to !(x > DBL_MAX). */
4992 real_maxval (&max, neg, mode);
4993 if (! HONOR_NANS (mode))
4994 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4995 arg0, build_real (TREE_TYPE (arg0), max)));
4996 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4997 arg0, build_real (TREE_TYPE (arg0), max)));
4998 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5000 default:
5001 break;
5004 return NULL_TREE;
5007 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5008 equality/inequality test, then return a simplified form of
5009 the test using shifts and logical operations. Otherwise return
5010 NULL. TYPE is the desired result type. */
5012 tree
5013 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5014 tree result_type)
5016 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5017 operand 0. */
5018 if (code == TRUTH_NOT_EXPR)
5020 code = TREE_CODE (arg0);
5021 if (code != NE_EXPR && code != EQ_EXPR)
5022 return NULL_TREE;
5024 /* Extract the arguments of the EQ/NE. */
5025 arg1 = TREE_OPERAND (arg0, 1);
5026 arg0 = TREE_OPERAND (arg0, 0);
5028 /* This requires us to invert the code. */
5029 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5032 /* If this is testing a single bit, we can optimize the test. */
5033 if ((code == NE_EXPR || code == EQ_EXPR)
5034 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5035 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5037 tree inner = TREE_OPERAND (arg0, 0);
5038 tree type = TREE_TYPE (arg0);
5039 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5040 enum machine_mode operand_mode = TYPE_MODE (type);
5041 int ops_unsigned;
5042 tree signed_type, unsigned_type, intermediate_type;
5043 tree arg00;
5045 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5046 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5047 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5048 if (arg00 != NULL_TREE)
5050 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5051 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5052 convert (stype, arg00),
5053 convert (stype, integer_zero_node)));
5056 /* At this point, we know that arg0 is not testing the sign bit. */
5057 if (TYPE_PRECISION (type) - 1 == bitnum)
5058 abort ();
5060 /* Otherwise we have (A & C) != 0 where C is a single bit,
5061 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5062 Similarly for (A & C) == 0. */
5064 /* If INNER is a right shift of a constant and it plus BITNUM does
5065 not overflow, adjust BITNUM and INNER. */
5066 if (TREE_CODE (inner) == RSHIFT_EXPR
5067 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5068 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5069 && bitnum < TYPE_PRECISION (type)
5070 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5071 bitnum - TYPE_PRECISION (type)))
5073 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5074 inner = TREE_OPERAND (inner, 0);
5077 /* If we are going to be able to omit the AND below, we must do our
5078 operations as unsigned. If we must use the AND, we have a choice.
5079 Normally unsigned is faster, but for some machines signed is. */
5080 #ifdef LOAD_EXTEND_OP
5081 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5082 #else
5083 ops_unsigned = 1;
5084 #endif
5086 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5087 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5088 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5089 inner = convert (intermediate_type, inner);
5091 if (bitnum != 0)
5092 inner = build (RSHIFT_EXPR, intermediate_type,
5093 inner, size_int (bitnum));
5095 if (code == EQ_EXPR)
5096 inner = build (BIT_XOR_EXPR, intermediate_type,
5097 inner, integer_one_node);
5099 /* Put the AND last so it can combine with more things. */
5100 inner = build (BIT_AND_EXPR, intermediate_type,
5101 inner, integer_one_node);
5103 /* Make sure to return the proper type. */
5104 inner = convert (result_type, inner);
5106 return inner;
5108 return NULL_TREE;
5111 /* Check whether we are allowed to reorder operands arg0 and arg1,
5112 such that the evaluation of arg1 occurs before arg0. */
5114 static bool
5115 reorder_operands_p (tree arg0, tree arg1)
5117 if (! flag_evaluation_order)
5118 return true;
5119 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5120 return true;
5121 return ! TREE_SIDE_EFFECTS (arg0)
5122 && ! TREE_SIDE_EFFECTS (arg1);
5125 /* Test whether it is preferable two swap two operands, ARG0 and
5126 ARG1, for example because ARG0 is an integer constant and ARG1
5127 isn't. If REORDER is true, only recommend swapping if we can
5128 evaluate the operands in reverse order. */
5130 static bool
5131 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5133 STRIP_SIGN_NOPS (arg0);
5134 STRIP_SIGN_NOPS (arg1);
5136 if (TREE_CODE (arg1) == INTEGER_CST)
5137 return 0;
5138 if (TREE_CODE (arg0) == INTEGER_CST)
5139 return 1;
5141 if (TREE_CODE (arg1) == REAL_CST)
5142 return 0;
5143 if (TREE_CODE (arg0) == REAL_CST)
5144 return 1;
5146 if (TREE_CODE (arg1) == COMPLEX_CST)
5147 return 0;
5148 if (TREE_CODE (arg0) == COMPLEX_CST)
5149 return 1;
5151 if (TREE_CONSTANT (arg1))
5152 return 0;
5153 if (TREE_CONSTANT (arg0))
5154 return 1;
5156 if (optimize_size)
5157 return 0;
5159 if (reorder && flag_evaluation_order
5160 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5161 return 0;
5163 if (DECL_P (arg1))
5164 return 0;
5165 if (DECL_P (arg0))
5166 return 1;
5168 return 0;
5171 /* Perform constant folding and related simplification of EXPR.
5172 The related simplifications include x*1 => x, x*0 => 0, etc.,
5173 and application of the associative law.
5174 NOP_EXPR conversions may be removed freely (as long as we
5175 are careful not to change the C type of the overall expression)
5176 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5177 but we can constant-fold them if they have constant operands. */
5179 #ifdef ENABLE_FOLD_CHECKING
5180 # define fold(x) fold_1 (x)
5181 static tree fold_1 (tree);
5182 static
5183 #endif
5184 tree
5185 fold (tree expr)
5187 tree t = expr, orig_t;
5188 tree t1 = NULL_TREE;
5189 tree tem;
5190 tree type = TREE_TYPE (expr);
5191 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5192 enum tree_code code = TREE_CODE (t);
5193 int kind = TREE_CODE_CLASS (code);
5194 int invert;
5195 /* WINS will be nonzero when the switch is done
5196 if all operands are constant. */
5197 int wins = 1;
5199 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5200 Likewise for a SAVE_EXPR that's already been evaluated. */
5201 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5202 return t;
5204 /* Return right away if a constant. */
5205 if (kind == 'c')
5206 return t;
5208 orig_t = t;
5210 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5212 tree subop;
5214 /* Special case for conversion ops that can have fixed point args. */
5215 arg0 = TREE_OPERAND (t, 0);
5217 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5218 if (arg0 != 0)
5219 STRIP_SIGN_NOPS (arg0);
5221 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5222 subop = TREE_REALPART (arg0);
5223 else
5224 subop = arg0;
5226 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5227 && TREE_CODE (subop) != REAL_CST)
5228 /* Note that TREE_CONSTANT isn't enough:
5229 static var addresses are constant but we can't
5230 do arithmetic on them. */
5231 wins = 0;
5233 else if (IS_EXPR_CODE_CLASS (kind))
5235 int len = first_rtl_op (code);
5236 int i;
5237 for (i = 0; i < len; i++)
5239 tree op = TREE_OPERAND (t, i);
5240 tree subop;
5242 if (op == 0)
5243 continue; /* Valid for CALL_EXPR, at least. */
5245 if (kind == '<' || code == RSHIFT_EXPR)
5247 /* Signedness matters here. Perhaps we can refine this
5248 later. */
5249 STRIP_SIGN_NOPS (op);
5251 else
5252 /* Strip any conversions that don't change the mode. */
5253 STRIP_NOPS (op);
5255 if (TREE_CODE (op) == COMPLEX_CST)
5256 subop = TREE_REALPART (op);
5257 else
5258 subop = op;
5260 if (TREE_CODE (subop) != INTEGER_CST
5261 && TREE_CODE (subop) != REAL_CST)
5262 /* Note that TREE_CONSTANT isn't enough:
5263 static var addresses are constant but we can't
5264 do arithmetic on them. */
5265 wins = 0;
5267 if (i == 0)
5268 arg0 = op;
5269 else if (i == 1)
5270 arg1 = op;
5274 /* If this is a commutative operation, and ARG0 is a constant, move it
5275 to ARG1 to reduce the number of tests below. */
5276 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5277 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5278 || code == BIT_AND_EXPR)
5279 && tree_swap_operands_p (arg0, arg1, true))
5280 return fold (build (code, type, arg1, arg0));
5282 /* Now WINS is set as described above,
5283 ARG0 is the first operand of EXPR,
5284 and ARG1 is the second operand (if it has more than one operand).
5286 First check for cases where an arithmetic operation is applied to a
5287 compound, conditional, or comparison operation. Push the arithmetic
5288 operation inside the compound or conditional to see if any folding
5289 can then be done. Convert comparison to conditional for this purpose.
5290 The also optimizes non-constant cases that used to be done in
5291 expand_expr.
5293 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5294 one of the operands is a comparison and the other is a comparison, a
5295 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5296 code below would make the expression more complex. Change it to a
5297 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5298 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5300 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5301 || code == EQ_EXPR || code == NE_EXPR)
5302 && ((truth_value_p (TREE_CODE (arg0))
5303 && (truth_value_p (TREE_CODE (arg1))
5304 || (TREE_CODE (arg1) == BIT_AND_EXPR
5305 && integer_onep (TREE_OPERAND (arg1, 1)))))
5306 || (truth_value_p (TREE_CODE (arg1))
5307 && (truth_value_p (TREE_CODE (arg0))
5308 || (TREE_CODE (arg0) == BIT_AND_EXPR
5309 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5311 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5312 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5313 : TRUTH_XOR_EXPR,
5314 type, arg0, arg1));
5316 if (code == EQ_EXPR)
5317 t = invert_truthvalue (t);
5319 return t;
5322 if (TREE_CODE_CLASS (code) == '1')
5324 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5325 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5326 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5327 else if (TREE_CODE (arg0) == COND_EXPR)
5329 tree arg01 = TREE_OPERAND (arg0, 1);
5330 tree arg02 = TREE_OPERAND (arg0, 2);
5331 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5332 arg01 = fold (build1 (code, type, arg01));
5333 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5334 arg02 = fold (build1 (code, type, arg02));
5335 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5336 arg01, arg02));
5338 /* If this was a conversion, and all we did was to move into
5339 inside the COND_EXPR, bring it back out. But leave it if
5340 it is a conversion from integer to integer and the
5341 result precision is no wider than a word since such a
5342 conversion is cheap and may be optimized away by combine,
5343 while it couldn't if it were outside the COND_EXPR. Then return
5344 so we don't get into an infinite recursion loop taking the
5345 conversion out and then back in. */
5347 if ((code == NOP_EXPR || code == CONVERT_EXPR
5348 || code == NON_LVALUE_EXPR)
5349 && TREE_CODE (t) == COND_EXPR
5350 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5351 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5352 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5353 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5354 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5355 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5356 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5357 && (INTEGRAL_TYPE_P
5358 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5359 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5360 t = build1 (code, type,
5361 build (COND_EXPR,
5362 TREE_TYPE (TREE_OPERAND
5363 (TREE_OPERAND (t, 1), 0)),
5364 TREE_OPERAND (t, 0),
5365 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5366 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5367 return t;
5369 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5370 return fold (build (COND_EXPR, type, arg0,
5371 fold (build1 (code, type, integer_one_node)),
5372 fold (build1 (code, type, integer_zero_node))));
5374 else if (TREE_CODE_CLASS (code) == '<'
5375 && TREE_CODE (arg0) == COMPOUND_EXPR)
5376 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5377 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5378 else if (TREE_CODE_CLASS (code) == '<'
5379 && TREE_CODE (arg1) == COMPOUND_EXPR)
5380 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5381 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5382 else if (TREE_CODE_CLASS (code) == '2'
5383 || TREE_CODE_CLASS (code) == '<')
5385 if (TREE_CODE (arg1) == COMPOUND_EXPR
5386 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5387 && ! TREE_SIDE_EFFECTS (arg0))
5388 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5389 fold (build (code, type,
5390 arg0, TREE_OPERAND (arg1, 1))));
5391 else if ((TREE_CODE (arg1) == COND_EXPR
5392 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5393 && TREE_CODE_CLASS (code) != '<'))
5394 && (TREE_CODE (arg0) != COND_EXPR
5395 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5396 && (! TREE_SIDE_EFFECTS (arg0)
5397 || ((*lang_hooks.decls.global_bindings_p) () == 0
5398 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5399 return
5400 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5401 /*cond_first_p=*/0);
5402 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5403 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5404 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5405 else if ((TREE_CODE (arg0) == COND_EXPR
5406 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5407 && TREE_CODE_CLASS (code) != '<'))
5408 && (TREE_CODE (arg1) != COND_EXPR
5409 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5410 && (! TREE_SIDE_EFFECTS (arg1)
5411 || ((*lang_hooks.decls.global_bindings_p) () == 0
5412 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5413 return
5414 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5415 /*cond_first_p=*/1);
5418 switch (code)
5420 case INTEGER_CST:
5421 case REAL_CST:
5422 case VECTOR_CST:
5423 case STRING_CST:
5424 case COMPLEX_CST:
5425 case CONSTRUCTOR:
5426 return t;
5428 case CONST_DECL:
5429 return fold (DECL_INITIAL (t));
5431 case NOP_EXPR:
5432 case FLOAT_EXPR:
5433 case CONVERT_EXPR:
5434 case FIX_TRUNC_EXPR:
5435 /* Other kinds of FIX are not handled properly by fold_convert. */
5437 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5438 return TREE_OPERAND (t, 0);
5440 /* Handle cases of two conversions in a row. */
5441 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5442 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5444 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5445 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5446 tree final_type = TREE_TYPE (t);
5447 int inside_int = INTEGRAL_TYPE_P (inside_type);
5448 int inside_ptr = POINTER_TYPE_P (inside_type);
5449 int inside_float = FLOAT_TYPE_P (inside_type);
5450 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5451 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5452 int inter_int = INTEGRAL_TYPE_P (inter_type);
5453 int inter_ptr = POINTER_TYPE_P (inter_type);
5454 int inter_float = FLOAT_TYPE_P (inter_type);
5455 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5456 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5457 int final_int = INTEGRAL_TYPE_P (final_type);
5458 int final_ptr = POINTER_TYPE_P (final_type);
5459 int final_float = FLOAT_TYPE_P (final_type);
5460 unsigned int final_prec = TYPE_PRECISION (final_type);
5461 int final_unsignedp = TREE_UNSIGNED (final_type);
5463 /* In addition to the cases of two conversions in a row
5464 handled below, if we are converting something to its own
5465 type via an object of identical or wider precision, neither
5466 conversion is needed. */
5467 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5468 && ((inter_int && final_int) || (inter_float && final_float))
5469 && inter_prec >= final_prec)
5470 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5472 /* Likewise, if the intermediate and final types are either both
5473 float or both integer, we don't need the middle conversion if
5474 it is wider than the final type and doesn't change the signedness
5475 (for integers). Avoid this if the final type is a pointer
5476 since then we sometimes need the inner conversion. Likewise if
5477 the outer has a precision not equal to the size of its mode. */
5478 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5479 || (inter_float && inside_float))
5480 && inter_prec >= inside_prec
5481 && (inter_float || inter_unsignedp == inside_unsignedp)
5482 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5483 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5484 && ! final_ptr)
5485 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5487 /* If we have a sign-extension of a zero-extended value, we can
5488 replace that by a single zero-extension. */
5489 if (inside_int && inter_int && final_int
5490 && inside_prec < inter_prec && inter_prec < final_prec
5491 && inside_unsignedp && !inter_unsignedp)
5492 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5494 /* Two conversions in a row are not needed unless:
5495 - some conversion is floating-point (overstrict for now), or
5496 - the intermediate type is narrower than both initial and
5497 final, or
5498 - the intermediate type and innermost type differ in signedness,
5499 and the outermost type is wider than the intermediate, or
5500 - the initial type is a pointer type and the precisions of the
5501 intermediate and final types differ, or
5502 - the final type is a pointer type and the precisions of the
5503 initial and intermediate types differ. */
5504 if (! inside_float && ! inter_float && ! final_float
5505 && (inter_prec > inside_prec || inter_prec > final_prec)
5506 && ! (inside_int && inter_int
5507 && inter_unsignedp != inside_unsignedp
5508 && inter_prec < final_prec)
5509 && ((inter_unsignedp && inter_prec > inside_prec)
5510 == (final_unsignedp && final_prec > inter_prec))
5511 && ! (inside_ptr && inter_prec != final_prec)
5512 && ! (final_ptr && inside_prec != inter_prec)
5513 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5514 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5515 && ! final_ptr)
5516 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5519 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5520 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5521 /* Detect assigning a bitfield. */
5522 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5523 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5525 /* Don't leave an assignment inside a conversion
5526 unless assigning a bitfield. */
5527 tree prev = TREE_OPERAND (t, 0);
5528 if (t == orig_t)
5529 t = copy_node (t);
5530 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5531 /* First do the assignment, then return converted constant. */
5532 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5533 TREE_USED (t) = 1;
5534 return t;
5537 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5538 constants (if x has signed type, the sign bit cannot be set
5539 in c). This folds extension into the BIT_AND_EXPR. */
5540 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5541 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5542 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5543 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5545 tree and = TREE_OPERAND (t, 0);
5546 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5547 int change = 0;
5549 if (TREE_UNSIGNED (TREE_TYPE (and))
5550 || (TYPE_PRECISION (TREE_TYPE (t))
5551 <= TYPE_PRECISION (TREE_TYPE (and))))
5552 change = 1;
5553 else if (TYPE_PRECISION (TREE_TYPE (and1))
5554 <= HOST_BITS_PER_WIDE_INT
5555 && host_integerp (and1, 1))
5557 unsigned HOST_WIDE_INT cst;
5559 cst = tree_low_cst (and1, 1);
5560 cst &= (HOST_WIDE_INT) -1
5561 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5562 change = (cst == 0);
5563 #ifdef LOAD_EXTEND_OP
5564 if (change
5565 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5566 == ZERO_EXTEND))
5568 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5569 and0 = convert (uns, and0);
5570 and1 = convert (uns, and1);
5572 #endif
5574 if (change)
5575 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5576 convert (TREE_TYPE (t), and0),
5577 convert (TREE_TYPE (t), and1)));
5580 if (!wins)
5582 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5584 if (t == orig_t)
5585 t = copy_node (t);
5586 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5588 return t;
5590 return fold_convert (t, arg0);
5592 case VIEW_CONVERT_EXPR:
5593 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5594 return build1 (VIEW_CONVERT_EXPR, type,
5595 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5596 return t;
5598 case COMPONENT_REF:
5599 if (TREE_CODE (arg0) == CONSTRUCTOR
5600 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5602 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5603 if (m)
5604 t = TREE_VALUE (m);
5606 return t;
5608 case RANGE_EXPR:
5609 if (TREE_CONSTANT (t) != wins)
5611 if (t == orig_t)
5612 t = copy_node (t);
5613 TREE_CONSTANT (t) = wins;
5615 return t;
5617 case NEGATE_EXPR:
5618 if (negate_expr_p (arg0))
5619 return negate_expr (arg0);
5620 return t;
5622 case ABS_EXPR:
5623 if (wins)
5625 if (TREE_CODE (arg0) == INTEGER_CST)
5627 /* If the value is unsigned, then the absolute value is
5628 the same as the ordinary value. */
5629 if (TREE_UNSIGNED (type))
5630 return arg0;
5631 /* Similarly, if the value is non-negative. */
5632 else if (INT_CST_LT (integer_minus_one_node, arg0))
5633 return arg0;
5634 /* If the value is negative, then the absolute value is
5635 its negation. */
5636 else
5638 unsigned HOST_WIDE_INT low;
5639 HOST_WIDE_INT high;
5640 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5641 TREE_INT_CST_HIGH (arg0),
5642 &low, &high);
5643 t = build_int_2 (low, high);
5644 TREE_TYPE (t) = type;
5645 TREE_OVERFLOW (t)
5646 = (TREE_OVERFLOW (arg0)
5647 | force_fit_type (t, overflow));
5648 TREE_CONSTANT_OVERFLOW (t)
5649 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5652 else if (TREE_CODE (arg0) == REAL_CST)
5654 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5655 t = build_real (type,
5656 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5659 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5660 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5661 /* Convert fabs((double)float) into (double)fabsf(float). */
5662 else if (TREE_CODE (arg0) == NOP_EXPR
5663 && TREE_CODE (type) == REAL_TYPE)
5665 tree targ0 = strip_float_extensions (arg0);
5666 if (targ0 != arg0)
5667 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5668 targ0)));
5670 else if (tree_expr_nonnegative_p (arg0))
5671 return arg0;
5672 return t;
5674 case CONJ_EXPR:
5675 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5676 return convert (type, arg0);
5677 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5678 return build (COMPLEX_EXPR, type,
5679 TREE_OPERAND (arg0, 0),
5680 negate_expr (TREE_OPERAND (arg0, 1)));
5681 else if (TREE_CODE (arg0) == COMPLEX_CST)
5682 return build_complex (type, TREE_REALPART (arg0),
5683 negate_expr (TREE_IMAGPART (arg0)));
5684 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5685 return fold (build (TREE_CODE (arg0), type,
5686 fold (build1 (CONJ_EXPR, type,
5687 TREE_OPERAND (arg0, 0))),
5688 fold (build1 (CONJ_EXPR,
5689 type, TREE_OPERAND (arg0, 1)))));
5690 else if (TREE_CODE (arg0) == CONJ_EXPR)
5691 return TREE_OPERAND (arg0, 0);
5692 return t;
5694 case BIT_NOT_EXPR:
5695 if (wins)
5697 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5698 ~ TREE_INT_CST_HIGH (arg0));
5699 TREE_TYPE (t) = type;
5700 force_fit_type (t, 0);
5701 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5702 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5704 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5705 return TREE_OPERAND (arg0, 0);
5706 return t;
5708 case PLUS_EXPR:
5709 /* A + (-B) -> A - B */
5710 if (TREE_CODE (arg1) == NEGATE_EXPR)
5711 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5712 /* (-A) + B -> B - A */
5713 if (TREE_CODE (arg0) == NEGATE_EXPR)
5714 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5715 else if (! FLOAT_TYPE_P (type))
5717 if (integer_zerop (arg1))
5718 return non_lvalue (convert (type, arg0));
5720 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5721 with a constant, and the two constants have no bits in common,
5722 we should treat this as a BIT_IOR_EXPR since this may produce more
5723 simplifications. */
5724 if (TREE_CODE (arg0) == BIT_AND_EXPR
5725 && TREE_CODE (arg1) == BIT_AND_EXPR
5726 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5727 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5728 && integer_zerop (const_binop (BIT_AND_EXPR,
5729 TREE_OPERAND (arg0, 1),
5730 TREE_OPERAND (arg1, 1), 0)))
5732 code = BIT_IOR_EXPR;
5733 goto bit_ior;
5736 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5737 (plus (plus (mult) (mult)) (foo)) so that we can
5738 take advantage of the factoring cases below. */
5739 if ((TREE_CODE (arg0) == PLUS_EXPR
5740 && TREE_CODE (arg1) == MULT_EXPR)
5741 || (TREE_CODE (arg1) == PLUS_EXPR
5742 && TREE_CODE (arg0) == MULT_EXPR))
5744 tree parg0, parg1, parg, marg;
5746 if (TREE_CODE (arg0) == PLUS_EXPR)
5747 parg = arg0, marg = arg1;
5748 else
5749 parg = arg1, marg = arg0;
5750 parg0 = TREE_OPERAND (parg, 0);
5751 parg1 = TREE_OPERAND (parg, 1);
5752 STRIP_NOPS (parg0);
5753 STRIP_NOPS (parg1);
5755 if (TREE_CODE (parg0) == MULT_EXPR
5756 && TREE_CODE (parg1) != MULT_EXPR)
5757 return fold (build (PLUS_EXPR, type,
5758 fold (build (PLUS_EXPR, type,
5759 convert (type, parg0),
5760 convert (type, marg))),
5761 convert (type, parg1)));
5762 if (TREE_CODE (parg0) != MULT_EXPR
5763 && TREE_CODE (parg1) == MULT_EXPR)
5764 return fold (build (PLUS_EXPR, type,
5765 fold (build (PLUS_EXPR, type,
5766 convert (type, parg1),
5767 convert (type, marg))),
5768 convert (type, parg0)));
5771 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5773 tree arg00, arg01, arg10, arg11;
5774 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5776 /* (A * C) + (B * C) -> (A+B) * C.
5777 We are most concerned about the case where C is a constant,
5778 but other combinations show up during loop reduction. Since
5779 it is not difficult, try all four possibilities. */
5781 arg00 = TREE_OPERAND (arg0, 0);
5782 arg01 = TREE_OPERAND (arg0, 1);
5783 arg10 = TREE_OPERAND (arg1, 0);
5784 arg11 = TREE_OPERAND (arg1, 1);
5785 same = NULL_TREE;
5787 if (operand_equal_p (arg01, arg11, 0))
5788 same = arg01, alt0 = arg00, alt1 = arg10;
5789 else if (operand_equal_p (arg00, arg10, 0))
5790 same = arg00, alt0 = arg01, alt1 = arg11;
5791 else if (operand_equal_p (arg00, arg11, 0))
5792 same = arg00, alt0 = arg01, alt1 = arg10;
5793 else if (operand_equal_p (arg01, arg10, 0))
5794 same = arg01, alt0 = arg00, alt1 = arg11;
5796 /* No identical multiplicands; see if we can find a common
5797 power-of-two factor in non-power-of-two multiplies. This
5798 can help in multi-dimensional array access. */
5799 else if (TREE_CODE (arg01) == INTEGER_CST
5800 && TREE_CODE (arg11) == INTEGER_CST
5801 && TREE_INT_CST_HIGH (arg01) == 0
5802 && TREE_INT_CST_HIGH (arg11) == 0)
5804 HOST_WIDE_INT int01, int11, tmp;
5805 int01 = TREE_INT_CST_LOW (arg01);
5806 int11 = TREE_INT_CST_LOW (arg11);
5808 /* Move min of absolute values to int11. */
5809 if ((int01 >= 0 ? int01 : -int01)
5810 < (int11 >= 0 ? int11 : -int11))
5812 tmp = int01, int01 = int11, int11 = tmp;
5813 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5814 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5817 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5819 alt0 = fold (build (MULT_EXPR, type, arg00,
5820 build_int_2 (int01 / int11, 0)));
5821 alt1 = arg10;
5822 same = arg11;
5826 if (same)
5827 return fold (build (MULT_EXPR, type,
5828 fold (build (PLUS_EXPR, type, alt0, alt1)),
5829 same));
5832 else
5834 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5835 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5836 return non_lvalue (convert (type, arg0));
5838 /* Likewise if the operands are reversed. */
5839 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5840 return non_lvalue (convert (type, arg1));
5842 /* Convert x+x into x*2.0. */
5843 if (operand_equal_p (arg0, arg1, 0)
5844 && SCALAR_FLOAT_TYPE_P (type))
5845 return fold (build (MULT_EXPR, type, arg0,
5846 build_real (type, dconst2)));
5848 /* Convert x*c+x into x*(c+1). */
5849 if (flag_unsafe_math_optimizations
5850 && TREE_CODE (arg0) == MULT_EXPR
5851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5852 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5853 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5855 REAL_VALUE_TYPE c;
5857 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5858 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5859 return fold (build (MULT_EXPR, type, arg1,
5860 build_real (type, c)));
5863 /* Convert x+x*c into x*(c+1). */
5864 if (flag_unsafe_math_optimizations
5865 && TREE_CODE (arg1) == MULT_EXPR
5866 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5867 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5868 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5870 REAL_VALUE_TYPE c;
5872 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5873 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5874 return fold (build (MULT_EXPR, type, arg0,
5875 build_real (type, c)));
5878 /* Convert x*c1+x*c2 into x*(c1+c2). */
5879 if (flag_unsafe_math_optimizations
5880 && TREE_CODE (arg0) == MULT_EXPR
5881 && TREE_CODE (arg1) == MULT_EXPR
5882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5883 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5884 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5885 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5886 && operand_equal_p (TREE_OPERAND (arg0, 0),
5887 TREE_OPERAND (arg1, 0), 0))
5889 REAL_VALUE_TYPE c1, c2;
5891 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5892 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5893 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5894 return fold (build (MULT_EXPR, type,
5895 TREE_OPERAND (arg0, 0),
5896 build_real (type, c1)));
5900 bit_rotate:
5901 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5902 is a rotate of A by C1 bits. */
5903 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5904 is a rotate of A by B bits. */
5906 enum tree_code code0, code1;
5907 code0 = TREE_CODE (arg0);
5908 code1 = TREE_CODE (arg1);
5909 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5910 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5911 && operand_equal_p (TREE_OPERAND (arg0, 0),
5912 TREE_OPERAND (arg1, 0), 0)
5913 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5915 tree tree01, tree11;
5916 enum tree_code code01, code11;
5918 tree01 = TREE_OPERAND (arg0, 1);
5919 tree11 = TREE_OPERAND (arg1, 1);
5920 STRIP_NOPS (tree01);
5921 STRIP_NOPS (tree11);
5922 code01 = TREE_CODE (tree01);
5923 code11 = TREE_CODE (tree11);
5924 if (code01 == INTEGER_CST
5925 && code11 == INTEGER_CST
5926 && TREE_INT_CST_HIGH (tree01) == 0
5927 && TREE_INT_CST_HIGH (tree11) == 0
5928 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5929 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5930 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5931 code0 == LSHIFT_EXPR ? tree01 : tree11);
5932 else if (code11 == MINUS_EXPR)
5934 tree tree110, tree111;
5935 tree110 = TREE_OPERAND (tree11, 0);
5936 tree111 = TREE_OPERAND (tree11, 1);
5937 STRIP_NOPS (tree110);
5938 STRIP_NOPS (tree111);
5939 if (TREE_CODE (tree110) == INTEGER_CST
5940 && 0 == compare_tree_int (tree110,
5941 TYPE_PRECISION
5942 (TREE_TYPE (TREE_OPERAND
5943 (arg0, 0))))
5944 && operand_equal_p (tree01, tree111, 0))
5945 return build ((code0 == LSHIFT_EXPR
5946 ? LROTATE_EXPR
5947 : RROTATE_EXPR),
5948 type, TREE_OPERAND (arg0, 0), tree01);
5950 else if (code01 == MINUS_EXPR)
5952 tree tree010, tree011;
5953 tree010 = TREE_OPERAND (tree01, 0);
5954 tree011 = TREE_OPERAND (tree01, 1);
5955 STRIP_NOPS (tree010);
5956 STRIP_NOPS (tree011);
5957 if (TREE_CODE (tree010) == INTEGER_CST
5958 && 0 == compare_tree_int (tree010,
5959 TYPE_PRECISION
5960 (TREE_TYPE (TREE_OPERAND
5961 (arg0, 0))))
5962 && operand_equal_p (tree11, tree011, 0))
5963 return build ((code0 != LSHIFT_EXPR
5964 ? LROTATE_EXPR
5965 : RROTATE_EXPR),
5966 type, TREE_OPERAND (arg0, 0), tree11);
5971 associate:
5972 /* In most languages, can't associate operations on floats through
5973 parentheses. Rather than remember where the parentheses were, we
5974 don't associate floats at all, unless the user has specified
5975 -funsafe-math-optimizations. */
5977 if (! wins
5978 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5980 tree var0, con0, lit0, minus_lit0;
5981 tree var1, con1, lit1, minus_lit1;
5983 /* Split both trees into variables, constants, and literals. Then
5984 associate each group together, the constants with literals,
5985 then the result with variables. This increases the chances of
5986 literals being recombined later and of generating relocatable
5987 expressions for the sum of a constant and literal. */
5988 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5989 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5990 code == MINUS_EXPR);
5992 /* Only do something if we found more than two objects. Otherwise,
5993 nothing has changed and we risk infinite recursion. */
5994 if (2 < ((var0 != 0) + (var1 != 0)
5995 + (con0 != 0) + (con1 != 0)
5996 + (lit0 != 0) + (lit1 != 0)
5997 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5999 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6000 if (code == MINUS_EXPR)
6001 code = PLUS_EXPR;
6003 var0 = associate_trees (var0, var1, code, type);
6004 con0 = associate_trees (con0, con1, code, type);
6005 lit0 = associate_trees (lit0, lit1, code, type);
6006 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6008 /* Preserve the MINUS_EXPR if the negative part of the literal is
6009 greater than the positive part. Otherwise, the multiplicative
6010 folding code (i.e extract_muldiv) may be fooled in case
6011 unsigned constants are subtracted, like in the following
6012 example: ((X*2 + 4) - 8U)/2. */
6013 if (minus_lit0 && lit0)
6015 if (TREE_CODE (lit0) == INTEGER_CST
6016 && TREE_CODE (minus_lit0) == INTEGER_CST
6017 && tree_int_cst_lt (lit0, minus_lit0))
6019 minus_lit0 = associate_trees (minus_lit0, lit0,
6020 MINUS_EXPR, type);
6021 lit0 = 0;
6023 else
6025 lit0 = associate_trees (lit0, minus_lit0,
6026 MINUS_EXPR, type);
6027 minus_lit0 = 0;
6030 if (minus_lit0)
6032 if (con0 == 0)
6033 return convert (type, associate_trees (var0, minus_lit0,
6034 MINUS_EXPR, type));
6035 else
6037 con0 = associate_trees (con0, minus_lit0,
6038 MINUS_EXPR, type);
6039 return convert (type, associate_trees (var0, con0,
6040 PLUS_EXPR, type));
6044 con0 = associate_trees (con0, lit0, code, type);
6045 return convert (type, associate_trees (var0, con0, code, type));
6049 binary:
6050 if (wins)
6051 t1 = const_binop (code, arg0, arg1, 0);
6052 if (t1 != NULL_TREE)
6054 /* The return value should always have
6055 the same type as the original expression. */
6056 if (TREE_TYPE (t1) != TREE_TYPE (t))
6057 t1 = convert (TREE_TYPE (t), t1);
6059 return t1;
6061 return t;
6063 case MINUS_EXPR:
6064 /* A - (-B) -> A + B */
6065 if (TREE_CODE (arg1) == NEGATE_EXPR)
6066 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6067 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6068 if (TREE_CODE (arg0) == NEGATE_EXPR
6069 && (FLOAT_TYPE_P (type)
6070 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6071 && negate_expr_p (arg1)
6072 && reorder_operands_p (arg0, arg1))
6073 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6074 TREE_OPERAND (arg0, 0)));
6076 if (! FLOAT_TYPE_P (type))
6078 if (! wins && integer_zerop (arg0))
6079 return negate_expr (convert (type, arg1));
6080 if (integer_zerop (arg1))
6081 return non_lvalue (convert (type, arg0));
6083 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6084 about the case where C is a constant, just try one of the
6085 four possibilities. */
6087 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6088 && operand_equal_p (TREE_OPERAND (arg0, 1),
6089 TREE_OPERAND (arg1, 1), 0))
6090 return fold (build (MULT_EXPR, type,
6091 fold (build (MINUS_EXPR, type,
6092 TREE_OPERAND (arg0, 0),
6093 TREE_OPERAND (arg1, 0))),
6094 TREE_OPERAND (arg0, 1)));
6096 /* Fold A - (A & B) into ~B & A. */
6097 if (!TREE_SIDE_EFFECTS (arg0)
6098 && TREE_CODE (arg1) == BIT_AND_EXPR)
6100 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6101 return fold (build (BIT_AND_EXPR, type,
6102 fold (build1 (BIT_NOT_EXPR, type,
6103 TREE_OPERAND (arg1, 0))),
6104 arg0));
6105 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6106 return fold (build (BIT_AND_EXPR, type,
6107 fold (build1 (BIT_NOT_EXPR, type,
6108 TREE_OPERAND (arg1, 1))),
6109 arg0));
6112 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6113 any power of 2 minus 1. */
6114 if (TREE_CODE (arg0) == BIT_AND_EXPR
6115 && TREE_CODE (arg1) == BIT_AND_EXPR
6116 && operand_equal_p (TREE_OPERAND (arg0, 0),
6117 TREE_OPERAND (arg1, 0), 0))
6119 tree mask0 = TREE_OPERAND (arg0, 1);
6120 tree mask1 = TREE_OPERAND (arg1, 1);
6121 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6123 if (operand_equal_p (tem, mask1, 0))
6125 tem = fold (build (BIT_XOR_EXPR, type,
6126 TREE_OPERAND (arg0, 0), mask1));
6127 return fold (build (MINUS_EXPR, type, tem, mask1));
6132 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6133 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6134 return non_lvalue (convert (type, arg0));
6136 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6137 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6138 (-ARG1 + ARG0) reduces to -ARG1. */
6139 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6140 return negate_expr (convert (type, arg1));
6142 /* Fold &x - &x. This can happen from &x.foo - &x.
6143 This is unsafe for certain floats even in non-IEEE formats.
6144 In IEEE, it is unsafe because it does wrong for NaNs.
6145 Also note that operand_equal_p is always false if an operand
6146 is volatile. */
6148 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6149 && operand_equal_p (arg0, arg1, 0))
6150 return convert (type, integer_zero_node);
6152 goto associate;
6154 case MULT_EXPR:
6155 /* (-A) * (-B) -> A * B */
6156 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6157 return fold (build (MULT_EXPR, type,
6158 TREE_OPERAND (arg0, 0),
6159 negate_expr (arg1)));
6160 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6161 return fold (build (MULT_EXPR, type,
6162 negate_expr (arg0),
6163 TREE_OPERAND (arg1, 0)));
6165 if (! FLOAT_TYPE_P (type))
6167 if (integer_zerop (arg1))
6168 return omit_one_operand (type, arg1, arg0);
6169 if (integer_onep (arg1))
6170 return non_lvalue (convert (type, arg0));
6172 /* (a * (1 << b)) is (a << b) */
6173 if (TREE_CODE (arg1) == LSHIFT_EXPR
6174 && integer_onep (TREE_OPERAND (arg1, 0)))
6175 return fold (build (LSHIFT_EXPR, type, arg0,
6176 TREE_OPERAND (arg1, 1)));
6177 if (TREE_CODE (arg0) == LSHIFT_EXPR
6178 && integer_onep (TREE_OPERAND (arg0, 0)))
6179 return fold (build (LSHIFT_EXPR, type, arg1,
6180 TREE_OPERAND (arg0, 1)));
6182 if (TREE_CODE (arg1) == INTEGER_CST
6183 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6184 convert (type, arg1),
6185 code, NULL_TREE)))
6186 return convert (type, tem);
6189 else
6191 /* Maybe fold x * 0 to 0. The expressions aren't the same
6192 when x is NaN, since x * 0 is also NaN. Nor are they the
6193 same in modes with signed zeros, since multiplying a
6194 negative value by 0 gives -0, not +0. */
6195 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6196 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6197 && real_zerop (arg1))
6198 return omit_one_operand (type, arg1, arg0);
6199 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6200 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6201 && real_onep (arg1))
6202 return non_lvalue (convert (type, arg0));
6204 /* Transform x * -1.0 into -x. */
6205 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6206 && real_minus_onep (arg1))
6207 return fold (build1 (NEGATE_EXPR, type, arg0));
6209 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6210 if (flag_unsafe_math_optimizations
6211 && TREE_CODE (arg0) == RDIV_EXPR
6212 && TREE_CODE (arg1) == REAL_CST
6213 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6215 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6216 arg1, 0);
6217 if (tem)
6218 return fold (build (RDIV_EXPR, type, tem,
6219 TREE_OPERAND (arg0, 1)));
6222 if (flag_unsafe_math_optimizations)
6224 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6225 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6227 /* Optimizations of sqrt(...)*sqrt(...). */
6228 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6229 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6230 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6232 tree sqrtfn, arg, arglist;
6233 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6234 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6236 /* Optimize sqrt(x)*sqrt(x) as x. */
6237 if (operand_equal_p (arg00, arg10, 0)
6238 && ! HONOR_SNANS (TYPE_MODE (type)))
6239 return arg00;
6241 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6242 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6243 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6244 arglist = build_tree_list (NULL_TREE, arg);
6245 return build_function_call_expr (sqrtfn, arglist);
6248 /* Optimize expN(x)*expN(y) as expN(x+y). */
6249 if (fcode0 == fcode1
6250 && (fcode0 == BUILT_IN_EXP
6251 || fcode0 == BUILT_IN_EXPF
6252 || fcode0 == BUILT_IN_EXPL
6253 || fcode0 == BUILT_IN_EXP2
6254 || fcode0 == BUILT_IN_EXP2F
6255 || fcode0 == BUILT_IN_EXP2L
6256 || fcode0 == BUILT_IN_EXP10
6257 || fcode0 == BUILT_IN_EXP10F
6258 || fcode0 == BUILT_IN_EXP10L
6259 || fcode0 == BUILT_IN_POW10
6260 || fcode0 == BUILT_IN_POW10F
6261 || fcode0 == BUILT_IN_POW10L))
6263 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6264 tree arg = build (PLUS_EXPR, type,
6265 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6266 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6267 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6268 return build_function_call_expr (expfn, arglist);
6271 /* Optimizations of pow(...)*pow(...). */
6272 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6273 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6274 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6276 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6277 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6278 1)));
6279 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6280 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6281 1)));
6283 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6284 if (operand_equal_p (arg01, arg11, 0))
6286 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6287 tree arg = build (MULT_EXPR, type, arg00, arg10);
6288 tree arglist = tree_cons (NULL_TREE, fold (arg),
6289 build_tree_list (NULL_TREE,
6290 arg01));
6291 return build_function_call_expr (powfn, arglist);
6294 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6295 if (operand_equal_p (arg00, arg10, 0))
6297 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6298 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6299 tree arglist = tree_cons (NULL_TREE, arg00,
6300 build_tree_list (NULL_TREE,
6301 arg));
6302 return build_function_call_expr (powfn, arglist);
6306 /* Optimize tan(x)*cos(x) as sin(x). */
6307 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6308 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6309 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6310 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6311 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6312 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6313 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6314 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6316 tree sinfn;
6318 switch (fcode0)
6320 case BUILT_IN_TAN:
6321 case BUILT_IN_COS:
6322 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6323 break;
6324 case BUILT_IN_TANF:
6325 case BUILT_IN_COSF:
6326 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6327 break;
6328 case BUILT_IN_TANL:
6329 case BUILT_IN_COSL:
6330 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6331 break;
6332 default:
6333 sinfn = NULL_TREE;
6336 if (sinfn != NULL_TREE)
6337 return build_function_call_expr (sinfn,
6338 TREE_OPERAND (arg0, 1));
6341 /* Optimize x*pow(x,c) as pow(x,c+1). */
6342 if (fcode1 == BUILT_IN_POW
6343 || fcode1 == BUILT_IN_POWF
6344 || fcode1 == BUILT_IN_POWL)
6346 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6347 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6348 1)));
6349 if (TREE_CODE (arg11) == REAL_CST
6350 && ! TREE_CONSTANT_OVERFLOW (arg11)
6351 && operand_equal_p (arg0, arg10, 0))
6353 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6354 REAL_VALUE_TYPE c;
6355 tree arg, arglist;
6357 c = TREE_REAL_CST (arg11);
6358 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6359 arg = build_real (type, c);
6360 arglist = build_tree_list (NULL_TREE, arg);
6361 arglist = tree_cons (NULL_TREE, arg0, arglist);
6362 return build_function_call_expr (powfn, arglist);
6366 /* Optimize pow(x,c)*x as pow(x,c+1). */
6367 if (fcode0 == BUILT_IN_POW
6368 || fcode0 == BUILT_IN_POWF
6369 || fcode0 == BUILT_IN_POWL)
6371 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6372 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6373 1)));
6374 if (TREE_CODE (arg01) == REAL_CST
6375 && ! TREE_CONSTANT_OVERFLOW (arg01)
6376 && operand_equal_p (arg1, arg00, 0))
6378 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6379 REAL_VALUE_TYPE c;
6380 tree arg, arglist;
6382 c = TREE_REAL_CST (arg01);
6383 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6384 arg = build_real (type, c);
6385 arglist = build_tree_list (NULL_TREE, arg);
6386 arglist = tree_cons (NULL_TREE, arg1, arglist);
6387 return build_function_call_expr (powfn, arglist);
6391 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6392 if (! optimize_size
6393 && operand_equal_p (arg0, arg1, 0))
6395 tree powfn;
6397 if (type == double_type_node)
6398 powfn = implicit_built_in_decls[BUILT_IN_POW];
6399 else if (type == float_type_node)
6400 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6401 else if (type == long_double_type_node)
6402 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6403 else
6404 powfn = NULL_TREE;
6406 if (powfn)
6408 tree arg = build_real (type, dconst2);
6409 tree arglist = build_tree_list (NULL_TREE, arg);
6410 arglist = tree_cons (NULL_TREE, arg0, arglist);
6411 return build_function_call_expr (powfn, arglist);
6416 goto associate;
6418 case BIT_IOR_EXPR:
6419 bit_ior:
6420 if (integer_all_onesp (arg1))
6421 return omit_one_operand (type, arg1, arg0);
6422 if (integer_zerop (arg1))
6423 return non_lvalue (convert (type, arg0));
6424 t1 = distribute_bit_expr (code, type, arg0, arg1);
6425 if (t1 != NULL_TREE)
6426 return t1;
6428 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6430 This results in more efficient code for machines without a NAND
6431 instruction. Combine will canonicalize to the first form
6432 which will allow use of NAND instructions provided by the
6433 backend if they exist. */
6434 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6435 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6437 return fold (build1 (BIT_NOT_EXPR, type,
6438 build (BIT_AND_EXPR, type,
6439 TREE_OPERAND (arg0, 0),
6440 TREE_OPERAND (arg1, 0))));
6443 /* See if this can be simplified into a rotate first. If that
6444 is unsuccessful continue in the association code. */
6445 goto bit_rotate;
6447 case BIT_XOR_EXPR:
6448 if (integer_zerop (arg1))
6449 return non_lvalue (convert (type, arg0));
6450 if (integer_all_onesp (arg1))
6451 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6453 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6454 with a constant, and the two constants have no bits in common,
6455 we should treat this as a BIT_IOR_EXPR since this may produce more
6456 simplifications. */
6457 if (TREE_CODE (arg0) == BIT_AND_EXPR
6458 && TREE_CODE (arg1) == BIT_AND_EXPR
6459 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6460 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6461 && integer_zerop (const_binop (BIT_AND_EXPR,
6462 TREE_OPERAND (arg0, 1),
6463 TREE_OPERAND (arg1, 1), 0)))
6465 code = BIT_IOR_EXPR;
6466 goto bit_ior;
6469 /* See if this can be simplified into a rotate first. If that
6470 is unsuccessful continue in the association code. */
6471 goto bit_rotate;
6473 case BIT_AND_EXPR:
6474 if (integer_all_onesp (arg1))
6475 return non_lvalue (convert (type, arg0));
6476 if (integer_zerop (arg1))
6477 return omit_one_operand (type, arg1, arg0);
6478 t1 = distribute_bit_expr (code, type, arg0, arg1);
6479 if (t1 != NULL_TREE)
6480 return t1;
6481 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6482 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6483 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6485 unsigned int prec
6486 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6488 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6489 && (~TREE_INT_CST_LOW (arg1)
6490 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6491 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6494 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6496 This results in more efficient code for machines without a NOR
6497 instruction. Combine will canonicalize to the first form
6498 which will allow use of NOR instructions provided by the
6499 backend if they exist. */
6500 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6501 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6503 return fold (build1 (BIT_NOT_EXPR, type,
6504 build (BIT_IOR_EXPR, type,
6505 TREE_OPERAND (arg0, 0),
6506 TREE_OPERAND (arg1, 0))));
6509 goto associate;
6511 case RDIV_EXPR:
6512 /* Don't touch a floating-point divide by zero unless the mode
6513 of the constant can represent infinity. */
6514 if (TREE_CODE (arg1) == REAL_CST
6515 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6516 && real_zerop (arg1))
6517 return t;
6519 /* (-A) / (-B) -> A / B */
6520 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6521 return fold (build (RDIV_EXPR, type,
6522 TREE_OPERAND (arg0, 0),
6523 negate_expr (arg1)));
6524 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6525 return fold (build (RDIV_EXPR, type,
6526 negate_expr (arg0),
6527 TREE_OPERAND (arg1, 0)));
6529 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6530 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6531 && real_onep (arg1))
6532 return non_lvalue (convert (type, arg0));
6534 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6535 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6536 && real_minus_onep (arg1))
6537 return non_lvalue (convert (type, negate_expr (arg0)));
6539 /* If ARG1 is a constant, we can convert this to a multiply by the
6540 reciprocal. This does not have the same rounding properties,
6541 so only do this if -funsafe-math-optimizations. We can actually
6542 always safely do it if ARG1 is a power of two, but it's hard to
6543 tell if it is or not in a portable manner. */
6544 if (TREE_CODE (arg1) == REAL_CST)
6546 if (flag_unsafe_math_optimizations
6547 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6548 arg1, 0)))
6549 return fold (build (MULT_EXPR, type, arg0, tem));
6550 /* Find the reciprocal if optimizing and the result is exact. */
6551 if (optimize)
6553 REAL_VALUE_TYPE r;
6554 r = TREE_REAL_CST (arg1);
6555 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6557 tem = build_real (type, r);
6558 return fold (build (MULT_EXPR, type, arg0, tem));
6562 /* Convert A/B/C to A/(B*C). */
6563 if (flag_unsafe_math_optimizations
6564 && TREE_CODE (arg0) == RDIV_EXPR)
6565 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6566 fold (build (MULT_EXPR, type,
6567 TREE_OPERAND (arg0, 1), arg1))));
6569 /* Convert A/(B/C) to (A/B)*C. */
6570 if (flag_unsafe_math_optimizations
6571 && TREE_CODE (arg1) == RDIV_EXPR)
6572 return fold (build (MULT_EXPR, type,
6573 fold (build (RDIV_EXPR, type, arg0,
6574 TREE_OPERAND (arg1, 0))),
6575 TREE_OPERAND (arg1, 1)));
6577 /* Convert C1/(X*C2) into (C1/C2)/X. */
6578 if (flag_unsafe_math_optimizations
6579 && TREE_CODE (arg1) == MULT_EXPR
6580 && TREE_CODE (arg0) == REAL_CST
6581 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6583 tree tem = const_binop (RDIV_EXPR, arg0,
6584 TREE_OPERAND (arg1, 1), 0);
6585 if (tem)
6586 return fold (build (RDIV_EXPR, type, tem,
6587 TREE_OPERAND (arg1, 0)));
6590 if (flag_unsafe_math_optimizations)
6592 enum built_in_function fcode = builtin_mathfn_code (arg1);
6593 /* Optimize x/expN(y) into x*expN(-y). */
6594 if (fcode == BUILT_IN_EXP
6595 || fcode == BUILT_IN_EXPF
6596 || fcode == BUILT_IN_EXPL
6597 || fcode == BUILT_IN_EXP2
6598 || fcode == BUILT_IN_EXP2F
6599 || fcode == BUILT_IN_EXP2L
6600 || fcode == BUILT_IN_EXP10
6601 || fcode == BUILT_IN_EXP10F
6602 || fcode == BUILT_IN_EXP10L
6603 || fcode == BUILT_IN_POW10
6604 || fcode == BUILT_IN_POW10F
6605 || fcode == BUILT_IN_POW10L)
6607 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6608 tree arg = build1 (NEGATE_EXPR, type,
6609 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6610 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6611 arg1 = build_function_call_expr (expfn, arglist);
6612 return fold (build (MULT_EXPR, type, arg0, arg1));
6615 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6616 if (fcode == BUILT_IN_POW
6617 || fcode == BUILT_IN_POWF
6618 || fcode == BUILT_IN_POWL)
6620 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6621 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6622 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6623 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6624 tree arglist = tree_cons(NULL_TREE, arg10,
6625 build_tree_list (NULL_TREE, neg11));
6626 arg1 = build_function_call_expr (powfn, arglist);
6627 return fold (build (MULT_EXPR, type, arg0, arg1));
6631 if (flag_unsafe_math_optimizations)
6633 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6634 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6636 /* Optimize sin(x)/cos(x) as tan(x). */
6637 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6638 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6639 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6640 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6641 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6643 tree tanfn;
6645 if (fcode0 == BUILT_IN_SIN)
6646 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6647 else if (fcode0 == BUILT_IN_SINF)
6648 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6649 else if (fcode0 == BUILT_IN_SINL)
6650 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6651 else
6652 tanfn = NULL_TREE;
6654 if (tanfn != NULL_TREE)
6655 return build_function_call_expr (tanfn,
6656 TREE_OPERAND (arg0, 1));
6659 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6660 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6661 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6662 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6663 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6664 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6666 tree tanfn;
6668 if (fcode0 == BUILT_IN_COS)
6669 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6670 else if (fcode0 == BUILT_IN_COSF)
6671 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6672 else if (fcode0 == BUILT_IN_COSL)
6673 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6674 else
6675 tanfn = NULL_TREE;
6677 if (tanfn != NULL_TREE)
6679 tree tmp = TREE_OPERAND (arg0, 1);
6680 tmp = build_function_call_expr (tanfn, tmp);
6681 return fold (build (RDIV_EXPR, type,
6682 build_real (type, dconst1),
6683 tmp));
6687 /* Optimize pow(x,c)/x as pow(x,c-1). */
6688 if (fcode0 == BUILT_IN_POW
6689 || fcode0 == BUILT_IN_POWF
6690 || fcode0 == BUILT_IN_POWL)
6692 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6693 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6694 if (TREE_CODE (arg01) == REAL_CST
6695 && ! TREE_CONSTANT_OVERFLOW (arg01)
6696 && operand_equal_p (arg1, arg00, 0))
6698 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6699 REAL_VALUE_TYPE c;
6700 tree arg, arglist;
6702 c = TREE_REAL_CST (arg01);
6703 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6704 arg = build_real (type, c);
6705 arglist = build_tree_list (NULL_TREE, arg);
6706 arglist = tree_cons (NULL_TREE, arg1, arglist);
6707 return build_function_call_expr (powfn, arglist);
6711 goto binary;
6713 case TRUNC_DIV_EXPR:
6714 case ROUND_DIV_EXPR:
6715 case FLOOR_DIV_EXPR:
6716 case CEIL_DIV_EXPR:
6717 case EXACT_DIV_EXPR:
6718 if (integer_onep (arg1))
6719 return non_lvalue (convert (type, arg0));
6720 if (integer_zerop (arg1))
6721 return t;
6723 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6724 operation, EXACT_DIV_EXPR.
6726 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6727 At one time others generated faster code, it's not clear if they do
6728 after the last round to changes to the DIV code in expmed.c. */
6729 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6730 && multiple_of_p (type, arg0, arg1))
6731 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6733 if (TREE_CODE (arg1) == INTEGER_CST
6734 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6735 code, NULL_TREE)))
6736 return convert (type, tem);
6738 goto binary;
6740 case CEIL_MOD_EXPR:
6741 case FLOOR_MOD_EXPR:
6742 case ROUND_MOD_EXPR:
6743 case TRUNC_MOD_EXPR:
6744 if (integer_onep (arg1))
6745 return omit_one_operand (type, integer_zero_node, arg0);
6746 if (integer_zerop (arg1))
6747 return t;
6749 if (TREE_CODE (arg1) == INTEGER_CST
6750 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6751 code, NULL_TREE)))
6752 return convert (type, tem);
6754 goto binary;
6756 case LROTATE_EXPR:
6757 case RROTATE_EXPR:
6758 if (integer_all_onesp (arg0))
6759 return omit_one_operand (type, arg0, arg1);
6760 goto shift;
6762 case RSHIFT_EXPR:
6763 /* Optimize -1 >> x for arithmetic right shifts. */
6764 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6765 return omit_one_operand (type, arg0, arg1);
6766 /* ... fall through ... */
6768 case LSHIFT_EXPR:
6769 shift:
6770 if (integer_zerop (arg1))
6771 return non_lvalue (convert (type, arg0));
6772 if (integer_zerop (arg0))
6773 return omit_one_operand (type, arg0, arg1);
6775 /* Since negative shift count is not well-defined,
6776 don't try to compute it in the compiler. */
6777 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6778 return t;
6779 /* Rewrite an LROTATE_EXPR by a constant into an
6780 RROTATE_EXPR by a new constant. */
6781 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6783 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6784 tem = convert (TREE_TYPE (arg1), tem);
6785 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6786 return fold (build (RROTATE_EXPR, type, arg0, tem));
6789 /* If we have a rotate of a bit operation with the rotate count and
6790 the second operand of the bit operation both constant,
6791 permute the two operations. */
6792 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6793 && (TREE_CODE (arg0) == BIT_AND_EXPR
6794 || TREE_CODE (arg0) == BIT_IOR_EXPR
6795 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6796 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6797 return fold (build (TREE_CODE (arg0), type,
6798 fold (build (code, type,
6799 TREE_OPERAND (arg0, 0), arg1)),
6800 fold (build (code, type,
6801 TREE_OPERAND (arg0, 1), arg1))));
6803 /* Two consecutive rotates adding up to the width of the mode can
6804 be ignored. */
6805 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6806 && TREE_CODE (arg0) == RROTATE_EXPR
6807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6808 && TREE_INT_CST_HIGH (arg1) == 0
6809 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6810 && ((TREE_INT_CST_LOW (arg1)
6811 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6812 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6813 return TREE_OPERAND (arg0, 0);
6815 goto binary;
6817 case MIN_EXPR:
6818 if (operand_equal_p (arg0, arg1, 0))
6819 return omit_one_operand (type, arg0, arg1);
6820 if (INTEGRAL_TYPE_P (type)
6821 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6822 return omit_one_operand (type, arg1, arg0);
6823 goto associate;
6825 case MAX_EXPR:
6826 if (operand_equal_p (arg0, arg1, 0))
6827 return omit_one_operand (type, arg0, arg1);
6828 if (INTEGRAL_TYPE_P (type)
6829 && TYPE_MAX_VALUE (type)
6830 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6831 return omit_one_operand (type, arg1, arg0);
6832 goto associate;
6834 case TRUTH_NOT_EXPR:
6835 /* Note that the operand of this must be an int
6836 and its values must be 0 or 1.
6837 ("true" is a fixed value perhaps depending on the language,
6838 but we don't handle values other than 1 correctly yet.) */
6839 tem = invert_truthvalue (arg0);
6840 /* Avoid infinite recursion. */
6841 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6843 tem = fold_single_bit_test (code, arg0, arg1, type);
6844 if (tem)
6845 return tem;
6846 return t;
6848 return convert (type, tem);
6850 case TRUTH_ANDIF_EXPR:
6851 /* Note that the operands of this must be ints
6852 and their values must be 0 or 1.
6853 ("true" is a fixed value perhaps depending on the language.) */
6854 /* If first arg is constant zero, return it. */
6855 if (integer_zerop (arg0))
6856 return convert (type, arg0);
6857 case TRUTH_AND_EXPR:
6858 /* If either arg is constant true, drop it. */
6859 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6860 return non_lvalue (convert (type, arg1));
6861 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6862 /* Preserve sequence points. */
6863 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6864 return non_lvalue (convert (type, arg0));
6865 /* If second arg is constant zero, result is zero, but first arg
6866 must be evaluated. */
6867 if (integer_zerop (arg1))
6868 return omit_one_operand (type, arg1, arg0);
6869 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6870 case will be handled here. */
6871 if (integer_zerop (arg0))
6872 return omit_one_operand (type, arg0, arg1);
6874 truth_andor:
6875 /* We only do these simplifications if we are optimizing. */
6876 if (!optimize)
6877 return t;
6879 /* Check for things like (A || B) && (A || C). We can convert this
6880 to A || (B && C). Note that either operator can be any of the four
6881 truth and/or operations and the transformation will still be
6882 valid. Also note that we only care about order for the
6883 ANDIF and ORIF operators. If B contains side effects, this
6884 might change the truth-value of A. */
6885 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6886 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6887 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6888 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6889 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6892 tree a00 = TREE_OPERAND (arg0, 0);
6893 tree a01 = TREE_OPERAND (arg0, 1);
6894 tree a10 = TREE_OPERAND (arg1, 0);
6895 tree a11 = TREE_OPERAND (arg1, 1);
6896 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6897 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6898 && (code == TRUTH_AND_EXPR
6899 || code == TRUTH_OR_EXPR));
6901 if (operand_equal_p (a00, a10, 0))
6902 return fold (build (TREE_CODE (arg0), type, a00,
6903 fold (build (code, type, a01, a11))));
6904 else if (commutative && operand_equal_p (a00, a11, 0))
6905 return fold (build (TREE_CODE (arg0), type, a00,
6906 fold (build (code, type, a01, a10))));
6907 else if (commutative && operand_equal_p (a01, a10, 0))
6908 return fold (build (TREE_CODE (arg0), type, a01,
6909 fold (build (code, type, a00, a11))));
6911 /* This case if tricky because we must either have commutative
6912 operators or else A10 must not have side-effects. */
6914 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6915 && operand_equal_p (a01, a11, 0))
6916 return fold (build (TREE_CODE (arg0), type,
6917 fold (build (code, type, a00, a10)),
6918 a01));
6921 /* See if we can build a range comparison. */
6922 if (0 != (tem = fold_range_test (t)))
6923 return tem;
6925 /* Check for the possibility of merging component references. If our
6926 lhs is another similar operation, try to merge its rhs with our
6927 rhs. Then try to merge our lhs and rhs. */
6928 if (TREE_CODE (arg0) == code
6929 && 0 != (tem = fold_truthop (code, type,
6930 TREE_OPERAND (arg0, 1), arg1)))
6931 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6933 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6934 return tem;
6936 return t;
6938 case TRUTH_ORIF_EXPR:
6939 /* Note that the operands of this must be ints
6940 and their values must be 0 or true.
6941 ("true" is a fixed value perhaps depending on the language.) */
6942 /* If first arg is constant true, return it. */
6943 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6944 return convert (type, arg0);
6945 case TRUTH_OR_EXPR:
6946 /* If either arg is constant zero, drop it. */
6947 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6948 return non_lvalue (convert (type, arg1));
6949 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6950 /* Preserve sequence points. */
6951 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6952 return non_lvalue (convert (type, arg0));
6953 /* If second arg is constant true, result is true, but we must
6954 evaluate first arg. */
6955 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6956 return omit_one_operand (type, arg1, arg0);
6957 /* Likewise for first arg, but note this only occurs here for
6958 TRUTH_OR_EXPR. */
6959 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6960 return omit_one_operand (type, arg0, arg1);
6961 goto truth_andor;
6963 case TRUTH_XOR_EXPR:
6964 /* If either arg is constant zero, drop it. */
6965 if (integer_zerop (arg0))
6966 return non_lvalue (convert (type, arg1));
6967 if (integer_zerop (arg1))
6968 return non_lvalue (convert (type, arg0));
6969 /* If either arg is constant true, this is a logical inversion. */
6970 if (integer_onep (arg0))
6971 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6972 if (integer_onep (arg1))
6973 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6974 return t;
6976 case EQ_EXPR:
6977 case NE_EXPR:
6978 case LT_EXPR:
6979 case GT_EXPR:
6980 case LE_EXPR:
6981 case GE_EXPR:
6982 /* If one arg is a real or integer constant, put it last. */
6983 if (tree_swap_operands_p (arg0, arg1, true))
6984 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
6986 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6988 tree targ0 = strip_float_extensions (arg0);
6989 tree targ1 = strip_float_extensions (arg1);
6990 tree newtype = TREE_TYPE (targ0);
6992 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6993 newtype = TREE_TYPE (targ1);
6995 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6996 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6997 return fold (build (code, type, convert (newtype, targ0),
6998 convert (newtype, targ1)));
7000 /* (-a) CMP (-b) -> b CMP a */
7001 if (TREE_CODE (arg0) == NEGATE_EXPR
7002 && TREE_CODE (arg1) == NEGATE_EXPR)
7003 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7004 TREE_OPERAND (arg0, 0)));
7006 if (TREE_CODE (arg1) == REAL_CST)
7008 REAL_VALUE_TYPE cst;
7009 cst = TREE_REAL_CST (arg1);
7011 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7012 if (TREE_CODE (arg0) == NEGATE_EXPR)
7013 return
7014 fold (build (swap_tree_comparison (code), type,
7015 TREE_OPERAND (arg0, 0),
7016 build_real (TREE_TYPE (arg1),
7017 REAL_VALUE_NEGATE (cst))));
7019 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7020 /* a CMP (-0) -> a CMP 0 */
7021 if (REAL_VALUE_MINUS_ZERO (cst))
7022 return fold (build (code, type, arg0,
7023 build_real (TREE_TYPE (arg1), dconst0)));
7025 /* x != NaN is always true, other ops are always false. */
7026 if (REAL_VALUE_ISNAN (cst)
7027 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7029 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7030 return omit_one_operand (type, convert (type, t), arg0);
7033 /* Fold comparisons against infinity. */
7034 if (REAL_VALUE_ISINF (cst))
7036 tem = fold_inf_compare (code, type, arg0, arg1);
7037 if (tem != NULL_TREE)
7038 return tem;
7042 /* If this is a comparison of a real constant with a PLUS_EXPR
7043 or a MINUS_EXPR of a real constant, we can convert it into a
7044 comparison with a revised real constant as long as no overflow
7045 occurs when unsafe_math_optimizations are enabled. */
7046 if (flag_unsafe_math_optimizations
7047 && TREE_CODE (arg1) == REAL_CST
7048 && (TREE_CODE (arg0) == PLUS_EXPR
7049 || TREE_CODE (arg0) == MINUS_EXPR)
7050 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7051 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7052 ? MINUS_EXPR : PLUS_EXPR,
7053 arg1, TREE_OPERAND (arg0, 1), 0))
7054 && ! TREE_CONSTANT_OVERFLOW (tem))
7055 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7057 /* Likewise, we can simplify a comparison of a real constant with
7058 a MINUS_EXPR whose first operand is also a real constant, i.e.
7059 (c1 - x) < c2 becomes x > c1-c2. */
7060 if (flag_unsafe_math_optimizations
7061 && TREE_CODE (arg1) == REAL_CST
7062 && TREE_CODE (arg0) == MINUS_EXPR
7063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7064 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7065 arg1, 0))
7066 && ! TREE_CONSTANT_OVERFLOW (tem))
7067 return fold (build (swap_tree_comparison (code), type,
7068 TREE_OPERAND (arg0, 1), tem));
7070 /* Fold comparisons against built-in math functions. */
7071 if (TREE_CODE (arg1) == REAL_CST
7072 && flag_unsafe_math_optimizations
7073 && ! flag_errno_math)
7075 enum built_in_function fcode = builtin_mathfn_code (arg0);
7077 if (fcode != END_BUILTINS)
7079 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7080 if (tem != NULL_TREE)
7081 return tem;
7086 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7087 First, see if one arg is constant; find the constant arg
7088 and the other one. */
7090 tree constop = 0, varop = NULL_TREE;
7091 int constopnum = -1;
7093 if (TREE_CONSTANT (arg1))
7094 constopnum = 1, constop = arg1, varop = arg0;
7095 if (TREE_CONSTANT (arg0))
7096 constopnum = 0, constop = arg0, varop = arg1;
7098 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7100 /* This optimization is invalid for ordered comparisons
7101 if CONST+INCR overflows or if foo+incr might overflow.
7102 This optimization is invalid for floating point due to rounding.
7103 For pointer types we assume overflow doesn't happen. */
7104 if (POINTER_TYPE_P (TREE_TYPE (varop))
7105 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7106 && (code == EQ_EXPR || code == NE_EXPR)))
7108 tree newconst
7109 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7110 constop, TREE_OPERAND (varop, 1)));
7112 /* Do not overwrite the current varop to be a preincrement,
7113 create a new node so that we won't confuse our caller who
7114 might create trees and throw them away, reusing the
7115 arguments that they passed to build. This shows up in
7116 the THEN or ELSE parts of ?: being postincrements. */
7117 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7118 TREE_OPERAND (varop, 0),
7119 TREE_OPERAND (varop, 1));
7121 /* If VAROP is a reference to a bitfield, we must mask
7122 the constant by the width of the field. */
7123 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7124 && DECL_BIT_FIELD(TREE_OPERAND
7125 (TREE_OPERAND (varop, 0), 1)))
7127 int size
7128 = TREE_INT_CST_LOW (DECL_SIZE
7129 (TREE_OPERAND
7130 (TREE_OPERAND (varop, 0), 1)));
7131 tree mask, unsigned_type;
7132 unsigned int precision;
7133 tree folded_compare;
7135 /* First check whether the comparison would come out
7136 always the same. If we don't do that we would
7137 change the meaning with the masking. */
7138 if (constopnum == 0)
7139 folded_compare = fold (build (code, type, constop,
7140 TREE_OPERAND (varop, 0)));
7141 else
7142 folded_compare = fold (build (code, type,
7143 TREE_OPERAND (varop, 0),
7144 constop));
7145 if (integer_zerop (folded_compare)
7146 || integer_onep (folded_compare))
7147 return omit_one_operand (type, folded_compare, varop);
7149 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7150 precision = TYPE_PRECISION (unsigned_type);
7151 mask = build_int_2 (~0, ~0);
7152 TREE_TYPE (mask) = unsigned_type;
7153 force_fit_type (mask, 0);
7154 mask = const_binop (RSHIFT_EXPR, mask,
7155 size_int (precision - size), 0);
7156 newconst = fold (build (BIT_AND_EXPR,
7157 TREE_TYPE (varop), newconst,
7158 convert (TREE_TYPE (varop),
7159 mask)));
7162 t = build (code, type,
7163 (constopnum == 0) ? newconst : varop,
7164 (constopnum == 1) ? newconst : varop);
7165 return t;
7168 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7170 if (POINTER_TYPE_P (TREE_TYPE (varop))
7171 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7172 && (code == EQ_EXPR || code == NE_EXPR)))
7174 tree newconst
7175 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7176 constop, TREE_OPERAND (varop, 1)));
7178 /* Do not overwrite the current varop to be a predecrement,
7179 create a new node so that we won't confuse our caller who
7180 might create trees and throw them away, reusing the
7181 arguments that they passed to build. This shows up in
7182 the THEN or ELSE parts of ?: being postdecrements. */
7183 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7184 TREE_OPERAND (varop, 0),
7185 TREE_OPERAND (varop, 1));
7187 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7188 && DECL_BIT_FIELD(TREE_OPERAND
7189 (TREE_OPERAND (varop, 0), 1)))
7191 int size
7192 = TREE_INT_CST_LOW (DECL_SIZE
7193 (TREE_OPERAND
7194 (TREE_OPERAND (varop, 0), 1)));
7195 tree mask, unsigned_type;
7196 unsigned int precision;
7197 tree folded_compare;
7199 if (constopnum == 0)
7200 folded_compare = fold (build (code, type, constop,
7201 TREE_OPERAND (varop, 0)));
7202 else
7203 folded_compare = fold (build (code, type,
7204 TREE_OPERAND (varop, 0),
7205 constop));
7206 if (integer_zerop (folded_compare)
7207 || integer_onep (folded_compare))
7208 return omit_one_operand (type, folded_compare, varop);
7210 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7211 precision = TYPE_PRECISION (unsigned_type);
7212 mask = build_int_2 (~0, ~0);
7213 TREE_TYPE (mask) = TREE_TYPE (varop);
7214 force_fit_type (mask, 0);
7215 mask = const_binop (RSHIFT_EXPR, mask,
7216 size_int (precision - size), 0);
7217 newconst = fold (build (BIT_AND_EXPR,
7218 TREE_TYPE (varop), newconst,
7219 convert (TREE_TYPE (varop),
7220 mask)));
7223 t = build (code, type,
7224 (constopnum == 0) ? newconst : varop,
7225 (constopnum == 1) ? newconst : varop);
7226 return t;
7231 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7232 This transformation affects the cases which are handled in later
7233 optimizations involving comparisons with non-negative constants. */
7234 if (TREE_CODE (arg1) == INTEGER_CST
7235 && TREE_CODE (arg0) != INTEGER_CST
7236 && tree_int_cst_sgn (arg1) > 0)
7238 switch (code)
7240 case GE_EXPR:
7241 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7242 return fold (build (GT_EXPR, type, arg0, arg1));
7244 case LT_EXPR:
7245 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7246 return fold (build (LE_EXPR, type, arg0, arg1));
7248 default:
7249 break;
7253 /* Comparisons with the highest or lowest possible integer of
7254 the specified size will have known values. */
7256 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7258 if (TREE_CODE (arg1) == INTEGER_CST
7259 && ! TREE_CONSTANT_OVERFLOW (arg1)
7260 && width <= HOST_BITS_PER_WIDE_INT
7261 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7262 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7264 unsigned HOST_WIDE_INT signed_max;
7265 unsigned HOST_WIDE_INT max, min;
7267 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7269 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7271 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7272 min = 0;
7274 else
7276 max = signed_max;
7277 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7280 if (TREE_INT_CST_HIGH (arg1) == 0
7281 && TREE_INT_CST_LOW (arg1) == max)
7282 switch (code)
7284 case GT_EXPR:
7285 return omit_one_operand (type,
7286 convert (type, integer_zero_node),
7287 arg0);
7288 case GE_EXPR:
7289 return fold (build (EQ_EXPR, type, arg0, arg1));
7291 case LE_EXPR:
7292 return omit_one_operand (type,
7293 convert (type, integer_one_node),
7294 arg0);
7295 case LT_EXPR:
7296 return fold (build (NE_EXPR, type, arg0, arg1));
7298 /* The GE_EXPR and LT_EXPR cases above are not normally
7299 reached because of previous transformations. */
7301 default:
7302 break;
7304 else if (TREE_INT_CST_HIGH (arg1) == 0
7305 && TREE_INT_CST_LOW (arg1) == max - 1)
7306 switch (code)
7308 case GT_EXPR:
7309 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7310 return fold (build (EQ_EXPR, type, arg0, arg1));
7311 case LE_EXPR:
7312 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7313 return fold (build (NE_EXPR, type, arg0, arg1));
7314 default:
7315 break;
7317 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7318 && TREE_INT_CST_LOW (arg1) == min)
7319 switch (code)
7321 case LT_EXPR:
7322 return omit_one_operand (type,
7323 convert (type, integer_zero_node),
7324 arg0);
7325 case LE_EXPR:
7326 return fold (build (EQ_EXPR, type, arg0, arg1));
7328 case GE_EXPR:
7329 return omit_one_operand (type,
7330 convert (type, integer_one_node),
7331 arg0);
7332 case GT_EXPR:
7333 return fold (build (NE_EXPR, type, arg0, arg1));
7335 default:
7336 break;
7338 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7339 && TREE_INT_CST_LOW (arg1) == min + 1)
7340 switch (code)
7342 case GE_EXPR:
7343 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7344 return fold (build (NE_EXPR, type, arg0, arg1));
7345 case LT_EXPR:
7346 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7347 return fold (build (EQ_EXPR, type, arg0, arg1));
7348 default:
7349 break;
7352 else if (TREE_INT_CST_HIGH (arg1) == 0
7353 && TREE_INT_CST_LOW (arg1) == signed_max
7354 && TREE_UNSIGNED (TREE_TYPE (arg1))
7355 /* signed_type does not work on pointer types. */
7356 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7358 /* The following case also applies to X < signed_max+1
7359 and X >= signed_max+1 because previous transformations. */
7360 if (code == LE_EXPR || code == GT_EXPR)
7362 tree st0, st1;
7363 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7364 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7365 return fold
7366 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7367 type, convert (st0, arg0),
7368 convert (st1, integer_zero_node)));
7374 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7375 a MINUS_EXPR of a constant, we can convert it into a comparison with
7376 a revised constant as long as no overflow occurs. */
7377 if ((code == EQ_EXPR || code == NE_EXPR)
7378 && TREE_CODE (arg1) == INTEGER_CST
7379 && (TREE_CODE (arg0) == PLUS_EXPR
7380 || TREE_CODE (arg0) == MINUS_EXPR)
7381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7382 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7383 ? MINUS_EXPR : PLUS_EXPR,
7384 arg1, TREE_OPERAND (arg0, 1), 0))
7385 && ! TREE_CONSTANT_OVERFLOW (tem))
7386 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7388 /* Similarly for a NEGATE_EXPR. */
7389 else if ((code == EQ_EXPR || code == NE_EXPR)
7390 && TREE_CODE (arg0) == NEGATE_EXPR
7391 && TREE_CODE (arg1) == INTEGER_CST
7392 && 0 != (tem = negate_expr (arg1))
7393 && TREE_CODE (tem) == INTEGER_CST
7394 && ! TREE_CONSTANT_OVERFLOW (tem))
7395 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7397 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7398 for !=. Don't do this for ordered comparisons due to overflow. */
7399 else if ((code == NE_EXPR || code == EQ_EXPR)
7400 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7401 return fold (build (code, type,
7402 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7404 /* If we are widening one operand of an integer comparison,
7405 see if the other operand is similarly being widened. Perhaps we
7406 can do the comparison in the narrower type. */
7407 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7408 && TREE_CODE (arg0) == NOP_EXPR
7409 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7410 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7411 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7412 || (TREE_CODE (t1) == INTEGER_CST
7413 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7414 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7416 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7417 constant, we can simplify it. */
7418 else if (TREE_CODE (arg1) == INTEGER_CST
7419 && (TREE_CODE (arg0) == MIN_EXPR
7420 || TREE_CODE (arg0) == MAX_EXPR)
7421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7422 return optimize_minmax_comparison (t);
7424 /* If we are comparing an ABS_EXPR with a constant, we can
7425 convert all the cases into explicit comparisons, but they may
7426 well not be faster than doing the ABS and one comparison.
7427 But ABS (X) <= C is a range comparison, which becomes a subtraction
7428 and a comparison, and is probably faster. */
7429 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7430 && TREE_CODE (arg0) == ABS_EXPR
7431 && ! TREE_SIDE_EFFECTS (arg0)
7432 && (0 != (tem = negate_expr (arg1)))
7433 && TREE_CODE (tem) == INTEGER_CST
7434 && ! TREE_CONSTANT_OVERFLOW (tem))
7435 return fold (build (TRUTH_ANDIF_EXPR, type,
7436 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7437 build (LE_EXPR, type,
7438 TREE_OPERAND (arg0, 0), arg1)));
7440 /* If this is an EQ or NE comparison with zero and ARG0 is
7441 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7442 two operations, but the latter can be done in one less insn
7443 on machines that have only two-operand insns or on which a
7444 constant cannot be the first operand. */
7445 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7446 && TREE_CODE (arg0) == BIT_AND_EXPR)
7448 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7449 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7450 return
7451 fold (build (code, type,
7452 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7453 build (RSHIFT_EXPR,
7454 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7455 TREE_OPERAND (arg0, 1),
7456 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7457 convert (TREE_TYPE (arg0),
7458 integer_one_node)),
7459 arg1));
7460 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7461 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7462 return
7463 fold (build (code, type,
7464 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7465 build (RSHIFT_EXPR,
7466 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7467 TREE_OPERAND (arg0, 0),
7468 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7469 convert (TREE_TYPE (arg0),
7470 integer_one_node)),
7471 arg1));
7474 /* If this is an NE or EQ comparison of zero against the result of a
7475 signed MOD operation whose second operand is a power of 2, make
7476 the MOD operation unsigned since it is simpler and equivalent. */
7477 if ((code == NE_EXPR || code == EQ_EXPR)
7478 && integer_zerop (arg1)
7479 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7480 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7481 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7482 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7483 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7486 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7487 tree newmod = build (TREE_CODE (arg0), newtype,
7488 convert (newtype, TREE_OPERAND (arg0, 0)),
7489 convert (newtype, TREE_OPERAND (arg0, 1)));
7491 return build (code, type, newmod, convert (newtype, arg1));
7494 /* If this is an NE comparison of zero with an AND of one, remove the
7495 comparison since the AND will give the correct value. */
7496 if (code == NE_EXPR && integer_zerop (arg1)
7497 && TREE_CODE (arg0) == BIT_AND_EXPR
7498 && integer_onep (TREE_OPERAND (arg0, 1)))
7499 return convert (type, arg0);
7501 /* If we have (A & C) == C where C is a power of 2, convert this into
7502 (A & C) != 0. Similarly for NE_EXPR. */
7503 if ((code == EQ_EXPR || code == NE_EXPR)
7504 && TREE_CODE (arg0) == BIT_AND_EXPR
7505 && integer_pow2p (TREE_OPERAND (arg0, 1))
7506 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7507 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7508 arg0, integer_zero_node));
7510 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7511 2, then fold the expression into shifts and logical operations. */
7512 tem = fold_single_bit_test (code, arg0, arg1, type);
7513 if (tem)
7514 return tem;
7516 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7517 Similarly for NE_EXPR. */
7518 if ((code == EQ_EXPR || code == NE_EXPR)
7519 && TREE_CODE (arg0) == BIT_AND_EXPR
7520 && TREE_CODE (arg1) == INTEGER_CST
7521 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7523 tree dandnotc
7524 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7525 arg1, build1 (BIT_NOT_EXPR,
7526 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7527 TREE_OPERAND (arg0, 1))));
7528 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7529 if (integer_nonzerop (dandnotc))
7530 return omit_one_operand (type, rslt, arg0);
7533 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7534 Similarly for NE_EXPR. */
7535 if ((code == EQ_EXPR || code == NE_EXPR)
7536 && TREE_CODE (arg0) == BIT_IOR_EXPR
7537 && TREE_CODE (arg1) == INTEGER_CST
7538 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7540 tree candnotd
7541 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7542 TREE_OPERAND (arg0, 1),
7543 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7544 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7545 if (integer_nonzerop (candnotd))
7546 return omit_one_operand (type, rslt, arg0);
7549 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7550 and similarly for >= into !=. */
7551 if ((code == LT_EXPR || code == GE_EXPR)
7552 && TREE_UNSIGNED (TREE_TYPE (arg0))
7553 && TREE_CODE (arg1) == LSHIFT_EXPR
7554 && integer_onep (TREE_OPERAND (arg1, 0)))
7555 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7556 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7557 TREE_OPERAND (arg1, 1)),
7558 convert (TREE_TYPE (arg0), integer_zero_node));
7560 else if ((code == LT_EXPR || code == GE_EXPR)
7561 && TREE_UNSIGNED (TREE_TYPE (arg0))
7562 && (TREE_CODE (arg1) == NOP_EXPR
7563 || TREE_CODE (arg1) == CONVERT_EXPR)
7564 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7565 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7566 return
7567 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7568 convert (TREE_TYPE (arg0),
7569 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7570 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7571 convert (TREE_TYPE (arg0), integer_zero_node));
7573 /* Simplify comparison of something with itself. (For IEEE
7574 floating-point, we can only do some of these simplifications.) */
7575 if (operand_equal_p (arg0, arg1, 0))
7577 switch (code)
7579 case EQ_EXPR:
7580 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7581 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7582 return constant_boolean_node (1, type);
7583 break;
7585 case GE_EXPR:
7586 case LE_EXPR:
7587 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7588 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7589 return constant_boolean_node (1, type);
7590 return fold (build (EQ_EXPR, type, arg0, arg1));
7592 case NE_EXPR:
7593 /* For NE, we can only do this simplification if integer
7594 or we don't honor IEEE floating point NaNs. */
7595 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7596 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7597 break;
7598 /* ... fall through ... */
7599 case GT_EXPR:
7600 case LT_EXPR:
7601 return constant_boolean_node (0, type);
7602 default:
7603 abort ();
7607 /* If we are comparing an expression that just has comparisons
7608 of two integer values, arithmetic expressions of those comparisons,
7609 and constants, we can simplify it. There are only three cases
7610 to check: the two values can either be equal, the first can be
7611 greater, or the second can be greater. Fold the expression for
7612 those three values. Since each value must be 0 or 1, we have
7613 eight possibilities, each of which corresponds to the constant 0
7614 or 1 or one of the six possible comparisons.
7616 This handles common cases like (a > b) == 0 but also handles
7617 expressions like ((x > y) - (y > x)) > 0, which supposedly
7618 occur in macroized code. */
7620 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7622 tree cval1 = 0, cval2 = 0;
7623 int save_p = 0;
7625 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7626 /* Don't handle degenerate cases here; they should already
7627 have been handled anyway. */
7628 && cval1 != 0 && cval2 != 0
7629 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7630 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7631 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7632 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7633 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7634 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7635 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7637 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7638 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7640 /* We can't just pass T to eval_subst in case cval1 or cval2
7641 was the same as ARG1. */
7643 tree high_result
7644 = fold (build (code, type,
7645 eval_subst (arg0, cval1, maxval, cval2, minval),
7646 arg1));
7647 tree equal_result
7648 = fold (build (code, type,
7649 eval_subst (arg0, cval1, maxval, cval2, maxval),
7650 arg1));
7651 tree low_result
7652 = fold (build (code, type,
7653 eval_subst (arg0, cval1, minval, cval2, maxval),
7654 arg1));
7656 /* All three of these results should be 0 or 1. Confirm they
7657 are. Then use those values to select the proper code
7658 to use. */
7660 if ((integer_zerop (high_result)
7661 || integer_onep (high_result))
7662 && (integer_zerop (equal_result)
7663 || integer_onep (equal_result))
7664 && (integer_zerop (low_result)
7665 || integer_onep (low_result)))
7667 /* Make a 3-bit mask with the high-order bit being the
7668 value for `>', the next for '=', and the low for '<'. */
7669 switch ((integer_onep (high_result) * 4)
7670 + (integer_onep (equal_result) * 2)
7671 + integer_onep (low_result))
7673 case 0:
7674 /* Always false. */
7675 return omit_one_operand (type, integer_zero_node, arg0);
7676 case 1:
7677 code = LT_EXPR;
7678 break;
7679 case 2:
7680 code = EQ_EXPR;
7681 break;
7682 case 3:
7683 code = LE_EXPR;
7684 break;
7685 case 4:
7686 code = GT_EXPR;
7687 break;
7688 case 5:
7689 code = NE_EXPR;
7690 break;
7691 case 6:
7692 code = GE_EXPR;
7693 break;
7694 case 7:
7695 /* Always true. */
7696 return omit_one_operand (type, integer_one_node, arg0);
7699 t = build (code, type, cval1, cval2);
7700 if (save_p)
7701 return save_expr (t);
7702 else
7703 return fold (t);
7708 /* If this is a comparison of a field, we may be able to simplify it. */
7709 if (((TREE_CODE (arg0) == COMPONENT_REF
7710 && (*lang_hooks.can_use_bit_fields_p) ())
7711 || TREE_CODE (arg0) == BIT_FIELD_REF)
7712 && (code == EQ_EXPR || code == NE_EXPR)
7713 /* Handle the constant case even without -O
7714 to make sure the warnings are given. */
7715 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7717 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7718 if (t1)
7719 return t1;
7722 /* If this is a comparison of complex values and either or both sides
7723 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7724 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7725 This may prevent needless evaluations. */
7726 if ((code == EQ_EXPR || code == NE_EXPR)
7727 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7728 && (TREE_CODE (arg0) == COMPLEX_EXPR
7729 || TREE_CODE (arg1) == COMPLEX_EXPR
7730 || TREE_CODE (arg0) == COMPLEX_CST
7731 || TREE_CODE (arg1) == COMPLEX_CST))
7733 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7734 tree real0, imag0, real1, imag1;
7736 arg0 = save_expr (arg0);
7737 arg1 = save_expr (arg1);
7738 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7739 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7740 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7741 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7743 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7744 : TRUTH_ORIF_EXPR),
7745 type,
7746 fold (build (code, type, real0, real1)),
7747 fold (build (code, type, imag0, imag1))));
7750 /* Optimize comparisons of strlen vs zero to a compare of the
7751 first character of the string vs zero. To wit,
7752 strlen(ptr) == 0 => *ptr == 0
7753 strlen(ptr) != 0 => *ptr != 0
7754 Other cases should reduce to one of these two (or a constant)
7755 due to the return value of strlen being unsigned. */
7756 if ((code == EQ_EXPR || code == NE_EXPR)
7757 && integer_zerop (arg1)
7758 && TREE_CODE (arg0) == CALL_EXPR)
7760 tree fndecl = get_callee_fndecl (arg0);
7761 tree arglist;
7763 if (fndecl
7764 && DECL_BUILT_IN (fndecl)
7765 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7766 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7767 && (arglist = TREE_OPERAND (arg0, 1))
7768 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7769 && ! TREE_CHAIN (arglist))
7770 return fold (build (code, type,
7771 build1 (INDIRECT_REF, char_type_node,
7772 TREE_VALUE(arglist)),
7773 integer_zero_node));
7776 /* From here on, the only cases we handle are when the result is
7777 known to be a constant.
7779 To compute GT, swap the arguments and do LT.
7780 To compute GE, do LT and invert the result.
7781 To compute LE, swap the arguments, do LT and invert the result.
7782 To compute NE, do EQ and invert the result.
7784 Therefore, the code below must handle only EQ and LT. */
7786 if (code == LE_EXPR || code == GT_EXPR)
7788 tem = arg0, arg0 = arg1, arg1 = tem;
7789 code = swap_tree_comparison (code);
7792 /* Note that it is safe to invert for real values here because we
7793 will check below in the one case that it matters. */
7795 t1 = NULL_TREE;
7796 invert = 0;
7797 if (code == NE_EXPR || code == GE_EXPR)
7799 invert = 1;
7800 code = invert_tree_comparison (code);
7803 /* Compute a result for LT or EQ if args permit;
7804 otherwise return T. */
7805 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7807 if (code == EQ_EXPR)
7808 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7809 else
7810 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7811 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7812 : INT_CST_LT (arg0, arg1)),
7816 #if 0 /* This is no longer useful, but breaks some real code. */
7817 /* Assume a nonexplicit constant cannot equal an explicit one,
7818 since such code would be undefined anyway.
7819 Exception: on sysvr4, using #pragma weak,
7820 a label can come out as 0. */
7821 else if (TREE_CODE (arg1) == INTEGER_CST
7822 && !integer_zerop (arg1)
7823 && TREE_CONSTANT (arg0)
7824 && TREE_CODE (arg0) == ADDR_EXPR
7825 && code == EQ_EXPR)
7826 t1 = build_int_2 (0, 0);
7827 #endif
7828 /* Two real constants can be compared explicitly. */
7829 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7831 /* If either operand is a NaN, the result is false with two
7832 exceptions: First, an NE_EXPR is true on NaNs, but that case
7833 is already handled correctly since we will be inverting the
7834 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7835 or a GE_EXPR into a LT_EXPR, we must return true so that it
7836 will be inverted into false. */
7838 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7839 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7840 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7842 else if (code == EQ_EXPR)
7843 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7844 TREE_REAL_CST (arg1)),
7846 else
7847 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7848 TREE_REAL_CST (arg1)),
7852 if (t1 == NULL_TREE)
7853 return t;
7855 if (invert)
7856 TREE_INT_CST_LOW (t1) ^= 1;
7858 TREE_TYPE (t1) = type;
7859 if (TREE_CODE (type) == BOOLEAN_TYPE)
7860 return (*lang_hooks.truthvalue_conversion) (t1);
7861 return t1;
7863 case COND_EXPR:
7864 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7865 so all simple results must be passed through pedantic_non_lvalue. */
7866 if (TREE_CODE (arg0) == INTEGER_CST)
7867 return pedantic_non_lvalue
7868 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7869 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7870 return pedantic_omit_one_operand (type, arg1, arg0);
7872 /* If we have A op B ? A : C, we may be able to convert this to a
7873 simpler expression, depending on the operation and the values
7874 of B and C. Signed zeros prevent all of these transformations,
7875 for reasons given above each one. */
7877 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7879 arg1, TREE_OPERAND (arg0, 1))
7880 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7882 tree arg2 = TREE_OPERAND (t, 2);
7883 enum tree_code comp_code = TREE_CODE (arg0);
7885 STRIP_NOPS (arg2);
7887 /* If we have A op 0 ? A : -A, consider applying the following
7888 transformations:
7890 A == 0? A : -A same as -A
7891 A != 0? A : -A same as A
7892 A >= 0? A : -A same as abs (A)
7893 A > 0? A : -A same as abs (A)
7894 A <= 0? A : -A same as -abs (A)
7895 A < 0? A : -A same as -abs (A)
7897 None of these transformations work for modes with signed
7898 zeros. If A is +/-0, the first two transformations will
7899 change the sign of the result (from +0 to -0, or vice
7900 versa). The last four will fix the sign of the result,
7901 even though the original expressions could be positive or
7902 negative, depending on the sign of A.
7904 Note that all these transformations are correct if A is
7905 NaN, since the two alternatives (A and -A) are also NaNs. */
7906 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7907 ? real_zerop (TREE_OPERAND (arg0, 1))
7908 : integer_zerop (TREE_OPERAND (arg0, 1)))
7909 && TREE_CODE (arg2) == NEGATE_EXPR
7910 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7911 switch (comp_code)
7913 case EQ_EXPR:
7914 return
7915 pedantic_non_lvalue
7916 (convert (type,
7917 negate_expr
7918 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7919 arg1))));
7920 case NE_EXPR:
7921 return pedantic_non_lvalue (convert (type, arg1));
7922 case GE_EXPR:
7923 case GT_EXPR:
7924 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7925 arg1 = convert ((*lang_hooks.types.signed_type)
7926 (TREE_TYPE (arg1)), arg1);
7927 return pedantic_non_lvalue
7928 (convert (type, fold (build1 (ABS_EXPR,
7929 TREE_TYPE (arg1), arg1))));
7930 case LE_EXPR:
7931 case LT_EXPR:
7932 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7933 arg1 = convert ((lang_hooks.types.signed_type)
7934 (TREE_TYPE (arg1)), arg1);
7935 return pedantic_non_lvalue
7936 (negate_expr (convert (type,
7937 fold (build1 (ABS_EXPR,
7938 TREE_TYPE (arg1),
7939 arg1)))));
7940 default:
7941 abort ();
7944 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7945 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7946 both transformations are correct when A is NaN: A != 0
7947 is then true, and A == 0 is false. */
7949 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7951 if (comp_code == NE_EXPR)
7952 return pedantic_non_lvalue (convert (type, arg1));
7953 else if (comp_code == EQ_EXPR)
7954 return pedantic_non_lvalue (convert (type, integer_zero_node));
7957 /* Try some transformations of A op B ? A : B.
7959 A == B? A : B same as B
7960 A != B? A : B same as A
7961 A >= B? A : B same as max (A, B)
7962 A > B? A : B same as max (B, A)
7963 A <= B? A : B same as min (A, B)
7964 A < B? A : B same as min (B, A)
7966 As above, these transformations don't work in the presence
7967 of signed zeros. For example, if A and B are zeros of
7968 opposite sign, the first two transformations will change
7969 the sign of the result. In the last four, the original
7970 expressions give different results for (A=+0, B=-0) and
7971 (A=-0, B=+0), but the transformed expressions do not.
7973 The first two transformations are correct if either A or B
7974 is a NaN. In the first transformation, the condition will
7975 be false, and B will indeed be chosen. In the case of the
7976 second transformation, the condition A != B will be true,
7977 and A will be chosen.
7979 The conversions to max() and min() are not correct if B is
7980 a number and A is not. The conditions in the original
7981 expressions will be false, so all four give B. The min()
7982 and max() versions would give a NaN instead. */
7983 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7984 arg2, TREE_OPERAND (arg0, 0)))
7986 tree comp_op0 = TREE_OPERAND (arg0, 0);
7987 tree comp_op1 = TREE_OPERAND (arg0, 1);
7988 tree comp_type = TREE_TYPE (comp_op0);
7990 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7991 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7993 comp_type = type;
7994 comp_op0 = arg1;
7995 comp_op1 = arg2;
7998 switch (comp_code)
8000 case EQ_EXPR:
8001 return pedantic_non_lvalue (convert (type, arg2));
8002 case NE_EXPR:
8003 return pedantic_non_lvalue (convert (type, arg1));
8004 case LE_EXPR:
8005 case LT_EXPR:
8006 /* In C++ a ?: expression can be an lvalue, so put the
8007 operand which will be used if they are equal first
8008 so that we can convert this back to the
8009 corresponding COND_EXPR. */
8010 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8011 return pedantic_non_lvalue
8012 (convert (type, fold (build (MIN_EXPR, comp_type,
8013 (comp_code == LE_EXPR
8014 ? comp_op0 : comp_op1),
8015 (comp_code == LE_EXPR
8016 ? comp_op1 : comp_op0)))));
8017 break;
8018 case GE_EXPR:
8019 case GT_EXPR:
8020 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8021 return pedantic_non_lvalue
8022 (convert (type, fold (build (MAX_EXPR, comp_type,
8023 (comp_code == GE_EXPR
8024 ? comp_op0 : comp_op1),
8025 (comp_code == GE_EXPR
8026 ? comp_op1 : comp_op0)))));
8027 break;
8028 default:
8029 abort ();
8033 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8034 we might still be able to simplify this. For example,
8035 if C1 is one less or one more than C2, this might have started
8036 out as a MIN or MAX and been transformed by this function.
8037 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8039 if (INTEGRAL_TYPE_P (type)
8040 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8041 && TREE_CODE (arg2) == INTEGER_CST)
8042 switch (comp_code)
8044 case EQ_EXPR:
8045 /* We can replace A with C1 in this case. */
8046 arg1 = convert (type, TREE_OPERAND (arg0, 1));
8047 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8048 TREE_OPERAND (t, 2)));
8050 case LT_EXPR:
8051 /* If C1 is C2 + 1, this is min(A, C2). */
8052 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8053 && operand_equal_p (TREE_OPERAND (arg0, 1),
8054 const_binop (PLUS_EXPR, arg2,
8055 integer_one_node, 0), 1))
8056 return pedantic_non_lvalue
8057 (fold (build (MIN_EXPR, type, arg1, arg2)));
8058 break;
8060 case LE_EXPR:
8061 /* If C1 is C2 - 1, this is min(A, C2). */
8062 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8063 && operand_equal_p (TREE_OPERAND (arg0, 1),
8064 const_binop (MINUS_EXPR, arg2,
8065 integer_one_node, 0), 1))
8066 return pedantic_non_lvalue
8067 (fold (build (MIN_EXPR, type, arg1, arg2)));
8068 break;
8070 case GT_EXPR:
8071 /* If C1 is C2 - 1, this is max(A, C2). */
8072 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8073 && operand_equal_p (TREE_OPERAND (arg0, 1),
8074 const_binop (MINUS_EXPR, arg2,
8075 integer_one_node, 0), 1))
8076 return pedantic_non_lvalue
8077 (fold (build (MAX_EXPR, type, arg1, arg2)));
8078 break;
8080 case GE_EXPR:
8081 /* If C1 is C2 + 1, this is max(A, C2). */
8082 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8083 && operand_equal_p (TREE_OPERAND (arg0, 1),
8084 const_binop (PLUS_EXPR, arg2,
8085 integer_one_node, 0), 1))
8086 return pedantic_non_lvalue
8087 (fold (build (MAX_EXPR, type, arg1, arg2)));
8088 break;
8089 case NE_EXPR:
8090 break;
8091 default:
8092 abort ();
8096 /* If the second operand is simpler than the third, swap them
8097 since that produces better jump optimization results. */
8098 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8099 TREE_OPERAND (t, 2), false))
8101 /* See if this can be inverted. If it can't, possibly because
8102 it was a floating-point inequality comparison, don't do
8103 anything. */
8104 tem = invert_truthvalue (arg0);
8106 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8107 return fold (build (code, type, tem,
8108 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8111 /* Convert A ? 1 : 0 to simply A. */
8112 if (integer_onep (TREE_OPERAND (t, 1))
8113 && integer_zerop (TREE_OPERAND (t, 2))
8114 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8115 call to fold will try to move the conversion inside
8116 a COND, which will recurse. In that case, the COND_EXPR
8117 is probably the best choice, so leave it alone. */
8118 && type == TREE_TYPE (arg0))
8119 return pedantic_non_lvalue (arg0);
8121 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8122 over COND_EXPR in cases such as floating point comparisons. */
8123 if (integer_zerop (TREE_OPERAND (t, 1))
8124 && integer_onep (TREE_OPERAND (t, 2))
8125 && truth_value_p (TREE_CODE (arg0)))
8126 return pedantic_non_lvalue (convert (type,
8127 invert_truthvalue (arg0)));
8129 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8130 operation is simply A & 2. */
8132 if (integer_zerop (TREE_OPERAND (t, 2))
8133 && TREE_CODE (arg0) == NE_EXPR
8134 && integer_zerop (TREE_OPERAND (arg0, 1))
8135 && integer_pow2p (arg1)
8136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8137 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8138 arg1, 1))
8139 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8141 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8142 if (integer_zerop (TREE_OPERAND (t, 2))
8143 && truth_value_p (TREE_CODE (arg0))
8144 && truth_value_p (TREE_CODE (arg1)))
8145 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8146 arg0, arg1)));
8148 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8149 if (integer_onep (TREE_OPERAND (t, 2))
8150 && truth_value_p (TREE_CODE (arg0))
8151 && truth_value_p (TREE_CODE (arg1)))
8153 /* Only perform transformation if ARG0 is easily inverted. */
8154 tem = invert_truthvalue (arg0);
8155 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8156 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8157 tem, arg1)));
8160 return t;
8162 case COMPOUND_EXPR:
8163 /* When pedantic, a compound expression can be neither an lvalue
8164 nor an integer constant expression. */
8165 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8166 return t;
8167 /* Don't let (0, 0) be null pointer constant. */
8168 if (integer_zerop (arg1))
8169 return build1 (NOP_EXPR, type, arg1);
8170 return convert (type, arg1);
8172 case COMPLEX_EXPR:
8173 if (wins)
8174 return build_complex (type, arg0, arg1);
8175 return t;
8177 case REALPART_EXPR:
8178 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8179 return t;
8180 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8181 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8182 TREE_OPERAND (arg0, 1));
8183 else if (TREE_CODE (arg0) == COMPLEX_CST)
8184 return TREE_REALPART (arg0);
8185 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8186 return fold (build (TREE_CODE (arg0), type,
8187 fold (build1 (REALPART_EXPR, type,
8188 TREE_OPERAND (arg0, 0))),
8189 fold (build1 (REALPART_EXPR,
8190 type, TREE_OPERAND (arg0, 1)))));
8191 return t;
8193 case IMAGPART_EXPR:
8194 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8195 return convert (type, integer_zero_node);
8196 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8197 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8198 TREE_OPERAND (arg0, 0));
8199 else if (TREE_CODE (arg0) == COMPLEX_CST)
8200 return TREE_IMAGPART (arg0);
8201 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8202 return fold (build (TREE_CODE (arg0), type,
8203 fold (build1 (IMAGPART_EXPR, type,
8204 TREE_OPERAND (arg0, 0))),
8205 fold (build1 (IMAGPART_EXPR, type,
8206 TREE_OPERAND (arg0, 1)))));
8207 return t;
8209 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8210 appropriate. */
8211 case CLEANUP_POINT_EXPR:
8212 if (! has_cleanups (arg0))
8213 return TREE_OPERAND (t, 0);
8216 enum tree_code code0 = TREE_CODE (arg0);
8217 int kind0 = TREE_CODE_CLASS (code0);
8218 tree arg00 = TREE_OPERAND (arg0, 0);
8219 tree arg01;
8221 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8222 return fold (build1 (code0, type,
8223 fold (build1 (CLEANUP_POINT_EXPR,
8224 TREE_TYPE (arg00), arg00))));
8226 if (kind0 == '<' || kind0 == '2'
8227 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8228 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8229 || code0 == TRUTH_XOR_EXPR)
8231 arg01 = TREE_OPERAND (arg0, 1);
8233 if (TREE_CONSTANT (arg00)
8234 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8235 && ! has_cleanups (arg00)))
8236 return fold (build (code0, type, arg00,
8237 fold (build1 (CLEANUP_POINT_EXPR,
8238 TREE_TYPE (arg01), arg01))));
8240 if (TREE_CONSTANT (arg01))
8241 return fold (build (code0, type,
8242 fold (build1 (CLEANUP_POINT_EXPR,
8243 TREE_TYPE (arg00), arg00)),
8244 arg01));
8247 return t;
8250 case CALL_EXPR:
8251 /* Check for a built-in function. */
8252 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8253 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8254 == FUNCTION_DECL)
8255 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8257 tree tmp = fold_builtin (expr);
8258 if (tmp)
8259 return tmp;
8261 return t;
8263 default:
8264 return t;
8265 } /* switch (code) */
8268 #ifdef ENABLE_FOLD_CHECKING
8269 #undef fold
8271 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8272 static void fold_check_failed (tree, tree);
8273 void print_fold_checksum (tree);
8275 /* When --enable-checking=fold, compute a digest of expr before
8276 and after actual fold call to see if fold did not accidentally
8277 change original expr. */
8279 tree
8280 fold (tree expr)
8282 tree ret;
8283 struct md5_ctx ctx;
8284 unsigned char checksum_before[16], checksum_after[16];
8285 htab_t ht;
8287 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8288 md5_init_ctx (&ctx);
8289 fold_checksum_tree (expr, &ctx, ht);
8290 md5_finish_ctx (&ctx, checksum_before);
8291 htab_empty (ht);
8293 ret = fold_1 (expr);
8295 md5_init_ctx (&ctx);
8296 fold_checksum_tree (expr, &ctx, ht);
8297 md5_finish_ctx (&ctx, checksum_after);
8298 htab_delete (ht);
8300 if (memcmp (checksum_before, checksum_after, 16))
8301 fold_check_failed (expr, ret);
8303 return ret;
8306 void
8307 print_fold_checksum (tree expr)
8309 struct md5_ctx ctx;
8310 unsigned char checksum[16], cnt;
8311 htab_t ht;
8313 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8314 md5_init_ctx (&ctx);
8315 fold_checksum_tree (expr, &ctx, ht);
8316 md5_finish_ctx (&ctx, checksum);
8317 htab_delete (ht);
8318 for (cnt = 0; cnt < 16; ++cnt)
8319 fprintf (stderr, "%02x", checksum[cnt]);
8320 putc ('\n', stderr);
8323 static void
8324 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8326 internal_error ("fold check: original tree changed by fold");
8329 static void
8330 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8332 void **slot;
8333 enum tree_code code;
8334 char buf[sizeof (struct tree_decl)];
8335 int i, len;
8337 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8338 > sizeof (struct tree_decl)
8339 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8340 abort ();
8341 if (expr == NULL)
8342 return;
8343 slot = htab_find_slot (ht, expr, INSERT);
8344 if (*slot != NULL)
8345 return;
8346 *slot = expr;
8347 code = TREE_CODE (expr);
8348 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8350 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8351 memcpy (buf, expr, tree_size (expr));
8352 expr = (tree) buf;
8353 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8355 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8357 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8358 memcpy (buf, expr, tree_size (expr));
8359 expr = (tree) buf;
8360 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8362 else if (TREE_CODE_CLASS (code) == 't'
8363 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8365 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8366 memcpy (buf, expr, tree_size (expr));
8367 expr = (tree) buf;
8368 TYPE_POINTER_TO (expr) = NULL;
8369 TYPE_REFERENCE_TO (expr) = NULL;
8371 md5_process_bytes (expr, tree_size (expr), ctx);
8372 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8373 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8374 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8375 len = TREE_CODE_LENGTH (code);
8376 switch (TREE_CODE_CLASS (code))
8378 case 'c':
8379 switch (code)
8381 case STRING_CST:
8382 md5_process_bytes (TREE_STRING_POINTER (expr),
8383 TREE_STRING_LENGTH (expr), ctx);
8384 break;
8385 case COMPLEX_CST:
8386 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8387 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8388 break;
8389 case VECTOR_CST:
8390 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8391 break;
8392 default:
8393 break;
8395 break;
8396 case 'x':
8397 switch (code)
8399 case TREE_LIST:
8400 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8401 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8402 break;
8403 case TREE_VEC:
8404 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8405 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8406 break;
8407 default:
8408 break;
8410 break;
8411 case 'e':
8412 switch (code)
8414 case SAVE_EXPR: len = 2; break;
8415 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8416 case RTL_EXPR: len = 0; break;
8417 case WITH_CLEANUP_EXPR: len = 2; break;
8418 default: break;
8420 /* FALLTHROUGH */
8421 case 'r':
8422 case '<':
8423 case '1':
8424 case '2':
8425 case 's':
8426 for (i = 0; i < len; ++i)
8427 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8428 break;
8429 case 'd':
8430 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8431 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8432 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8433 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8434 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8435 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8436 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8437 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8438 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8439 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8440 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8441 break;
8442 case 't':
8443 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8444 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8445 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8446 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8447 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8448 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8449 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8450 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8451 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8452 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8453 break;
8454 default:
8455 break;
8459 #endif
8461 /* Perform constant folding and related simplification of initializer
8462 expression EXPR. This behaves identically to "fold" but ignores
8463 potential run-time traps and exceptions that fold must preserve. */
8465 tree
8466 fold_initializer (tree expr)
8468 int saved_signaling_nans = flag_signaling_nans;
8469 int saved_trapping_math = flag_trapping_math;
8470 int saved_trapv = flag_trapv;
8471 tree result;
8473 flag_signaling_nans = 0;
8474 flag_trapping_math = 0;
8475 flag_trapv = 0;
8477 result = fold (expr);
8479 flag_signaling_nans = saved_signaling_nans;
8480 flag_trapping_math = saved_trapping_math;
8481 flag_trapv = saved_trapv;
8483 return result;
8486 /* Determine if first argument is a multiple of second argument. Return 0 if
8487 it is not, or we cannot easily determined it to be.
8489 An example of the sort of thing we care about (at this point; this routine
8490 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8491 fold cases do now) is discovering that
8493 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8495 is a multiple of
8497 SAVE_EXPR (J * 8)
8499 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8501 This code also handles discovering that
8503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8505 is a multiple of 8 so we don't have to worry about dealing with a
8506 possible remainder.
8508 Note that we *look* inside a SAVE_EXPR only to determine how it was
8509 calculated; it is not safe for fold to do much of anything else with the
8510 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8511 at run time. For example, the latter example above *cannot* be implemented
8512 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8513 evaluation time of the original SAVE_EXPR is not necessarily the same at
8514 the time the new expression is evaluated. The only optimization of this
8515 sort that would be valid is changing
8517 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8519 divided by 8 to
8521 SAVE_EXPR (I) * SAVE_EXPR (J)
8523 (where the same SAVE_EXPR (J) is used in the original and the
8524 transformed version). */
8526 static int
8527 multiple_of_p (tree type, tree top, tree bottom)
8529 if (operand_equal_p (top, bottom, 0))
8530 return 1;
8532 if (TREE_CODE (type) != INTEGER_TYPE)
8533 return 0;
8535 switch (TREE_CODE (top))
8537 case MULT_EXPR:
8538 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8539 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8541 case PLUS_EXPR:
8542 case MINUS_EXPR:
8543 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8544 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8546 case LSHIFT_EXPR:
8547 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8549 tree op1, t1;
8551 op1 = TREE_OPERAND (top, 1);
8552 /* const_binop may not detect overflow correctly,
8553 so check for it explicitly here. */
8554 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8555 > TREE_INT_CST_LOW (op1)
8556 && TREE_INT_CST_HIGH (op1) == 0
8557 && 0 != (t1 = convert (type,
8558 const_binop (LSHIFT_EXPR, size_one_node,
8559 op1, 0)))
8560 && ! TREE_OVERFLOW (t1))
8561 return multiple_of_p (type, t1, bottom);
8563 return 0;
8565 case NOP_EXPR:
8566 /* Can't handle conversions from non-integral or wider integral type. */
8567 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8568 || (TYPE_PRECISION (type)
8569 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8570 return 0;
8572 /* .. fall through ... */
8574 case SAVE_EXPR:
8575 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8577 case INTEGER_CST:
8578 if (TREE_CODE (bottom) != INTEGER_CST
8579 || (TREE_UNSIGNED (type)
8580 && (tree_int_cst_sgn (top) < 0
8581 || tree_int_cst_sgn (bottom) < 0)))
8582 return 0;
8583 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8584 top, bottom, 0));
8586 default:
8587 return 0;
8591 /* Return true if `t' is known to be non-negative. */
8594 tree_expr_nonnegative_p (tree t)
8596 switch (TREE_CODE (t))
8598 case ABS_EXPR:
8599 return 1;
8601 case INTEGER_CST:
8602 return tree_int_cst_sgn (t) >= 0;
8604 case REAL_CST:
8605 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8607 case PLUS_EXPR:
8608 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8609 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8610 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8612 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8613 both unsigned and at least 2 bits shorter than the result. */
8614 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8615 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8616 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8618 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8619 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8620 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8621 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8623 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8624 TYPE_PRECISION (inner2)) + 1;
8625 return prec < TYPE_PRECISION (TREE_TYPE (t));
8628 break;
8630 case MULT_EXPR:
8631 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8633 /* x * x for floating point x is always non-negative. */
8634 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8635 return 1;
8636 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8637 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8640 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8641 both unsigned and their total bits is shorter than the result. */
8642 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8643 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8644 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8646 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8647 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8648 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8649 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8650 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8651 < TYPE_PRECISION (TREE_TYPE (t));
8653 return 0;
8655 case TRUNC_DIV_EXPR:
8656 case CEIL_DIV_EXPR:
8657 case FLOOR_DIV_EXPR:
8658 case ROUND_DIV_EXPR:
8659 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8660 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8662 case TRUNC_MOD_EXPR:
8663 case CEIL_MOD_EXPR:
8664 case FLOOR_MOD_EXPR:
8665 case ROUND_MOD_EXPR:
8666 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8668 case RDIV_EXPR:
8669 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8670 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8672 case NOP_EXPR:
8674 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8675 tree outer_type = TREE_TYPE (t);
8677 if (TREE_CODE (outer_type) == REAL_TYPE)
8679 if (TREE_CODE (inner_type) == REAL_TYPE)
8680 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8681 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8683 if (TREE_UNSIGNED (inner_type))
8684 return 1;
8685 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8688 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8690 if (TREE_CODE (inner_type) == REAL_TYPE)
8691 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8692 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8693 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8694 && TREE_UNSIGNED (inner_type);
8697 break;
8699 case COND_EXPR:
8700 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8701 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8702 case COMPOUND_EXPR:
8703 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8704 case MIN_EXPR:
8705 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8706 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8707 case MAX_EXPR:
8708 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8709 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8710 case MODIFY_EXPR:
8711 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8712 case BIND_EXPR:
8713 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8714 case SAVE_EXPR:
8715 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8716 case NON_LVALUE_EXPR:
8717 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8718 case FLOAT_EXPR:
8719 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8720 case RTL_EXPR:
8721 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8723 case CALL_EXPR:
8725 tree fndecl = get_callee_fndecl (t);
8726 tree arglist = TREE_OPERAND (t, 1);
8727 if (fndecl
8728 && DECL_BUILT_IN (fndecl)
8729 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8730 switch (DECL_FUNCTION_CODE (fndecl))
8732 case BUILT_IN_CABS:
8733 case BUILT_IN_CABSL:
8734 case BUILT_IN_CABSF:
8735 case BUILT_IN_EXP:
8736 case BUILT_IN_EXPF:
8737 case BUILT_IN_EXPL:
8738 case BUILT_IN_EXP2:
8739 case BUILT_IN_EXP2F:
8740 case BUILT_IN_EXP2L:
8741 case BUILT_IN_EXP10:
8742 case BUILT_IN_EXP10F:
8743 case BUILT_IN_EXP10L:
8744 case BUILT_IN_FABS:
8745 case BUILT_IN_FABSF:
8746 case BUILT_IN_FABSL:
8747 case BUILT_IN_FFS:
8748 case BUILT_IN_FFSL:
8749 case BUILT_IN_FFSLL:
8750 case BUILT_IN_PARITY:
8751 case BUILT_IN_PARITYL:
8752 case BUILT_IN_PARITYLL:
8753 case BUILT_IN_POPCOUNT:
8754 case BUILT_IN_POPCOUNTL:
8755 case BUILT_IN_POPCOUNTLL:
8756 case BUILT_IN_POW10:
8757 case BUILT_IN_POW10F:
8758 case BUILT_IN_POW10L:
8759 case BUILT_IN_SQRT:
8760 case BUILT_IN_SQRTF:
8761 case BUILT_IN_SQRTL:
8762 return 1;
8764 case BUILT_IN_ATAN:
8765 case BUILT_IN_ATANF:
8766 case BUILT_IN_ATANL:
8767 case BUILT_IN_CEIL:
8768 case BUILT_IN_CEILF:
8769 case BUILT_IN_CEILL:
8770 case BUILT_IN_FLOOR:
8771 case BUILT_IN_FLOORF:
8772 case BUILT_IN_FLOORL:
8773 case BUILT_IN_NEARBYINT:
8774 case BUILT_IN_NEARBYINTF:
8775 case BUILT_IN_NEARBYINTL:
8776 case BUILT_IN_ROUND:
8777 case BUILT_IN_ROUNDF:
8778 case BUILT_IN_ROUNDL:
8779 case BUILT_IN_TRUNC:
8780 case BUILT_IN_TRUNCF:
8781 case BUILT_IN_TRUNCL:
8782 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8784 case BUILT_IN_POW:
8785 case BUILT_IN_POWF:
8786 case BUILT_IN_POWL:
8787 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8789 default:
8790 break;
8794 /* ... fall through ... */
8796 default:
8797 if (truth_value_p (TREE_CODE (t)))
8798 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8799 return 1;
8802 /* We don't know sign of `t', so be conservative and return false. */
8803 return 0;
8806 /* Return true if `r' is known to be non-negative.
8807 Only handles constants at the moment. */
8810 rtl_expr_nonnegative_p (rtx r)
8812 switch (GET_CODE (r))
8814 case CONST_INT:
8815 return INTVAL (r) >= 0;
8817 case CONST_DOUBLE:
8818 if (GET_MODE (r) == VOIDmode)
8819 return CONST_DOUBLE_HIGH (r) >= 0;
8820 return 0;
8822 case CONST_VECTOR:
8824 int units, i;
8825 rtx elt;
8827 units = CONST_VECTOR_NUNITS (r);
8829 for (i = 0; i < units; ++i)
8831 elt = CONST_VECTOR_ELT (r, i);
8832 if (!rtl_expr_nonnegative_p (elt))
8833 return 0;
8836 return 1;
8839 case SYMBOL_REF:
8840 case LABEL_REF:
8841 /* These are always nonnegative. */
8842 return 1;
8844 default:
8845 return 0;
8849 #include "gt-fold-const.h"